net.manub.embeddedkafka.EmbeddedKafka Scala Examples
The following examples show how to use net.manub.embeddedkafka.EmbeddedKafka.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: KafkaSpec.scala From kmq with Apache License 2.0 | 6 votes |
package com.softwaremill.kmq.redelivery.infrastructure import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig} import org.apache.kafka.common.serialization.StringDeserializer import org.scalatest.{BeforeAndAfterEach, Suite} trait KafkaSpec extends BeforeAndAfterEach { self: Suite => val testKafkaConfig = EmbeddedKafkaConfig(9092, 2182) private implicit val stringDeserializer = new StringDeserializer() def sendToKafka(topic: String, message: String): Unit = { EmbeddedKafka.publishStringMessageToKafka(topic, message)(testKafkaConfig) } def consumeFromKafka(topic: String): String = { EmbeddedKafka.consumeFirstStringMessageFrom(topic)(testKafkaConfig) } override def beforeEach(): Unit = { super.beforeEach() EmbeddedKafka.start()(testKafkaConfig) } override def afterEach(): Unit = { super.afterEach() EmbeddedKafka.stop() } }
Example 2
Source File: EmbeddedKafkaStreams.scala From scalatest-embedded-kafka with MIT License | 5 votes |
package net.manub.embeddedkafka.streams import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig, UUIDs} import org.apache.kafka.streams.{KafkaStreams, Topology} def runStreams[T](topicsToCreate: Seq[String], topology: Topology, extraConfig: Map[String, AnyRef] = Map.empty)(block: => T)( implicit config: EmbeddedKafkaConfig): T = withRunningKafka { topicsToCreate.foreach(topic => createCustomTopic(topic)) val streamId = UUIDs.newUuid().toString val streams = new KafkaStreams(topology, streamConfig(streamId, extraConfig)) streams.start() try { block } finally { streams.close() } }(config) }
Example 3
Source File: KafkaSinkTest.scala From eel-sdk with Apache License 2.0 | 5 votes |
package io.eels.component.kafka import java.util import java.util.{Properties, UUID} import io.eels.Row import io.eels.datastream.DataStream import io.eels.schema.{Field, StringType, StructType} import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig} import org.apache.kafka.clients.consumer.KafkaConsumer import org.apache.kafka.clients.producer.KafkaProducer import org.apache.kafka.common.serialization.{Deserializer, Serializer} import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers} import scala.collection.JavaConverters._ import scala.util.Try class KafkaSinkTest extends FlatSpec with Matchers with BeforeAndAfterAll { implicit val kafkaConfig = EmbeddedKafkaConfig( kafkaPort = 6001, zooKeeperPort = 6000 ) Try { EmbeddedKafka.start() } val schema = StructType( Field("name", StringType, nullable = true), Field("location", StringType, nullable = true) ) val ds = DataStream.fromValues( schema, Seq( Vector("clint eastwood", UUID.randomUUID().toString), Vector("elton john", UUID.randomUUID().toString) ) ) "KafkaSink" should "support default implicits" ignore { val topic = "mytopic-" + System.currentTimeMillis() val properties = new Properties() properties.put("bootstrap.servers", s"localhost:${kafkaConfig.kafkaPort}") properties.put("group.id", "test") properties.put("auto.offset.reset", "earliest") val producer = new KafkaProducer[String, Row](properties, StringSerializer, RowSerializer) val sink = KafkaSink(topic, producer) val consumer = new KafkaConsumer[String, String](properties, StringDeserializer, StringDeserializer) consumer.subscribe(util.Arrays.asList(topic)) ds.to(sink) producer.close() val records = consumer.poll(4000) records.iterator().asScala.map(_.value).toList shouldBe ds.collect.map { case Row(_, values) => values.mkString(",") }.toList } } object RowSerializer extends Serializer[Row] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () override def serialize(topic: String, data: Row): Array[Byte] = data.values.mkString(",").getBytes override def close(): Unit = () } object StringSerializer extends Serializer[String] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () override def close(): Unit = () override def serialize(topic: String, data: String): Array[Byte] = data.getBytes } object StringDeserializer extends Deserializer[String] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () override def close(): Unit = () override def deserialize(topic: String, data: Array[Byte]): String = new String(data) }
Example 4
Source File: KafkaAdminAlgebraSpec.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.kafka.algebras import akka.actor.ActorSystem import cats.effect.{ContextShift, IO} import cats.implicits._ import hydra.kafka.util.KafkaUtils.TopicDetails import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig} import org.scalatest.BeforeAndAfterAll import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import scala.concurrent.ExecutionContext final class KafkaAdminAlgebraSpec extends AnyWordSpecLike with Matchers with BeforeAndAfterAll with EmbeddedKafka { private val port = 8023 implicit private val kafkaConfig: EmbeddedKafkaConfig = EmbeddedKafkaConfig(kafkaPort = port, zooKeeperPort = 3027) implicit private val contextShift: ContextShift[IO] = IO.contextShift(ExecutionContext.global) implicit private val system: ActorSystem = ActorSystem( "kafka-client-spec-system" ) override def beforeAll(): Unit = { super.beforeAll() EmbeddedKafka.start() } override def afterAll(): Unit = { super.afterAll() EmbeddedKafka.stop() } (for { live <- KafkaAdminAlgebra .live[IO](s"localhost:$port") test <- KafkaAdminAlgebra.test[IO] } yield { runTests(live) runTests(test, isTest = true) }).unsafeRunSync() private def runTests(kafkaClient: KafkaAdminAlgebra[IO], isTest: Boolean = false): Unit = { (if (isTest) "KafkaAdmin#test" else "KafkaAdmin#live") must { "create a topic" in { val topicName = "Topic1" val topicDetails = TopicDetails(3, 1.toShort) (kafkaClient.createTopic(topicName, topicDetails) *> kafkaClient .describeTopic(topicName) .map { case Some(topic) => topic.name shouldBe topicName topic.numberPartitions shouldBe topicDetails.numPartitions case None => fail("Found None when a Topic was Expected") }).unsafeRunSync() } "list all topics" in { kafkaClient.getTopicNames.unsafeRunSync() shouldBe List("Topic1") } "delete a topic" in { val topicToDelete = "topic_to_delete" (for { _ <- kafkaClient.createTopic(topicToDelete, TopicDetails(1, 1)) _ <- kafkaClient.deleteTopic(topicToDelete) maybeTopic <- kafkaClient.describeTopic(topicToDelete) } yield maybeTopic should not be defined).unsafeRunSync() } } } }
Example 5
Source File: KafkaConsumerProxySpec.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.kafka.consumer import akka.actor.{ActorSystem, Props} import akka.testkit.{ImplicitSender, TestKit} import hydra.kafka.consumer.KafkaConsumerProxy._ import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig} import org.apache.kafka.common.TopicPartition import org.scalatest.matchers.should.Matchers import org.scalatest.funspec.AnyFunSpecLike import org.scalatest.BeforeAndAfterAll import scala.concurrent.duration._ class KafkaConsumerProxySpec extends TestKit(ActorSystem("test")) with Matchers with AnyFunSpecLike with BeforeAndAfterAll with ImplicitSender { implicit val config = EmbeddedKafkaConfig(kafkaPort = 8092, zooKeeperPort = 3181) override def beforeAll() = { super.beforeAll() EmbeddedKafka.start() EmbeddedKafka.createCustomTopic("test-consumer1") EmbeddedKafka.createCustomTopic("test-consumer2") } override def afterAll() = { super.afterAll() EmbeddedKafka.stop() TestKit.shutdownActorSystem(system, verifySystemShutdown = true) } lazy val kafkaProxy = system.actorOf(Props[KafkaConsumerProxy]) describe("When using KafkaConsumerProxy") { it("gets latest offsets for a topic") { kafkaProxy ! GetLatestOffsets("test-consumer1") expectMsg( 10.seconds, LatestOffsetsResponse( "test-consumer1", Map(new TopicPartition("test-consumer1", 0) -> 0L) ) ) } it("lists topics") { kafkaProxy ! ListTopics expectMsgPF(10.seconds) { case ListTopicsResponse(topics) => topics.keys should contain allOf ("test-consumer1", "test-consumer2") } } it("gets partition info") { kafkaProxy ! GetPartitionInfo("test-consumer2") expectMsgPF(10.seconds) { case PartitionInfoResponse(topic, response) => topic shouldBe "test-consumer2" response.map(p => p.partition()) shouldBe Seq(0) } } it("handles errors") { kafkaProxy ! GetPartitionInfo("test-consumer-unknown") expectMsgPF(10.seconds) { case PartitionInfoResponse(topic, response) => response(0).leader().idString shouldBe "0" topic should startWith("test-consumer-unknown") } } } }
Example 6
Source File: KafkaMetricsSpec.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.kafka.transport import akka.actor.ActorSystem import akka.testkit.TestKit import com.typesafe.config.ConfigFactory import hydra.core.transport.AckStrategy import hydra.kafka.producer.KafkaRecordMetadata import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig} import org.scalatest.matchers.should.Matchers import org.scalatest.funspec.AnyFunSpecLike import org.scalatest.BeforeAndAfterAll import spray.json.DefaultJsonProtocol class KafkaMetricsSpec extends TestKit(ActorSystem("hydra")) with Matchers with AnyFunSpecLike with BeforeAndAfterAll with DefaultJsonProtocol { import KafkaRecordMetadata._ implicit val config = EmbeddedKafkaConfig( kafkaPort = 8092, zooKeeperPort = 3181, customBrokerProperties = Map( "auto.create.topics.enable" -> "false", "offsets.topic.replication.factor" -> "1" ) ) override def afterAll() = { super.afterAll() EmbeddedKafka.stop() TestKit.shutdownActorSystem(system, verifySystemShutdown = true) } override def beforeAll() = { super.beforeAll() EmbeddedKafka.start() EmbeddedKafka.createCustomTopic("metrics_topic") } describe("When using the KafkaMetrics object") { it("uses the NoOpMetrics") { KafkaMetrics(ConfigFactory.empty()) shouldBe NoOpMetrics KafkaMetrics( ConfigFactory.parseString("transports.kafka.metrics.enabled=false") ) shouldBe NoOpMetrics } it("uses the PublishMetrics") { import spray.json._ val cfg = ConfigFactory.parseString(s""" | transports.kafka.metrics.topic = metrics_topic | transports.kafka.metrics.enabled=true""".stripMargin) val pm = KafkaMetrics(cfg) pm shouldBe a[PublishMetrics] val kmd = KafkaRecordMetadata(1, 1, "topic", 1, 1, AckStrategy.NoAck) pm.saveMetrics(kmd) EmbeddedKafka .consumeFirstStringMessageFrom("metrics_topic") .parseJson shouldBe kmd.toJson } } }
Example 7
Source File: AuditLogProviderItTest.scala From rokku with Apache License 2.0 | 5 votes |
package com.ing.wbaa.rokku.proxy.provider import java.net.InetAddress import akka.actor.ActorSystem import akka.http.scaladsl.model.{HttpMethods, HttpRequest, RemoteAddress, StatusCodes} import com.ing.wbaa.rokku.proxy.config.KafkaSettings import com.ing.wbaa.rokku.proxy.data._ import com.ing.wbaa.rokku.proxy.handler.parsers.RequestParser.RequestTypeUnknown import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig} import org.scalatest.diagrams.Diagrams import org.scalatest.wordspec.AnyWordSpecLike import scala.concurrent.ExecutionContext class AuditLogProviderItTest extends AnyWordSpecLike with Diagrams with EmbeddedKafka with AuditLogProvider { implicit val testSystem: ActorSystem = ActorSystem("kafkaTest") private val testKafkaPort = 9093 override def auditEnabled = true override implicit val kafkaSettings: KafkaSettings = new KafkaSettings(testSystem.settings.config) { override val bootstrapServers: String = s"localhost:$testKafkaPort" } override implicit val executionContext: ExecutionContext = testSystem.dispatcher implicit val requestId: RequestId = RequestId("test") val s3Request = S3Request(AwsRequestCredential(AwsAccessKey("a"), None), Some("demobucket"), Some("s3object"), Read()) .copy(headerIPs = HeaderIPs(Some(RemoteAddress(InetAddress.getByName("127.0.0.1"))), Some(Seq(RemoteAddress(InetAddress.getByName("1.1.1.1")))), Some(RemoteAddress(InetAddress.getByName("2.2.2.2"))))) "AuditLogProvider" should { "send audit" in { implicit val config = EmbeddedKafkaConfig(kafkaPort = testKafkaPort) withRunningKafka { Thread.sleep(3000) val createEventsTopic = "audit_events" createCustomTopic(createEventsTopic) auditLog(s3Request, HttpRequest(HttpMethods.PUT, "http://localhost", Nil), "testUser", RequestTypeUnknown(), StatusCodes.Processing) val result = consumeFirstStringMessageFrom(createEventsTopic) assert(result.contains("\"eventName\":\"PUT\"")) assert(result.contains("\"sourceIPAddress\":\"ClientIp=unknown|X-Real-IP=127.0.0.1|X-Forwarded-For=1.1.1.1|Remote-Address=2.2.2.2\"")) assert(result.contains("\"x-amz-request-id\":\"test\"")) assert(result.contains("\"principalId\":\"testUser\"")) } } } }
Example 8
Source File: MessageProviderKafkaItTest.scala From rokku with Apache License 2.0 | 5 votes |
package com.ing.wbaa.rokku.proxy.provider import java.net.InetAddress import akka.actor.ActorSystem import akka.http.scaladsl.model.{HttpMethods, RemoteAddress} import com.ing.wbaa.rokku.proxy.config.KafkaSettings import com.ing.wbaa.rokku.proxy.data._ import com.ing.wbaa.rokku.proxy.handler.parsers.RequestParser.RequestTypeUnknown import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig} import org.scalatest.RecoverMethods._ import org.scalatest.diagrams.Diagrams import org.scalatest.wordspec.AnyWordSpecLike import scala.concurrent.ExecutionContext class MessageProviderKafkaItTest extends AnyWordSpecLike with Diagrams with EmbeddedKafka with MessageProviderKafka { implicit val testSystem: ActorSystem = ActorSystem("kafkaTest") private val testKafkaPort = 9093 override implicit val kafkaSettings: KafkaSettings = new KafkaSettings(testSystem.settings.config) { override val bootstrapServers: String = s"localhost:$testKafkaPort" } override implicit val executionContext: ExecutionContext = testSystem.dispatcher implicit val requestId: RequestId = RequestId("test") val s3Request = S3Request(AwsRequestCredential(AwsAccessKey("a"), None), Some("demobucket"), Some("s3object"), Read()) .copy(clientIPAddress = RemoteAddress(InetAddress.getByName("127.0.0.1"))) "KafkaMessageProvider" should { "Send message to correct topic with Put or Post" in { implicit val config = EmbeddedKafkaConfig(kafkaPort = testKafkaPort) withRunningKafka { Thread.sleep(3000) val createEventsTopic = "create_events" createCustomTopic(createEventsTopic) emitEvent(s3Request, HttpMethods.PUT, "testUser", RequestTypeUnknown()) val result = consumeFirstStringMessageFrom(createEventsTopic) assert(result.contains("s3:ObjectCreated:PUT")) } } "Send message to correct topic with Delete" in { implicit val config = EmbeddedKafkaConfig(kafkaPort = testKafkaPort) withRunningKafka { Thread.sleep(3000) val deleteEventsTopic = "delete_events" createCustomTopic(deleteEventsTopic) emitEvent(s3Request, HttpMethods.DELETE, "testUser", RequestTypeUnknown()) assert(consumeFirstStringMessageFrom(deleteEventsTopic).contains("s3:ObjectRemoved:DELETE")) } } "fail on incomplete data" in { recoverToSucceededIf[Exception](emitEvent(s3Request.copy(s3Object = None), HttpMethods.PUT, "testUser", RequestTypeUnknown())) } } }
Example 9
Source File: KafkaSpecBase.scala From openwhisk with Apache License 2.0 | 5 votes |
package org.apache.openwhisk.core.monitoring.metrics import akka.kafka.testkit.scaladsl.{EmbeddedKafkaLike, ScalatestKafkaSpec} import akka.stream.ActorMaterializer import net.manub.embeddedkafka.EmbeddedKafka import org.scalatest._ import org.scalatest.concurrent.{Eventually, IntegrationPatience, ScalaFutures} import scala.concurrent.duration.{DurationInt, FiniteDuration} abstract class KafkaSpecBase extends ScalatestKafkaSpec(6065) with Matchers with ScalaFutures with FlatSpecLike with EmbeddedKafka with EmbeddedKafkaLike with IntegrationPatience with Eventually with EventsTestHelper { this: Suite => implicit val timeoutConfig: PatienceConfig = PatienceConfig(1.minute) implicit val materializer: ActorMaterializer = ActorMaterializer() override val sleepAfterProduce: FiniteDuration = 10.seconds override protected val topicCreationTimeout = 60.seconds }
Example 10
Source File: package.scala From zio-kafka with Apache License 2.0 | 5 votes |
package zio.kafka import net.manub.embeddedkafka.{ EmbeddedK, EmbeddedKafka, EmbeddedKafkaConfig } import zio._ package object embedded { type Kafka = Has[Kafka.Service] object Kafka { trait Service { def bootstrapServers: List[String] def stop(): UIO[Unit] } case class EmbeddedKafkaService(embeddedK: EmbeddedK) extends Service { override def bootstrapServers: List[String] = List(s"localhost:${embeddedK.config.kafkaPort}") override def stop(): UIO[Unit] = ZIO.effectTotal(embeddedK.stop(true)) } case object DefaultLocal extends Service { override def bootstrapServers: List[String] = List(s"localhost:9092") override def stop(): UIO[Unit] = UIO.unit } val embedded: ZLayer[Any, Throwable, Kafka] = ZLayer.fromManaged { implicit val embeddedKafkaConfig = EmbeddedKafkaConfig( customBrokerProperties = Map("group.min.session.timeout.ms" -> "500", "group.initial.rebalance.delay.ms" -> "0") ) ZManaged.make(ZIO.effect(EmbeddedKafkaService(EmbeddedKafka.start())))(_.stop()) } val local: ZLayer[Any, Nothing, Kafka] = ZLayer.succeed(DefaultLocal) } }
Example 11
Source File: KafkaExample.scala From cornichon with Apache License 2.0 | 5 votes |
package com.github.agourlay.kafka.kafka import com.github.agourlay.cornichon.CornichonFeature import com.github.agourlay.cornichon.kafka.KafkaDsl import net.manub.embeddedkafka.{ EmbeddedKafka, EmbeddedKafkaConfig } class KafkaExample extends CornichonFeature with KafkaDsl { override lazy val kafkaBootstrapServersHost = "localhost" override lazy val kafkaBootstrapServersPort = 9092 def feature = Feature("Kafka DSL") { Scenario("write and read arbitrary Strings to/from topic") { Given I put_topic( topic = "cornichon", key = "success", message = "I am a plain string" ) When I read_from_topic("cornichon") Then assert kafka("cornichon").topic_is("cornichon") Then assert kafka("cornichon").key_is("success") Then assert kafka("cornichon").message_value.is("I am a plain string") } Scenario("use cornichon jsonAssertions on the message value") { Given I put_topic( topic = "cornichon", key = "json", message = """{ "coffee": "black", "cornichon": "green" }""" ) When I read_from_topic("cornichon") Then assert kafka("cornichon").key_is("json") Then assert kafka("cornichon").message_value.ignoring("coffee").is(""" { "cornichon": "green" } """ ) } } beforeFeature { // start an embedded kafka for the tests EmbeddedKafka.start() { EmbeddedKafkaConfig( kafkaPort = kafkaBootstrapServersPort, customBrokerProperties = Map("group.initial.rebalance.delay.ms" -> "10") ) } () } afterFeature { EmbeddedKafka.stop() } }
Example 12
Source File: WebSocketRoutesSpec.scala From sync3k-server with Apache License 2.0 | 5 votes |
package sync3k.routes import akka.http.scaladsl.testkit.{ ScalatestRouteTest, WSProbe } import net.manub.embeddedkafka.EmbeddedKafka import org.scalatest.prop.TableDrivenPropertyChecks._ import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpec } class WebSocketRoutesSpec extends WordSpec with Matchers with ScalatestRouteTest with WebSocketRoutes with EmbeddedKafka with BeforeAndAfterAll { override implicit var kafkaServer: String = "localhost:6001" val baseRoots = Table( "base url", "/ws", "/kafka/test-1", "/kafka/test-2" ) override def beforeAll(): Unit = { super.beforeAll() EmbeddedKafka.start() } override def afterAll(): Unit = { EmbeddedKafka.stop() super.afterAll() } forAll(baseRoots) { baseRoot => baseRoot should { "echo updates" in { val wsClient = WSProbe() WS(s"$baseRoot/0", wsClient.flow) ~> webSocketRoutes ~> check { isWebSocketUpgrade shouldBe true wsClient.sendMessage("test1") wsClient.expectMessage("""{"id":0,"message":"test1"}""") wsClient.sendMessage("test2") wsClient.expectMessage("""{"id":1,"message":"test2"}""") wsClient.sendCompletion() wsClient.expectCompletion() } } "replay updates" in { val wsClient2 = WSProbe() WS(s"$baseRoot/0", wsClient2.flow) ~> webSocketRoutes ~> check { isWebSocketUpgrade shouldBe true wsClient2.expectMessage("""{"id":0,"message":"test1"}""") wsClient2.expectMessage("""{"id":1,"message":"test2"}""") wsClient2.sendCompletion() wsClient2.expectCompletion() } } "skip to offset" in { val wsClient3 = WSProbe() WS(s"$baseRoot/1", wsClient3.flow) ~> webSocketRoutes ~> check { isWebSocketUpgrade shouldBe true wsClient3.expectMessage("""{"id":1,"message":"test2"}""") wsClient3.sendCompletion() wsClient3.expectCompletion() } } } } }
Example 13
Source File: EmbeddedKafkaTest.scala From apache-spark-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend.spark.kafka import com.github.dnvriend.TestSpec import net.manub.embeddedkafka.EmbeddedKafka import org.apache.kafka.common.serialization.{ Deserializer, Serializer } class EmbeddedKafkaTest extends TestSpec with EmbeddedKafka { final val TopicName = "MyTopic" def publish[T: Serializer](msg: T): Unit = publishToKafka(TopicName, msg) def consume[T: Deserializer]: T = consumeFirstMessageFrom(TopicName) import net.manub.embeddedkafka.Codecs._ it should "setup and embedded kafka, create a topic, send a message and receive a message from the same topic" in withRunningKafka { publish("foo") consume[String] shouldBe "foo" publish("bar".getBytes) consume[Array[Byte]] shouldBe "bar".getBytes() } }
Example 14
Source File: KafkaProducerSpec.scala From freestyle-kafka with Apache License 2.0 | 5 votes |
package freestyle package kafka import freestyle.free._ import net.manub.embeddedkafka.EmbeddedKafka import org.scalatest.WordSpec import scala.concurrent.duration._ class KafkaProducerSpec extends WordSpec with FSKafkaAlgebraSpec { "Producer can be reused after closed" in { withProducer[String].apply { producer => for { _ <- producer.close() isClosed <- producer.isClosed _ <- producer.metrics isClosedAfterUsed <- producer.isClosed } yield (isClosed, isClosedAfterUsed) } shouldBe Right((true, false)) } "Producer can be reused after closed with a timeout" in { withProducer[String].apply { producer => for { _ <- producer.closeWaitingFor(5.seconds) isClosed <- producer.isClosed _ <- producer.metrics isClosedAfterUsed <- producer.isClosed } yield (isClosed, isClosedAfterUsed) } shouldBe Right((true, false)) } "Producer can send a message to a topic" in { withProducer[String].apply { producer => for { _ <- producer.sendToTopic("mytopic", ("key", "mymessage")) _ <- producer.flush() message <- FreeS.pure(EmbeddedKafka.consumeFirstStringMessageFrom("mytopic", true)) } yield message } shouldBe Right("mymessage") } "Producer can send many messages to a topic" in { val records = List("key" -> "mymessage1", "key2" -> "mymessage2") withProducer[String].apply { producer => for { _ <- producer.sendManyToTopic("mytopic", records) _ <- producer.flush() messages <- FreeS.pure(EmbeddedKafka.consumeNumberStringMessagesFrom("mytopic", 2, true)) } yield messages } shouldBe Right(List("mymessage1", "mymessage2")) } "Producer can obtain metrics" in { withProducer[String].apply { _.metrics }.isRight shouldBe true } }
Example 15
Source File: KafkaConsumerSpec.scala From freestyle-kafka with Apache License 2.0 | 5 votes |
package freestyle package kafka import freestyle.free._ import net.manub.embeddedkafka.EmbeddedKafka import org.scalatest.WordSpec import scala.concurrent.duration._ import cats.implicits._ import org.apache.kafka.clients.consumer.ConsumerRecords import scala.collection.JavaConverters._ class KafkaConsumerSpec extends WordSpec with FSKafkaAlgebraSpec { "Consumer can be reused after closed" in { withConsumer[String].apply { consumer => for { _ <- consumer.close() isClosed <- consumer.isClosed _ <- consumer.metrics isClosedAfterUsed <- consumer.isClosed } yield (isClosed, isClosedAfterUsed) } shouldBe Right((true, false)) } "Consumer can be reused after closed with a timeout" in { withConsumer[String].apply { consumer => for { _ <- consumer.closeWaitingFor(5.seconds) isClosed <- consumer.isClosed _ <- consumer.metrics isClosedAfterUsed <- consumer.isClosed } yield (isClosed, isClosedAfterUsed) } shouldBe Right((true, false)) } "Consumer can subscribe to topics" in { val topics = "topicsubscription" :: Nil createCustomTopic(topics.head) withConsumer[String].apply { consumer => for { _ <- consumer.subscribe(topics) topics <- consumer.subscription } yield topics } shouldBe Right(topics) } "Consumer can read a message from a topic" in { val topic = "mytopic" val key = "key" val message = "mymessage" withProducerAndConsumer[String].apply { (producer, consumer) => for { _ <- producer.sendToTopic(topic, (key, message)) _ <- producer.flush() _ <- consumer.subscribe(topic :: Nil) _ <- consumer.commitSync() records <- consumer.poll(10.seconds) message = records.records(topic).asScala.toList.headOption.map(_.value) } yield message } shouldBe Right(Some("mymessage")) } "Consumer can obtain metrics" in { withProducer[String].apply { _.metrics }.isRight shouldBe true } }