net.manub.embeddedkafka.EmbeddedKafkaConfig Scala Examples
The following examples show how to use net.manub.embeddedkafka.EmbeddedKafkaConfig.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: KafkaSpec.scala From kmq with Apache License 2.0 | 6 votes |
package com.softwaremill.kmq.redelivery.infrastructure import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig} import org.apache.kafka.common.serialization.StringDeserializer import org.scalatest.{BeforeAndAfterEach, Suite} trait KafkaSpec extends BeforeAndAfterEach { self: Suite => val testKafkaConfig = EmbeddedKafkaConfig(9092, 2182) private implicit val stringDeserializer = new StringDeserializer() def sendToKafka(topic: String, message: String): Unit = { EmbeddedKafka.publishStringMessageToKafka(topic, message)(testKafkaConfig) } def consumeFromKafka(topic: String): String = { EmbeddedKafka.consumeFirstStringMessageFrom(topic)(testKafkaConfig) } override def beforeEach(): Unit = { super.beforeEach() EmbeddedKafka.start()(testKafkaConfig) } override def afterEach(): Unit = { super.afterEach() EmbeddedKafka.stop() } }
Example 2
Source File: config.scala From scalatest-embedded-kafka with MIT License | 5 votes |
package net.manub.embeddedkafka.schemaregistry import net.manub.embeddedkafka.EmbeddedKafkaConfig trait EmbeddedKafkaConfigWithSchemaRegistry extends EmbeddedKafkaConfig { def schemaRegistryPort: Int } case class EmbeddedKafkaConfigWithSchemaRegistryImpl( kafkaPort: Int, zooKeeperPort: Int, schemaRegistryPort: Int, customBrokerProperties: Map[String, String], customProducerProperties: Map[String, String], customConsumerProperties: Map[String, String] ) extends EmbeddedKafkaConfigWithSchemaRegistry { override val numberOfThreads: Int = 3 } object EmbeddedKafkaConfigWithSchemaRegistry { implicit val defaultConfig: EmbeddedKafkaConfig = apply() def apply( kafkaPort: Int = 6001, zooKeeperPort: Int = 6000, schemaRegistryPort: Int = 6002, customBrokerProperties: Map[String, String] = Map.empty, customProducerProperties: Map[String, String] = Map.empty, customConsumerProperties: Map[String, String] = Map.empty ): EmbeddedKafkaConfigWithSchemaRegistry = EmbeddedKafkaConfigWithSchemaRegistryImpl(kafkaPort, zooKeeperPort, schemaRegistryPort, customBrokerProperties, customProducerProperties, customConsumerProperties) }
Example 3
Source File: EmbeddedKafkaStreams.scala From scalatest-embedded-kafka with MIT License | 5 votes |
package net.manub.embeddedkafka.streams import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig, UUIDs} import org.apache.kafka.streams.{KafkaStreams, Topology} def runStreams[T](topicsToCreate: Seq[String], topology: Topology, extraConfig: Map[String, AnyRef] = Map.empty)(block: => T)( implicit config: EmbeddedKafkaConfig): T = withRunningKafka { topicsToCreate.foreach(topic => createCustomTopic(topic)) val streamId = UUIDs.newUuid().toString val streams = new KafkaStreams(topology, streamConfig(streamId, extraConfig)) streams.start() try { block } finally { streams.close() } }(config) }
Example 4
Source File: TestStreamsConfig.scala From scalatest-embedded-kafka with MIT License | 5 votes |
package net.manub.embeddedkafka.streams import java.nio.file.Files import net.manub.embeddedkafka.EmbeddedKafkaConfig import org.apache.kafka.clients.consumer.{ConsumerConfig, OffsetResetStrategy} import org.apache.kafka.streams.StreamsConfig def streamConfig(streamName: String, extraConfig: Map[String, AnyRef] = Map.empty)( implicit kafkaConfig: EmbeddedKafkaConfig): StreamsConfig = { import scala.collection.JavaConverters._ val defaultConfig = Map( StreamsConfig.APPLICATION_ID_CONFIG -> streamName, StreamsConfig.BOOTSTRAP_SERVERS_CONFIG -> s"localhost:${kafkaConfig.kafkaPort}", StreamsConfig.STATE_DIR_CONFIG -> Files .createTempDirectory(streamName) .toString, // force stream consumers to start reading from the beginning so as not to lose messages ConsumerConfig.AUTO_OFFSET_RESET_CONFIG -> OffsetResetStrategy.EARLIEST.toString.toLowerCase ) val configOverwrittenByExtra = defaultConfig ++ extraConfig new StreamsConfig(configOverwrittenByExtra.asJava) } }
Example 5
Source File: ExampleKafkaStreamsSpec.scala From scalatest-embedded-kafka with MIT License | 5 votes |
package net.manub.embeddedkafka.streams import net.manub.embeddedkafka.Codecs._ import net.manub.embeddedkafka.ConsumerExtensions._ import net.manub.embeddedkafka.EmbeddedKafkaConfig import org.apache.kafka.common.serialization.{Serde, Serdes} import org.apache.kafka.streams.StreamsBuilder import org.apache.kafka.streams.kstream.{Consumed, KStream, Produced} import org.scalatest.{Matchers, WordSpec} class ExampleKafkaStreamsSpec extends WordSpec with Matchers with EmbeddedKafkaStreamsAllInOne { import net.manub.embeddedkafka.Codecs.stringKeyValueCrDecoder implicit val config = EmbeddedKafkaConfig(kafkaPort = 7000, zooKeeperPort = 7001) val (inTopic, outTopic) = ("in", "out") val stringSerde: Serde[String] = Serdes.String() "A Kafka streams test" should { "be easy to run with streams and consumer lifecycle management" in { val streamBuilder = new StreamsBuilder val stream: KStream[String, String] = streamBuilder.stream(inTopic, Consumed.`with`(stringSerde, stringSerde)) stream.to(outTopic, Produced.`with`(stringSerde, stringSerde)) runStreams(Seq(inTopic, outTopic), streamBuilder.build()) { publishToKafka(inTopic, "hello", "world") publishToKafka(inTopic, "foo", "bar") publishToKafka(inTopic, "baz", "yaz") withConsumer[String, String, Unit] { consumer => val consumedMessages: Stream[(String, String)] = consumer.consumeLazily(outTopic) consumedMessages.take(2) should be( Seq("hello" -> "world", "foo" -> "bar")) consumedMessages.drop(2).head should be("baz" -> "yaz") } } } "allow support creating custom consumers" in { val streamBuilder = new StreamsBuilder val stream: KStream[String, String] = streamBuilder.stream(inTopic, Consumed.`with`(stringSerde, stringSerde)) stream.to(outTopic, Produced.`with`(stringSerde, stringSerde)) runStreams(Seq(inTopic, outTopic), streamBuilder.build()) { publishToKafka(inTopic, "hello", "world") publishToKafka(inTopic, "foo", "bar") val consumer = newConsumer[String, String]() consumer.consumeLazily[(String, String)](outTopic).take(2) should be( Seq("hello" -> "world", "foo" -> "bar")) consumer.close() } } "allow for easy string based testing" in { val streamBuilder = new StreamsBuilder val stream: KStream[String, String] = streamBuilder.stream(inTopic, Consumed.`with`(stringSerde, stringSerde)) stream.to(outTopic, Produced.`with`(stringSerde, stringSerde)) runStreamsWithStringConsumer(Seq(inTopic, outTopic), streamBuilder.build()) { consumer => publishToKafka(inTopic, "hello", "world") consumer.consumeLazily[(String, String)](outTopic).head should be( "hello" -> "world") } } } }
Example 6
Source File: KafkaSinkTest.scala From eel-sdk with Apache License 2.0 | 5 votes |
package io.eels.component.kafka import java.util import java.util.{Properties, UUID} import io.eels.Row import io.eels.datastream.DataStream import io.eels.schema.{Field, StringType, StructType} import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig} import org.apache.kafka.clients.consumer.KafkaConsumer import org.apache.kafka.clients.producer.KafkaProducer import org.apache.kafka.common.serialization.{Deserializer, Serializer} import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers} import scala.collection.JavaConverters._ import scala.util.Try class KafkaSinkTest extends FlatSpec with Matchers with BeforeAndAfterAll { implicit val kafkaConfig = EmbeddedKafkaConfig( kafkaPort = 6001, zooKeeperPort = 6000 ) Try { EmbeddedKafka.start() } val schema = StructType( Field("name", StringType, nullable = true), Field("location", StringType, nullable = true) ) val ds = DataStream.fromValues( schema, Seq( Vector("clint eastwood", UUID.randomUUID().toString), Vector("elton john", UUID.randomUUID().toString) ) ) "KafkaSink" should "support default implicits" ignore { val topic = "mytopic-" + System.currentTimeMillis() val properties = new Properties() properties.put("bootstrap.servers", s"localhost:${kafkaConfig.kafkaPort}") properties.put("group.id", "test") properties.put("auto.offset.reset", "earliest") val producer = new KafkaProducer[String, Row](properties, StringSerializer, RowSerializer) val sink = KafkaSink(topic, producer) val consumer = new KafkaConsumer[String, String](properties, StringDeserializer, StringDeserializer) consumer.subscribe(util.Arrays.asList(topic)) ds.to(sink) producer.close() val records = consumer.poll(4000) records.iterator().asScala.map(_.value).toList shouldBe ds.collect.map { case Row(_, values) => values.mkString(",") }.toList } } object RowSerializer extends Serializer[Row] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () override def serialize(topic: String, data: Row): Array[Byte] = data.values.mkString(",").getBytes override def close(): Unit = () } object StringSerializer extends Serializer[String] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () override def close(): Unit = () override def serialize(topic: String, data: String): Array[Byte] = data.getBytes } object StringDeserializer extends Deserializer[String] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () override def close(): Unit = () override def deserialize(topic: String, data: Array[Byte]): String = new String(data) }
Example 7
Source File: KafkaAdminAlgebraSpec.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.kafka.algebras import akka.actor.ActorSystem import cats.effect.{ContextShift, IO} import cats.implicits._ import hydra.kafka.util.KafkaUtils.TopicDetails import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig} import org.scalatest.BeforeAndAfterAll import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import scala.concurrent.ExecutionContext final class KafkaAdminAlgebraSpec extends AnyWordSpecLike with Matchers with BeforeAndAfterAll with EmbeddedKafka { private val port = 8023 implicit private val kafkaConfig: EmbeddedKafkaConfig = EmbeddedKafkaConfig(kafkaPort = port, zooKeeperPort = 3027) implicit private val contextShift: ContextShift[IO] = IO.contextShift(ExecutionContext.global) implicit private val system: ActorSystem = ActorSystem( "kafka-client-spec-system" ) override def beforeAll(): Unit = { super.beforeAll() EmbeddedKafka.start() } override def afterAll(): Unit = { super.afterAll() EmbeddedKafka.stop() } (for { live <- KafkaAdminAlgebra .live[IO](s"localhost:$port") test <- KafkaAdminAlgebra.test[IO] } yield { runTests(live) runTests(test, isTest = true) }).unsafeRunSync() private def runTests(kafkaClient: KafkaAdminAlgebra[IO], isTest: Boolean = false): Unit = { (if (isTest) "KafkaAdmin#test" else "KafkaAdmin#live") must { "create a topic" in { val topicName = "Topic1" val topicDetails = TopicDetails(3, 1.toShort) (kafkaClient.createTopic(topicName, topicDetails) *> kafkaClient .describeTopic(topicName) .map { case Some(topic) => topic.name shouldBe topicName topic.numberPartitions shouldBe topicDetails.numPartitions case None => fail("Found None when a Topic was Expected") }).unsafeRunSync() } "list all topics" in { kafkaClient.getTopicNames.unsafeRunSync() shouldBe List("Topic1") } "delete a topic" in { val topicToDelete = "topic_to_delete" (for { _ <- kafkaClient.createTopic(topicToDelete, TopicDetails(1, 1)) _ <- kafkaClient.deleteTopic(topicToDelete) maybeTopic <- kafkaClient.describeTopic(topicToDelete) } yield maybeTopic should not be defined).unsafeRunSync() } } } }
Example 8
Source File: KafkaConsumerProxySpec.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.kafka.consumer import akka.actor.{ActorSystem, Props} import akka.testkit.{ImplicitSender, TestKit} import hydra.kafka.consumer.KafkaConsumerProxy._ import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig} import org.apache.kafka.common.TopicPartition import org.scalatest.matchers.should.Matchers import org.scalatest.funspec.AnyFunSpecLike import org.scalatest.BeforeAndAfterAll import scala.concurrent.duration._ class KafkaConsumerProxySpec extends TestKit(ActorSystem("test")) with Matchers with AnyFunSpecLike with BeforeAndAfterAll with ImplicitSender { implicit val config = EmbeddedKafkaConfig(kafkaPort = 8092, zooKeeperPort = 3181) override def beforeAll() = { super.beforeAll() EmbeddedKafka.start() EmbeddedKafka.createCustomTopic("test-consumer1") EmbeddedKafka.createCustomTopic("test-consumer2") } override def afterAll() = { super.afterAll() EmbeddedKafka.stop() TestKit.shutdownActorSystem(system, verifySystemShutdown = true) } lazy val kafkaProxy = system.actorOf(Props[KafkaConsumerProxy]) describe("When using KafkaConsumerProxy") { it("gets latest offsets for a topic") { kafkaProxy ! GetLatestOffsets("test-consumer1") expectMsg( 10.seconds, LatestOffsetsResponse( "test-consumer1", Map(new TopicPartition("test-consumer1", 0) -> 0L) ) ) } it("lists topics") { kafkaProxy ! ListTopics expectMsgPF(10.seconds) { case ListTopicsResponse(topics) => topics.keys should contain allOf ("test-consumer1", "test-consumer2") } } it("gets partition info") { kafkaProxy ! GetPartitionInfo("test-consumer2") expectMsgPF(10.seconds) { case PartitionInfoResponse(topic, response) => topic shouldBe "test-consumer2" response.map(p => p.partition()) shouldBe Seq(0) } } it("handles errors") { kafkaProxy ! GetPartitionInfo("test-consumer-unknown") expectMsgPF(10.seconds) { case PartitionInfoResponse(topic, response) => response(0).leader().idString shouldBe "0" topic should startWith("test-consumer-unknown") } } } }
Example 9
Source File: KafkaMetricsSpec.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.kafka.transport import akka.actor.ActorSystem import akka.testkit.TestKit import com.typesafe.config.ConfigFactory import hydra.core.transport.AckStrategy import hydra.kafka.producer.KafkaRecordMetadata import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig} import org.scalatest.matchers.should.Matchers import org.scalatest.funspec.AnyFunSpecLike import org.scalatest.BeforeAndAfterAll import spray.json.DefaultJsonProtocol class KafkaMetricsSpec extends TestKit(ActorSystem("hydra")) with Matchers with AnyFunSpecLike with BeforeAndAfterAll with DefaultJsonProtocol { import KafkaRecordMetadata._ implicit val config = EmbeddedKafkaConfig( kafkaPort = 8092, zooKeeperPort = 3181, customBrokerProperties = Map( "auto.create.topics.enable" -> "false", "offsets.topic.replication.factor" -> "1" ) ) override def afterAll() = { super.afterAll() EmbeddedKafka.stop() TestKit.shutdownActorSystem(system, verifySystemShutdown = true) } override def beforeAll() = { super.beforeAll() EmbeddedKafka.start() EmbeddedKafka.createCustomTopic("metrics_topic") } describe("When using the KafkaMetrics object") { it("uses the NoOpMetrics") { KafkaMetrics(ConfigFactory.empty()) shouldBe NoOpMetrics KafkaMetrics( ConfigFactory.parseString("transports.kafka.metrics.enabled=false") ) shouldBe NoOpMetrics } it("uses the PublishMetrics") { import spray.json._ val cfg = ConfigFactory.parseString(s""" | transports.kafka.metrics.topic = metrics_topic | transports.kafka.metrics.enabled=true""".stripMargin) val pm = KafkaMetrics(cfg) pm shouldBe a[PublishMetrics] val kmd = KafkaRecordMetadata(1, 1, "topic", 1, 1, AckStrategy.NoAck) pm.saveMetrics(kmd) EmbeddedKafka .consumeFirstStringMessageFrom("metrics_topic") .parseJson shouldBe kmd.toJson } } }
Example 10
Source File: AuditLogProviderItTest.scala From rokku with Apache License 2.0 | 5 votes |
package com.ing.wbaa.rokku.proxy.provider import java.net.InetAddress import akka.actor.ActorSystem import akka.http.scaladsl.model.{HttpMethods, HttpRequest, RemoteAddress, StatusCodes} import com.ing.wbaa.rokku.proxy.config.KafkaSettings import com.ing.wbaa.rokku.proxy.data._ import com.ing.wbaa.rokku.proxy.handler.parsers.RequestParser.RequestTypeUnknown import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig} import org.scalatest.diagrams.Diagrams import org.scalatest.wordspec.AnyWordSpecLike import scala.concurrent.ExecutionContext class AuditLogProviderItTest extends AnyWordSpecLike with Diagrams with EmbeddedKafka with AuditLogProvider { implicit val testSystem: ActorSystem = ActorSystem("kafkaTest") private val testKafkaPort = 9093 override def auditEnabled = true override implicit val kafkaSettings: KafkaSettings = new KafkaSettings(testSystem.settings.config) { override val bootstrapServers: String = s"localhost:$testKafkaPort" } override implicit val executionContext: ExecutionContext = testSystem.dispatcher implicit val requestId: RequestId = RequestId("test") val s3Request = S3Request(AwsRequestCredential(AwsAccessKey("a"), None), Some("demobucket"), Some("s3object"), Read()) .copy(headerIPs = HeaderIPs(Some(RemoteAddress(InetAddress.getByName("127.0.0.1"))), Some(Seq(RemoteAddress(InetAddress.getByName("1.1.1.1")))), Some(RemoteAddress(InetAddress.getByName("2.2.2.2"))))) "AuditLogProvider" should { "send audit" in { implicit val config = EmbeddedKafkaConfig(kafkaPort = testKafkaPort) withRunningKafka { Thread.sleep(3000) val createEventsTopic = "audit_events" createCustomTopic(createEventsTopic) auditLog(s3Request, HttpRequest(HttpMethods.PUT, "http://localhost", Nil), "testUser", RequestTypeUnknown(), StatusCodes.Processing) val result = consumeFirstStringMessageFrom(createEventsTopic) assert(result.contains("\"eventName\":\"PUT\"")) assert(result.contains("\"sourceIPAddress\":\"ClientIp=unknown|X-Real-IP=127.0.0.1|X-Forwarded-For=1.1.1.1|Remote-Address=2.2.2.2\"")) assert(result.contains("\"x-amz-request-id\":\"test\"")) assert(result.contains("\"principalId\":\"testUser\"")) } } } }
Example 11
Source File: MessageProviderKafkaItTest.scala From rokku with Apache License 2.0 | 5 votes |
package com.ing.wbaa.rokku.proxy.provider import java.net.InetAddress import akka.actor.ActorSystem import akka.http.scaladsl.model.{HttpMethods, RemoteAddress} import com.ing.wbaa.rokku.proxy.config.KafkaSettings import com.ing.wbaa.rokku.proxy.data._ import com.ing.wbaa.rokku.proxy.handler.parsers.RequestParser.RequestTypeUnknown import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig} import org.scalatest.RecoverMethods._ import org.scalatest.diagrams.Diagrams import org.scalatest.wordspec.AnyWordSpecLike import scala.concurrent.ExecutionContext class MessageProviderKafkaItTest extends AnyWordSpecLike with Diagrams with EmbeddedKafka with MessageProviderKafka { implicit val testSystem: ActorSystem = ActorSystem("kafkaTest") private val testKafkaPort = 9093 override implicit val kafkaSettings: KafkaSettings = new KafkaSettings(testSystem.settings.config) { override val bootstrapServers: String = s"localhost:$testKafkaPort" } override implicit val executionContext: ExecutionContext = testSystem.dispatcher implicit val requestId: RequestId = RequestId("test") val s3Request = S3Request(AwsRequestCredential(AwsAccessKey("a"), None), Some("demobucket"), Some("s3object"), Read()) .copy(clientIPAddress = RemoteAddress(InetAddress.getByName("127.0.0.1"))) "KafkaMessageProvider" should { "Send message to correct topic with Put or Post" in { implicit val config = EmbeddedKafkaConfig(kafkaPort = testKafkaPort) withRunningKafka { Thread.sleep(3000) val createEventsTopic = "create_events" createCustomTopic(createEventsTopic) emitEvent(s3Request, HttpMethods.PUT, "testUser", RequestTypeUnknown()) val result = consumeFirstStringMessageFrom(createEventsTopic) assert(result.contains("s3:ObjectCreated:PUT")) } } "Send message to correct topic with Delete" in { implicit val config = EmbeddedKafkaConfig(kafkaPort = testKafkaPort) withRunningKafka { Thread.sleep(3000) val deleteEventsTopic = "delete_events" createCustomTopic(deleteEventsTopic) emitEvent(s3Request, HttpMethods.DELETE, "testUser", RequestTypeUnknown()) assert(consumeFirstStringMessageFrom(deleteEventsTopic).contains("s3:ObjectRemoved:DELETE")) } } "fail on incomplete data" in { recoverToSucceededIf[Exception](emitEvent(s3Request.copy(s3Object = None), HttpMethods.PUT, "testUser", RequestTypeUnknown())) } } }
Example 12
Source File: package.scala From zio-kafka with Apache License 2.0 | 5 votes |
package zio.kafka import net.manub.embeddedkafka.{ EmbeddedK, EmbeddedKafka, EmbeddedKafkaConfig } import zio._ package object embedded { type Kafka = Has[Kafka.Service] object Kafka { trait Service { def bootstrapServers: List[String] def stop(): UIO[Unit] } case class EmbeddedKafkaService(embeddedK: EmbeddedK) extends Service { override def bootstrapServers: List[String] = List(s"localhost:${embeddedK.config.kafkaPort}") override def stop(): UIO[Unit] = ZIO.effectTotal(embeddedK.stop(true)) } case object DefaultLocal extends Service { override def bootstrapServers: List[String] = List(s"localhost:9092") override def stop(): UIO[Unit] = UIO.unit } val embedded: ZLayer[Any, Throwable, Kafka] = ZLayer.fromManaged { implicit val embeddedKafkaConfig = EmbeddedKafkaConfig( customBrokerProperties = Map("group.min.session.timeout.ms" -> "500", "group.initial.rebalance.delay.ms" -> "0") ) ZManaged.make(ZIO.effect(EmbeddedKafkaService(EmbeddedKafka.start())))(_.stop()) } val local: ZLayer[Any, Nothing, Kafka] = ZLayer.succeed(DefaultLocal) } }
Example 13
Source File: AdminOps.scala From embedded-kafka with MIT License | 5 votes |
package net.manub.embeddedkafka.ops import net.manub.embeddedkafka.{EmbeddedKafkaConfig, duration2JavaDuration} import org.apache.kafka.clients.admin.{ AdminClient, AdminClientConfig, DeleteTopicsOptions, NewTopic } import scala.jdk.CollectionConverters._ import scala.concurrent.duration._ import scala.util.Try protected def withAdminClient[T]( body: AdminClient => T )(implicit config: C): Try[T] = { val adminClient = AdminClient.create( Map[String, Object]( AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG -> s"localhost:${config.kafkaPort}", AdminClientConfig.CLIENT_ID_CONFIG -> "embedded-kafka-admin-client", AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG -> zkSessionTimeoutMs.toString, AdminClientConfig.CONNECTIONS_MAX_IDLE_MS_CONFIG -> zkConnectionTimeoutMs.toString ).asJava ) val res = Try(body(adminClient)) adminClient.close(duration2JavaDuration(adminClientCloseTimeout)) res } }
Example 14
Source File: ExampleKafkaStreamsSpec.scala From embedded-kafka with MIT License | 5 votes |
package net.manub.embeddedkafka.streams import net.manub.embeddedkafka.Codecs._ import net.manub.embeddedkafka.ConsumerExtensions._ import net.manub.embeddedkafka.EmbeddedKafkaConfig import net.manub.embeddedkafka.streams.EmbeddedKafkaStreams._ import org.apache.kafka.common.serialization.{Serde, Serdes} import org.apache.kafka.streams.StreamsBuilder import org.apache.kafka.streams.kstream.{Consumed, KStream, Produced} import org.scalatest.Assertion import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class ExampleKafkaStreamsSpec extends AnyWordSpec with Matchers { implicit val config: EmbeddedKafkaConfig = EmbeddedKafkaConfig(kafkaPort = 7000, zooKeeperPort = 7001) val (inTopic, outTopic) = ("in", "out") val stringSerde: Serde[String] = Serdes.String() "A Kafka streams test" should { "be easy to run with streams and consumer lifecycle management" in { val streamBuilder = new StreamsBuilder val stream: KStream[String, String] = streamBuilder.stream(inTopic, Consumed.`with`(stringSerde, stringSerde)) stream.to(outTopic, Produced.`with`(stringSerde, stringSerde)) runStreams(Seq(inTopic, outTopic), streamBuilder.build()) { publishToKafka(inTopic, "hello", "world") publishToKafka(inTopic, "foo", "bar") publishToKafka(inTopic, "baz", "yaz") withConsumer[String, String, Assertion] { consumer => val consumedMessages = consumer.consumeLazily[(String, String)](outTopic) consumedMessages.take(2).toList should be( Seq("hello" -> "world", "foo" -> "bar") ) val h :: _ = consumedMessages.drop(2).toList h should be("baz" -> "yaz") } } } "allow support creating custom consumers" in { val streamBuilder = new StreamsBuilder val stream: KStream[String, String] = streamBuilder.stream(inTopic, Consumed.`with`(stringSerde, stringSerde)) stream.to(outTopic, Produced.`with`(stringSerde, stringSerde)) runStreams(Seq(inTopic, outTopic), streamBuilder.build()) { publishToKafka(inTopic, "hello", "world") publishToKafka(inTopic, "foo", "bar") withConsumer[String, String, Assertion] { consumer => consumer.consumeLazily[(String, String)](outTopic).take(2) should be( Seq("hello" -> "world", "foo" -> "bar") ) } } } "allow for easy string based testing" in { val streamBuilder = new StreamsBuilder val stream: KStream[String, String] = streamBuilder.stream(inTopic, Consumed.`with`(stringSerde, stringSerde)) stream.to(outTopic, Produced.`with`(stringSerde, stringSerde)) runStreams(Seq(inTopic, outTopic), streamBuilder.build())( withConsumer[String, String, Assertion]({ consumer => publishToKafka(inTopic, "hello", "world") val h :: _ = consumer.consumeLazily[(String, String)](outTopic).toList h should be("hello" -> "world") }) )(config) } } }
Example 15
Source File: SpecBase.scala From kafka-lag-exporter with Apache License 2.0 | 5 votes |
package com.lightbend.kafkalagexporter.integration import akka.actor.typed.ActorSystem import akka.kafka.testkit.scaladsl.{EmbeddedKafkaLike, ScalatestKafkaSpec} import com.lightbend.kafkalagexporter.MainApp import com.lightbend.kafkalagexporter.KafkaClusterManager import com.typesafe.config.{Config, ConfigFactory} import net.manub.embeddedkafka.EmbeddedKafkaConfig import org.scalatest.concurrent.{Eventually, ScalaFutures} import org.scalatest.{BeforeAndAfterEach, Matchers, WordSpecLike} import scala.concurrent.Await import scala.concurrent.duration._ abstract class SpecBase(kafkaPort: Int, val exporterPort: Int) extends ScalatestKafkaSpec(kafkaPort) with WordSpecLike with BeforeAndAfterEach with EmbeddedKafkaLike with Matchers with ScalaFutures with Eventually with PrometheusUtils with LagSim { override def createKafkaConfig: EmbeddedKafkaConfig = EmbeddedKafkaConfig(kafkaPort, zooKeeperPort, Map( "offsets.topic.replication.factor" -> "1" )) var kafkaLagExporter: ActorSystem[KafkaClusterManager.Message] = _ val clusterName = "default" val config: Config = ConfigFactory.parseString(s""" |kafka-lag-exporter { | port: $exporterPort | clusters = [ | { | name: "$clusterName" | bootstrap-brokers: "localhost:$kafkaPort" | } | ] | poll-interval = 5 seconds | lookup-table-size = 20 |}""".stripMargin).withFallback(ConfigFactory.load()) override def beforeEach(): Unit = { kafkaLagExporter = MainApp.start(config) } override def afterEach(): Unit = { kafkaLagExporter ! KafkaClusterManager.Stop Await.result(kafkaLagExporter.whenTerminated, 10 seconds) } }
Example 16
Source File: KafkaExample.scala From cornichon with Apache License 2.0 | 5 votes |
package com.github.agourlay.kafka.kafka import com.github.agourlay.cornichon.CornichonFeature import com.github.agourlay.cornichon.kafka.KafkaDsl import net.manub.embeddedkafka.{ EmbeddedKafka, EmbeddedKafkaConfig } class KafkaExample extends CornichonFeature with KafkaDsl { override lazy val kafkaBootstrapServersHost = "localhost" override lazy val kafkaBootstrapServersPort = 9092 def feature = Feature("Kafka DSL") { Scenario("write and read arbitrary Strings to/from topic") { Given I put_topic( topic = "cornichon", key = "success", message = "I am a plain string" ) When I read_from_topic("cornichon") Then assert kafka("cornichon").topic_is("cornichon") Then assert kafka("cornichon").key_is("success") Then assert kafka("cornichon").message_value.is("I am a plain string") } Scenario("use cornichon jsonAssertions on the message value") { Given I put_topic( topic = "cornichon", key = "json", message = """{ "coffee": "black", "cornichon": "green" }""" ) When I read_from_topic("cornichon") Then assert kafka("cornichon").key_is("json") Then assert kafka("cornichon").message_value.ignoring("coffee").is(""" { "cornichon": "green" } """ ) } } beforeFeature { // start an embedded kafka for the tests EmbeddedKafka.start() { EmbeddedKafkaConfig( kafkaPort = kafkaBootstrapServersPort, customBrokerProperties = Map("group.initial.rebalance.delay.ms" -> "10") ) } () } afterFeature { EmbeddedKafka.stop() } }