org.apache.kafka.clients.consumer.ConsumerRecords Scala Examples
The following examples show how to use org.apache.kafka.clients.consumer.ConsumerRecords.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: SimpleKafkaConsumer.scala From remora with MIT License | 5 votes |
import java.util.Properties import com.fasterxml.jackson.databind.KeyDeserializer import org.apache.kafka.clients.consumer.{ConsumerRecords, KafkaConsumer} import org.apache.kafka.common.serialization.Deserializer import net.manub.embeddedkafka.Codecs.stringDeserializer import net.manub.embeddedkafka.ConsumerExtensions._ class SimpleKafkaConsumer[K,V](consumerProps : Properties, topic : String, keyDeserializer: Deserializer[K], valueDeserializer: Deserializer[V], function : ConsumerRecords[K, V] => Unit, poll : Long = 2000) { private var running = false private val consumer = new KafkaConsumer[K, V](consumerProps, keyDeserializer, valueDeserializer) private val thread = new Thread { import scala.collection.JavaConverters._ override def run: Unit = { consumer.subscribe(List(topic).asJava) consumer.partitionsFor(topic) while (running) { val record: ConsumerRecords[K, V] = consumer.poll(poll) function(record) } } } def start(): Unit = { if(!running) { running = true thread.start() } } def stop(): Unit = { if(running) { running = false thread.join() consumer.close() } } }
Example 2
Source File: KafkaConsumer.scala From aecor with MIT License | 5 votes |
package aecor.kafkadistributedprocessing.internal import java.time.Duration import java.util.Properties import java.util.concurrent.Executors import cats.effect.{ Async, ContextShift, Resource } import cats.~> import org.apache.kafka.clients.consumer.{ Consumer, ConsumerRebalanceListener, ConsumerRecords } import org.apache.kafka.common.PartitionInfo import org.apache.kafka.common.serialization.Deserializer import scala.collection.JavaConverters._ import scala.concurrent.ExecutionContext import scala.concurrent.duration.FiniteDuration private[kafkadistributedprocessing] final class KafkaConsumer[F[_], K, V]( withConsumer: (Consumer[K, V] => *) ~> F ) { def subscribe(topics: Set[String], listener: ConsumerRebalanceListener): F[Unit] = withConsumer(_.subscribe(topics.asJava, listener)) def subscribe(topics: Set[String]): F[Unit] = withConsumer(_.subscribe(topics.asJava)) val unsubscribe: F[Unit] = withConsumer(_.unsubscribe()) def partitionsFor(topic: String): F[Set[PartitionInfo]] = withConsumer(_.partitionsFor(topic).asScala.toSet) def close: F[Unit] = withConsumer(_.close()) def poll(timeout: FiniteDuration): F[ConsumerRecords[K, V]] = withConsumer(_.poll(Duration.ofNanos(timeout.toNanos))) } private[kafkadistributedprocessing] object KafkaConsumer { final class Create[F[_]] { def apply[K, V]( config: Properties, keyDeserializer: Deserializer[K], valueDeserializer: Deserializer[V] )(implicit F: Async[F], contextShift: ContextShift[F]): Resource[F, KafkaConsumer[F, K, V]] = { val create = F.suspend { val executor = Executors.newSingleThreadExecutor() def eval[A](a: => A): F[A] = contextShift.evalOn(ExecutionContext.fromExecutor(executor)) { F.async[A] { cb => executor.execute(new Runnable { override def run(): Unit = cb { try Right(a) catch { case e: Throwable => Left(e) } } }) } } eval { val original = Thread.currentThread.getContextClassLoader Thread.currentThread.setContextClassLoader(null) val consumer = new org.apache.kafka.clients.consumer.KafkaConsumer[K, V]( config, keyDeserializer, valueDeserializer ) Thread.currentThread.setContextClassLoader(original) val withConsumer = new ((Consumer[K, V] => *) ~> F) { def apply[A](f: Consumer[K, V] => A): F[A] = eval(f(consumer)) } new KafkaConsumer[F, K, V](withConsumer) } } Resource.make(create)(_.close) } } def create[F[_]]: Create[F] = new Create[F] }
Example 3
Source File: KafkaConsumerSpec.scala From freestyle-kafka with Apache License 2.0 | 5 votes |
package freestyle package kafka import freestyle.free._ import net.manub.embeddedkafka.EmbeddedKafka import org.scalatest.WordSpec import scala.concurrent.duration._ import cats.implicits._ import org.apache.kafka.clients.consumer.ConsumerRecords import scala.collection.JavaConverters._ class KafkaConsumerSpec extends WordSpec with FSKafkaAlgebraSpec { "Consumer can be reused after closed" in { withConsumer[String].apply { consumer => for { _ <- consumer.close() isClosed <- consumer.isClosed _ <- consumer.metrics isClosedAfterUsed <- consumer.isClosed } yield (isClosed, isClosedAfterUsed) } shouldBe Right((true, false)) } "Consumer can be reused after closed with a timeout" in { withConsumer[String].apply { consumer => for { _ <- consumer.closeWaitingFor(5.seconds) isClosed <- consumer.isClosed _ <- consumer.metrics isClosedAfterUsed <- consumer.isClosed } yield (isClosed, isClosedAfterUsed) } shouldBe Right((true, false)) } "Consumer can subscribe to topics" in { val topics = "topicsubscription" :: Nil createCustomTopic(topics.head) withConsumer[String].apply { consumer => for { _ <- consumer.subscribe(topics) topics <- consumer.subscription } yield topics } shouldBe Right(topics) } "Consumer can read a message from a topic" in { val topic = "mytopic" val key = "key" val message = "mymessage" withProducerAndConsumer[String].apply { (producer, consumer) => for { _ <- producer.sendToTopic(topic, (key, message)) _ <- producer.flush() _ <- consumer.subscribe(topic :: Nil) _ <- consumer.commitSync() records <- consumer.poll(10.seconds) message = records.records(topic).asScala.toList.headOption.map(_.value) } yield message } shouldBe Right(Some("mymessage")) } "Consumer can obtain metrics" in { withProducer[String].apply { _.metrics }.isRight shouldBe true } }
Example 4
Source File: IdempotentProducerSpec.scala From scala-kafka-client with MIT License | 5 votes |
package cakesolutions.kafka import org.apache.kafka.clients.consumer.ConsumerRecords import org.apache.kafka.common.KafkaException import org.apache.kafka.common.requests.IsolationLevel import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer} import org.slf4j.LoggerFactory import scala.collection.JavaConverters._ import scala.util.Random class IdempotentProducerSpec extends KafkaIntSpec { private val log = LoggerFactory.getLogger(getClass) private def randomString: String = Random.alphanumeric.take(5).mkString("") val idempotentProducerConfig: KafkaProducer.Conf[String, String] = KafkaProducer.Conf(new StringSerializer(), new StringSerializer(), bootstrapServers = s"localhost:$kafkaPort", enableIdempotence = true) val transactionalProducerConfig: KafkaProducer.Conf[String, String] = KafkaProducer.Conf(new StringSerializer(), new StringSerializer(), bootstrapServers = s"localhost:$kafkaPort", transactionalId = Some("t1"), enableIdempotence = true) val consumerConfig: KafkaConsumer.Conf[String, String] = KafkaConsumer.Conf(new StringDeserializer(), new StringDeserializer(), bootstrapServers = s"localhost:$kafkaPort", groupId = randomString, enableAutoCommit = false) val transactionConsumerConfig: KafkaConsumer.Conf[String, String] = KafkaConsumer.Conf(new StringDeserializer(), new StringDeserializer(), bootstrapServers = s"localhost:$kafkaPort", groupId = randomString, enableAutoCommit = false, isolationLevel = IsolationLevel.READ_COMMITTED) "Producer with idempotent config" should "deliver batch" in { val topic = randomString log.info(s"Using topic [$topic] and kafka port [$kafkaPort]") val producer = KafkaProducer(idempotentProducerConfig) val consumer = KafkaConsumer(consumerConfig) consumer.subscribe(List(topic).asJava) val records1 = consumer.poll(1000) records1.count() shouldEqual 0 log.info("Kafka producer connecting on port: [{}]", kafkaPort) producer.send(KafkaProducerRecord(topic, Some("key"), "value")) producer.flush() val records2: ConsumerRecords[String, String] = consumer.poll(1000) records2.count() shouldEqual 1 producer.close() consumer.close() } "Producer with transaction" should "deliver batch" in { val topic = randomString log.info(s"Using topic [$topic] and kafka port [$kafkaPort]") val producer = KafkaProducer(transactionalProducerConfig) val consumer = KafkaConsumer(transactionConsumerConfig) consumer.subscribe(List(topic).asJava) val records1 = consumer.poll(1000) records1.count() shouldEqual 0 log.info("Kafka producer connecting on port: [{}]", kafkaPort) producer.initTransactions() try { producer.beginTransaction() producer.send(KafkaProducerRecord(topic, Some("key"), "value")) producer.commitTransaction() } catch { case ex: KafkaException => log.error(ex.getMessage, ex) producer.abortTransaction() } val records2: ConsumerRecords[String, String] = consumer.poll(1000) records2.count() shouldEqual 1 producer.close() consumer.close() } }
Example 5
Source File: KafkaConsumerSpec.scala From scala-kafka-client with MIT License | 5 votes |
package cakesolutions.kafka import org.apache.kafka.clients.consumer.ConsumerRecords import org.scalatest.concurrent.Waiters.Waiter import scala.concurrent.ExecutionContext.Implicits.global import org.slf4j.LoggerFactory import scala.collection.JavaConverters._ import scala.util.{Failure, Random, Success} class KafkaConsumerSpec extends KafkaIntSpec { private def randomString: String = Random.alphanumeric.take(5).mkString("") private val log = LoggerFactory.getLogger(getClass) private val serializer = (msg: String) => msg.getBytes private val deserializer = (bytes: Array[Byte]) => new String(bytes) val consumerConfig: KafkaConsumer.Conf[String, String] = { KafkaConsumer.Conf(KafkaDeserializer(deserializer), KafkaDeserializer(deserializer), bootstrapServers = s"localhost:$kafkaPort", groupId = randomString, enableAutoCommit = false) } val producerConfig: KafkaProducer.Conf[String, String] = { KafkaProducer.Conf(KafkaSerializer(serializer), KafkaSerializer(serializer), bootstrapServers = s"localhost:$kafkaPort") } "KafkaConsumer and KafkaProducer with Function serializers" should "deliver and consume a message" in { val topic = randomString log.info(s"Using topic [$topic] and kafka port [$kafkaPort]") val producer = KafkaProducer(producerConfig) val consumer = KafkaConsumer(consumerConfig) consumer.subscribe(List(topic).asJava) val records1 = consumer.poll(1000) records1.count() shouldEqual 0 log.info("Kafka producer connecting on port: [{}]", kafkaPort) producer.send(KafkaProducerRecord(topic, Some("key"), "value")) producer.flush() val records2: ConsumerRecords[String, String] = consumer.poll(1000) records2.count() shouldEqual 1 producer.close() consumer.close() } "Kafka producer with bad serializer" should "return a failed future" in { val w = new Waiter val topic = randomString log.info(s"Using topic [$topic] and kafka port [$kafkaPort]") val badSerializer = (msg: String) => { throw new Exception("Serialization failed") } val producerConfig = KafkaProducer.Conf( KafkaSerializer(serializer), KafkaSerializer(badSerializer), bootstrapServers = s"localhost:$kafkaPort" ) val producer = KafkaProducer(producerConfig) log.info("Kafka producer connecting on port: [{}]", kafkaPort) val future = producer.send(KafkaProducerRecord(topic, Some("key"), "value")) future.onComplete { case Success(_) => case Failure(_) => w.dismiss() } w.await() producer.close() } }