org.apache.kafka.common.serialization.Serializer Scala Examples
The following examples show how to use org.apache.kafka.common.serialization.Serializer.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: KafkaClient.scala From incubator-retired-gearpump with Apache License 2.0 | 6 votes |
package org.apache.gearpump.streaming.kafka.lib.util import kafka.admin.AdminUtils import kafka.cluster.Broker import kafka.common.TopicAndPartition import kafka.consumer.SimpleConsumer import kafka.utils.{ZKStringSerializer, ZkUtils} import org.I0Itec.zkclient.ZkClient import org.apache.gearpump.streaming.kafka.lib.source.consumer.KafkaConsumer import org.apache.gearpump.streaming.kafka.util.KafkaConfig import org.apache.gearpump.util.LogUtil import org.apache.kafka.clients.producer.KafkaProducer import org.apache.kafka.common.serialization.Serializer object KafkaClient { private val LOG = LogUtil.getLogger(classOf[KafkaClient]) val factory = new KafkaClientFactory class KafkaClientFactory extends java.io.Serializable { def getKafkaClient(config: KafkaConfig): KafkaClient = { val consumerConfig = config.getConsumerConfig val zkClient = new ZkClient(consumerConfig.zkConnect, consumerConfig.zkSessionTimeoutMs, consumerConfig.zkConnectionTimeoutMs, ZKStringSerializer) new KafkaClient(config, zkClient) } } } class KafkaClient(config: KafkaConfig, zkClient: ZkClient) { import org.apache.gearpump.streaming.kafka.lib.util.KafkaClient._ private val consumerConfig = config.getConsumerConfig def getTopicAndPartitions(consumerTopics: List[String]): Array[TopicAndPartition] = { try { ZkUtils.getPartitionsForTopics(zkClient, consumerTopics).flatMap { case (topic, partitions) => partitions.map(TopicAndPartition(topic, _)) }.toArray } catch { case e: Exception => LOG.error(e.getMessage) throw e } } def getBroker(topic: String, partition: Int): Broker = { try { val leader = ZkUtils.getLeaderForPartition(zkClient, topic, partition) .getOrElse(throw new RuntimeException( s"leader not available for TopicAndPartition($topic, $partition)")) ZkUtils.getBrokerInfo(zkClient, leader) .getOrElse(throw new RuntimeException(s"broker info not found for leader $leader")) } catch { case e: Exception => LOG.error(e.getMessage) throw e } } def createConsumer(topic: String, partition: Int, startOffsetTime: Long): KafkaConsumer = { val broker = getBroker(topic, partition) val soTimeout = consumerConfig.socketTimeoutMs val soBufferSize = consumerConfig.socketReceiveBufferBytes val clientId = consumerConfig.clientId val fetchSize = consumerConfig.fetchMessageMaxBytes val consumer = new SimpleConsumer(broker.host, broker.port, soTimeout, soBufferSize, clientId) KafkaConsumer(topic, partition, startOffsetTime, fetchSize, consumer) } def createProducer[K, V](keySerializer: Serializer[K], valueSerializer: Serializer[V]): KafkaProducer[K, V] = { new KafkaProducer[K, V](config.getProducerConfig, keySerializer, valueSerializer) } def createTopic(topic: String, partitions: Int, replicas: Int): Boolean = { try { if (AdminUtils.topicExists(zkClient, topic)) { LOG.info(s"topic $topic exists") true } else { AdminUtils.createTopic(zkClient, topic, partitions, replicas) LOG.info(s"created topic $topic") false } } catch { case e: Exception => LOG.error(e.getMessage) throw e } } def close(): Unit = { zkClient.close() } }
Example 2
Source File: ModelStateSerde.scala From kafka-with-akka-streams-kafka-streams-tutorial with Apache License 2.0 | 5 votes |
package com.lightbend.scala.kafkastreams.store.store import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream} import java.util import com.lightbend.model.modeldescriptor.ModelDescriptor import com.lightbend.scala.modelServer.model.PMML.PMMLModel import com.lightbend.scala.modelServer.model.tensorflow.TensorFlowModel import com.lightbend.scala.modelServer.model.{ModelToServeStats, ModelWithDescriptor} import com.lightbend.scala.kafkastreams.store.StoreState import org.apache.kafka.common.serialization.{Deserializer, Serde, Serializer} class ModelStateSerde extends Serde[StoreState] { private val mserializer = new ModelStateSerializer() private val mdeserializer = new ModelStateDeserializer() override def deserializer() = mdeserializer override def serializer() = mserializer override def configure(configs: util.Map[String, _], isKey: Boolean) = {} override def close() = {} } object ModelStateDeserializer { val factories = Map( ModelDescriptor.ModelType.PMML.index -> PMMLModel, ModelDescriptor.ModelType.TENSORFLOW.index -> TensorFlowModel ) } class ModelStateDeserializer extends Deserializer[StoreState] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def deserialize(topic: String, data: Array[Byte]): StoreState = { if(data != null) { val input = new DataInputStream(new ByteArrayInputStream(data)) new StoreState(ModelWithDescriptor.readModel(input), ModelWithDescriptor.readModel(input), ModelToServeStats.readServingInfo(input), ModelToServeStats.readServingInfo(input)) } else new StoreState() } override def close(): Unit = {} } class ModelStateSerializer extends Serializer[StoreState] { private val bos = new ByteArrayOutputStream() override def serialize(topic: String, state: StoreState): Array[Byte] = { bos.reset() val output = new DataOutputStream(bos) ModelWithDescriptor.writeModel(output, state.currentModel.orNull) ModelWithDescriptor.writeModel(output, state.newModel.orNull) ModelToServeStats.writeServingInfo(output, state.currentState.orNull) ModelToServeStats.writeServingInfo(output, state.newState.orNull) try { output.flush() output.close() } catch { case t: Throwable => } bos.toByteArray } override def close(): Unit = {} override def configure(configs: util.Map[String, _], isKey: Boolean) = {} }
Example 3
Source File: Avro4sJsonSupport.scala From kafka-serde-scala with Apache License 2.0 | 5 votes |
package io.github.azhur.kafkaserdeavro4s import java.io.ByteArrayOutputStream import java.util import com.sksamuel.avro4s.{ AvroJsonInputStream, AvroOutputStream, FromRecord, SchemaFor, ToRecord } import org.apache.avro.file.SeekableByteArrayInput import org.apache.kafka.common.errors.SerializationException import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer } import scala.language.implicitConversions import scala.util.control.NonFatal import scala.util.{ Failure, Success } trait Avro4sJsonSupport { implicit def toSerializer[T >: Null](implicit schemaFor: SchemaFor[T], toRecord: ToRecord[T]): Serializer[T] = new Serializer[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serialize(topic: String, data: T): Array[Byte] = if (data == null) null else { val baos = new ByteArrayOutputStream() try { val output = AvroOutputStream.json[T](baos) try { output.write(data) } finally { output.close() } baos.toByteArray } catch { case NonFatal(e) => throw new SerializationException(e) } finally { baos.close() } } } implicit def toDeserializer[T >: Null]( implicit schemaFor: SchemaFor[T], fromRecord: FromRecord[T], schemas: WriterReaderSchemas = WriterReaderSchemas() ): Deserializer[T] = new Deserializer[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def deserialize(topic: String, data: Array[Byte]): T = if (data == null) null else new AvroJsonInputStream[T](new SeekableByteArrayInput(data), schemas.writerSchema, schemas.readerSchema).singleEntity match { case Success(json) => json case Failure(error) => throw new SerializationException(error) } } implicit def toSerde[T >: Null]( implicit schemaFor: SchemaFor[T], toRecord: ToRecord[T], fromRecord: FromRecord[T], schemas: WriterReaderSchemas = WriterReaderSchemas() ): Serde[T] = new Serde[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serializer(): Serializer[T] = toSerializer[T] override def deserializer(): Deserializer[T] = toDeserializer[T] } } object Avro4sJsonSupport extends Avro4sJsonSupport
Example 4
Source File: Avro4sDataSupport.scala From kafka-serde-scala with Apache License 2.0 | 5 votes |
package io.github.azhur.kafkaserdeavro4s import java.io.ByteArrayOutputStream import java.util import com.sksamuel.avro4s.{ AvroDataInputStream, AvroDataOutputStream, FromRecord, SchemaFor, ToRecord } import org.apache.avro.file.{ CodecFactory, SeekableByteArrayInput } import org.apache.kafka.common.errors.SerializationException import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer } import scala.language.implicitConversions import scala.util.control.NonFatal import scala.util.{ Failure, Success } trait Avro4sDataSupport { implicit def toSerializer[T >: Null]( implicit schemaFor: SchemaFor[T], toRecord: ToRecord[T], codec: CodecFactory = CodecFactory.nullCodec() ): Serializer[T] = new Serializer[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serialize(topic: String, data: T): Array[Byte] = if (data == null) null else { val baos = new ByteArrayOutputStream() try { val output = AvroDataOutputStream[T](baos, codec) try { output.write(data) } finally { output.close() } baos.toByteArray } catch { case NonFatal(e) => throw new SerializationException(e) } finally { baos.close() } } } implicit def toDeserializer[T >: Null]( implicit schemaFor: SchemaFor[T], fromRecord: FromRecord[T], schemas: WriterReaderSchemas = WriterReaderSchemas() ): Deserializer[T] = new Deserializer[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def deserialize(topic: String, data: Array[Byte]): T = if (data == null) null else { val it = new AvroDataInputStream[T](new SeekableByteArrayInput(data), schemas.writerSchema, schemas.readerSchema).tryIterator if (it.hasNext) { it.next() match { case Success(record) => record case Failure(err) => throw new SerializationException(err) } } else { throw new SerializationException("Empty avro4s data iterator") } } } implicit def toSerde[T >: Null](implicit schemaFor: SchemaFor[T], toRecord: ToRecord[T], fromRecord: FromRecord[T], codec: CodecFactory = CodecFactory.nullCodec()): Serde[T] = new Serde[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serializer(): Serializer[T] = toSerializer[T] override def deserializer(): Deserializer[T] = toDeserializer[T] } } object Avro4sDataSupport extends Avro4sDataSupport
Example 5
Source File: Avro4sBinarySupport.scala From kafka-serde-scala with Apache License 2.0 | 5 votes |
package io.github.azhur.kafkaserdeavro4s import java.io.ByteArrayOutputStream import java.util import com.sksamuel.avro4s.{ AvroBinaryInputStream, AvroOutputStream, FromRecord, SchemaFor, ToRecord } import org.apache.avro.file.SeekableByteArrayInput import org.apache.kafka.common.errors.SerializationException import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer } import scala.language.implicitConversions import scala.util.{ Failure, Success } import scala.util.control.NonFatal trait Avro4sBinarySupport { implicit def toSerializer[T >: Null](implicit schemaFor: SchemaFor[T], toRecord: ToRecord[T]): Serializer[T] = new Serializer[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serialize(topic: String, data: T): Array[Byte] = if (data == null) null else { val baos = new ByteArrayOutputStream() try { val output = AvroOutputStream.binary[T](baos) try { output.write(data) } finally { output.close() } baos.toByteArray } catch { case NonFatal(e) => throw new SerializationException(e) } finally { baos.close() } } } implicit def toDeserializer[T >: Null]( implicit schemaFor: SchemaFor[T], fromRecord: FromRecord[T], schemas: WriterReaderSchemas = WriterReaderSchemas() ): Deserializer[T] = new Deserializer[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def deserialize(topic: String, data: Array[Byte]): T = if (data == null) null else { val it = new AvroBinaryInputStream[T](new SeekableByteArrayInput(data), schemas.writerSchema, schemas.readerSchema).tryIterator if (it.hasNext) { it.next() match { case Success(record) => record case Failure(err) => throw new SerializationException(err) } } else { throw new SerializationException("Empty avro4s binary iterator") } } } implicit def toSerde[T >: Null]( implicit schemaFor: SchemaFor[T], toRecord: ToRecord[T], fromRecord: FromRecord[T], schemas: WriterReaderSchemas = WriterReaderSchemas() ): Serde[T] = new Serde[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serializer(): Serializer[T] = toSerializer[T] override def deserializer(): Deserializer[T] = toDeserializer[T] } } object Avro4sBinarySupport extends Avro4sBinarySupport
Example 6
Source File: PlayJsonSupport.scala From kafka-serde-scala with Apache License 2.0 | 5 votes |
package io.github.azhur.kafkaserdeplayjson import java.nio.charset.StandardCharsets.UTF_8 import java.util import io.github.azhur.kafkaserdeplayjson.PlayJsonSupport.PlayJsonError import org.apache.kafka.common.errors.SerializationException import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer } import play.api.libs.json.{ JsError, JsValue, Json, Reads, Writes } import scala.language.implicitConversions import scala.util.control.NonFatal trait PlayJsonSupport { implicit def toSerializer[T <: AnyRef]( implicit writes: Writes[T], printer: JsValue => String = Json.stringify ): Serializer[T] = new Serializer[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serialize(topic: String, data: T): Array[Byte] = if (data == null) null else try printer(writes.writes(data)).getBytes(UTF_8) catch { case NonFatal(e) => throw new SerializationException(e) } } implicit def toDeserializer[T >: Null <: AnyRef: Manifest]( implicit reads: Reads[T] ): Deserializer[T] = new Deserializer[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def deserialize(topic: String, data: Array[Byte]): T = if (data == null) null else reads .reads(Json.parse(new String(data, UTF_8))) .recoverTotal { e => throw new SerializationException(PlayJsonError(e)) } } implicit def toSerde[T >: Null <: AnyRef: Manifest]( implicit writes: Writes[T], reads: Reads[T], printer: JsValue => String = Json.stringify ): Serde[T] = new Serde[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serializer(): Serializer[T] = toSerializer[T] override def deserializer(): Deserializer[T] = toDeserializer[T] } } object PlayJsonSupport extends PlayJsonSupport { final case class PlayJsonError(error: JsError) extends RuntimeException { override def getMessage: String = JsError.toJson(error).toString() } }
Example 7
Source File: JacksonFormatSchemaSupport.scala From kafka-serde-scala with Apache License 2.0 | 5 votes |
package io.github.azhur.kafkaserdejackson import java.util import scala.reflect.runtime.universe._ import com.fasterxml.jackson.core.FormatSchema import com.fasterxml.jackson.databind.ObjectMapper import org.apache.kafka.common.errors.SerializationException import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer } import Jackson.typeReference import scala.language.implicitConversions import scala.reflect.ClassTag import scala.util.control.NonFatal trait JacksonFormatSchemaSupport { implicit def toSerializer[T <: AnyRef](implicit mapper: ObjectMapper, schema: FormatSchema): Serializer[T] = new Serializer[T] { private val writer = mapper.writer(schema) override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serialize(topic: String, data: T): Array[Byte] = if (data == null) null else try writer.writeValueAsBytes(data) catch { case NonFatal(e) => throw new SerializationException(e) } } implicit def toDeserializer[T >: Null <: AnyRef]( implicit mapper: ObjectMapper, schema: FormatSchema, tt: TypeTag[T] ): Deserializer[T] = new Deserializer[T] { private val reader = mapper.readerFor(typeReference[T]).`with`(schema) override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def deserialize(topic: String, data: Array[Byte]): T = if (data == null) null else try reader.readValue[T](data) catch { case NonFatal(e) => throw new SerializationException(e) } } implicit def toSerde[T >: Null <: AnyRef]( implicit mapper: ObjectMapper, schema: FormatSchema, ct: TypeTag[T] ): Serde[T] = new Serde[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serializer(): Serializer[T] = toSerializer[T] override def deserializer(): Deserializer[T] = toDeserializer[T] } } object JacksonFormatSchemaSupport extends JacksonFormatSchemaSupport
Example 8
Source File: JacksonJsonSupport.scala From kafka-serde-scala with Apache License 2.0 | 5 votes |
package io.github.azhur.kafkaserdejackson import java.util import com.fasterxml.jackson.databind.ObjectMapper import Jackson.typeReference import org.apache.kafka.common.errors.SerializationException import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer } import scala.language.implicitConversions import scala.reflect.runtime.universe._ import scala.util.control.NonFatal trait JacksonJsonSupport { implicit def toSerializer[T <: AnyRef](implicit mapper: ObjectMapper): Serializer[T] = new Serializer[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serialize(topic: String, data: T): Array[Byte] = if (data == null) null else try mapper.writeValueAsBytes(data) catch { case NonFatal(e) => throw new SerializationException(e) } } implicit def toDeserializer[T >: Null <: AnyRef]( implicit mapper: ObjectMapper, tt: TypeTag[T] ): Deserializer[T] = new Deserializer[T] { private val tr = typeReference[T] override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def deserialize(topic: String, data: Array[Byte]): T = if (data == null) null else try mapper.readValue[T](data, tr) catch { case NonFatal(e) => throw new SerializationException(e) } } implicit def toSerde[T >: Null <: AnyRef]( implicit mapper: ObjectMapper, tt: TypeTag[T] ): Serde[T] = new Serde[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serializer(): Serializer[T] = toSerializer[T] override def deserializer(): Deserializer[T] = toDeserializer[T] } } object JacksonJsonSupport extends JacksonJsonSupport
Example 9
Source File: Customer.scala From fusion-data with Apache License 2.0 | 5 votes |
package kafkasample.demo import java.nio.ByteBuffer import java.nio.charset.StandardCharsets import java.util import helloscala.common.util.StringUtils import org.apache.kafka.common.serialization.Serializer case class Customer(customerId: Int, customerName: String) {} class CustomerSerializer extends Serializer[Customer] { private val EMPTY_NAME = Array[Byte]() override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def serialize(topic: String, data: Customer): Array[Byte] = if (data eq null) { null } else { var nameLen = 0 var nameBytes = EMPTY_NAME if (StringUtils.isNoneBlank(data.customerName)) { nameLen = data.customerName.length nameBytes = data.customerName.getBytes(StandardCharsets.UTF_8) } val buf = ByteBuffer.allocate(4 + 4 + nameLen) buf.putInt(data.customerId) buf.putInt(nameLen) buf.put(nameBytes) buf.array() } override def close(): Unit = ??? }
Example 10
Source File: package.scala From Waves with MIT License | 5 votes |
package com.wavesplatform.events import java.util import com.wavesplatform.events.protobuf.PBEvents import com.wavesplatform.events.settings.BlockchainUpdatesSettings import org.apache.kafka.clients.CommonClientConfigs import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord} import org.apache.kafka.common.config.SaslConfigs import org.apache.kafka.common.serialization.{IntegerSerializer, Serializer} package object kafka { private object BlockchainUpdatedSerializer extends Serializer[BlockchainUpdated] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serialize(topic: String, data: BlockchainUpdated): Array[Byte] = PBEvents.protobuf(data).toByteArray } private object IntSerializer extends Serializer[Int] { val integerSerializer = new IntegerSerializer override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = integerSerializer.configure(configs, isKey) override def close(): Unit = integerSerializer.close() override def serialize(topic: String, data: Int): Array[Byte] = integerSerializer.serialize(topic, data) } def createProperties(settings: BlockchainUpdatesSettings): util.Properties = { val props = new util.Properties() props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, settings.bootstrapServers) props.put(ProducerConfig.CLIENT_ID_CONFIG, settings.clientId) // props.put(ProducerConfig.RETRIES_CONFIG, "0") // SASL_SSL if (settings.ssl.enabled) { props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_SSL") props.put(SaslConfigs.SASL_MECHANISM, "PLAIN") props.put( SaslConfigs.SASL_JAAS_CONFIG, s"org.apache.kafka.common.security.plain.PlainLoginModule required username = '${settings.ssl.username}' password = '${settings.ssl.password}';" ) } props } def createProducerProperties(settings: BlockchainUpdatesSettings): util.Properties = { val props = createProperties(settings) props.put(ProducerConfig.ACKS_CONFIG, "all") props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, "10485760") // 10MB props } def createProducer(settings: BlockchainUpdatesSettings): KafkaProducer[Int, BlockchainUpdated] = new KafkaProducer[Int, BlockchainUpdated](createProducerProperties(settings), IntSerializer, BlockchainUpdatedSerializer) def createProducerRecord(topic: String, event: BlockchainUpdated): ProducerRecord[Int, BlockchainUpdated] = { val h = event match { case ap: BlockAppended => ap.toHeight case MicroBlockAppended(_, height, _, _, _) => height case RollbackCompleted(_, height) => height case MicroBlockRollbackCompleted(_, height) => height } new ProducerRecord[Int, BlockchainUpdated](topic, h, event) } }
Example 11
Source File: JsoniterScalaSupport.scala From kafka-serde-scala with Apache License 2.0 | 5 votes |
package io.github.azhur.kafkaserdejsoniterscala import java.util import com.github.plokhotnyuk.jsoniter_scala.core._ import org.apache.kafka.common.errors.SerializationException import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer } import scala.language.implicitConversions import scala.util.control.NonFatal trait JsoniterScalaSupport { implicit def toSerializer[T >: Null]( implicit codec: JsonValueCodec[T], writerConfig: WriterConfig = WriterConfig() ): Serializer[T] = new Serializer[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serialize(topic: String, data: T): Array[Byte] = if (data == null) null else try writeToArray(data, writerConfig) catch { case NonFatal(e) => throw new SerializationException(e) } } implicit def toDeserializer[T >: Null]( implicit codec: JsonValueCodec[T], readerConfig: ReaderConfig = ReaderConfig() ): Deserializer[T] = new Deserializer[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def deserialize(topic: String, data: Array[Byte]): T = if (data == null) null else try readFromArray(data, readerConfig) catch { case NonFatal(e) => throw new SerializationException(e) } } implicit def toSerde[T >: Null]( implicit codec: JsonValueCodec[T], writerConfig: WriterConfig = WriterConfig(), readerConfig: ReaderConfig = ReaderConfig() ): Serde[T] = new Serde[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serializer(): Serializer[T] = toSerializer[T] override def deserializer(): Deserializer[T] = toDeserializer[T] } } object JsoniterScalaSupport extends JsoniterScalaSupport
Example 12
Source File: EmbeddedKafkaTest.scala From apache-spark-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend.spark.kafka import com.github.dnvriend.TestSpec import net.manub.embeddedkafka.EmbeddedKafka import org.apache.kafka.common.serialization.{ Deserializer, Serializer } class EmbeddedKafkaTest extends TestSpec with EmbeddedKafka { final val TopicName = "MyTopic" def publish[T: Serializer](msg: T): Unit = publishToKafka(TopicName, msg) def consume[T: Deserializer]: T = consumeFirstMessageFrom(TopicName) import net.manub.embeddedkafka.Codecs._ it should "setup and embedded kafka, create a topic, send a message and receive a message from the same topic" in withRunningKafka { publish("foo") consume[String] shouldBe "foo" publish("bar".getBytes) consume[Array[Byte]] shouldBe "bar".getBytes() } }
Example 13
Source File: KafkaProducerConfig.scala From freestyle-kafka with Apache License 2.0 | 5 votes |
package freestyle package kafka import org.apache.kafka.clients.producer.KafkaProducer import org.apache.kafka.common.serialization.Serializer import collection.JavaConverters._ case class KafkaProducerConfig[K, V]( configs: Map[String, Any], keyValueSerializers: Option[(Serializer[K], Serializer[V])]) extends UnderlyingKafkaProducer[K, V] { override def producer: KafkaProducer[K, V] = KafkaProducerConfig.producerFromConfig(this) } object KafkaProducerConfig { private def toAnyRefMap(m: Map[String, Any]): java.util.Map[String, AnyRef] = m.asInstanceOf[Map[String, AnyRef]].asJava def producerFromConfig[K, V](config: KafkaProducerConfig[K, V]): KafkaProducer[K, V] = config.keyValueSerializers.fold(new KafkaProducer[K, V](toAnyRefMap(config.configs))) { case (ks, vs) => new KafkaProducer[K, V](toAnyRefMap(config.configs), ks, vs) } }
Example 14
Source File: Config.scala From event-sourcing-kafka-streams with MIT License | 5 votes |
package org.amitayh.invoices.common import org.amitayh.invoices.common.serde.{AvroSerde, UuidSerde} import org.apache.kafka.clients.admin.NewTopic import org.apache.kafka.common.config.TopicConfig import org.apache.kafka.common.serialization.{Deserializer, Serde, Serializer} import scala.collection.JavaConverters._ import scala.concurrent.duration._ object Config { val BootstrapServers = sys.env("BOOTSTRAP_SERVERS") object Stores { val Snapshots = "invoices.store.snapshots" } object Topics { sealed trait CleanupPolicy object CleanupPolicy { case object Compact extends CleanupPolicy } case class Topic[K, V](name: String, keySerde: Serde[K], valueSerde: Serde[V], numPartitions: Int = 4, replicationFactor: Short = 1, retention: Option[Duration] = None, cleanupPolicy: Option[CleanupPolicy] = None) { val keySerializer: Serializer[K] = keySerde.serializer val keyDeserializer: Deserializer[K] = keySerde.deserializer val valueSerializer: Serializer[V] = valueSerde.serializer val valueDeserializer: Deserializer[V] = valueSerde.deserializer def toNewTopic: NewTopic = { val emptyConfigs = Map.empty[String, String] val withRetention = retentionConfig.foldLeft(emptyConfigs)(_ + _) val withCleanupPolicy = cleanupPolicyConfig.foldLeft(withRetention)(_ + _) new NewTopic(name, numPartitions, replicationFactor) .configs(withCleanupPolicy.asJava) } private def retentionConfig: Option[(String, String)] = retention.map { retention => val millis = if (retention.isFinite) retention.toMillis else -1 TopicConfig.RETENTION_MS_CONFIG -> millis.toString } private def cleanupPolicyConfig: Option[(String, String)] = cleanupPolicy.map { case CleanupPolicy.Compact => TopicConfig.CLEANUP_POLICY_CONFIG -> TopicConfig.CLEANUP_POLICY_COMPACT } } val Events = Topic( "invoices.topic.events", UuidSerde, AvroSerde.EventSerde, retention = Some(Duration.Inf)) val Commands = Topic( "invoices.topic.commands", UuidSerde, AvroSerde.CommandSerde, retention = Some(5.minutes)) val CommandResults = Topic( "invoices.topic.command-results", UuidSerde, AvroSerde.CommandResultSerde, retention = Some(5.minutes)) val Snapshots = Topic( "invoices.topic.snapshots", UuidSerde, AvroSerde.SnapshotSerde, cleanupPolicy = Some(CleanupPolicy.Compact)) val All = Set(Events, Commands, CommandResults, Snapshots) } }
Example 15
Source File: UuidSerde.scala From event-sourcing-kafka-streams with MIT License | 5 votes |
package org.amitayh.invoices.common.serde import java.util import java.util.UUID import org.amitayh.invoices.common.serde.UuidConverters.{fromBytes, toBytes} import org.apache.kafka.common.serialization.{Deserializer, Serde, Serializer} object UuidSerializer extends Serializer[UUID] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () override def serialize(topic: String, uuid: UUID): Array[Byte] = toBytes(uuid) override def close(): Unit = () } object UuidDeserializer extends Deserializer[UUID] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () override def deserialize(topic: String, data: Array[Byte]): UUID = fromBytes(data) override def close(): Unit = () } object UuidSerde extends Serde[UUID] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () override val serializer: Serializer[UUID] = UuidSerializer override val deserializer: Deserializer[UUID] = UuidDeserializer override def close(): Unit = () }
Example 16
Source File: AvroSerde.scala From event-sourcing-kafka-streams with MIT License | 5 votes |
package org.amitayh.invoices.common.serde import java.io.ByteArrayOutputStream import java.nio.ByteBuffer import java.time.Instant import java.util import java.util.UUID import com.sksamuel.avro4s._ import org.amitayh.invoices.common.domain._ import org.amitayh.invoices.common.serde.UuidConverters.{fromByteBuffer, toByteBuffer} import org.apache.avro.Schema import org.apache.avro.Schema.Field import org.apache.kafka.common.serialization.{Deserializer, Serde, Serializer} object AvroSerde { implicit val instantToSchema: ToSchema[Instant] = new ToSchema[Instant] { override val schema: Schema = Schema.create(Schema.Type.STRING) } implicit val instantToValue: ToValue[Instant] = new ToValue[Instant] { override def apply(value: Instant): String = value.toString } implicit val instantFromValue: FromValue[Instant] = new FromValue[Instant] { override def apply(value: Any, field: Field): Instant = Instant.parse(value.toString) } implicit val uuidToSchema: ToSchema[UUID] = new ToSchema[UUID] { override val schema: Schema = Schema.create(Schema.Type.BYTES) } implicit val uuidToValue: ToValue[UUID] = new ToValue[UUID] { override def apply(value: UUID): ByteBuffer = toByteBuffer(value) } implicit val uuidFromValue: FromValue[UUID] = new FromValue[UUID] { override def apply(value: Any, field: Field): UUID = fromByteBuffer(value.asInstanceOf[ByteBuffer]) } val CommandSerde: Serde[Command] = serdeFor[Command] val CommandResultSerde: Serde[CommandResult] = serdeFor[CommandResult] val SnapshotSerde: Serde[InvoiceSnapshot] = serdeFor[InvoiceSnapshot] val EventSerde: Serde[Event] = serdeFor[Event] def toBytes[T: SchemaFor: ToRecord](data: T): Array[Byte] = { val baos = new ByteArrayOutputStream val output = AvroOutputStream.binary[T](baos) output.write(data) output.close() baos.toByteArray } def fromBytes[T: SchemaFor: FromRecord](data: Array[Byte]): T = { val input = AvroInputStream.binary[T](data) input.iterator.next() } private def serdeFor[T: SchemaFor: ToRecord: FromRecord]: Serde[T] = new Serde[T] { override val serializer: Serializer[T] = new Serializer[T] { override def serialize(topic: String, data: T): Array[Byte] = toBytes(data) override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () override def close(): Unit = () } override val deserializer: Deserializer[T] = new Deserializer[T] { override def deserialize(topic: String, data: Array[Byte]): T = fromBytes(data) override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () override def close(): Unit = () } override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () override def close(): Unit = () } }
Example 17
Source File: ConfigureSerializationSpec.scala From scala-kafka-client with MIT License | 5 votes |
package cakesolutions.kafka import java.util import com.typesafe.config.ConfigFactory import org.apache.kafka.common.serialization.{Deserializer, Serializer} class ConfigureSerializationSpec extends KafkaIntSpec{ private class MockDeserializer() extends Deserializer[String] { var configuration: String = _ var isKeyDeserializer: Boolean = _ override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = { configuration = configs.get("mock.config").toString isKeyDeserializer = isKey } override def close(): Unit = { } override def deserialize(topic: String, data: Array[Byte]): String = new String(data) } private class MockSerializer() extends Serializer[String] { var configuration: String = _ var isKeySerializer: Boolean = _ override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = { configuration = configs.get("mock.config").toString isKeySerializer = isKey } override def serialize(topic: String, data: String): Array[Byte] = data.getBytes override def close(): Unit = { } } "Producer" should "configure the serializers" in { val keySerializer = new MockSerializer val valueSerializer = new MockSerializer val conf = KafkaProducer.Conf( ConfigFactory.parseString( s""" | bootstrap.servers = "localhost:$kafkaPort", | mock.config = "mock_value" """.stripMargin ), keySerializer, valueSerializer) val producer = KafkaProducer(conf) producer.close keySerializer.configuration shouldEqual "mock_value" keySerializer.isKeySerializer shouldEqual true valueSerializer.configuration shouldEqual "mock_value" valueSerializer.isKeySerializer shouldEqual false } "Consumer" should "configure the deserializers" in { val keyDeserializer = new MockDeserializer val valueDeserializer = new MockDeserializer val conf = KafkaConsumer.Conf( ConfigFactory.parseString( s""" | bootstrap.servers = "localhost:$kafkaPort", | mock.config = "mock_value" """.stripMargin ), keyDeserializer, valueDeserializer) val consumer = KafkaConsumer(conf) consumer.close keyDeserializer.configuration shouldEqual "mock_value" keyDeserializer.isKeyDeserializer shouldEqual true valueDeserializer.configuration shouldEqual "mock_value" valueDeserializer.isKeyDeserializer shouldEqual false } }
Example 18
Source File: UpickleSupport.scala From kafka-serde-scala with Apache License 2.0 | 5 votes |
package io.github.azhur.kafkaserdeupickle import java.nio.charset.StandardCharsets.UTF_8 import java.util import org.apache.kafka.common.errors.SerializationException import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer } import upickle.default.{ Reader, Writer, read, write } import scala.language.implicitConversions import scala.util.control.NonFatal trait UpickleSupport { implicit def toSerializer[T >: Null](implicit writer: Writer[T]): Serializer[T] = new Serializer[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serialize(topic: String, data: T): Array[Byte] = if (data == null) null else try write(data).getBytes(UTF_8) catch { case NonFatal(e) => throw new SerializationException(e) } } implicit def toDeserializer[T >: Null](implicit reader: Reader[T]): Deserializer[T] = new Deserializer[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def deserialize(topic: String, data: Array[Byte]): T = if (data == null) null else try read(new String(data, UTF_8)) catch { case NonFatal(e) => throw new SerializationException(e) } } implicit def toSerde[T >: Null](implicit reader: Reader[T], writer: Writer[T]): Serde[T] = new Serde[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serializer(): Serializer[T] = toSerializer[T] override def deserializer(): Deserializer[T] = toDeserializer[T] } } object UpickleSupport extends UpickleSupport
Example 19
Source File: avroMarshallers.scala From scalatest-embedded-kafka with MIT License | 5 votes |
package net.manub.embeddedkafka.avro import java.io.ByteArrayOutputStream import kafka.utils.VerifiableProperties import org.apache.avro.Schema import org.apache.avro.io._ import org.apache.avro.specific.{ SpecificDatumReader, SpecificDatumWriter, SpecificRecord } import org.apache.kafka.common.serialization.{Deserializer, Serializer} class KafkaAvroDeserializer[T <: SpecificRecord](schema: Schema) extends Deserializer[T] with NoOpConfiguration with NoOpClose { private val reader = new SpecificDatumReader[T](schema) override def deserialize(topic: String, data: Array[Byte]): T = { val decoder = DecoderFactory.get().binaryDecoder(data, null) reader.read(null.asInstanceOf[T], decoder) } } class KafkaAvroSerializer[T <: SpecificRecord]() extends Serializer[T] with NoOpConfiguration with NoOpClose { private def toBytes(nullableData: T): Array[Byte] = Option(nullableData).fold[Array[Byte]](null) { data => val writer: DatumWriter[T] = new SpecificDatumWriter[T](data.getSchema) val out = new ByteArrayOutputStream() val encoder = EncoderFactory.get.binaryEncoder(out, null) writer.write(data, encoder) encoder.flush() out.close() out.toByteArray } override def serialize(topic: String, data: T): Array[Byte] = toBytes(data) } sealed trait NoOpConfiguration { def configure(configs: java.util.Map[String, _], isKey: Boolean): Unit = () } sealed trait NoOpClose { def close(): Unit = () }
Example 20
Source File: Json4sSupport.scala From kafka-serde-scala with Apache License 2.0 | 5 votes |
package io.github.azhur.kafkaserdejson4s import java.nio.charset.StandardCharsets.UTF_8 import java.util import org.apache.kafka.common.errors.SerializationException import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer } import org.json4s.{ Formats, Serialization } import scala.language.implicitConversions import scala.util.control.NonFatal trait Json4sSupport { implicit def toSerializer[T <: AnyRef](implicit serialization: Serialization, formats: Formats): Serializer[T] = new Serializer[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serialize(topic: String, data: T): Array[Byte] = if (data == null) null else try serialization.write[T](data).getBytes(UTF_8) catch { case NonFatal(e) => throw new SerializationException(e) } } implicit def toDeserializer[T >: Null <: AnyRef: Manifest]( implicit serialization: Serialization, formats: Formats ): Deserializer[T] = new Deserializer[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def deserialize(topic: String, data: Array[Byte]): T = if (data == null) null else try serialization.read[T](new String(data, UTF_8)) catch { case NonFatal(e) => throw new SerializationException(e) } } implicit def toSerde[T >: Null <: AnyRef: Manifest](implicit serialization: Serialization, formats: Formats): Serde[T] = new Serde[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serializer(): Serializer[T] = toSerializer[T] override def deserializer(): Deserializer[T] = toDeserializer[T] } } object Json4sSupport extends Json4sSupport
Example 21
Source File: CirceSupport.scala From kafka-serde-scala with Apache License 2.0 | 5 votes |
package io.github.azhur.kafkaserdecirce import java.nio.charset.StandardCharsets.UTF_8 import java.util import io.circe.{ Decoder, Encoder, Printer } import org.apache.kafka.common.errors.SerializationException import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer } import scala.language.implicitConversions import scala.util.control.NonFatal trait CirceSupport { implicit def toSerializer[T >: Null](implicit encoder: Encoder[T], printer: Printer = Printer.noSpaces): Serializer[T] = new Serializer[T] { import io.circe.syntax._ override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serialize(topic: String, data: T): Array[Byte] = if (data == null) null else try printer.pretty(data.asJson).getBytes(UTF_8) catch { case NonFatal(e) => throw new SerializationException(e) } } implicit def toDeserializer[T >: Null](implicit decoder: Decoder[T]): Deserializer[T] = new Deserializer[T] { import io.circe._ import cats.syntax.either._ override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def deserialize(topic: String, data: Array[Byte]): T = if (data == null) null else parser .parse(new String(data, UTF_8)) .valueOr(e => throw new SerializationException(e)) .as[T] .valueOr(e => throw new SerializationException(e)) } implicit def toSerde[T >: Null](implicit encoder: Encoder[T], printer: Printer = Printer.noSpaces, decoder: Decoder[T]): Serde[T] = new Serde[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serializer(): Serializer[T] = toSerializer[T] override def deserializer(): Deserializer[T] = toDeserializer[T] } } object CirceSupport extends CirceSupport
Example 22
Source File: GenericSerde.scala From avro4s with Apache License 2.0 | 5 votes |
package com.sksamuel.avro4s.kafka import java.io.ByteArrayOutputStream import com.sksamuel.avro4s.{AvroFormat, AvroInputStream, AvroOutputStream, AvroSchema, BinaryFormat, DataFormat, Decoder, Encoder, JsonFormat, SchemaFor} import org.apache.avro.Schema import org.apache.kafka.common.serialization.{Deserializer, Serde, Serializer} class GenericSerde[T >: Null : SchemaFor : Encoder : Decoder](avroFormat: AvroFormat = BinaryFormat) extends Serde[T] with Deserializer[T] with Serializer[T] with Serializable { val schema: Schema = AvroSchema[T] override def serializer(): Serializer[T] = this override def deserializer(): Deserializer[T] = this override def deserialize(topic: String, data: Array[Byte]): T = { if (data == null) null else { val avroInputStream = avroFormat match { case BinaryFormat => AvroInputStream.binary[T] case JsonFormat => AvroInputStream.json[T] case DataFormat => AvroInputStream.data[T] } val input = avroInputStream.from(data).build(schema) val result = input.iterator.next() input.close() result } } override def close(): Unit = () override def configure(configs: java.util.Map[String, _], isKey: Boolean): Unit = () override def serialize(topic: String, data: T): Array[Byte] = { val baos = new ByteArrayOutputStream() val avroOutputStream = avroFormat match { case BinaryFormat => AvroOutputStream.binary[T] case JsonFormat => AvroOutputStream.json[T] case DataFormat => AvroOutputStream.data[T] } val output = avroOutputStream.to(baos).build() output.write(data) output.close() baos.toByteArray } }
Example 23
Source File: TestSerdes.scala From haystack-traces with Apache License 2.0 | 5 votes |
package com.expedia.www.haystack.trace.indexer.integration.serdes import java.util import com.expedia.open.tracing.Span import com.expedia.open.tracing.buffer.SpanBuffer import com.expedia.www.haystack.trace.commons.packer.Unpacker import org.apache.kafka.common.serialization.{Deserializer, Serializer} class SpanProtoSerializer extends Serializer[Span] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () override def serialize(topic: String, data: Span): Array[Byte] = { data.toByteArray } override def close(): Unit = () } class SnappyCompressedSpanBufferProtoDeserializer extends Deserializer[SpanBuffer] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () override def deserialize(topic: String, data: Array[Byte]): SpanBuffer = { if(data == null) { null } else { Unpacker.readSpanBuffer(data) } } override def close(): Unit = () }
Example 24
Source File: InternalKafkaAvroSerde.scala From affinity with Apache License 2.0 | 5 votes |
package io.amient.affinity.kafka import io.amient.affinity.avro.record.AvroRecord import org.apache.kafka.common.serialization.{Deserializer, Serde, Serializer} import scala.reflect.runtime.universe._ class InternalKafkaAvroSerde[T: TypeTag] extends Serde[T] { val schema = AvroRecord.inferSchema[T] override def configure(configs: java.util.Map[String, _], isKey: Boolean) = () override def close() = () override def deserializer() = new Deserializer[T] { override def configure(configs: java.util.Map[String, _], isKey: Boolean) = () override def close() = () override def deserialize(topic: String, data: Array[Byte]) = AvroRecord.read(data, schema) } override def serializer() = new Serializer[T] { override def configure(configs: java.util.Map[String, _], isKey: Boolean) = () override def close() = () override def serialize(topic: String, data: T) = AvroRecord.write(data, schema) } }
Example 25
Source File: KafkaAvroSerializer.scala From affinity with Apache License 2.0 | 5 votes |
package io.amient.affinity.kafka import java.util import com.typesafe.config.ConfigFactory import io.amient.affinity.avro.record.AvroSerde import org.apache.kafka.common.serialization.Serializer class KafkaAvroSerializer extends Serializer[Any] { var isKey: Boolean = false var serde: AvroSerde = null override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = { val config = ConfigFactory.parseMap(configs).getConfig("schema").atKey("schema").atPath(AvroSerde.AbsConf.Avro.path) this.serde = AvroSerde.create(config) this.isKey = isKey } override def serialize(topic: String, data: Any): Array[Byte] = { require(serde != null, "AvroSerde not configured") val subject = s"$topic-${if (isKey) "key" else "value"}" val (schemaId, objSchema) = serde.from(data, subject) serde.write(data, objSchema, schemaId) } override def close(): Unit = if (serde != null) serde.close() }
Example 26
Source File: KafkaJsonSerializer.scala From ticket-booking-aecor with Apache License 2.0 | 5 votes |
package ru.pavkin.payment.kafka import java.nio.charset.StandardCharsets import java.util import io.circe.parser._ import io.circe.Encoder import org.apache.kafka.common.serialization.{ Deserializer, Serializer, StringSerializer } import ru.pavkin.payment.event.PaymentReceived class PaymentReceivedEventSerializer extends Serializer[PaymentReceived] { private val stringSerializer = new StringSerializer def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () def serialize(topic: String, data: PaymentReceived): Array[Byte] = stringSerializer.serialize(topic, Encoder[PaymentReceived].apply(data).noSpaces) def close(): Unit = () } class PaymentReceivedEventDeserializer extends Deserializer[PaymentReceived] { def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () def close(): Unit = () def deserialize(topic: String, data: Array[Byte]): PaymentReceived = if (data ne null) decode[PaymentReceived](new String(data, StandardCharsets.UTF_8)).fold(throw _, identity) else null }
Example 27
Source File: WindowedMetricSerde.scala From haystack-trends with Apache License 2.0 | 5 votes |
package com.expedia.www.haystack.trends.kstream.serde import java.util import com.expedia.www.haystack.commons.entities.Interval import com.expedia.www.haystack.commons.metrics.MetricsSupport import com.expedia.www.haystack.trends.aggregation.metrics.{AggregationType, CountMetricFactory, HistogramMetricFactory, Metric} import com.expedia.www.haystack.trends.aggregation.{TrendMetric, WindowedMetric} import com.expedia.www.haystack.trends.aggregation.entities.TimeWindow import org.apache.kafka.common.serialization.{Deserializer, Serde, Serializer} import org.msgpack.core.MessagePack import org.msgpack.value.ValueFactory import scala.collection.JavaConverters._ import scala.collection.mutable object WindowedMetricSerde extends Serde[WindowedMetric] with MetricsSupport { private val SERIALIZED_METRIC_KEY = "serializedMetric" private val START_TIME_KEY = "startTime" private val END_TIME_KEY = "endTime" private val aggregationTypeKey = "aggregationType" private val metricsKey = "metrics" override def close(): Unit = () override def deserializer(): Deserializer[WindowedMetric] = { new Deserializer[WindowedMetric] { override def configure(map: util.Map[String, _], b: Boolean): Unit = () override def close(): Unit = () override def serialize(topic: String, windowedMetric: WindowedMetric): Array[Byte] = { val packer = MessagePack.newDefaultBufferPacker() val serializedMetrics = windowedMetric.windowedMetricsMap.map { case (timeWindow, metric) => ValueFactory.newMap(Map( ValueFactory.newString(START_TIME_KEY) -> ValueFactory.newInteger(timeWindow.startTime), ValueFactory.newString(END_TIME_KEY) -> ValueFactory.newInteger(timeWindow.endTime), ValueFactory.newString(SERIALIZED_METRIC_KEY) -> ValueFactory.newBinary(windowedMetric.getMetricFactory.getMetricSerde.serialize(metric)) ).asJava) } val windowedMetricMessagePack = Map( ValueFactory.newString(metricsKey) -> ValueFactory.newArray(serializedMetrics.toList.asJava), ValueFactory.newString(aggregationTypeKey) -> ValueFactory.newString(windowedMetric.getMetricFactory.getAggregationType.toString) ) packer.packValue(ValueFactory.newMap(windowedMetricMessagePack.asJava)) val data = packer.toByteArray data } override def close(): Unit = () } } override def configure(map: util.Map[String, _], b: Boolean): Unit = () }
Example 28
Source File: KafkaService.scala From ws_to_kafka with MIT License | 5 votes |
package com.pkinsky import akka.actor.ActorSystem import akka.stream.scaladsl.{Source, Flow, Sink} import com.softwaremill.react.kafka.{ConsumerProperties, ProducerProperties, ProducerMessage, ReactiveKafka} import org.apache.kafka.common.serialization.{Deserializer, Serializer} import play.api.libs.json.{Json, Reads, Writes} case class KafkaServiceConf(bootstrapServers: String) class KafkaService(kafkaClient: ReactiveKafka, conf: KafkaServiceConf) { def consume[T](topic: String, groupId: String)(implicit writes: Reads[T], actorSystem: ActorSystem): Source[T, Unit] = Source.fromPublisher(kafkaClient.consume( ConsumerProperties( bootstrapServers = conf.bootstrapServers, // IP and port of local Kafka instance topic = topic, // topic to consume messages from groupId = groupId, // consumer group valueDeserializer = KafkaService.deserializer[T] ) )).map(_.value()) } object KafkaService { def serializer[T: Writes] = new Serializer[T] { override def serialize(topic: String, data: T): Array[Byte] = { val js = Json.toJson(data) js.toString().getBytes("UTF-8") } override def configure(configs: java.util.Map[String, _], isKey: Boolean): Unit = () override def close(): Unit = () } def deserializer[T: Reads] = new Deserializer[T] { override def deserialize(topic: String, data: Array[Byte]): T = { val s = new String(data, "UTF-8") Json.fromJson(Json.parse(s)).get //throw exception on error ¯\_(ツ)_/¯ (consider returning JsResult[T]) } override def configure(configs: java.util.Map[String, _], isKey: Boolean): Unit = () override def close(): Unit = () } }
Example 29
Source File: EventAggregationSpec.scala From spark-summit-2018 with GNU General Public License v3.0 | 5 votes |
package com.twilio.open.streaming.trend.discovery import java.util import com.twilio.open.protocol.Calls.CallEvent import com.twilio.open.protocol.Metrics import com.twilio.open.streaming.trend.discovery.streams.EventAggregation import org.apache.kafka.common.serialization.{Deserializer, Serializer, StringDeserializer, StringSerializer} import org.apache.spark.sql.streaming.{OutputMode, Trigger} import org.apache.spark.sql._ import org.apache.spark.sql.kafka010.KafkaTestUtils import org.apache.spark.{SparkConf, SparkContext} import org.slf4j.{Logger, LoggerFactory} class EventAggregationSpec extends KafkaBackedTest[String, CallEvent] { override val testUtils = new KafkaTestUtils[String, CallEvent] { override val keySerializer: Serializer[String] = new StringSerializer override val keyDeserializer: Deserializer[String] = new StringDeserializer override val valueSerializer: Serializer[CallEvent] = new CallEventSerializer override val valueDeserializer: Deserializer[CallEvent] = new CallEventDeserializer } override protected val kafkaTopic = "spark.summit.call.events" override protected val partitions = 8 private val pathToTestScenarios = "src/test/resources/scenarios" val log: Logger = LoggerFactory.getLogger(classOf[EventAggregation]) lazy val session: SparkSession = sparkSql override def conf: SparkConf = { new SparkConf() .setMaster("local[*]") .setAppName("aggregation-test-app") .set("spark.ui.enabled", "false") .set("spark.app.id", appID) .set("spark.driver.host", "localhost") .set("spark.sql.shuffle.partitions", "32") .set("spark.executor.cores", "4") .set("spark.executor.memory", "1g") .set("spark.ui.enabled", "false") .setJars(SparkContext.jarOfClass(classOf[EventAggregation]).toList) } test("Should aggregate call events") { import session.implicits._ val appConfig = appConfigForTest() val scenario = TestHelper.loadScenario[CallEvent](s"$pathToTestScenarios/pdd_events.json") val scenarioIter = scenario.toIterator scenario.nonEmpty shouldBe true testUtils.createTopic(kafkaTopic, partitions, overwrite = true) sendNextMessages(scenarioIter, 30, _.getEventId, _.getLoggedEventTime) val trendDiscoveryApp = new TrendDiscoveryApp(appConfigForTest(), session) val eventAggregation = EventAggregation(appConfig) eventAggregation.process(trendDiscoveryApp.readKafkaStream())(session) .writeStream .queryName("calleventaggs") .format("memory") .outputMode(eventAggregation.outputMode) .start() .processAllAvailable() val df = session.sql("select * from calleventaggs") df.printSchema() df.show val res = session .sql("select avg(stats.p99) from calleventaggs") .collect() .map { r => r.getAs[Double](0) } .head DiscoveryUtils.round(res) shouldEqual 7.13 } } class CallEventSerializer extends Serializer[CallEvent] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def serialize(topic: String, data: CallEvent): Array[Byte] = data.toByteArray override def close(): Unit = {} } class CallEventDeserializer extends Deserializer[CallEvent] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def deserialize(topic: String, data: Array[Byte]): CallEvent = CallEvent.parseFrom(data) override def close(): Unit = {} }
Example 30
Source File: Producer.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.broker.kafka import java.net.URI import akka.actor.ActorSystem import akka.persistence.query.{ Offset => AkkaOffset } import akka.stream.Materializer import akka.stream.scaladsl._ import com.lightbend.lagom.internal.projection.ProjectionRegistry import com.lightbend.lagom.internal.projection.ProjectionRegistryActor.WorkerCoordinates import com.lightbend.lagom.spi.persistence.OffsetStore import org.apache.kafka.common.serialization.Serializer import scala.collection.immutable import scala.concurrent.ExecutionContext import scala.concurrent.Future private[lagom] object Producer { def startTaggedOffsetProducer[Message]( system: ActorSystem, tags: immutable.Seq[String], kafkaConfig: KafkaConfig, locateService: String => Future[Seq[URI]], topicId: String, eventStreamFactory: (String, AkkaOffset) => Source[(Message, AkkaOffset), _], partitionKeyStrategy: Option[Message => String], serializer: Serializer[Message], offsetStore: OffsetStore, projectionRegistry: ProjectionRegistry )(implicit mat: Materializer, ec: ExecutionContext): Unit = { val projectionName = s"kafkaProducer-$topicId" val producerConfig = ProducerConfig(system.settings.config) val topicProducerProps = (coordinates: WorkerCoordinates) => TopicProducerActor.props( coordinates, kafkaConfig, producerConfig, locateService, topicId, eventStreamFactory, partitionKeyStrategy, serializer, offsetStore ) val entityIds = tags.toSet projectionRegistry.registerProjection( projectionName, entityIds, topicProducerProps, producerConfig.role ) } }
Example 31
Source File: TestJsonSerializer.scala From embedded-kafka with MIT License | 5 votes |
package net.manub.embeddedkafka.serializers import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.module.scala.DefaultScalaModule import org.apache.kafka.common.errors.SerializationException import org.apache.kafka.common.serialization.Serializer class TestJsonSerializer[T] extends Serializer[T] { private val mapper = new ObjectMapper().registerModule(DefaultScalaModule) override def serialize(topic: String, data: T): Array[Byte] = Option(data).map { _ => try mapper.writeValueAsBytes(data) catch { case e: Exception => throw new SerializationException("Error serializing JSON message", e) } }.orNull }
Example 32
Source File: CirceSerdes.scala From kafka-streams-circe with Apache License 2.0 | 5 votes |
package com.goyeau.kafka.streams.circe import java.nio.charset.StandardCharsets import java.util import io.circe.parser._ import io.circe.{Decoder, Encoder} import org.apache.kafka.common.errors.SerializationException import org.apache.kafka.common.serialization.{Deserializer, Serde, Serdes, Serializer} object CirceSerdes { implicit def serializer[T: Encoder]: Serializer[T] = new Serializer[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () override def serialize(topic: String, caseClass: T): Array[Byte] = Encoder[T].apply(caseClass).noSpaces.getBytes(StandardCharsets.UTF_8) override def close(): Unit = () } implicit def deserializer[T: Decoder]: Deserializer[T] = new Deserializer[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () override def deserialize(topic: String, data: Array[Byte]): T = Option(data).fold(null.asInstanceOf[T]) { data => decode[T](new String(data, StandardCharsets.UTF_8)) .fold(error => throw new SerializationException(error), identity) } override def close(): Unit = () } implicit def serde[CC: Encoder: Decoder]: Serde[CC] = Serdes.serdeFrom(serializer, deserializer) }
Example 33
Source File: KafkaSinkTest.scala From eel-sdk with Apache License 2.0 | 5 votes |
package io.eels.component.kafka import java.util import java.util.{Properties, UUID} import io.eels.Row import io.eels.datastream.DataStream import io.eels.schema.{Field, StringType, StructType} import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig} import org.apache.kafka.clients.consumer.KafkaConsumer import org.apache.kafka.clients.producer.KafkaProducer import org.apache.kafka.common.serialization.{Deserializer, Serializer} import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers} import scala.collection.JavaConverters._ import scala.util.Try class KafkaSinkTest extends FlatSpec with Matchers with BeforeAndAfterAll { implicit val kafkaConfig = EmbeddedKafkaConfig( kafkaPort = 6001, zooKeeperPort = 6000 ) Try { EmbeddedKafka.start() } val schema = StructType( Field("name", StringType, nullable = true), Field("location", StringType, nullable = true) ) val ds = DataStream.fromValues( schema, Seq( Vector("clint eastwood", UUID.randomUUID().toString), Vector("elton john", UUID.randomUUID().toString) ) ) "KafkaSink" should "support default implicits" ignore { val topic = "mytopic-" + System.currentTimeMillis() val properties = new Properties() properties.put("bootstrap.servers", s"localhost:${kafkaConfig.kafkaPort}") properties.put("group.id", "test") properties.put("auto.offset.reset", "earliest") val producer = new KafkaProducer[String, Row](properties, StringSerializer, RowSerializer) val sink = KafkaSink(topic, producer) val consumer = new KafkaConsumer[String, String](properties, StringDeserializer, StringDeserializer) consumer.subscribe(util.Arrays.asList(topic)) ds.to(sink) producer.close() val records = consumer.poll(4000) records.iterator().asScala.map(_.value).toList shouldBe ds.collect.map { case Row(_, values) => values.mkString(",") }.toList } } object RowSerializer extends Serializer[Row] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () override def serialize(topic: String, data: Row): Array[Byte] = data.values.mkString(",").getBytes override def close(): Unit = () } object StringSerializer extends Serializer[String] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () override def close(): Unit = () override def serialize(topic: String, data: String): Array[Byte] = data.getBytes } object StringDeserializer extends Deserializer[String] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = () override def close(): Unit = () override def deserialize(topic: String, data: Array[Byte]): String = new String(data) }