kafka.producer.ProducerConfig Scala Examples
The following examples show how to use kafka.producer.ProducerConfig.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: WithKafka.scala From incubator-s2graph with Apache License 2.0 | 5 votes |
package org.apache.s2graph.spark.spark import java.util.Properties import kafka.producer.{Producer, ProducerConfig} trait WithKafka { def kafkaConf(brokerList: String) = { val props = new Properties() props.put("metadata.broker.list", brokerList) props.put("request.required.acks", "0") props.put("producer.type", "async") props.put("serializer.class", "kafka.serializer.StringEncoder") props.put("compression.codec", "1") props.put("message.send.max.retries", "3") props.put("batch.num.messages", "1000") new ProducerConfig(props) } def producerConfig(brokerList: String, requireAcks: String = "1", producerType: String = "sync") = { val props = new Properties() props.setProperty("metadata.broker.list", brokerList) props.setProperty("request.required.acks", requireAcks) props.setProperty("producer.type", producerType) props.setProperty("serializer.class", "kafka.serializer.StringEncoder") props.setProperty("compression.codec", "snappy") props.setProperty("message.send.max.retries", "1") new ProducerConfig(props) } def getProducer[K, V](config: ProducerConfig): Producer[K, V] = { new Producer[K, V](config) } def getProducer[K, V](brokers: String): Producer[K, V] = { getProducer(producerConfig(brokers)) } def getPartKey(k: Any, n: Int): Int = { kafka.utils.Utils.abs(k.hashCode()) % n } def makeKafkaGroupId(topic: String, ext: String): String = { val phase = System.getProperty("phase") var groupId = s"${topic}_$ext" groupId += { System.getProperty("spark.master") match { case x if x.startsWith("local") => "_local" case _ => "" } } groupId += { phase match { case "alpha" => "_alpha" case _ => "" }} groupId } }
Example 2
Source File: KafkaSpanHandler.scala From money with Apache License 2.0 | 5 votes |
package com.comcast.money.kafka import java.util.Properties import com.comcast.money.api.SpanInfo import com.comcast.money.core.handlers.ConfigurableHandler import com.comcast.money.wire.AvroConversions import com.typesafe.config.Config import kafka.producer.{ ProducerConfig, KeyedMessage, Producer } // We use the producer maker so that we can mock this out trait ProducerMaker { def makeProducer(conf: Config): Producer[Array[Byte], Array[Byte]] } trait ConfigDrivenProducerMaker extends ProducerMaker { def makeProducer(conf: Config): Producer[Array[Byte], Array[Byte]] = { val props = new Properties() props.put("compression.codec", conf.getString("compression.codec")) props.put("producer.type", conf.getString("producer.type")) props.put("batch.num.messages", conf.getString("batch.num.messages")) props.put("message.send.max.retries", conf.getString("message.send.max.retries")) props.put("metadata.broker.list", conf.getString("metadata.broker.list")) new Producer[Array[Byte], Array[Byte]](new ProducerConfig(props)) } } class KafkaSpanHandler extends ConfigurableHandler with ConfigDrivenProducerMaker { import AvroConversions._ private[kafka] var topic: String = _ private[kafka] var producer: Producer[Array[Byte], Array[Byte]] = _ def configure(config: Config): Unit = { producer = makeProducer(config) topic = config.getString("topic") } def handle(span: SpanInfo): Unit = { producer.send(new KeyedMessage(topic, span.convertTo[Array[Byte]])) } }
Example 3
Source File: KafkaProducer.scala From sparta with Apache License 2.0 | 5 votes |
package com.stratio.kafka.benchmark.generator.kafka import java.util.Properties import com.typesafe.config.Config import kafka.producer.{KeyedMessage, Producer, ProducerConfig} object KafkaProducer { def getInstance(config: Config): Producer[String, String] = { val props: Properties = new Properties() props.put("metadata.broker.list", config.getString("brokerList")) props.put("serializer.class", "kafka.serializer.StringEncoder") props.put("request.required.acks", "1") val producerConfig = new ProducerConfig(props) new Producer[String, String](producerConfig) } def send(producer: Producer[String, String], topic: String, message: String): Unit = { val keyedMessage: KeyedMessage[String, String] = new KeyedMessage[String, String](topic, message) producer.send(keyedMessage) } }
Example 4
Source File: Actors.scala From embedded-kafka with Apache License 2.0 | 5 votes |
package com.tuplejump.embedded.kafka import scala.reflect.ClassTag import scala.util.Try import akka.actor.Actor import akka.actor.Props import kafka.producer.{ProducerConfig, KeyedMessage, Producer} // only string so far //TODO error handling class KafkaPublisher[K,V : ClassTag](producer: Producer[K,V]) extends Actor { override def postStop(): Unit = Try(producer.close()) def receive: Actor.Receive = { case e: Events.PublishTo[V] => publish(e) } private def publish(e: Events.PublishTo[V]): Unit = producer.send(e.data.toArray.map { new KeyedMessage[K,V](e.topic, _) }: _*) } object KafkaPublisher { def props(producerConfig: ProducerConfig): Props = { val producer = new Producer[String,String](producerConfig) Props(new KafkaPublisher(producer)) } }
Example 5
Source File: KafkaProducer.scala From spark-ref-architecture with Apache License 2.0 | 5 votes |
package com.stc.spark.streaming.kafka import java.util.{Date, Properties} import kafka.producer.{KeyedMessage, Producer, ProducerConfig} import scala.util.Random object KafkaProducer extends App { val events = args(0).toInt val topic = args(1) val brokers = args(2) val rnd = new Random() val producer = new Producer[String, String](KafkaConfig.config) val t = System.currentTimeMillis() for (nEvents <- Range(0, events)) { val runtime = new Date().getTime(); val ip = "192.168.2." + rnd.nextInt(255); val msg = runtime + "," + nEvents + ",www.example.com," + ip; val data = new KeyedMessage[String, String](topic, ip, msg); producer.send(data); } System.out.println("sent per second: " + events * 1000 / (System.currentTimeMillis() - t)); producer.close(); }
Example 6
Source File: UserBehaviorMsgProducer.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.examples.streaming.IBMKafkaStream import scala.util.Random import java.util.Properties import kafka.producer.KeyedMessage import kafka.producer.ProducerConfig import kafka.producer.Producer class UserBehaviorMsgProducer(brokers: String, topic: String) extends Runnable { private val brokerList = brokers private val targetTopic = topic private val props = new Properties() props.put("metadata.broker.list", this.brokerList) props.put("serializer.class", "kafka.serializer.StringEncoder") props.put("producer.type", "async") private val config = new ProducerConfig(this.props) private val producer = new Producer[String, String](this.config) private val PAGE_NUM = 100 private val MAX_MSG_NUM = 3 private val MAX_CLICK_TIME = 5 private val MAX_STAY_TIME = 10 //Like,1;Dislike -1;No Feeling 0 private val LIKE_OR_NOT = Array[Int](1, 0, -1) def run(): Unit = { val rand = new Random() while (true) { //how many user behavior messages will be produced val msgNum = rand.nextInt(MAX_MSG_NUM) + 1 try { //generate the message with format like page1|2|7.123|1 //网页 ID|点击次数|停留时间 (分钟)|是否点赞 //(page001.html, 1, 0.5, 1) //向量的第一项表示网页的ID,第二项表示从进入网站到离开对该网页的点击次数,第三项表示停留时间,以分钟为单位,第四项是代表是否点赞,1 为赞,-1 表示踩,0 表示中立。 for (i <- 0 to msgNum) { var msg = new StringBuilder() msg.append("page" + (rand.nextInt(PAGE_NUM) + 1)) msg.append("|") msg.append(rand.nextInt(MAX_CLICK_TIME) + 1) msg.append("|") msg.append(rand.nextInt(MAX_CLICK_TIME) + rand.nextFloat()) msg.append("|") msg.append(LIKE_OR_NOT(rand.nextInt(3))) println(msg.toString()) //send the generated message to broker sendMessage(msg.toString()) } println("%d user behavior messages produced.".format(msgNum+1)) } catch { case e: Exception => println(e) } try { //sleep for 5 seconds after send a micro batch of message //每隔 5 秒钟会随机的向 user-behavior-topic 主题推送 0 到 50 条行为数据消息 Thread.sleep(5000) } catch { case e: Exception => println(e) } } } def sendMessage(message: String) = { try { val data = new KeyedMessage[String, String](this.topic, message); producer.send(data); } catch { case e:Exception => println(e) } } } object UserBehaviorMsgProducerClient { def main(args: Array[String]) { //start the message producer thread val Array(zkServers,processingInterval) = Array("192.168.200.80:9092","topic")//args new Thread(new UserBehaviorMsgProducer(zkServers,processingInterval)).start() } }
Example 7
Source File: KafkaJsonProducer.scala From coral with Apache License 2.0 | 5 votes |
package io.coral.lib import java.util.Properties import io.coral.lib.KafkaJsonProducer.KafkaEncoder import kafka.producer.{KeyedMessage, ProducerConfig, Producer} import kafka.serializer.Encoder import kafka.utils.VerifiableProperties import org.json4s.JsonAST.{JObject, JValue} import org.json4s.jackson.JsonMethods._ object KafkaJsonProducer { type KafkaEncoder = Encoder[JValue] def apply() = new KafkaJsonProducer(classOf[JsonEncoder]) def apply[T <: KafkaEncoder](encoder: Class[T]) = new KafkaJsonProducer(encoder) } class KafkaJsonProducer[T <: KafkaEncoder](encoderClass: Class[T]) { def createSender(topic: String, properties: Properties): KafkaSender = { val props = properties.clone.asInstanceOf[Properties] props.put("serializer.class", encoderClass.getName) val producer = createProducer(props) new KafkaSender(topic, producer) } def createProducer(props: Properties): Producer[String, JValue] = { new Producer[String, JValue](new ProducerConfig(props)) } } class KafkaSender(topic: String, producer: Producer[String, JValue]) { def send(key: Option[String], message: JObject) = { val keyedMessage: KeyedMessage[String, JValue] = key match { case Some(key) => new KeyedMessage(topic, key, message) case None => new KeyedMessage(topic, message) } producer.send(keyedMessage) } } class JsonEncoder(verifiableProperties: VerifiableProperties) extends KafkaEncoder { override def toBytes(value: JValue): Array[Byte] = { compact(value).getBytes("UTF-8") } }
Example 8
Source File: KafkaJsonProducerSpec.scala From coral with Apache License 2.0 | 5 votes |
package io.coral.lib import java.util.Properties import io.coral.lib.KafkaJsonProducer.KafkaEncoder import kafka.utils.VerifiableProperties import org.json4s.JsonAST.{JObject, JValue} import org.scalatest.{Matchers, WordSpec} import org.json4s.jackson.JsonMethods._ import kafka.producer.{ProducerConfig, KeyedMessage, Producer} import org.mockito.{Mockito, ArgumentCaptor} import org.mockito.Mockito._ import scala.collection.mutable class KafkaJsonProducerSpec extends WordSpec with Matchers { "A KafkaJsonProducer" should { "create a KafkaJsonProducer with the JsonEncoder" in { val producer = KafkaJsonProducer() assert(producer.getClass == classOf[KafkaJsonProducer[JsonEncoder]]) } "create a KafkaJsonProducer with the specified Encoder" in { val producer = KafkaJsonProducer(classOf[MyEncoder]) assert(producer.getClass == classOf[KafkaJsonProducer[MyEncoder]]) } "create a sender" in { val producer = new MyKafkaJsonProducer producer.createSender("topic", new Properties) val serializer = producer.receivedProperties.get("serializer.class") assert(serializer == classOf[MyEncoder].getName) } } "A KafkaSender" should { "send the JSON provided without a key to Kafka" in { val messageJson = """{"key1": "value1", "key2": "value2"}""" val keyedMessage = sendMessage(None, messageJson) assert(keyedMessage.topic == "test") assert(keyedMessage.hasKey == false) assert(keyedMessage.message == parse(messageJson)) } "send the JSON provided with a key to Kafka" in { val messageJson = """{"key3": "value3", "key4": "value4"}""" val keyedMessage = sendMessage(Some("key"), messageJson) assert(keyedMessage.key == "key") assert(keyedMessage.topic == "test") assert(keyedMessage.message == parse(messageJson)) } } "A JsonEncoder" should { "encode the provided json" in { val json = """{"key1": "value1"}""" val encoder = new JsonEncoder(new VerifiableProperties) val result = encoder.toBytes(parse(json)) assert(parse(new String(result, "UTF-8")) == parse(json)) } } private def sendMessage(key: Option[String], messageJson: String): KeyedMessage[String, JValue] = { val producer = Mockito.mock(classOf[Producer[String, JValue]]) val sender = new KafkaSender("test", producer) sender.send(key, parse(messageJson).asInstanceOf[JObject]) val argumentCaptor = ArgumentCaptor.forClass(classOf[KeyedMessage[String, JValue]]) verify(producer).send(argumentCaptor.capture()) val keyedMessages = argumentCaptor.getAllValues assert(keyedMessages.size == 1) // The following construction is necessary because capturing of parameters // with Mockito, Scala type interference, and multiple arguments // don't work together without explicit casts. keyedMessages.get(0).asInstanceOf[mutable.WrappedArray.ofRef[KeyedMessage[String, JValue]]](0) } } class MyEncoder(verifiableProperties: VerifiableProperties) extends KafkaEncoder { override def toBytes(value: JValue): Array[Byte] = { Array() } } class MyKafkaJsonProducer extends KafkaJsonProducer(classOf[MyEncoder]) { var receivedProperties: Properties = _ override def createProducer(props: Properties): Producer[String, JValue] = { receivedProperties = props Mockito.mock(classOf[Producer[String, JValue]]) } }