kafka.producer.KeyedMessage Java Examples
The following examples show how to use
kafka.producer.KeyedMessage.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MockRealTimeData.java From BigDataPlatform with GNU General Public License v3.0 | 6 votes |
public void run() { while(true) { String province = provinces[random.nextInt(5)]; String city = provinceCityMap.get(province)[random.nextInt(2)]; String log = new Date().getTime() + " " + province + " " + city + " " + random.nextInt(1000) + " " + random.nextInt(10); producer.send(new KeyedMessage<Integer, String>("AdRealTimeLog", log)); try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); } } }
Example #2
Source File: AbstractExactlyOnceKafkaOutputOperator.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
@Override public void process(T tuple) { Pair<K, V> keyValue = tupleToKeyValue(tuple); int pid = 0; if (partitioner != null) { pid = partitioner.partition(keyValue.first, partitionNum); } Pair<byte[], byte[]> lastMsg = lastMsgs.get(pid); if (lastMsg == null || compareToLastMsg(keyValue, lastMsg) > 0) { getProducer().send(new KeyedMessage<K, V>(getTopic(), keyValue.first, keyValue.second)); sendCount++; } else { // ignore tuple because kafka has already had the tuple logger.debug("Ingore tuple " + tuple); return; } }
Example #3
Source File: KafkaFunctionOp.java From PoseidonX with Apache License 2.0 | 6 votes |
/** * {@inheritDoc} */ @Override public void execute(String streamName, TupleEvent event) throws StreamingException { String result = null; try { result = (String)serde.serialize(BaseSerDe.changeEventsToList(event)); } catch (StreamSerDeException e) { LOG.warn("Ignore a serde exception.", e); } if (result == null) { LOG.warn("Ignore a null result in output."); return; } LOG.debug("The Output result is {}.", result); producer.send(new KeyedMessage<Integer, String>(topic, result)); LOG.debug("Kafka send success."); }
Example #4
Source File: KafkaStreamProxyProducerImpl.java From eagle with Apache License 2.0 | 6 votes |
@Override public void send(List<StreamRecord> events) throws IOException { List<KeyedMessage> messages = new ArrayList<>(events.size()); for (StreamRecord record : events) { String output = new ObjectMapper().writeValueAsString(record); messages.add(new KeyedMessage(this.config.getTopicId(), output)); } try { // partition key may cause data skew //producer.send(new KeyedMessage(this.topicId, key, output)); producer.send(messages); } catch (Exception ex) { LOGGER.error(ex.getMessage(), ex); throw ex; } }
Example #5
Source File: AvroOneM2MDataPublish.java From SDA with BSD 2-Clause "Simplified" License | 6 votes |
/** * 데이타 전송 * @param event * @throws Exception * @return void */ public void send(COL_ONEM2M event) throws Exception { EncoderFactory avroEncoderFactory = EncoderFactory.get(); SpecificDatumWriter<COL_ONEM2M> avroEventWriter = new SpecificDatumWriter<COL_ONEM2M>(COL_ONEM2M.SCHEMA$); ByteArrayOutputStream stream = new ByteArrayOutputStream(); BinaryEncoder binaryEncoder = avroEncoderFactory.binaryEncoder(stream,null); try { avroEventWriter.write(event, binaryEncoder); binaryEncoder.flush(); } catch (IOException e) { e.printStackTrace(); throw e; } IOUtils.closeQuietly(stream); KeyedMessage<String, byte[]> data = new KeyedMessage<String, byte[]>( TOPIC, stream.toByteArray()); producer.send(data); }
Example #6
Source File: AvroLWM2MDataPublish.java From SDA with BSD 2-Clause "Simplified" License | 6 votes |
/** * 데이타 전송 * @param event * @throws Exception * @return void */ public void send(COL_LWM2M event) throws Exception { EncoderFactory avroEncoderFactory = EncoderFactory.get(); SpecificDatumWriter<COL_LWM2M> avroEventWriter = new SpecificDatumWriter<COL_LWM2M>(COL_LWM2M.SCHEMA$); ByteArrayOutputStream stream = new ByteArrayOutputStream(); BinaryEncoder binaryEncoder = avroEncoderFactory.binaryEncoder(stream,null); try { avroEventWriter.write(event, binaryEncoder); binaryEncoder.flush(); } catch (IOException e) { e.printStackTrace(); throw e; } IOUtils.closeQuietly(stream); KeyedMessage<String, byte[]> data = new KeyedMessage<String, byte[]>( TOPIC, stream.toByteArray()); producer.send(data); }
Example #7
Source File: AvroOneM2MDataPublish.java From SDA with BSD 2-Clause "Simplified" License | 6 votes |
/** * 데이타 전송 * @param event * @throws Exception * @return void */ public void send(COL_ONEM2M event) throws Exception { EncoderFactory avroEncoderFactory = EncoderFactory.get(); SpecificDatumWriter<COL_ONEM2M> avroEventWriter = new SpecificDatumWriter<COL_ONEM2M>(COL_ONEM2M.SCHEMA$); ByteArrayOutputStream stream = new ByteArrayOutputStream(); BinaryEncoder binaryEncoder = avroEncoderFactory.binaryEncoder(stream,null); try { avroEventWriter.write(event, binaryEncoder); binaryEncoder.flush(); } catch (IOException e) { e.printStackTrace(); throw e; } IOUtils.closeQuietly(stream); KeyedMessage<String, byte[]> data = new KeyedMessage<String, byte[]>( TOPIC, stream.toByteArray()); producer.send(data); }
Example #8
Source File: AvroLWM2MDataPublish.java From SDA with BSD 2-Clause "Simplified" License | 6 votes |
/** * 데이타 전송 * @param event * @throws Exception * @return void */ public void send(COL_LWM2M event) throws Exception { EncoderFactory avroEncoderFactory = EncoderFactory.get(); SpecificDatumWriter<COL_LWM2M> avroEventWriter = new SpecificDatumWriter<COL_LWM2M>(COL_LWM2M.SCHEMA$); ByteArrayOutputStream stream = new ByteArrayOutputStream(); BinaryEncoder binaryEncoder = avroEncoderFactory.binaryEncoder(stream,null); try { avroEventWriter.write(event, binaryEncoder); binaryEncoder.flush(); } catch (IOException e) { e.printStackTrace(); throw e; } IOUtils.closeQuietly(stream); KeyedMessage<String, byte[]> data = new KeyedMessage<String, byte[]>( TOPIC, stream.toByteArray()); producer.send(data); }
Example #9
Source File: KafkaPluginSink.java From ffwd with Apache License 2.0 | 6 votes |
final <T> Iterator<KeyedMessage<Integer, byte[]>> iteratorFor( Iterable<? extends T> iterable, final Converter<T> converter ) { final Iterator<? extends T> iterator = iterable.iterator(); return new Iterator<KeyedMessage<Integer, byte[]>>() { @Override public boolean hasNext() { return iterator.hasNext(); } @Override public KeyedMessage<Integer, byte[]> next() { try { return converter.toMessage(iterator.next()); } catch (final Exception e) { throw new RuntimeException("Failed to produce next element", e); } } @Override public void remove() { } }; }
Example #10
Source File: StageToKafkaDriver.java From geowave with Apache License 2.0 | 6 votes |
@Override protected void processFile( final URL file, final String typeName, final GeoWaveAvroFormatPlugin<?, ?> plugin, final StageKafkaData<?> runData) { try { final Producer<String, Object> producer = (Producer<String, Object>) runData.getProducer(typeName, plugin); try (final CloseableIterator<?> avroRecords = plugin.toAvroObjects(file)) { while (avroRecords.hasNext()) { final Object avroRecord = avroRecords.next(); final KeyedMessage<String, Object> data = new KeyedMessage<>(typeName, avroRecord); producer.send(data); } } } catch (final Exception e) { LOGGER.info( "Unable to send file [" + file.getPath() + "] to Kafka topic: " + e.getMessage(), e); } }
Example #11
Source File: NativeProducer.java From spring-kafka-demo with Apache License 2.0 | 6 votes |
public static void main(String[] args) { String topic= "test"; long events = 100; Random rand = new Random(); Properties props = new Properties(); props.put("metadata.broker.list", "localhost:9092"); props.put("serializer.class", "kafka.serializer.StringEncoder"); props.put("request.required.acks", "1"); ProducerConfig config = new ProducerConfig(props); Producer<String, String> producer = new Producer<String, String>(config); for (long nEvents = 0; nEvents < events; nEvents++) { String msg = "NativeMessage-" + rand.nextInt() ; KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, nEvents + "", msg); producer.send(data); } producer.close(); }
Example #12
Source File: KafkaPluginSink.java From ffwd with Apache License 2.0 | 6 votes |
private AsyncFuture<Void> send(final Iterator<List<KeyedMessage<Integer, byte[]>>> batches) { final UUID id = UUID.randomUUID(); return async.call(() -> { final List<Long> times = new ArrayList<>(); log.info("{}: Start sending of batch", id); while (batches.hasNext()) { final Stopwatch watch = Stopwatch.createStarted(); producer.send(batches.next()); times.add(watch.elapsed(TimeUnit.MILLISECONDS)); } log.info("{}: Done sending batch (timings in ms: {})", id, times); return null; }, executorService); }
Example #13
Source File: CBKafkaProducer.java From couchbasekafka with Apache License 2.0 | 6 votes |
/** * Public Message to Kafka Queue * @param key - Key to Couchbase Document * @param msg - Body of Couchbase Document. * @throws IOException */ public static void publishMessage(final String key,final String message) throws IOException{ String msg=null; try { //If we need to make any Transformation on the message. if(Boolean.parseBoolean(ConfigLoader.getProp(Constants.ENABLETRANSFORMATION))){ msg = CBMessageTransformerFactory.INSTANCE.createCBMessageConverter().convert(key, message); }else{ msg=message; } } catch (Exception e) { //If any exception, perform no conversion } if(msg!=null && msg.trim().length()>0){ //Wrap KEY/VALUE in JSON -format {\"KEY\":\"<CBKEY>\",\"VALUE\":<CBVALUE>} String cbmessage=Constants.KAFKA_MESSAGE.replace("[CBKEY]", key); cbmessage=cbmessage.replace("[CBVALUE]", msg); KeyedMessage<String, String> data = new KeyedMessage<String, String>(ConfigLoader.getKafkaConfigProps().getProperty(Constants.TOPIC_NAME), key, cbmessage); //property producer.type indicates async/sync message if(data!=null) producer.send(data); } }
Example #14
Source File: KafkaConnPoolUtilsTest.java From yuzhouwan with Apache License 2.0 | 6 votes |
@Ignore @Test public void getConnTest() throws Exception { PropUtils p = PropUtils.getInstance(); int kafkaConnPoolSize = Integer.parseInt(p.getProperty("kafka.conn.pool.size")); Producer<String, byte[]> conn = KafkaConnPoolUtils.getInstance().getConn(); String topic = p.getProperty("kafka.topic"); for (int i = 0, max = 1000000000; i < max; i++) { System.out.println(String.format("Sending %s/%s ...", i, max)); Thread.sleep(1000); conn.send(new KeyedMessage<>(topic, ("{\"appId\":1,\"attemptId\":\"2\",\"callId\":\"" + i + "\",\"description\":\"yuzhouwan\"}") .getBytes())); } for (int i = 1; i < 2 * kafkaConnPoolSize; i++) KafkaConnPoolUtils.getInstance().getConn(); }
Example #15
Source File: SensorGridSimulation.java From Decision with Apache License 2.0 | 5 votes |
@Override public void run() { logger.debug(name + index + " is ON... generating " + dataToGenerate + " measures"); StratioStreamingMessage message = new StratioStreamingMessage(); message.setOperation(STREAM_OPERATIONS.MANIPULATION.INSERT); message.setStreamName(sensorDataStream); message.setTimestamp(System.currentTimeMillis()); message.setSession_id("" + streamSessionId); for (int i = 0; i < dataToGenerate; i++) { List<ColumnNameTypeValue> sensorData = Lists.newArrayList(); sensorData.add(new ColumnNameTypeValue("name", null, name)); sensorData.add(new ColumnNameTypeValue("ind", null, "" + index)); sensorData.add(new ColumnNameTypeValue("data", null, (random .nextInt((dataRangeHigh - dataRangeLow) + 1) + dataRangeLow))); message.setRequest_id("" + System.currentTimeMillis()); message.setColumns(sensorData); message.setRequest("dummy request"); KeyedMessage<String, String> busMessage = new KeyedMessage<String, String>( InternalTopic.TOPIC_DATA.getTopicName(), STREAM_OPERATIONS.MANIPULATION.INSERT, gson.toJson(message)); producer.send(busMessage); globalMessagesSent.getAndIncrement(); } shutdownLatch.countDown(); producer.close(); }
Example #16
Source File: KafkaStreamSink.java From eagle with Apache License 2.0 | 5 votes |
@Override protected void execute(Object key, Map event, OutputCollector collector) throws Exception { try { String output = new ObjectMapper().writeValueAsString(event); // partition key may cause data skew //producer.send(new KeyedMessage(this.topicId, key, output)); producer.send(new KeyedMessage(this.topicId, output)); } catch (Exception ex) { LOG.error(ex.getMessage(), ex); throw ex; } }
Example #17
Source File: KafkaSink.java From cep with GNU Affero General Public License v3.0 | 5 votes |
/** * This method sends a given message, with a given key to a given kafka topic. * * @param topic The topic where the message will be sent * @param key The key of the message * @param message The message to send */ public void send(String topic, String key, Map<String, Object> message) { try { String messageStr = objectMapper.writeValueAsString(message); KeyedMessage<String, String> keyedMessage = new KeyedMessage<>(topic, key, messageStr); producer.send(keyedMessage); } catch (IOException e) { log.error("Error converting map to json: {}", message); } }
Example #18
Source File: ApplicationTest.java From examples with Apache License 2.0 | 5 votes |
private void writeToTopic() { KafkaUnit ku = kafkaUnitRule.getKafkaUnit(); ku.createTopic(TOPIC); for (String line : lines) { KeyedMessage<String, String> kMsg = new KeyedMessage<>(TOPIC, line); ku.sendMessages(kMsg); } LOG.debug("Sent messages to topic {}", TOPIC); }
Example #19
Source File: StreamToActionBusCallback.java From Decision with Apache License 2.0 | 5 votes |
@Override public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) { if (log.isDebugEnabled()) { log.debug("Receiving {} events from stream {}", inEvents.length, streamName); } String topicAction = InternalTopic.TOPIC_ACTION.getTopicName(); if (groupId !=null){ topicAction = topicAction.concat("_").concat(groupId); } List<KeyedMessage<String, byte[]>> messages = new ArrayList<>(); for (Event event : inEvents) { StratioStreamingMessage messageObject = javaToSiddhiSerializer.deserialize(event); messageObject.setStreamName(streamName); messageObject.setActiveActions(this.activeActions); messages.add(new KeyedMessage<String, byte[]>(topicAction, javaToAvroSerializer.serialize(messageObject))); } if (messages.size() != 0) { avroProducer.send(messages); } }
Example #20
Source File: KafkaTransport.java From incubator-retired-htrace with Apache License 2.0 | 5 votes |
@Override public void send(List<byte[]> spans) throws IOException { List<KeyedMessage<byte[], byte[]>> entries = new ArrayList<>(spans.size()); for (byte[] span : spans) { entries.add(new KeyedMessage<byte[], byte[]>(topic, span)); } if (LOG.isTraceEnabled()) { LOG.trace("sending " + entries.size() + " entries"); } producer.send(entries); }
Example #21
Source File: SendToKafkaActionExecutionFunction.java From Decision with Apache License 2.0 | 5 votes |
@Override public void process(Iterable<StratioStreamingMessage> messages) throws Exception { List<KeyedMessage<String, String>> kafkaMessages = new ArrayList<>(); for (StratioStreamingMessage message : messages) { kafkaMessages.add(new KeyedMessage<String, String>(message.getStreamName(), getSerializer().deserialize( message))); } getProducer().send(kafkaMessages); }
Example #22
Source File: BenchmarkPartitionableKafkaOutputOperator.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
@Override public void run() { logger.info("Start produce data .... "); Properties props = new Properties(); props.setProperty("serializer.class", "kafka.serializer.StringEncoder"); props.setProperty("key.serializer.class", "kafka.serializer.StringEncoder"); props.put("metadata.broker.list", brokerList); // props.put("metadata.broker.list", "localhost:9092"); props.setProperty("partitioner.class", KafkaTestPartitioner.class.getCanonicalName()); props.setProperty("producer.type", "async"); // props.setProperty("send.buffer.bytes", "1048576"); props.setProperty("topic.metadata.refresh.interval.ms", "10000"); if (producer == null) { producer = new Producer<String, String>(new ProducerConfig(props)); } long k = 0; while (k < msgsSecThread || !controlThroughput) { long key = (stickyKey >= 0 ? stickyKey : k); k++; producer.send(new KeyedMessage<String, String>(topic, "" + key, new String(constantMsg))); if (k == Long.MAX_VALUE) { k = 0; } } }
Example #23
Source File: KafkaPluginSink.java From ffwd with Apache License 2.0 | 5 votes |
private KeyedMessage<Integer, byte[]> convertBatchMetric( final Batch batch, final Batch.Point point ) throws Exception { final Map<String, String> allTags = new HashMap<>(batch.getCommonTags()); allTags.putAll(point.getTags()); final Map<String, String> allResource = new HashMap<>(batch.getCommonResource()); allResource.putAll(point.getResource()); // TODO: support serialization of batches more... immediately. return metricConverter.toMessage( new Metric(point.getKey(), point.getValue(), new Date(point.getTimestamp()), ImmutableSet.of(), allTags, allResource, null)); }
Example #24
Source File: KafkaLoader.java From sqoop-on-spark with Apache License 2.0 | 5 votes |
private void sendToKafka(List<KeyedMessage<String,String>> messageList) { try { producer.send(messageList); messageList.clear(); } catch (Exception ex) { throw new SqoopException(KafkaConnectorErrors.KAFKA_CONNECTOR_0001); } }
Example #25
Source File: KafkaLoader.java From sqoop-on-spark with Apache License 2.0 | 5 votes |
@Override public void load(LoaderContext context,LinkConfiguration linkConfiguration, ToJobConfiguration jobConfiguration) throws Exception { producer = getProducer(linkConfiguration); System.out.println("got producer"); String topic = jobConfiguration.toJobConfig.topic; System.out.println("topic is:"+topic); String batchUUID = UUID.randomUUID().toString(); String record; while ((record = context.getDataReader().readTextRecord()) != null) { // create a message and add to buffer KeyedMessage<String, String> data = new KeyedMessage<String, String> (topic, null, batchUUID, record); messageList.add(data); // If we have enough messages, send the batch to Kafka if (messageList.size() >= KafkaConstants.DEFAULT_BATCH_SIZE) { sendToKafka(messageList); } rowsWritten ++; } if (messageList.size() > 0) { sendToKafka(messageList); } producer.close(); }
Example #26
Source File: KafkaTestProducer.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
@Override public void run() { if (messages == null) { generateMessages(); } else { for (String msg : messages) { producer.send(new KeyedMessage<String, String>(topic, "", msg)); } } }
Example #27
Source File: SplunkInputFromForwarder.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
public void writeToKafka(String line) { T message = null; if (line != null) { message = getMessage(line); } if (message != null) { producer.send(new KeyedMessage<String, T>(getTopic(), message)); } }
Example #28
Source File: ApplicationTest.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
private void writeToTopic() { KafkaUnit ku = kafkaUnitRule.getKafkaUnit(); ku.createTopic(TOPIC); for (String line : lines) { KeyedMessage<String, String> kMsg = new KeyedMessage<>(TOPIC, line); ku.sendMessages(kMsg); } LOG.debug("Sent messages to topic {}", TOPIC); }
Example #29
Source File: ProducerExample.java From pulsar with Apache License 2.0 | 5 votes |
private static void publishMessage(Arguments arguments) { // (2) Create producer Properties properties2 = new Properties(); properties2.put(BROKER_URL, arguments.serviceUrl); properties2.put(PRODUCER_TYPE, "sync"); properties2.put(SERIALIZER_CLASS, TestEncoder.class.getName()); properties2.put(KEY_SERIALIZER_CLASS, StringEncoder.class.getName()); properties2.put(PARTITIONER_CLASS, TestPartitioner.class.getName()); properties2.put(COMPRESSION_CODEC, "gzip"); // compression: ZLIB properties2.put(QUEUE_ENQUEUE_TIMEOUT_MS, "-1"); // block queue if full => -1 = true properties2.put(QUEUE_BUFFERING_MAX_MESSAGES, "6000"); // queue max message properties2.put(QUEUE_BUFFERING_MAX_MS, "100"); // batch delay properties2.put(BATCH_NUM_MESSAGES, "500"); // batch msg properties2.put(CLIENT_ID, "test"); ProducerConfig config = new ProducerConfig(properties2); Producer<String, Tweet> producer = new Producer<>(config); String name = "user"; String msg = arguments.messageValue; for (int i = 0; i < arguments.totalMessages; i++) { String sendMessage = msg + i; Tweet tweet = new Tweet(name, sendMessage); KeyedMessage<String, Tweet> message = new KeyedMessage<>(arguments.topicName, name, tweet); producer.send(message); } producer.close(); log.info("Successfully published messages {}", arguments.totalMessages); }
Example #30
Source File: KafkaPluginSink.java From ffwd with Apache License 2.0 | 5 votes |
@Override public KeyedMessage<Integer, byte[]> toMessage(final Metric metric) throws Exception { final String topic = router.route(metric); final int partition = partitioner.partition(metric, host); final byte[] payload = serializer.serialize(metric); return new KeyedMessage<>(topic, partition, payload); }