Java Code Examples for org.apache.kafka.clients.producer.KafkaProducer#flush()
The following examples show how to use
org.apache.kafka.clients.producer.KafkaProducer#flush() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KafkaUtil.java From flink-learning with Apache License 2.0 | 6 votes |
public static void writeToKafka() throws InterruptedException { Properties props = new Properties(); props.put("bootstrap.servers", broker_list); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); KafkaProducer producer = new KafkaProducer<String, String>(props); for (int i = 1; i <= 100; i++) { Student student = new Student(i, "zhisheng" + i, "password" + i, 18 + i); ProducerRecord record = new ProducerRecord<String, String>(topic, null, null, GsonUtil.toJson(student)); producer.send(record); System.out.println("发送数据: " + GsonUtil.toJson(student)); Thread.sleep(10 * 1000); //发送一条数据 sleep 10s,相当于 1 分钟 6 条 } producer.flush(); }
Example 2
Source File: ProductUtil.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) { Properties props = new Properties(); props.put("bootstrap.servers", broker_list); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); KafkaProducer producer = new KafkaProducer<String, String>(props); for (int i = 1; i <= 10000; i++) { ProductEvent product = ProductEvent.builder().id((long) i) //商品的 id .name("product" + i) //商品 name .price(random.nextLong() / 10000000000000L) //商品价格(以分为单位) .code("code" + i).build(); //商品编码 ProducerRecord record = new ProducerRecord<String, String>(topic, null, null, GsonUtil.toJson(product)); producer.send(record); System.out.println("发送数据: " + GsonUtil.toJson(product)); } producer.flush(); }
Example 3
Source File: KafkaUtil.java From flink-learning with Apache License 2.0 | 6 votes |
public static void writeToKafka() throws InterruptedException { Properties props = new Properties(); props.put("bootstrap.servers", broker_list); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); KafkaProducer producer = new KafkaProducer<String, String>(props); for (int i = 1; i <= 100; i++) { Student student = new Student(i, "zhisheng" + i, "password" + i, 18 + i); ProducerRecord record = new ProducerRecord<String, String>(topic, null, null, GsonUtil.toJson(student)); producer.send(record); System.out.println("发送数据: " + GsonUtil.toJson(student)); Thread.sleep(10 * 1000); //发送一条数据 sleep 10s,相当于 1 分钟 6 条 } producer.flush(); }
Example 4
Source File: UnionListStateUtil.java From flink-learning with Apache License 2.0 | 6 votes |
public static void writeToKafka() throws InterruptedException { Properties props = new Properties(); props.put("bootstrap.servers", broker_list); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); KafkaProducer producer = new KafkaProducer<String, String>(props); // 生成 0~9 的随机数做为 appId for(int i = 0; i<5; i++){ String value = "" + new Random().nextInt(10); ProducerRecord record = new ProducerRecord<String, String>(topic, null, null, value); producer.send(record); } System.out.println("发送数据: " ); producer.flush(); }
Example 5
Source File: WebKafkaConsumerTest.java From kafka-webview with MIT License | 6 votes |
public void publishDummyDataNumbers() { final String topic = "NumbersTopic"; // Create publisher final Map<String, Object> config = new HashMap<>(); config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class); config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class); config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); final KafkaProducer<Integer, Integer> producer = new KafkaProducer<>(config); for (int value = 0; value < 10000; value++) { producer.send(new ProducerRecord<>(topic, value, value)); } producer.flush(); producer.close(); }
Example 6
Source File: PvStatExactlyOnceKafkaUtil.java From flink-learning with Apache License 2.0 | 6 votes |
private static void writeToKafka() { Properties props = new Properties(); props.put("bootstrap.servers", broker_list); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); KafkaProducer producer = new KafkaProducer<String, String>(props); // 生成 0~9 的随机数做为 appId String value = "" + new Random().nextInt(2); ProducerRecord record = new ProducerRecord<String, String>(topic, null, null, value); producer.send(record); System.out.println("发送数据: " + value); Long pv = producerMap.get(value); if (null == pv) { producerMap.put(value, 1L); } else { producerMap.put(value, pv + 1); } System.out.println("生产数据:"); for (Map.Entry<String, Long> appIdPv : producerMap.entrySet()) { System.out.println("appId:" + appIdPv.getKey() + " pv:" + appIdPv.getValue()); } producer.flush(); }
Example 7
Source File: BuildLogEventDataUtil.java From flink-learning with Apache License 2.0 | 6 votes |
public static void writeDataToKafka() { Properties props = new Properties(); props.put("bootstrap.servers", BROKER_LIST); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); KafkaProducer producer = new KafkaProducer<String, String>(props); for (int i = 0; i < 100; i++) { LogEvent logEvent = new LogEvent().builder() .type("app") .timestamp(System.currentTimeMillis()) .level(logLevel()) .message(message(i + 1)) .tags(mapData()) .build(); // System.out.println(logEvent); ProducerRecord record = new ProducerRecord<String, String>(LOG_TOPIC, null, null, GsonUtil.toJson(logEvent)); producer.send(record); } producer.flush(); }
Example 8
Source File: TestUtils.java From uReplicator with Apache License 2.0 | 5 votes |
public static void produceMessages(String bootstrapServer, String topicname, int messageCount, int numOfPartitions) { KafkaProducer producer = createProducer(bootstrapServer); for (int i = 0; i < messageCount; i++) { ProducerRecord<Byte[], Byte[]> record = new ProducerRecord(topicname, i % numOfPartitions, null, String.format("Test Value - %d", i).getBytes()); producer.send(record); } producer.flush(); producer.close(); }
Example 9
Source File: DeduplicationExampleUtil.java From flink-learning with Apache License 2.0 | 5 votes |
public static void writeToKafka() throws InterruptedException { Properties props = new Properties(); props.put("bootstrap.servers", broker_list); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); KafkaProducer producer = new KafkaProducer<String, String>(props); // 生成 0~9 的随机数做为 appId for(int i = 0; i<10; i++){ String yyyyMMdd = new DateTime(System.currentTimeMillis()).toString("yyyyMMdd"); int pageId = random.nextInt(10); // 随机生成页面 id int userId = random.nextInt(100); // 随机生成用户 id UserVisitWebEvent userVisitWebEvent = UserVisitWebEvent.builder() .id(UUID.randomUUID().toString()) // 日志的唯一 id .date(yyyyMMdd) // 日期 .pageId(pageId) // 页面 id .userId(Integer.toString(userId)) // 用户 id .url("url/" + pageId) // 页面的 url .build(); // 对象序列化为 JSON 发送到 Kafka ProducerRecord record = new ProducerRecord<String, String>(topic, null, null, GsonUtil.toJson(userVisitWebEvent)); producer.send(record); System.out.println("发送数据: " + GsonUtil.toJson(userVisitWebEvent)); } producer.flush(); }
Example 10
Source File: TestUtils.java From uReplicator with Apache License 2.0 | 5 votes |
public static void produceMessages(String bootstrapServer, String topicname, int messageCount) { KafkaProducer producer = createProducer(bootstrapServer); for (int i = 0; i < messageCount; i++) { ProducerRecord<Byte[], Byte[]> record = new ProducerRecord(topicname, null, String.format("Test Value - %d", i).getBytes()); producer.send(record); } producer.flush(); producer.close(); }
Example 11
Source File: KafkaUtil.java From flink-learning with Apache License 2.0 | 5 votes |
public static void writeToKafka() throws InterruptedException { Properties props = new Properties(); props.put("bootstrap.servers", broker_list); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); KafkaProducer producer = new KafkaProducer<String, String>(props); for (int i = 1; i <= 100; i++) { Student student = new Student(i, "zhisheng" + i, "password" + i, 18 + i); ProducerRecord record = new ProducerRecord<String, String>(topic, null, null, GsonUtil.toJson(student)); producer.send(record); System.out.println("发送数据: " + GsonUtil.toJson(student)); } producer.flush(); }
Example 12
Source File: UvExampleUtil.java From flink-learning with Apache License 2.0 | 5 votes |
public static void writeToKafka() throws InterruptedException { Properties props = new Properties(); props.put("bootstrap.servers", broker_list); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); KafkaProducer producer = new KafkaProducer<String, String>(props); // 生成 0~9 的随机数做为 appId for(int i = 0; i<10; i++){ String yyyyMMdd = new DateTime(System.currentTimeMillis()).toString("yyyyMMdd"); int pageId = random.nextInt(10); // 随机生成页面 id int userId = random.nextInt(100); // 随机生成用户 id UserVisitWebEvent userVisitWebEvent = UserVisitWebEvent.builder() .id(UUID.randomUUID().toString()) // 日志的唯一 id .date(yyyyMMdd) // 日期 .pageId(pageId) // 页面 id .userId(Integer.toString(userId)) // 用户 id .url("url/" + pageId) // 页面的 url .build(); // 对象序列化为 JSON 发送到 Kafka ProducerRecord record = new ProducerRecord<String, String>(topic, null, null, GsonUtil.toJson(userVisitWebEvent)); producer.send(record); System.out.println("发送数据: " + GsonUtil.toJson(userVisitWebEvent)); } producer.flush(); }
Example 13
Source File: BaseService.java From whirlpool with Apache License 2.0 | 5 votes |
@Override public String call() throws Exception { // set up the producer KafkaProducer<String, String> producer; try (InputStream props = Resources.getResource("producer.props").openStream()) { Properties properties = new Properties(); properties.load(props); producer = new KafkaProducer<>(properties); } String message; try { while (keepRunning.get()) { while ((message = responseQueue.poll()) != null) { logger.debug(String.format("Sending message: '%s' to topic: '%s'", message, topic)); producer.send(new ProducerRecord<>(topic, message), (metadata, e) -> { if (e != null) { logger.error(e.getMessage(), e); } logger.trace(String.format("The offset of the record we just sent is: %d", metadata.offset())); }); } producer.flush(); // Don't busy wait Thread.sleep(20L); } } catch (Throwable throwable) { logger.error(throwable.getMessage(), throwable); } finally { producer.close(); } return "done"; }
Example 14
Source File: KafkaUtil.java From flink-learning with Apache License 2.0 | 5 votes |
public static void writeToKafka() throws InterruptedException { Properties props = new Properties(); props.put("bootstrap.servers", broker_list); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); //key 序列化 props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); //value 序列化 KafkaProducer producer = new KafkaProducer<String, String>(props); MetricEvent metric = new MetricEvent(); metric.setTimestamp(System.currentTimeMillis()); metric.setName("mem"); Map<String, String> tags = new HashMap<>(); Map<String, Object> fields = new HashMap<>(); tags.put("cluster", "zhisheng"); tags.put("host_ip", "101.147.022.106"); fields.put("used_percent", 90d); fields.put("max", 27244873d); fields.put("used", 17244873d); fields.put("init", 27244873d); metric.setTags(tags); metric.setFields(fields); ProducerRecord record = new ProducerRecord<String, String>(topic, null, null, GsonUtil.toJson(metric)); producer.send(record); System.out.println("发送数据: " + GsonUtil.toJson(metric)); producer.flush(); }
Example 15
Source File: DeduplicationExampleUtil.java From flink-learning with Apache License 2.0 | 5 votes |
public static void writeToKafka() throws InterruptedException { Properties props = new Properties(); props.put("bootstrap.servers", broker_list); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); KafkaProducer producer = new KafkaProducer<String, String>(props); // 生成 0~9 的随机数做为 appId for(int i = 0; i<10; i++){ String yyyyMMdd = new DateTime(System.currentTimeMillis()).toString("yyyyMMdd"); int pageId = random.nextInt(10); // 随机生成页面 id int userId = random.nextInt(100); // 随机生成用户 id UserVisitWebEvent userVisitWebEvent = UserVisitWebEvent.builder() .id(UUID.randomUUID().toString()) // 日志的唯一 id .date(yyyyMMdd) // 日期 .pageId(pageId) // 页面 id .userId(Integer.toString(userId)) // 用户 id .url("url/" + pageId) // 页面的 url .build(); // 对象序列化为 JSON 发送到 Kafka ProducerRecord record = new ProducerRecord<String, String>(topic, null, null, GsonUtil.toJson(userVisitWebEvent)); producer.send(record); System.out.println("发送数据: " + GsonUtil.toJson(userVisitWebEvent)); } producer.flush(); }
Example 16
Source File: Producer.java From kafka-sample-programs with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws IOException { // set up the producer KafkaProducer<String, String> producer; try (InputStream props = Resources.getResource("producer.props").openStream()) { Properties properties = new Properties(); properties.load(props); producer = new KafkaProducer<>(properties); } try { for (int i = 0; i < 1000000; i++) { // send lots of messages producer.send(new ProducerRecord<String, String>( "fast-messages", String.format(Locale.US, "{\"type\":\"test\", \"t\":%.3f, \"k\":%d}", System.nanoTime() * 1e-9, i))); // every so often send to a different topic if (i % 1000 == 0) { producer.send(new ProducerRecord<String, String>( "fast-messages", String.format(Locale.US, "{\"type\":\"marker\", \"t\":%.3f, \"k\":%d}", System.nanoTime() * 1e-9, i))); producer.send(new ProducerRecord<String, String>( "summary-markers", String.format(Locale.US, "{\"type\":\"other\", \"t\":%.3f, \"k\":%d}", System.nanoTime() * 1e-9, i))); producer.flush(); System.out.println("Sent msg number " + i); } } } catch (Throwable throwable) { System.out.printf("%s", throwable.getStackTrace()); } finally { producer.close(); } }
Example 17
Source File: KafkaUtil.java From flink-learning with Apache License 2.0 | 5 votes |
public static void writeToKafka() throws InterruptedException { Properties props = new Properties(); props.put("bootstrap.servers", broker_list); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); //key 序列化 props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); //value 序列化 KafkaProducer producer = new KafkaProducer<String, String>(props); MetricEvent metric = new MetricEvent(); metric.setTimestamp(System.currentTimeMillis()); metric.setName("mem"); Map<String, String> tags = new HashMap<>(); Map<String, Object> fields = new HashMap<>(); tags.put("cluster", "zhisheng"); tags.put("host_ip", "101.147.022.106"); fields.put("used_percent", 90d); fields.put("max", 27244873d); fields.put("used", 17244873d); fields.put("init", 27244873d); metric.setTags(tags); metric.setFields(fields); ProducerRecord record = new ProducerRecord<String, String>(topic, null, null, GsonUtil.toJson(metric)); producer.send(record); System.out.println("发送数据: " + GsonUtil.toJson(metric)); producer.flush(); }
Example 18
Source File: KafkaAdapter.java From mdw with Apache License 2.0 | 4 votes |
public void closeProducer(KafkaProducer<Object, Object> producer) { producer.flush(); producer.close(); }
Example 19
Source File: WhirlpoolMessageHandler.java From whirlpool with Apache License 2.0 | 4 votes |
@Override public String call() throws Exception { // set up the producer KafkaProducer<String, String> producer; try (InputStream props = Resources.getResource("producer.props").openStream()) { Properties properties = new Properties(); properties.load(props); producer = new KafkaProducer<>(properties); } try { String request; while (keepRunning.get()) { while ((request = requestQueue.poll()) != null) { // simple class containing only the type Message message = gson.fromJson(request, Message.class); String topic = null; switch (message.getType()) { case "TickerCommand": topic = "stock-ticker-cmd"; break; case "UpDownCommand": topic = "updown-cmd"; break; case "WeatherCommand": topic = "weather-cmd"; break; } if (topic != null) { producer.send(new ProducerRecord<>(topic, request), (metadata, e) -> { if (e != null) { logger.error(e.getMessage(), e); } logger.debug("The offset of the record we just sent is: " + metadata.offset()); }); } else { logger.info(String.format("Ignoring message with unknown type %s", message.getType())); } } producer.flush(); Thread.sleep(20L); } } catch (Throwable throwable) { logger.error(throwable.getMessage(), throwable); } finally { producer.close(); } return "done"; }
Example 20
Source File: KafkaHelper.java From kafka-junit with Apache License 2.0 | 2 votes |
/** * Produce data to the specified topic * * @param topic Topic to produce to * @param producer Producer to use * @param data Data to produce * @param <K> Type of key * @param <V> Type of value */ public <K, V> void produce(String topic, KafkaProducer<K, V> producer, Map<K, V> data) { data.forEach((k, v) -> producer.send(new ProducerRecord<>(topic, k, v))); producer.flush(); }