Java Code Examples for org.apache.kafka.clients.consumer.KafkaConsumer#subscribe()
The following examples show how to use
org.apache.kafka.clients.consumer.KafkaConsumer#subscribe() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: EventMessagingConsumer.java From cqrs-es-kafka with MIT License | 6 votes |
private KafkaConsumer<String, String> getKafkaEventConsumer() { try { KafkaConsumer<String, String> consumer = null; Properties properties = new Properties(); properties.load(new FileReader(this.consumerProperties)); consumer = new KafkaConsumer<String, String>(properties); consumer.subscribe(Arrays.asList(this.topic)); return consumer; } catch (IOException exception) { log.error("Error loading Kafka consumer properties", exception); } return null; }
Example 2
Source File: SimpleComsumer.java From jeesuite-libs with Apache License 2.0 | 6 votes |
public static void main(String[] args) { Properties props = new Properties(); props.put("bootstrap.servers", "localhost:9092"); props.put("group.id", "simpleComsumer2"); props.put("enable.auto.commit", "true"); props.put("auto.commit.interval.ms", "1000"); props.put("session.timeout.ms", "30000"); props.put("auto.offset.reset", "earliest"); props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); props.put("value.deserializer", "org.apache.kafka.common.serialization.LongDeserializer"); KafkaConsumer<String, Long> consumer = new KafkaConsumer<>(props); /* 消费者订阅的topic, 可同时订阅多个 */ consumer.subscribe(Arrays.asList("streams-wordcount-output")); /* 读取数据,读取超时时间为100ms */ while (true) { ConsumerRecords<String, Long> records = consumer.poll(1000); for (ConsumerRecord<String, Long> record : records) System.out.printf("offset = %d, key = %s, value = %s \n", record.offset(), record.key(), record.value()); } }
Example 3
Source File: ConsumerFastStart.java From kafka_book_demo with Apache License 2.0 | 6 votes |
public static void main(String[] args) { Properties properties = new Properties(); properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); properties.put("bootstrap.servers", brokerList); properties.put("group.id", groupId); KafkaConsumer<String, String> consumer = new KafkaConsumer<>(properties); consumer.subscribe(Collections.singletonList(topic)); while (true) { ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1000)); for (ConsumerRecord<String, String> record : records) { System.out.println(record.value()); } } }
Example 4
Source File: KafkaMessageReader.java From java-course-ee with MIT License | 6 votes |
public static void main(String[] args) throws Exception { Properties props = new Properties(); props.put("bootstrap.servers", "localhost:9092"); props.put("group.id", "test"); props.put("enable.auto.commit", "true"); props.put("auto.commit.interval.ms", "1000"); props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props); consumer.subscribe(Arrays.asList("test.topic")); consumer.poll(Duration.ofMillis(1)); while (true) { ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100)); for (ConsumerRecord<String, String> record : records) System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value()); } }
Example 5
Source File: ConsumerTTL.java From kafka_book_demo with Apache License 2.0 | 6 votes |
public static void main(String[] args) { Properties props = new Properties(); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList); props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId); props.put(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG, ConsumerInterceptorTTL.class.getName()); KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList(topic)); while (true) { ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1000)); for (ConsumerRecord<String, String> record : records) { System.out.println(record.partition() + ":" + record.offset() + ":" + record.value()); } } }
Example 6
Source File: KafkaAvroTest.java From quarkus with Apache License 2.0 | 6 votes |
public static KafkaConsumer<Integer, Pet> createConsumer() { String registry = System.getProperty("schema.url"); Properties props = new Properties(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:19092"); props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-avro"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class.getName()); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, registry); // Without you get GenericData.Record instead of `Pet` props.put(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, true); KafkaConsumer<Integer, Pet> consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList("test-avro-producer")); return consumer; }
Example 7
Source File: ConsumerThread.java From kafka-topic-exporter with Apache License 2.0 | 6 votes |
@Override public void run() { KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props.getProperties()); consumer.subscribe(props.getTopicsPattern(), new KteConsumerRebalanceListener(consumer)); try { while (!Thread.interrupted()) { ConsumerRecords<String, String> records = consumer.poll(100); if (records.count() > 0) LOG.info("Got records count: " + String.valueOf(records.count())); for (ConsumerRecord<String, String> record : records) { String topic = record.topic(); if(props.get(PropertyConfig.Constants.KAKFA_CONSUMER_REMOVEPREFIX.key,null) != null) { topic = topic.replaceFirst("^" + props.get(PropertyConfig.Constants.KAKFA_CONSUMER_REMOVEPREFIX.key), ""); } collector.add(topic, record.value()); } } } finally { LOG.info("Shutting down consumer"); consumer.close(); } }
Example 8
Source File: CompositeTransactionManagerKafkaImpl.java From microservices-transactions-tcc with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") @Override public List<EntityCommand<?>> fetch(String txId) { List<EntityCommand<?>> transactionOperations = new ArrayList<EntityCommand<?>>(); Map<String, Object> consumerConfigs = (Map<String, Object>)configuration.get("kafkaConsumerConfiguration"); consumerConfigs.put(ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString()); KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<String, String>(consumerConfigs); kafkaConsumer.subscribe(Arrays.asList(txId)); ConsumerRecords<String, String> records = kafkaConsumer.poll(kafkaConsumerPollTimeout); for (ConsumerRecord<String, String> record : records){ LOG.info("offset = {}, key = {}, value = {}", record.offset(), record.key(), record.value()); try { transactionOperations.add(serializer.readFromString(record.value())); } catch (SerializationFailedException e) { LOG.error("Unable to deserialize [{}] because of: {}", record.value(), e.getMessage()); } } kafkaConsumer.close(); return transactionOperations; }
Example 9
Source File: TestJGroupMetadataManager.java From kafka-eagle with Apache License 2.0 | 5 votes |
public static void main(String[] args) { Properties prop = new Properties(); prop.put("group.id", "kafka.eagle.system.group"); prop.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092"); prop.put("exclude.internal.topics", "false"); prop.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getCanonicalName()); prop.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getCanonicalName()); KafkaConsumer<String, String> consumer = new KafkaConsumer<>(prop); consumer.subscribe(Arrays.asList("__consumer_offsets")); boolean flag = true; while (flag) { try { ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100)); for (ConsumerRecord<String, String> record : records) { ByteBuffer buffer = ByteBuffer.wrap(record.value().getBytes()); // OffsetAndMetadata meta = GroupMetadataManager.readOffsetMessageValue(); // BaseKey key = readMessageKey(buffer); OffsetAndMetadata gm = readOffsetMessageValue(buffer); System.out.println(gm); } }catch (Exception e) { // TODO: handle exception e.printStackTrace(); } } }
Example 10
Source File: BrokerStatsReader.java From doctorkafka with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { CommandLine commandLine = parseCommandLine(args); String zkUrl = commandLine.getOptionValue(ZOOKEEPER); String statsTopic = commandLine.getOptionValue(STATS_TOPIC); String bootstrapBrokers = OperatorUtil.getBrokers(zkUrl, SecurityProtocol.PLAINTEXT); Properties props = new Properties(); props.put(KafkaUtils.BOOTSTRAP_SERVERS, bootstrapBrokers); props.put("group.id", "broker_statsreader_group"); props.put("enable.auto.commit", "false"); props.put("auto.commit.interval.ms", "1000"); props.put("key.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer"); props.put("value.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer"); Schema schema = BrokerStats.getClassSchema(); KafkaConsumer<byte[], byte[]> consumer = new KafkaConsumer<>(props); consumer.subscribe(Arrays.asList(statsTopic)); while (true) { ConsumerRecords<byte[], byte[]> records = consumer.poll(100); for (ConsumerRecord<byte[], byte[]> record : records) { System.out.printf("offset = %d, key.size = %d, value.size = %s%n", record.offset(), record.key().length, record.value().length); try { BinaryDecoder binaryDecoder = avroDecoderFactory.binaryDecoder(record.value(), null); SpecificDatumReader<BrokerStats> reader = new SpecificDatumReader<>(schema); BrokerStats result = new BrokerStats(); reader.read(result, binaryDecoder); System.out.println(result); } catch (Exception e) { LOG.error("Fail to decode an message", e); } } } }
Example 11
Source File: KafkaConsumerExample.java From client-examples with Apache License 2.0 | 5 votes |
public static void main(String[] args) { KafkaConsumerConfig config = KafkaConsumerConfig.fromEnv(); Properties props = KafkaConsumerConfig.createProperties(config); int receivedMsgs = 0; if (System.getenv("JAEGER_SERVICE_NAME") != null) { Tracer tracer = Configuration.fromEnv().getTracer(); GlobalTracer.registerIfAbsent(tracer); props.put(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG, TracingConsumerInterceptor.class.getName()); } boolean commit = !Boolean.parseBoolean(config.getEnableAutoCommit()); KafkaConsumer consumer = new KafkaConsumer(props); consumer.subscribe(Collections.singletonList(config.getTopic())); while (receivedMsgs < config.getMessageCount()) { ConsumerRecords<String, String> records = consumer.poll(Long.MAX_VALUE); for (ConsumerRecord<String, String> record : records) { log.info("Received message:"); log.info("\tpartition: {}", record.partition()); log.info("\toffset: {}", record.offset()); log.info("\tvalue: {}", record.value()); receivedMsgs++; } if (commit) { consumer.commitSync(); } } }
Example 12
Source File: SslKafkaEndpoint.java From quarkus with Apache License 2.0 | 5 votes |
public static KafkaConsumer<Integer, String> createConsumer() { Properties props = new Properties(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:19093"); props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-consumer"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); addSSL(props); KafkaConsumer<Integer, String> consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList("test-ssl-consumer")); return consumer; }
Example 13
Source File: KafkaStreamsTest.java From quarkus with Apache License 2.0 | 5 votes |
private static KafkaConsumer<Integer, EnrichedCustomer> createConsumer() { Properties props = new Properties(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:19092"); props.put(ConsumerConfig.GROUP_ID_CONFIG, "streams-test-consumer"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, EnrichedCustomerDeserializer.class.getName()); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); addSSL(props); KafkaConsumer<Integer, EnrichedCustomer> consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList("streams-test-customers-processed")); return consumer; }
Example 14
Source File: SaslKafkaEndpoint.java From quarkus with Apache License 2.0 | 5 votes |
public static KafkaConsumer<Integer, String> createConsumer() { Properties props = new Properties(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:19094"); props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-consumer"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); addJAAS(props); KafkaConsumer<Integer, String> consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList("test-sasl-consumer")); return consumer; }
Example 15
Source File: AvroConsumer.java From Kafka-Streams-Real-time-Stream-Processing with The Unlicense | 5 votes |
/** * Application entry point * * @param args topicName and groupName */ @SuppressWarnings("InfiniteLoopStatement") public static void main(String[] args) { if (args.length < 2) { System.out.println("Please provide command line arguments: topicName groupName"); System.exit(-1); } String topicName = args[0]; String groupName = args[1]; Properties properties = new Properties(); try { InputStream kafkaConfigStream = ClassLoader.class.getResourceAsStream(kafkaConfig); properties.load(kafkaConfigStream); properties.put(ConsumerConfig.GROUP_ID_CONFIG, groupName); //Set autocommit to false so you can execute it again for the same set of messages properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class); properties.put(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, true); properties.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081"); } catch (IOException e) { logger.error(e.getMessage()); throw new RuntimeException(e); } final KafkaConsumer<String, StockData> consumer = new KafkaConsumer<>(properties); consumer.subscribe(Collections.singletonList(topicName)); while (true) { ConsumerRecords<String, StockData> records = consumer.poll(Duration.ofMillis(100)); for (ConsumerRecord<String, StockData> record : records) { System.out.println(record.value()); } } }
Example 16
Source File: KafkaDestinationProcessorTest.java From incubator-samoa with Apache License 2.0 | 4 votes |
@Test public void testSendingData() throws InterruptedException, ExecutionException, TimeoutException { final Logger logger = Logger.getLogger(KafkaDestinationProcessorTest.class.getName()); Properties props = TestUtilsForKafka.getProducerProperties(BROKERHOST,BROKERPORT); props.setProperty("auto.offset.reset", "earliest"); KafkaDestinationProcessor kdp = new KafkaDestinationProcessor(props, TOPIC, new OosTestSerializer()); kdp.onCreate(1); final int[] i = {0}; // prepare new thread for data receiveing Thread th = new Thread(new Runnable() { @Override public void run() { KafkaConsumer<String, byte[]> consumer = new KafkaConsumer<>(TestUtilsForKafka.getConsumerProperties(BROKERHOST, BROKERPORT)); consumer.subscribe(Arrays.asList(TOPIC)); while (i[0] < NUM_INSTANCES) { try { ConsumerRecords<String, byte[]> cr = consumer.poll(CONSUMER_TIMEOUT); Iterator<ConsumerRecord<String, byte[]>> it = cr.iterator(); while (it.hasNext()) { ConsumerRecord<String, byte[]> record = it.next(); i[0]++; } } catch (Exception ex) { Logger.getLogger(KafkaDestinationProcessorTest.class.getName()).log(Level.SEVERE, null, ex); } } consumer.close(); } }); th.start(); int z = 0; Random r = new Random(); InstancesHeader header = TestUtilsForKafka.generateHeader(10); for (z = 0; z < NUM_INSTANCES; z++) { InstanceContentEvent event = TestUtilsForKafka.getData(r, 10, header); kdp.process(event); // logger.log(Level.INFO, "{0} {1}", new Object[]{"Sent item with id: ", z}); } // wait for all instances to be read Thread.sleep(2 * CONSUMER_TIMEOUT); assertEquals("Number of sent and received instances", z, i[0]); }
Example 17
Source File: KafkaRangerAuthorizerGSSTest.java From ranger with Apache License 2.0 | 4 votes |
@Test public void testAuthorizedRead() { // Create the Producer Properties producerProps = new Properties(); producerProps.put("bootstrap.servers", "localhost:" + port); producerProps.put("acks", "all"); producerProps.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); producerProps.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); producerProps.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT"); producerProps.put("sasl.mechanism", "GSSAPI"); producerProps.put("sasl.kerberos.service.name", "kafka"); final Producer<String, String> producer = new KafkaProducer<>(producerProps); // Create the Consumer Properties consumerProps = new Properties(); consumerProps.put("bootstrap.servers", "localhost:" + port); consumerProps.put("group.id", "consumerTestGroup"); consumerProps.put("enable.auto.commit", "true"); consumerProps.put("auto.offset.reset", "earliest"); consumerProps.put("auto.commit.interval.ms", "1000"); consumerProps.put("session.timeout.ms", "30000"); consumerProps.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); consumerProps.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); consumerProps.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT"); consumerProps.put("sasl.mechanism", "GSSAPI"); consumerProps.put("sasl.kerberos.service.name", "kafka"); final KafkaConsumer<String, String> consumer = new KafkaConsumer<>(consumerProps); checkTopicExists(consumer); LOG.info("Subscribing to 'test'"); consumer.subscribe(Arrays.asList("test")); sendMessage(producer); // Poll until we consume it ConsumerRecord<String, String> record = null; for (int i = 0; i < 1000; i++) { LOG.info("Waiting for messages {}. try", i); ConsumerRecords<String, String> records = consumer.poll(100); if (records.count() > 0) { LOG.info("Found {} messages", records.count()); record = records.iterator().next(); break; } sleep(); } Assert.assertNotNull(record); Assert.assertEquals("somevalue", record.value()); producer.close(); consumer.close(); }
Example 18
Source File: KafkaOperationsTest.java From kafka-webview with MIT License | 4 votes |
/** * Helper method to consumer records from a topic. * @param topics topics to consume from. * @param consumerId Consumer's consumerId * @param consumerPrefix Any consumer Id prefix. */ private KafkaConsumer<String, String> consumeFromTopics(final Collection<String> topics, final String consumerId, final String consumerPrefix) { // Create cluster config. final ClusterConfig clusterConfig = ClusterConfig.newBuilder() .withBrokerHosts(sharedKafkaTestResource.getKafkaConnectString()) .build(); // Create Deserializer Config final DeserializerConfig deserializerConfig = DeserializerConfig.newBuilder() .withKeyDeserializerClass(KafkaConsumerFactoryTest.TestDeserializer.class) .withKeyDeserializerOption("key.option", "key.value") .withKeyDeserializerOption("key.option2", "key.value2") // Attempt to override a real setting, it should get filtered .withKeyDeserializerOption(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "MadeUpValue") .withValueDeserializerClass(KafkaConsumerFactoryTest.TestDeserializer.class) .withValueDeserializerOption("value.option", "value.value") .withValueDeserializerOption("value.option2", "value.value2") // Attempt to override a real setting, it should get filtered .withValueDeserializerOption(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, "MadeUpValue") .build(); // Create Topic Config final String topic = topics.iterator().next(); final org.sourcelab.kafka.webview.ui.manager.kafka.config.TopicConfig topicConfig = new org.sourcelab.kafka.webview.ui.manager.kafka.config.TopicConfig(clusterConfig, deserializerConfig, topic); // Create FilterConfig final FilterConfig filterConfig = FilterConfig.withNoFilters(); // Create ClientConfig, instructing to start from tail. final ClientConfig clientConfig = ClientConfig.newBuilder() .withConsumerId(consumerId) .withFilterConfig(filterConfig) .withAllPartitions() .withStartingPosition(StartingPosition.newHeadPosition()) .withMaxResultsPerPartition(100) .withTopicConfig(topicConfig) .build(); // Create consumer and consume the entries, storing state in Kafka. final KafkaConsumerFactory kafkaConsumerFactory = new KafkaConsumerFactory(new KafkaClientConfigUtil("not/used", consumerPrefix)); final KafkaConsumer<String, String> consumer = kafkaConsumerFactory.createConsumerAndSubscribe(clientConfig); // subscribe to all topics. consumer.unsubscribe(); consumer.subscribe(topics); // consume and commit offsets. // Wait for assignment to complete. for (int attempts = 0; attempts < 10; attempts++) { consumer.poll(Duration.ofMillis(1000L)); final Set<TopicPartition> assignmentSet = consumer.assignment(); if (!assignmentSet.isEmpty()) { break; } } // Commit offsets. consumer.commitSync(); return consumer; }
Example 19
Source File: BasicConsumerExample.java From kafka-examples with Apache License 2.0 | 4 votes |
public static void main(String[] args) { ArgumentParser parser = argParser(); try { Namespace res = parser.parseArgs(args); /* parse args */ String brokerList = res.getString("bootstrap.servers"); String topic = res.getString("topic"); String serializer = res.getString("serializer"); Properties consumerConfig = new Properties(); consumerConfig.put("group.id", "my-group"); consumerConfig.put("bootstrap.servers",brokerList); consumerConfig.put("auto.offset.reset","earliest"); consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer"); consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer"); KafkaConsumer<byte[], byte[]> consumer = new KafkaConsumer<>(consumerConfig); consumer.subscribe(Collections.singletonList(topic)); while (true) { ConsumerRecords<byte[], byte[]> records = consumer.poll(1000); for (ConsumerRecord<byte[], byte[]> record : records) { System.out.printf("Received Message topic =%s, partition =%s, offset = %d, key = %s, value = %s\n", record.topic(), record.partition(), record.offset(), deserialize(record.key()), deserialize(record.value())); } consumer.commitSync(); } } catch (ArgumentParserException e) { if (args.length == 0) { parser.printHelp(); System.exit(0); } else { parser.handleError(e); System.exit(1); } } }
Example 20
Source File: Consumer.java From mapr-streams-sample-programs with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws IOException { // set up house-keeping ObjectMapper mapper = new ObjectMapper(); Histogram stats = new Histogram(1, 10000000, 2); Histogram global = new Histogram(1, 10000000, 2); final String TOPIC_FAST_MESSAGES = "/sample-stream:fast-messages"; final String TOPIC_SUMMARY_MARKERS = "/sample-stream:summary-markers"; // and the consumer KafkaConsumer<String, String> consumer; try (InputStream props = Resources.getResource("consumer.props").openStream()) { Properties properties = new Properties(); properties.load(props); if (properties.getProperty("group.id") == null) { properties.setProperty("group.id", "group-" + new Random().nextInt(100000)); } consumer = new KafkaConsumer<>(properties); } consumer.subscribe(Arrays.asList(TOPIC_FAST_MESSAGES, TOPIC_SUMMARY_MARKERS)); int timeouts = 0; //noinspection InfiniteLoopStatement while (true) { // read records with a short timeout. If we time out, we don't really care. ConsumerRecords<String, String> records = consumer.poll(200); if (records.count() == 0) { timeouts++; } else { System.out.printf("Got %d records after %d timeouts\n", records.count(), timeouts); timeouts = 0; } for (ConsumerRecord<String, String> record : records) { switch (record.topic()) { case TOPIC_FAST_MESSAGES: // the send time is encoded inside the message JsonNode msg = mapper.readTree(record.value()); switch (msg.get("type").asText()) { case "test": long latency = (long) ((System.nanoTime() * 1e-9 - msg.get("t").asDouble()) * 1000); stats.recordValue(latency); global.recordValue(latency); break; case "marker": // whenever we get a marker message, we should dump out the stats // note that the number of fast messages won't necessarily be quite constant System.out.printf("%d messages received in period, latency(min, max, avg, 99%%) = %d, %d, %.1f, %d (ms)\n", stats.getTotalCount(), stats.getValueAtPercentile(0), stats.getValueAtPercentile(100), stats.getMean(), stats.getValueAtPercentile(99)); System.out.printf("%d messages received overall, latency(min, max, avg, 99%%) = %d, %d, %.1f, %d (ms)\n", global.getTotalCount(), global.getValueAtPercentile(0), global.getValueAtPercentile(100), global.getMean(), global.getValueAtPercentile(99)); stats.reset(); break; default: throw new IllegalArgumentException("Illegal message type: " + msg.get("type")); } break; case TOPIC_SUMMARY_MARKERS: break; default: throw new IllegalStateException("Shouldn't be possible to get message on topic " + record.topic()); } } } }