Java Code Examples for kafka.javaapi.consumer.ConsumerConnector#commitOffsets()
The following examples show how to use
kafka.javaapi.consumer.ConsumerConnector#commitOffsets() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Kafka08Fetcher.java From indexr with Apache License 2.0 | 6 votes |
@Override public synchronized void close() throws IOException { logger.debug("Stop kafka fetcher. [topic: {}]", topics); ConsumerConnector connector = this.connector; this.connector = null; if (connector != null) { connector.commitOffsets(); connector.shutdown(); } IOUtil.closeQuietly(eventItr); // Some events could exists in the buffer, try to save them. List<byte[]> remaining = new ArrayList<>(); try { while (eventItr.hasNext()) { remaining.add(eventItr.next()); } } catch (Exception e) { // Ignore } eventItr = null; if (!remaining.isEmpty()) { this.remaining = remaining; } }
Example 2
Source File: KafkaDistributed.java From jlogstash-input-plugin with Apache License 2.0 | 6 votes |
@Override public void release() { try { for(ConsumerConnector consumer : consumerConnMap.values()){ consumer.commitOffsets(true); consumer.shutdown(); } for(ExecutorService executor : executorMap.values()){ executor.shutdownNow(); } if(scheduleExecutor != null){ scheduleExecutor.shutdownNow(); } this.zkDistributed.realse(); } catch (Exception e) { // TODO Auto-generated catch block logger.error(ExceptionUtil.getErrorMessage(e)); } }
Example 3
Source File: KafkaDistributed.java From jlogstash-input-plugin with Apache License 2.0 | 6 votes |
public void reconnConsumer(String topicName){ //停止topic 对应的conn ConsumerConnector consumerConn = consumerConnMap.get(topicName); consumerConn.commitOffsets(true); consumerConn.shutdown(); consumerConnMap.remove(topicName); //停止topic 对应的stream消耗线程 ExecutorService es = executorMap.get(topicName); es.shutdownNow(); executorMap.remove(topicName); Properties prop = geneConsumerProp(); ConsumerConnector newConsumerConn = kafka.consumer.Consumer .createJavaConsumerConnector(new ConsumerConfig(prop)); consumerConnMap.put(topicName, newConsumerConn); addNewConsumer(topicName, topic.get(topicName)); }
Example 4
Source File: Kafka.java From jlogstash-input-plugin with Apache License 2.0 | 6 votes |
public void reconnConsumer(String topicName){ //停止topic 对应的conn ConsumerConnector consumerConn = consumerConnMap.get(topicName); consumerConn.commitOffsets(true); consumerConn.shutdown(); consumerConnMap.remove(topicName); //停止topic 对应的stream消耗线程 ExecutorService es = executorMap.get(topicName); es.shutdownNow(); executorMap.remove(topicName); Properties prop = geneConsumerProp(); ConsumerConnector newConsumerConn = kafka.consumer.Consumer .createJavaConsumerConnector(new ConsumerConfig(prop)); consumerConnMap.put(topicName, newConsumerConn); addNewConsumer(topicName, topic.get(topicName)); }
Example 5
Source File: Kafka08Fetcher.java From indexr with Apache License 2.0 | 5 votes |
@Override public void commit() { ConsumerConnector connector = this.connector; if (connector != null) { connector.commitOffsets(); } }
Example 6
Source File: DemoHighLevelConsumer.java From KafkaExample with Apache License 2.0 | 5 votes |
public static void main(String[] args) { args = new String[] { "zookeeper0:2181/kafka", "topic1", "group2", "consumer1" }; if (args == null || args.length != 4) { System.err.println("Usage:\n\tjava -jar kafka_consumer.jar ${zookeeper_list} ${topic_name} ${group_name} ${consumer_id}"); System.exit(1); } String zk = args[0]; String topic = args[1]; String groupid = args[2]; String consumerid = args[3]; Properties props = new Properties(); props.put("zookeeper.connect", zk); props.put("group.id", groupid); props.put("client.id", "test"); props.put("consumer.id", consumerid); props.put("auto.offset.reset", "largest"); props.put("auto.commit.enable", "false"); props.put("auto.commit.interval.ms", "60000"); ConsumerConfig consumerConfig = new ConsumerConfig(props); ConsumerConnector consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig); Map<String, Integer> topicCountMap = new HashMap<String, Integer>(); topicCountMap.put(topic, 1); Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumerConnector.createMessageStreams(topicCountMap); KafkaStream<byte[], byte[]> stream1 = consumerMap.get(topic).get(0); ConsumerIterator<byte[], byte[]> interator = stream1.iterator(); while (interator.hasNext()) { MessageAndMetadata<byte[], byte[]> messageAndMetadata = interator.next(); String message = String.format( "Topic:%s, GroupID:%s, Consumer ID:%s, PartitionID:%s, Offset:%s, Message Key:%s, Message Payload: %s", messageAndMetadata.topic(), groupid, consumerid, messageAndMetadata.partition(), messageAndMetadata.offset(), new String(messageAndMetadata.key()), new String(messageAndMetadata.message())); System.out.println(message); consumerConnector.commitOffsets(); } }
Example 7
Source File: Kafka.java From jlogstash-input-plugin with Apache License 2.0 | 5 votes |
@Override public void release() { for(ConsumerConnector consumer : consumerConnMap.values()){ consumer.commitOffsets(true); consumer.shutdown(); } for(ExecutorService executor : executorMap.values()){ executor.shutdownNow(); } scheduleExecutor.shutdownNow(); }
Example 8
Source File: HighlevelKafkaConsumer.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
@Override protected void commitOffset() { // commit the offsets at checkpoint so that high-level consumer don't // have to receive too many duplicate messages if (standardConsumer != null && standardConsumer.values() != null) { for (ConsumerConnector consumerConnector : standardConsumer.values()) { consumerConnector.commitOffsets(); } } }
Example 9
Source File: MessageResource.java From dropwizard-kafka-http with Apache License 2.0 | 5 votes |
@GET @Timed public Response consume( @QueryParam("topic") String topic, @QueryParam("timeout") Integer timeout ) { if (Strings.isNullOrEmpty(topic)) return Response.status(400) .entity(new String[]{"Undefined topic"}) .build(); Properties props = (Properties) consumerCfg.clone(); if (timeout != null) props.put("consumer.timeout.ms", "" + timeout); ConsumerConfig config = new ConsumerConfig(props); ConsumerConnector connector = Consumer.createJavaConsumerConnector(config); Map<String, Integer> streamCounts = Collections.singletonMap(topic, 1); Map<String, List<KafkaStream<byte[], byte[]>>> streams = connector.createMessageStreams(streamCounts); KafkaStream<byte[], byte[]> stream = streams.get(topic).get(0); List<Message> messages = new ArrayList<>(); try { for (MessageAndMetadata<byte[], byte[]> messageAndMetadata : stream) messages.add(new Message(messageAndMetadata)); } catch (ConsumerTimeoutException ignore) { } finally { connector.commitOffsets(); connector.shutdown(); } return Response.ok(messages).build(); }