Java Code Examples for org.apache.kafka.clients.consumer.KafkaConsumer#offsetsForTimes()
The following examples show how to use
org.apache.kafka.clients.consumer.KafkaConsumer#offsetsForTimes() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ReplicaStatsUtil.java From doctorkafka with Apache License 2.0 | 6 votes |
public static Map<TopicPartition, Long> getProcessingStartOffsets( KafkaConsumer<?, ?> kafkaConsumer, String brokerStatsTopic, long startTimestampInMillis) { List<TopicPartition> tpList = kafkaConsumer.partitionsFor(brokerStatsTopic).stream() .map(p->new TopicPartition(p.topic(), p.partition())).collect(Collectors.toList()); Map<TopicPartition, Long> partitionMap = new HashMap<>(); for (TopicPartition topicPartition : tpList) { partitionMap.put(topicPartition, startTimestampInMillis); } Map<TopicPartition, OffsetAndTimestamp> offsetsForTimes = kafkaConsumer .offsetsForTimes(partitionMap); for (Map.Entry<TopicPartition, OffsetAndTimestamp> entry : offsetsForTimes.entrySet()) { partitionMap.put(entry.getKey(), entry.getValue().offset()); } return partitionMap; }
Example 2
Source File: KafkaConfigUtil.java From flink-learning with Apache License 2.0 | 5 votes |
private static Map<KafkaTopicPartition, Long> buildOffsetByTime(Properties props, ParameterTool parameterTool, Long time) { props.setProperty("group.id", "query_time_" + time); KafkaConsumer consumer = new KafkaConsumer(props); List<PartitionInfo> partitionsFor = consumer.partitionsFor(parameterTool.getRequired(PropertiesConstants.METRICS_TOPIC)); Map<TopicPartition, Long> partitionInfoLongMap = new HashMap<>(); for (PartitionInfo partitionInfo : partitionsFor) { partitionInfoLongMap.put(new TopicPartition(partitionInfo.topic(), partitionInfo.partition()), time); } Map<TopicPartition, OffsetAndTimestamp> offsetResult = consumer.offsetsForTimes(partitionInfoLongMap); Map<KafkaTopicPartition, Long> partitionOffset = new HashMap<>(); offsetResult.forEach((key, value) -> partitionOffset.put(new KafkaTopicPartition(key.topic(), key.partition()), value.offset())); consumer.close(); return partitionOffset; }
Example 3
Source File: KafkaConfigUtil.java From flink-learning with Apache License 2.0 | 5 votes |
private static Map<KafkaTopicPartition, Long> buildOffsetByTime(Properties props, ParameterTool parameterTool, Long time) { props.setProperty("group.id", "query_time_" + time); KafkaConsumer consumer = new KafkaConsumer(props); List<PartitionInfo> partitionsFor = consumer.partitionsFor(parameterTool.getRequired(PropertiesConstants.METRICS_TOPIC)); Map<TopicPartition, Long> partitionInfoLongMap = new HashMap<>(); for (PartitionInfo partitionInfo : partitionsFor) { partitionInfoLongMap.put(new TopicPartition(partitionInfo.topic(), partitionInfo.partition()), time); } Map<TopicPartition, OffsetAndTimestamp> offsetResult = consumer.offsetsForTimes(partitionInfoLongMap); Map<KafkaTopicPartition, Long> partitionOffset = new HashMap<>(); offsetResult.forEach((key, value) -> partitionOffset.put(new KafkaTopicPartition(key.topic(), key.partition()), value.offset())); consumer.close(); return partitionOffset; }
Example 4
Source File: KafkaConsumerFromTime.java From post-kafka-rewind-consumer-offset with MIT License | 5 votes |
public static void main(String[] args) { KafkaConsumer<String, String> consumer = createConsumer(); consumer.subscribe(Arrays.asList(TOPIC)); boolean flag = true; while (true) { ConsumerRecords<String, String> records = consumer.poll(100); if (flag) { Set<TopicPartition> assignments = consumer.assignment(); Map<TopicPartition, Long> query = new HashMap<>(); for (TopicPartition topicPartition : assignments) { query.put( topicPartition, Instant.now().minus(10, MINUTES).toEpochMilli()); } Map<TopicPartition, OffsetAndTimestamp> result = consumer.offsetsForTimes(query); result.entrySet() .stream() .forEach(entry -> consumer.seek( entry.getKey(), Optional.ofNullable(entry.getValue()) .map(OffsetAndTimestamp::offset) .orElse(new Long(0)))); flag = false; } for (ConsumerRecord<String, String> record : records) System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value()); } }
Example 5
Source File: Kafka0_10ConsumerLoader.java From datacollector with Apache License 2.0 | 5 votes |
private void setOffsetsByTimestamp(String topic, KafkaConsumer kafkaAuxiliaryConsumer) { // Build map of topics partitions and timestamp to use when searching offset for that partition (same timestamp // for all the partitions) List<PartitionInfo> partitionInfoList = kafkaAuxiliaryConsumer.partitionsFor(topic); if (partitionInfoList != null) { Map<TopicPartition, Long> partitionsAndTimestampMap = partitionInfoList.stream().map(e -> new TopicPartition( topic, e.partition() )).collect(Collectors.toMap(e -> e, (e) -> timestampToSearchOffsets)); // Get Offsets by timestamp using previously built map and commit them to corresponding partition if (!partitionsAndTimestampMap.isEmpty()) { Map<TopicPartition, OffsetAndTimestamp> partitionsOffsets = kafkaAuxiliaryConsumer.offsetsForTimes( partitionsAndTimestampMap); if (partitionsOffsets != null && !partitionsOffsets.isEmpty()) { Map<TopicPartition, OffsetAndMetadata> offsetsToCommit = partitionsOffsets.entrySet().stream().filter( entry -> entry.getKey() != null && entry.getValue() != null).collect( Collectors.toMap(entry -> entry.getKey(), entry -> new OffsetAndMetadata(entry.getValue().offset()))); if (!offsetsToCommit.isEmpty()) { kafkaAuxiliaryConsumer.commitSync(offsetsToCommit); } } } } }