org.springframework.kafka.core.ConsumerFactory Java Examples
The following examples show how to use
org.springframework.kafka.core.ConsumerFactory.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KafkaConfig.java From Mastering-Distributed-Tracing with MIT License | 7 votes |
private ConsumerFactory<String, Message> consumerFactory() throws Exception { Map<String, Object> props = new HashMap<>(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(ConsumerConfig.CLIENT_ID_CONFIG, clientId()); props.put(ConsumerConfig.GROUP_ID_CONFIG, app.name); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 1000); props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "100"); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true); props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "15000"); return new TracingConsumerFactory<>( // new DefaultKafkaConsumerFactory<String, Message>( // props, // new StringDeserializer(), // new JsonDeserializer<>(Message.class))); }
Example #2
Source File: SynapseKafkaAutoConfiguration.java From synapse with Apache License 2.0 | 7 votes |
@Bean @ConditionalOnMissingBean(name = "kafkaMessageLogReceiverEndpointFactory") public MessageLogReceiverEndpointFactory kafkaMessageLogReceiverEndpointFactory(final KafkaProperties kafkaProperties, final MessageInterceptorRegistry interceptorRegistry, final ApplicationEventPublisher eventPublisher, final ConsumerFactory<String, String> kafkaConsumerFactory) { LOG.info("Auto-configuring Kafka MessageLogReceiverEndpointFactory"); final ExecutorService executorService = newCachedThreadPool( new ThreadFactoryBuilder().setNameFormat("kafka-message-log-%d").build() ); final KafkaConsumer<String, String> kafkaConsumer = (KafkaConsumer<String, String>)kafkaConsumerFactory.createConsumer(); return new KafkaMessageLogReceiverEndpointFactory( interceptorRegistry, kafkaConsumer, executorService, eventPublisher); }
Example #3
Source File: KafkaConsumerConfig.java From SpringAll with MIT License | 6 votes |
@Bean public ConsumerFactory<String, Message> consumerFactory() { Map<String, Object> props = new HashMap<>(); props.put( ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); props.put( ConsumerConfig.GROUP_ID_CONFIG, consumerGroupId); props.put( ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoOffsetReset); // props.put( // ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, // StringDeserializer.class); // props.put( // ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, // StringDeserializer.class); return new DefaultKafkaConsumerFactory<>( props, new StringDeserializer(), new JsonDeserializer<>(Message.class)); }
Example #4
Source File: KafkaBinderAutoConfigurationPropertiesTest.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
@Test @SuppressWarnings("unchecked") public void testKafkaHealthIndicatorProperties() { assertThat(this.kafkaBinderHealthIndicator).isNotNull(); Field consumerFactoryField = ReflectionUtils.findField( KafkaBinderHealthIndicator.class, "consumerFactory", ConsumerFactory.class); ReflectionUtils.makeAccessible(consumerFactoryField); DefaultKafkaConsumerFactory consumerFactory = (DefaultKafkaConsumerFactory) ReflectionUtils .getField(consumerFactoryField, this.kafkaBinderHealthIndicator); Field configField = ReflectionUtils.findField(DefaultKafkaConsumerFactory.class, "configs", Map.class); ReflectionUtils.makeAccessible(configField); Map<String, Object> configs = (Map<String, Object>) ReflectionUtils .getField(configField, consumerFactory); assertThat(configs.containsKey("bootstrap.servers")).isTrue(); List<String> bootstrapServers = new ArrayList<>(); bootstrapServers.add("10.98.09.199:9092"); bootstrapServers.add("10.98.09.196:9092"); assertThat(((List<String>) configs.get("bootstrap.servers")) .containsAll(bootstrapServers)).isTrue(); }
Example #5
Source File: EventApisFactory.java From eventapis with Apache License 2.0 | 6 votes |
@Bean({"eventsKafkaListenerContainerFactory", "kafkaListenerContainerFactory"}) public ConcurrentKafkaListenerContainerFactory<String, PublishedEventWrapper> eventsKafkaListenerContainerFactory( EventMessageConverter eventMessageConverter, ConsumerFactory<String, PublishedEventWrapper> consumerFactory) { ConcurrentKafkaListenerContainerFactory<String, PublishedEventWrapper> factory = new ConcurrentKafkaListenerContainerFactory<>(); factory.setConsumerFactory(consumerFactory); factory.setConcurrency(eventApisConfiguration.getEventBus().getConsumer().getEventConcurrency()); factory.setMessageConverter(eventMessageConverter); factory.getContainerProperties().setPollTimeout(3000); ThreadPoolTaskScheduler scheduler = new ThreadPoolTaskScheduler(); scheduler.setPoolSize(eventApisConfiguration.getEventBus().getConsumer().getEventSchedulerPoolSize()); scheduler.setBeanName("EventsFactory-Scheduler"); scheduler.initialize(); factory.getContainerProperties().setScheduler(scheduler); factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.RECORD); return factory; }
Example #6
Source File: KafkaBinderHealthIndicatorConfiguration.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
@Bean KafkaBinderHealthIndicator kafkaBinderHealthIndicator( KafkaMessageChannelBinder kafkaMessageChannelBinder, KafkaBinderConfigurationProperties configurationProperties) { Map<String, Object> props = new HashMap<>(); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class); Map<String, Object> mergedConfig = configurationProperties .mergedConsumerConfiguration(); if (!ObjectUtils.isEmpty(mergedConfig)) { props.putAll(mergedConfig); } if (!props.containsKey(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG)) { props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, configurationProperties.getKafkaConnectionString()); } ConsumerFactory<?, ?> consumerFactory = new DefaultKafkaConsumerFactory<>(props); KafkaBinderHealthIndicator indicator = new KafkaBinderHealthIndicator( kafkaMessageChannelBinder, consumerFactory); indicator.setTimeout(configurationProperties.getHealthTimeout()); return indicator; }
Example #7
Source File: KafkaConsumerConfig.java From springboot_cwenao with MIT License | 6 votes |
public ConsumerFactory<String, String> consumerFactory() { Map<String, Object> properties = new HashMap<String, Object>(); properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false); properties.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "100"); properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "15000"); properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); properties.put(ConsumerConfig.GROUP_ID_CONFIG, group); properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); return new DefaultKafkaConsumerFactory<String, String>(properties); }
Example #8
Source File: KafkaConsumerConfig.java From stateful-functions with Apache License 2.0 | 6 votes |
@Bean public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> kafkaListenerContainerFactory(ConsumerFactory<String, String> consumerFactory) { ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>(); factory.setConsumerFactory(consumerFactory); return factory; }
Example #9
Source File: StubRunnerKafkaConfiguration.java From spring-cloud-contract with Apache License 2.0 | 6 votes |
private void registerContainers(ConfigurableListableBeanFactory beanFactory, List<Contract> matchingContracts, String flowName, StubRunnerKafkaRouter listener) { // listener's container ConsumerFactory consumerFactory = beanFactory.getBean(ConsumerFactory.class); for (Contract matchingContract : matchingContracts) { if (matchingContract.getInput() == null) { continue; } String destination = MapConverter.getStubSideValuesForNonBody( matchingContract.getInput().getMessageFrom()).toString(); ContainerProperties containerProperties = new ContainerProperties( destination); KafkaMessageListenerContainer container = listenerContainer(consumerFactory, containerProperties, listener); String containerName = flowName + ".container"; Object initializedContainer = beanFactory.initializeBean(container, containerName); beanFactory.registerSingleton(containerName, initializedContainer); if (log.isDebugEnabled()) { log.debug( "Initialized kafka message container with name [" + containerName + "] listening to destination [" + destination + "]"); } } }
Example #10
Source File: KafkaBinderConfiguration.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
@Bean @ConditionalOnMissingBean(name = "binderClientFactoryCustomizer") public ClientFactoryCustomizer binderClientFactoryCustomizer(MeterRegistry meterRegistry) { return new ClientFactoryCustomizer() { @Override public void configure(ProducerFactory<?, ?> pf) { if (pf instanceof DefaultKafkaProducerFactory) { ((DefaultKafkaProducerFactory<?, ?>) pf) .addListener(new MicrometerProducerListener<>(meterRegistry)); } } @Override public void configure(ConsumerFactory<?, ?> cf) { if (cf instanceof DefaultKafkaConsumerFactory) { ((DefaultKafkaConsumerFactory<?, ?>) cf) .addListener(new MicrometerConsumerListener<>(meterRegistry)); } } }; }
Example #11
Source File: KafkaBinderMetrics.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
private synchronized ConsumerFactory<?, ?> createConsumerFactory() { if (this.defaultConsumerFactory == null) { Map<String, Object> props = new HashMap<>(); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class); Map<String, Object> mergedConfig = this.binderConfigurationProperties .mergedConsumerConfiguration(); if (!ObjectUtils.isEmpty(mergedConfig)) { props.putAll(mergedConfig); } if (!props.containsKey(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG)) { props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.binderConfigurationProperties .getKafkaConnectionString()); } this.defaultConsumerFactory = new DefaultKafkaConsumerFactory<>( props); } return this.defaultConsumerFactory; }
Example #12
Source File: EventApisFactory.java From eventapis with Apache License 2.0 | 5 votes |
@Bean("operationsKafkaListenerContainerFactory") public ConcurrentKafkaListenerContainerFactory<String, Operation> operationsKafkaListenerContainerFactory( ConsumerFactory<String, Operation> consumerFactory) { ConcurrentKafkaListenerContainerFactory<String, Operation> factory = new ConcurrentKafkaListenerContainerFactory<>(); factory.setConsumerFactory(consumerFactory); RetryTemplate retryTemplate = new RetryTemplate(); factory.setRetryTemplate(retryTemplate); factory.setConcurrency(eventApisConfiguration.getEventBus().getConsumer().getOperationSchedulerPoolSize()); ThreadPoolTaskScheduler scheduler = new ThreadPoolTaskScheduler(); scheduler.setPoolSize(eventApisConfiguration.getEventBus().getConsumer().getOperationSchedulerPoolSize()); scheduler.setBeanName("OperationsFactory-Scheduler"); scheduler.initialize(); factory.getContainerProperties().setScheduler(scheduler); ThreadPoolTaskScheduler consumerScheduler = new ThreadPoolTaskScheduler(); consumerScheduler.setPoolSize(eventApisConfiguration.getEventBus().getConsumer().getOperationSchedulerPoolSize()); consumerScheduler.setBeanName("OperationsFactory-ConsumerScheduler"); consumerScheduler.initialize(); factory.getContainerProperties().setPollTimeout(3000L); factory.getContainerProperties().setAckOnError(false); factory.getContainerProperties().setConsumerTaskExecutor(consumerScheduler); factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.RECORD); /** * This is Fix for Spring Kafka versions which does not have ConsumerAwareErrorHandler handler till 2.0 * When Listener faces with error, it retries snapshot operation * See https://github.com/kloiasoft/eventapis/issues/44 */ factory.getContainerProperties().setTransactionManager(new EmptyTransactionManager()); // factory.getContainerProperties().setTransactionManager(platformTransactionManager); return factory; }
Example #13
Source File: S1pKafkaApplication.java From grussell-spring-kafka with Apache License 2.0 | 5 votes |
@Bean public KafkaMessageListenerContainer<String, String> container( ConsumerFactory<String, String> consumerFactory, ConfigProperties config) { ContainerProperties containerProperties = new ContainerProperties(config.getTopic()); containerProperties.setMessageListener(listener()); containerProperties.setAckMode(AckMode.MANUAL_IMMEDIATE); return new KafkaMessageListenerContainer<>(consumerFactory, containerProperties); }
Example #14
Source File: S1pKafkaApplication.java From grussell-spring-kafka with Apache License 2.0 | 5 votes |
@Bean public KafkaMessageListenerContainer<String, String> container( ConsumerFactory<String, String> consumerFactory, ConfigProperties config) { ContainerProperties containerProperties = new ContainerProperties(config.getTopic()); containerProperties.setMessageListener(listener()); return new KafkaMessageListenerContainer<>(consumerFactory, containerProperties); }
Example #15
Source File: KafkaBinderTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
private ConsumerFactory<byte[], byte[]> consumerFactory() { Map<String, Object> props = new HashMap<>(); KafkaBinderConfigurationProperties configurationProperties = createConfigurationProperties(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, configurationProperties.getKafkaConnectionString()); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false); props.put(ConsumerConfig.GROUP_ID_CONFIG, "TEST-CONSUMER-GROUP"); Deserializer<byte[]> valueDecoder = new ByteArrayDeserializer(); Deserializer<byte[]> keyDecoder = new ByteArrayDeserializer(); return new DefaultKafkaConsumerFactory<>(props, keyDecoder, valueDecoder); }
Example #16
Source File: TracingConsumerFactory.java From java-kafka-client with Apache License 2.0 | 5 votes |
public TracingConsumerFactory(ConsumerFactory<K, V> consumerFactory, Tracer tracer, Collection<SpanDecorator> spanDecorators, BiFunction<String, ConsumerRecord, String> consumerSpanNameProvider) { this.tracer = tracer; this.consumerFactory = consumerFactory; this.spanDecorators = (spanDecorators == null) ? Collections.singletonList(STANDARD_TAGS) : spanDecorators; this.consumerSpanNameProvider = (consumerSpanNameProvider == null) ? ClientSpanNameProvider.CONSUMER_OPERATION_NAME : consumerSpanNameProvider; }
Example #17
Source File: EnodeTestKafkaConfig.java From enode with MIT License | 5 votes |
@Bean public KafkaMessageListenerContainer<String, String> commandListenerContainer(KafkaCommandListener commandListener, ConsumerFactory<String, String> consumerFactory) { ContainerProperties properties = new ContainerProperties(commandTopic); properties.setGroupId(Constants.DEFAULT_CONSUMER_GROUP); properties.setMessageListener(commandListener); properties.setMissingTopicsFatal(false); return new KafkaMessageListenerContainer<>(consumerFactory, properties); }
Example #18
Source File: TestConfiguration.java From java-kafka-client with Apache License 2.0 | 5 votes |
@Bean public ConsumerFactory<Integer, String> consumerFactory() { final Map<String, Object> consumerProps = KafkaTestUtils .consumerProps("sampleRawConsumer", "false", embeddedKafka.getEmbeddedKafka()); consumerProps.put("auto.offset.reset", "earliest"); return new TracingConsumerFactory<>(new DefaultKafkaConsumerFactory<>(consumerProps), tracer()); }
Example #19
Source File: KafkaServiceImpl.java From metron with Apache License 2.0 | 5 votes |
/** * @param zkUtils A utility class used to interact with ZooKeeper. * @param kafkaConsumerFactory A class used to create {@link KafkaConsumer} in order to interact with Kafka. * @param kafkaProducer A class used to produce messages to Kafka. * @param adminUtils A utility class used to do administration operations on Kafka. */ @Autowired public KafkaServiceImpl(final ZkUtils zkUtils, final ConsumerFactory<String, String> kafkaConsumerFactory, final KafkaProducer<String, String> kafkaProducer, final AdminUtils$ adminUtils) { this.zkUtils = zkUtils; this.kafkaConsumerFactory = kafkaConsumerFactory; this.kafkaProducer = kafkaProducer; this.adminUtils = adminUtils; }
Example #20
Source File: EventApisFactory.java From eventapis with Apache License 2.0 | 5 votes |
@Bean public ConsumerFactory<String, PublishedEventWrapper> kafkaConsumerFactory() { KafkaProperties properties = eventApisConfiguration.getEventBus().clone(); properties.getConsumer().setEnableAutoCommit(false); return new DefaultKafkaConsumerFactory<>(properties.buildConsumerProperties(), new StringDeserializer(), new JsonDeserializer<>(PublishedEventWrapper.class, objectMapper)); }
Example #21
Source File: EventApisFactory.java From eventapis with Apache License 2.0 | 5 votes |
@Bean public ConsumerFactory<String, Operation> kafkaOperationsFactory() { KafkaProperties properties = eventApisConfiguration.getEventBus().clone(); properties.getConsumer().setEnableAutoCommit(false); return new DefaultKafkaConsumerFactory<>(properties.buildConsumerProperties(), new StringDeserializer(), new JsonDeserializer<>(Operation.class, objectMapper)); }
Example #22
Source File: KafkaConfiguration.java From spring-examples with GNU General Public License v3.0 | 5 votes |
@Bean public ConsumerFactory<String, KMessage> consumerFactory() { Map<String, Object> props = new HashMap<>(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaAddress); props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId); props.put(JsonDeserializer.VALUE_DEFAULT_TYPE, KMessage.class); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class); return new DefaultKafkaConsumerFactory<>(props); }
Example #23
Source File: KafkaAutoConfigurationTest.java From java-spring-cloud with Apache License 2.0 | 5 votes |
@Test public void loadTracingConsumerFactory() { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); context.register(TracerConfig.class, FakeKafkaConfig.class, KafkaAutoConfiguration.class); context.refresh(); ConsumerFactory tracingConsumerFactory = context.getBean(ConsumerFactory.class); assertTrue(tracingConsumerFactory instanceof TracingConsumerFactory); }
Example #24
Source File: KafkaAutoConfigurationTest.java From java-spring-cloud with Apache License 2.0 | 5 votes |
@Test public void loadNormalConsumerFactoryWhenDisabled() { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); context.register(TracerConfig.class, FakeKafkaConfig.class, KafkaAutoConfiguration.class); TestPropertyValues.of("opentracing.spring.cloud.kafka.enabled:false").applyTo(context); context.refresh(); ConsumerFactory consumerFactory = context.getBean(ConsumerFactory.class); assertFalse(consumerFactory instanceof TracingConsumerFactory); }
Example #25
Source File: KafkaAutoConfigurationTest.java From java-spring-cloud with Apache License 2.0 | 5 votes |
@Test public void loadNormalConsumerFactoryWhenTracerNotPresent() { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); context.register(FakeKafkaConfig.class, KafkaAutoConfiguration.class); context.refresh(); ConsumerFactory consumerFactory = context.getBean(ConsumerFactory.class); assertFalse(consumerFactory instanceof TracingConsumerFactory); }
Example #26
Source File: LocKafkaAutoConfiguration.java From loc-framework with MIT License | 5 votes |
@Bean(name = "kafkaListenerContainerFactory") public ConcurrentKafkaListenerContainerFactory<?, ?> kafkaListenerContainerFactory( ConcurrentKafkaListenerContainerFactoryConfigurer configurer, ConsumerFactory<Object, Object> kafkaConsumerFactory) { ConcurrentKafkaListenerContainerFactory<Object, Object> factory = new ConcurrentKafkaListenerContainerFactory<>(); ContainerProperties containerProperties = factory.getContainerProperties(); factory.setRecordFilterStrategy(locMessageFilterStrategy()); factory.setErrorHandler(new LocKafkaConsumerErrorHandler()); factory.setMessageConverter(recordMessageConverter()); configurer.configure(factory, kafkaConsumerFactory); return factory; }
Example #27
Source File: KafkaBinderConfiguration.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@Bean @ConditionalOnMissingBean(name = "binderClientFactoryCustomizer") public ClientFactoryCustomizer binderClientFactoryCustomizer(ConfigurableApplicationContext context) { return new ClientFactoryCustomizer() { MeterRegistry meterRegistry = context.getBean("outerContext", ApplicationContext.class) .getBean(MeterRegistry.class); @Override public void configure(ProducerFactory<?, ?> pf) { if (pf instanceof DefaultKafkaProducerFactory) { ((DefaultKafkaProducerFactory<?, ?>) pf) .addListener(new MicrometerProducerListener<>(this.meterRegistry)); } } @Override public void configure(ConsumerFactory<?, ?> cf) { if (cf instanceof DefaultKafkaConsumerFactory) { ((DefaultKafkaConsumerFactory<?, ?>) cf) .addListener(new MicrometerConsumerListener<>(this.meterRegistry)); } } }; }
Example #28
Source File: SynapseKafkaAutoConfiguration.java From synapse with Apache License 2.0 | 5 votes |
@Bean @ConditionalOnMissingBean(name="kafkaConsumerFactory") public ConsumerFactory<String, String> kafkaConsumerFactory(KafkaProperties kafkaProperties) { return new DefaultKafkaConsumerFactory<>( kafkaProperties.buildConsumerProperties(), new StringDeserializer(), new StringDeserializer()); }
Example #29
Source File: KafkaMessageChannelBinder.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
public Collection<PartitionInfo> processTopic(final String group, final ExtendedConsumerProperties<KafkaConsumerProperties> extendedConsumerProperties, final ConsumerFactory<?, ?> consumerFactory, int partitionCount, boolean usingPatterns, boolean groupManagement, String topic) { Collection<PartitionInfo> listenedPartitions; Collection<PartitionInfo> allPartitions = usingPatterns ? Collections.emptyList() : getPartitionInfo(topic, extendedConsumerProperties, consumerFactory, partitionCount); if (groupManagement || extendedConsumerProperties.getInstanceCount() == 1) { listenedPartitions = allPartitions; } else { listenedPartitions = new ArrayList<>(); for (PartitionInfo partition : allPartitions) { // divide partitions across modules if ((partition.partition() % extendedConsumerProperties .getInstanceCount()) == extendedConsumerProperties .getInstanceIndex()) { listenedPartitions.add(partition); } } } this.topicsInUse.put(topic, new TopicInformation(group, listenedPartitions, usingPatterns)); return listenedPartitions; }
Example #30
Source File: KafkaMessageChannelBinder.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
private Collection<PartitionInfo> getPartitionInfo(String topic, final ExtendedConsumerProperties<KafkaConsumerProperties> extendedConsumerProperties, final ConsumerFactory<?, ?> consumerFactory, int partitionCount) { return provisioningProvider.getPartitionsForTopic(partitionCount, extendedConsumerProperties.getExtension().isAutoRebalanceEnabled(), () -> { try (Consumer<?, ?> consumer = consumerFactory.createConsumer()) { return consumer.partitionsFor(topic); } }, topic); }