org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicsDescriptor Java Examples
The following examples show how to use
org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicsDescriptor.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FlinkKafkaConsumerBase.java From flink with Apache License 2.0 | 6 votes |
/** * Base constructor. * * @param topics fixed list of topics to subscribe to (null, if using topic pattern) * @param topicPattern the topic pattern to subscribe to (null, if using fixed topics) * @param deserializer The deserializer to turn raw byte messages into Java/Scala objects. * @param discoveryIntervalMillis the topic / partition discovery interval, in * milliseconds (0 if discovery is disabled). */ public FlinkKafkaConsumerBase( List<String> topics, Pattern topicPattern, KafkaDeserializationSchema<T> deserializer, long discoveryIntervalMillis, boolean useMetrics) { this.topicsDescriptor = new KafkaTopicsDescriptor(topics, topicPattern); this.deserializer = checkNotNull(deserializer, "valueDeserializer"); checkArgument( discoveryIntervalMillis == PARTITION_DISCOVERY_DISABLED || discoveryIntervalMillis >= 0, "Cannot define a negative value for the topic / partition discovery interval."); this.discoveryIntervalMillis = discoveryIntervalMillis; this.useMetrics = useMetrics; }
Example #2
Source File: FlinkKafkaConsumerBaseMigrationTest.java From flink with Apache License 2.0 | 6 votes |
@Override protected AbstractPartitionDiscoverer createPartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks) { AbstractPartitionDiscoverer mockPartitionDiscoverer = mock(AbstractPartitionDiscoverer.class); try { when(mockPartitionDiscoverer.discoverPartitions()).thenReturn(partitions); } catch (Exception e) { // ignore } when(mockPartitionDiscoverer.setAndCheckDiscoveredPartition(any(KafkaTopicPartition.class))).thenReturn(true); return mockPartitionDiscoverer; }
Example #3
Source File: FlinkKafkaConsumerBaseMigrationTest.java From flink with Apache License 2.0 | 6 votes |
@Override protected AbstractPartitionDiscoverer createPartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks) { AbstractPartitionDiscoverer mockPartitionDiscoverer = mock(AbstractPartitionDiscoverer.class); try { when(mockPartitionDiscoverer.discoverPartitions()).thenReturn(partitions); } catch (Exception e) { // ignore } when(mockPartitionDiscoverer.setAndCheckDiscoveredPartition(any(KafkaTopicPartition.class))).thenReturn(true); return mockPartitionDiscoverer; }
Example #4
Source File: FlinkKafkaConsumerBase.java From flink with Apache License 2.0 | 6 votes |
/** * Base constructor. * * @param topics fixed list of topics to subscribe to (null, if using topic pattern) * @param topicPattern the topic pattern to subscribe to (null, if using fixed topics) * @param deserializer The deserializer to turn raw byte messages into Java/Scala objects. * @param discoveryIntervalMillis the topic / partition discovery interval, in * milliseconds (0 if discovery is disabled). */ public FlinkKafkaConsumerBase( List<String> topics, Pattern topicPattern, KafkaDeserializationSchema<T> deserializer, long discoveryIntervalMillis, boolean useMetrics) { this.topicsDescriptor = new KafkaTopicsDescriptor(topics, topicPattern); this.deserializer = checkNotNull(deserializer, "valueDeserializer"); checkArgument( discoveryIntervalMillis == PARTITION_DISCOVERY_DISABLED || discoveryIntervalMillis >= 0, "Cannot define a negative value for the topic / partition discovery interval."); this.discoveryIntervalMillis = discoveryIntervalMillis; this.useMetrics = useMetrics; }
Example #5
Source File: FlinkKafkaConsumerBase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Base constructor. * * @param topics fixed list of topics to subscribe to (null, if using topic pattern) * @param topicPattern the topic pattern to subscribe to (null, if using fixed topics) * @param deserializer The deserializer to turn raw byte messages into Java/Scala objects. * @param discoveryIntervalMillis the topic / partition discovery interval, in * milliseconds (0 if discovery is disabled). */ public FlinkKafkaConsumerBase( List<String> topics, Pattern topicPattern, KafkaDeserializationSchema<T> deserializer, long discoveryIntervalMillis, boolean useMetrics) { this.topicsDescriptor = new KafkaTopicsDescriptor(topics, topicPattern); this.deserializer = checkNotNull(deserializer, "valueDeserializer"); checkArgument( discoveryIntervalMillis == PARTITION_DISCOVERY_DISABLED || discoveryIntervalMillis >= 0, "Cannot define a negative value for the topic / partition discovery interval."); this.discoveryIntervalMillis = discoveryIntervalMillis; this.useMetrics = useMetrics; }
Example #6
Source File: FlinkKafkaConsumerBaseMigrationTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override protected AbstractPartitionDiscoverer createPartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks) { AbstractPartitionDiscoverer mockPartitionDiscoverer = mock(AbstractPartitionDiscoverer.class); try { when(mockPartitionDiscoverer.discoverPartitions()).thenReturn(partitions); } catch (Exception e) { // ignore } when(mockPartitionDiscoverer.setAndCheckDiscoveredPartition(any(KafkaTopicPartition.class))).thenReturn(true); return mockPartitionDiscoverer; }
Example #7
Source File: Kafka010PartitionDiscoverer.java From flink with Apache License 2.0 | 5 votes |
public Kafka010PartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks, Properties kafkaProperties) { super(topicsDescriptor, indexOfThisSubtask, numParallelSubtasks); this.kafkaProperties = checkNotNull(kafkaProperties); }
Example #8
Source File: Kafka010PartitionDiscoverer.java From flink with Apache License 2.0 | 5 votes |
public Kafka010PartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks, Properties kafkaProperties) { super(topicsDescriptor, indexOfThisSubtask, numParallelSubtasks, kafkaProperties); }
Example #9
Source File: Kafka09PartitionDiscoverer.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public Kafka09PartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks, Properties kafkaProperties) { super(topicsDescriptor, indexOfThisSubtask, numParallelSubtasks); this.kafkaProperties = checkNotNull(kafkaProperties); }
Example #10
Source File: FlinkKafkaConsumerBaseTest.java From flink with Apache License 2.0 | 5 votes |
private void checkFilterRestoredPartitionsWithDisovered( List<String> restoredKafkaTopics, List<String> initKafkaTopics, List<String> expectedSubscribedPartitions, Boolean disableFiltering) throws Exception { final AbstractPartitionDiscoverer discoverer = new TestPartitionDiscoverer( new KafkaTopicsDescriptor(initKafkaTopics, null), 0, 1, TestPartitionDiscoverer.createMockGetAllTopicsSequenceFromFixedReturn(initKafkaTopics), TestPartitionDiscoverer.createMockGetAllPartitionsFromTopicsSequenceFromFixedReturn( initKafkaTopics.stream() .map(topic -> new KafkaTopicPartition(topic, 0)) .collect(Collectors.toList()))); final FlinkKafkaConsumerBase<String> consumer = new DummyFlinkKafkaConsumer<>(initKafkaTopics, discoverer); if (disableFiltering) { consumer.disableFilterRestoredPartitionsWithSubscribedTopics(); } final TestingListState<Tuple2<KafkaTopicPartition, Long>> listState = new TestingListState<>(); for (int i = 0; i < restoredKafkaTopics.size(); i++) { listState.add(new Tuple2<>(new KafkaTopicPartition(restoredKafkaTopics.get(i), 0), 12345L)); } setupConsumer(consumer, true, listState, true, 0, 1); Map<KafkaTopicPartition, Long> subscribedPartitionsToStartOffsets = consumer.getSubscribedPartitionsToStartOffsets(); assertEquals(new HashSet<>(expectedSubscribedPartitions), subscribedPartitionsToStartOffsets .keySet() .stream() .map(partition -> partition.getTopic()) .collect(Collectors.toSet())); }
Example #11
Source File: FlinkKafkaConsumerBaseTest.java From flink with Apache License 2.0 | 5 votes |
public FailingPartitionDiscoverer(RuntimeException failureCause) { super( new KafkaTopicsDescriptor(Arrays.asList("foo"), null), 0, 1); this.failureCause = failureCause; }
Example #12
Source File: FlinkKafkaConsumerBaseTest.java From flink with Apache License 2.0 | 5 votes |
@Override protected AbstractPartitionDiscoverer createPartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks) { return this.testPartitionDiscoverer; }
Example #13
Source File: TestPartitionDiscoverer.java From flink with Apache License 2.0 | 5 votes |
public TestPartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks, List<List<String>> mockGetAllTopicsReturnSequence, List<List<KafkaTopicPartition>> mockGetAllPartitionsForTopicsReturnSequence) { super(topicsDescriptor, indexOfThisSubtask, numParallelSubtasks); this.topicsDescriptor = topicsDescriptor; this.mockGetAllTopicsReturnSequence = mockGetAllTopicsReturnSequence; this.mockGetAllPartitionsForTopicsReturnSequence = mockGetAllPartitionsForTopicsReturnSequence; }
Example #14
Source File: FlinkKafkaConsumer.java From flink with Apache License 2.0 | 5 votes |
@Override protected AbstractPartitionDiscoverer createPartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks) { return new KafkaPartitionDiscoverer(topicsDescriptor, indexOfThisSubtask, numParallelSubtasks, properties); }
Example #15
Source File: KafkaPartitionDiscoverer.java From flink with Apache License 2.0 | 5 votes |
public KafkaPartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks, Properties kafkaProperties) { super(topicsDescriptor, indexOfThisSubtask, numParallelSubtasks); this.kafkaProperties = checkNotNull(kafkaProperties); }
Example #16
Source File: FlinkKafkaConsumer010.java From flink with Apache License 2.0 | 5 votes |
@Override protected AbstractPartitionDiscoverer createPartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks) { return new Kafka010PartitionDiscoverer(topicsDescriptor, indexOfThisSubtask, numParallelSubtasks, properties); }
Example #17
Source File: FlinkKafkaConsumer010.java From flink with Apache License 2.0 | 5 votes |
@Override protected AbstractPartitionDiscoverer createPartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks) { return new Kafka010PartitionDiscoverer(topicsDescriptor, indexOfThisSubtask, numParallelSubtasks, properties); }
Example #18
Source File: FlinkKafkaConsumerBaseTest.java From flink with Apache License 2.0 | 5 votes |
private void checkFilterRestoredPartitionsWithDisovered( List<String> restoredKafkaTopics, List<String> initKafkaTopics, List<String> expectedSubscribedPartitions, Boolean disableFiltering) throws Exception { final AbstractPartitionDiscoverer discoverer = new TestPartitionDiscoverer( new KafkaTopicsDescriptor(initKafkaTopics, null), 0, 1, TestPartitionDiscoverer.createMockGetAllTopicsSequenceFromFixedReturn(initKafkaTopics), TestPartitionDiscoverer.createMockGetAllPartitionsFromTopicsSequenceFromFixedReturn( initKafkaTopics.stream() .map(topic -> new KafkaTopicPartition(topic, 0)) .collect(Collectors.toList()))); final FlinkKafkaConsumerBase<String> consumer = new DummyFlinkKafkaConsumer<>(initKafkaTopics, discoverer); if (disableFiltering) { consumer.disableFilterRestoredPartitionsWithSubscribedTopics(); } final TestingListState<Tuple2<KafkaTopicPartition, Long>> listState = new TestingListState<>(); for (int i = 0; i < restoredKafkaTopics.size(); i++) { listState.add(new Tuple2<>(new KafkaTopicPartition(restoredKafkaTopics.get(i), 0), 12345L)); } setupConsumer(consumer, true, listState, true, 0, 1); Map<KafkaTopicPartition, Long> subscribedPartitionsToStartOffsets = consumer.getSubscribedPartitionsToStartOffsets(); assertEquals(new HashSet<>(expectedSubscribedPartitions), subscribedPartitionsToStartOffsets .keySet() .stream() .map(partition -> partition.getTopic()) .collect(Collectors.toSet())); }
Example #19
Source File: FlinkKafkaConsumerBaseTest.java From flink with Apache License 2.0 | 5 votes |
public FailingPartitionDiscoverer(RuntimeException failureCause) { super( new KafkaTopicsDescriptor(Arrays.asList("foo"), null), 0, 1); this.failureCause = failureCause; }
Example #20
Source File: FlinkKafkaConsumerBaseTest.java From flink with Apache License 2.0 | 5 votes |
@Override protected AbstractPartitionDiscoverer createPartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks) { return this.testPartitionDiscoverer; }
Example #21
Source File: TestPartitionDiscoverer.java From flink with Apache License 2.0 | 5 votes |
public TestPartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks, List<List<String>> mockGetAllTopicsReturnSequence, List<List<KafkaTopicPartition>> mockGetAllPartitionsForTopicsReturnSequence) { super(topicsDescriptor, indexOfThisSubtask, numParallelSubtasks); this.topicsDescriptor = topicsDescriptor; this.mockGetAllTopicsReturnSequence = mockGetAllTopicsReturnSequence; this.mockGetAllPartitionsForTopicsReturnSequence = mockGetAllPartitionsForTopicsReturnSequence; }
Example #22
Source File: FlinkKafkaConsumerBaseTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public FailingPartitionDiscoverer(RuntimeException failureCause) { super( new KafkaTopicsDescriptor(Arrays.asList("foo"), null), 0, 1); this.failureCause = failureCause; }
Example #23
Source File: FlinkKafkaConsumer09.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override protected AbstractPartitionDiscoverer createPartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks) { return new Kafka09PartitionDiscoverer(topicsDescriptor, indexOfThisSubtask, numParallelSubtasks, properties); }
Example #24
Source File: FlinkKafkaConsumer08.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override protected AbstractPartitionDiscoverer createPartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks) { return new Kafka08PartitionDiscoverer(topicsDescriptor, indexOfThisSubtask, numParallelSubtasks, kafkaProperties); }
Example #25
Source File: FlinkKafkaConsumer.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override protected AbstractPartitionDiscoverer createPartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks) { return new KafkaPartitionDiscoverer(topicsDescriptor, indexOfThisSubtask, numParallelSubtasks, properties); }
Example #26
Source File: KafkaPartitionDiscoverer.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public KafkaPartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks, Properties kafkaProperties) { super(topicsDescriptor, indexOfThisSubtask, numParallelSubtasks); this.kafkaProperties = checkNotNull(kafkaProperties); }
Example #27
Source File: Kafka010PartitionDiscoverer.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public Kafka010PartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks, Properties kafkaProperties) { super(topicsDescriptor, indexOfThisSubtask, numParallelSubtasks, kafkaProperties); }
Example #28
Source File: FlinkKafkaConsumer010.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override protected AbstractPartitionDiscoverer createPartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks) { return new Kafka010PartitionDiscoverer(topicsDescriptor, indexOfThisSubtask, numParallelSubtasks, properties); }
Example #29
Source File: FlinkKafkaConsumerBaseTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private void checkFilterRestoredPartitionsWithDisovered( List<String> restoredKafkaTopics, List<String> initKafkaTopics, List<String> expectedSubscribedPartitions, Boolean disableFiltering) throws Exception { final AbstractPartitionDiscoverer discoverer = new TestPartitionDiscoverer( new KafkaTopicsDescriptor(initKafkaTopics, null), 0, 1, TestPartitionDiscoverer.createMockGetAllTopicsSequenceFromFixedReturn(initKafkaTopics), TestPartitionDiscoverer.createMockGetAllPartitionsFromTopicsSequenceFromFixedReturn( initKafkaTopics.stream() .map(topic -> new KafkaTopicPartition(topic, 0)) .collect(Collectors.toList()))); final FlinkKafkaConsumerBase<String> consumer = new DummyFlinkKafkaConsumer<>(initKafkaTopics, discoverer); if (disableFiltering) { consumer.disableFilterRestoredPartitionsWithSubscribedTopics(); } final TestingListState<Tuple2<KafkaTopicPartition, Long>> listState = new TestingListState<>(); for (int i = 0; i < restoredKafkaTopics.size(); i++) { listState.add(new Tuple2<>(new KafkaTopicPartition(restoredKafkaTopics.get(i), 0), 12345L)); } setupConsumer(consumer, true, listState, true, 0, 1); Map<KafkaTopicPartition, Long> subscribedPartitionsToStartOffsets = consumer.getSubscribedPartitionsToStartOffsets(); assertEquals(new HashSet<>(expectedSubscribedPartitions), subscribedPartitionsToStartOffsets .keySet() .stream() .map(partition -> partition.getTopic()) .collect(Collectors.toSet())); }
Example #30
Source File: KafkaPartitionDiscoverer.java From flink with Apache License 2.0 | 5 votes |
public KafkaPartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks, Properties kafkaProperties) { super(topicsDescriptor, indexOfThisSubtask, numParallelSubtasks); this.kafkaProperties = checkNotNull(kafkaProperties); }