org.apache.kafka.streams.TopologyTestDriver Java Examples
The following examples show how to use
org.apache.kafka.streams.TopologyTestDriver.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestObjectMother.java From adaptive-alerting with Apache License 2.0 | 6 votes |
public static TopologyTestDriver topologyTestDriver( Topology topology, Class<?> valueSerdeClass, boolean continueOnDeserException) { val props = new Properties(); props.put(StreamsConfig.APPLICATION_ID_CONFIG, "test"); props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "dummy:1234"); props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, valueSerdeClass.getName()); if (continueOnDeserException) { props.put( StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG, LogAndContinueExceptionHandler.class.getName()); } return new TopologyTestDriver(topology, props); }
Example #2
Source File: QueryTranslationTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") void verifyOutput(final TopologyTestDriver testDriver) { for (final Record expectedOutput : expectedOutputs) { try { OutputVerifier.compareKeyValueTimestamp(testDriver.readOutput(expectedOutput.topic, expectedOutput.keyDeserializer(), Serdes.String().deserializer()), expectedOutput.key(), expectedOutput.value, expectedOutput.timestamp); } catch (AssertionError assertionError) { throw new AssertionError("Query name: " + name + " in file: " + testPath + " failed due to: " + assertionError.getMessage()); } } // check for no more records }
Example #3
Source File: DependencyStorageTopologyTest.java From zipkin-storage-kafka with Apache License 2.0 | 6 votes |
@Test void should_doNothing_whenDisabled() { // Given: configs Duration dependenciesRetentionPeriod = Duration.ofMinutes(1); Duration dependenciesWindowSize = Duration.ofMillis(100); // When: topology created Topology topology = new DependencyStorageTopology( dependencyTopic, dependenciesRetentionPeriod, dependenciesWindowSize, false).get(); TopologyDescription description = topology.describe(); // Then: topology with 1 thread assertThat(description.subtopologies()).hasSize(0); // Given: streams configuration TopologyTestDriver testDriver = new TopologyTestDriver(topology, props); testDriver.close(); }
Example #4
Source File: TraceStorageTopologyTest.java From zipkin-storage-kafka with Apache License 2.0 | 6 votes |
@Test void should_doNothing_whenAllDisabled() { // Given: configs Duration traceTtl = Duration.ofMillis(5); Duration traceTtlCheckInterval = Duration.ofMinutes(1); List<String> autocompleteKeys = Collections.singletonList("environment"); // When: topology provided Topology topology = new TraceStorageTopology( spansTopic, autocompleteKeys, traceTtl, traceTtlCheckInterval, 0, false, false).get(); TopologyDescription description = topology.describe(); // Then: assertThat(description.subtopologies()).hasSize(0); // Given: streams config TopologyTestDriver testDriver = new TopologyTestDriver(topology, props); testDriver.close(); }
Example #5
Source File: TransformStreamTest.java From kafka-tutorials with Apache License 2.0 | 6 votes |
private List<Movie> readOutputTopic(TopologyTestDriver testDriver, String topic, Deserializer<String> keyDeserializer, SpecificAvroDeserializer<Movie> valueDeserializer) { List<Movie> results = new ArrayList<>(); while (true) { ProducerRecord<String, Movie> record = testDriver.readOutput(topic, keyDeserializer, valueDeserializer); if (record != null) { results.add(record.value()); } else { break; } } return results; }
Example #6
Source File: AppTopologyTest.java From Kafka-Streams-Real-time-Stream-Processing with The Unlicense | 6 votes |
@BeforeAll static void setUp() { Properties properties = new Properties(); properties.put(StreamsConfig.APPLICATION_ID_CONFIG, AppConfigs.applicationID); properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, AppConfigs.bootstrapServers); properties.put(StreamsConfig.STATE_DIR_CONFIG, AppConfigs.stateStoreLocationUT); StreamsBuilder builder = new StreamsBuilder(); AppTopology.withBuilder(builder); Topology topology = builder.build(); topologyTestDriver = new TopologyTestDriver(topology, properties); }
Example #7
Source File: TestTopologyReceiver.java From simplesource with Apache License 2.0 | 6 votes |
TestTopologyReceiver(BiConsumer<K, V> updateTarget, TopologyTestDriver driver, ReceiverSpec<K, V> spec) { getDriverOutput = () -> { int count = 0; while (true) { ProducerRecord<String, V> record = driver.readOutput(spec.topicName, Serdes.String().deserializer(), spec.valueSerde.deserializer()); if (record == null) break; count++; K key = spec.keyConverter.apply(record.key()); updateTarget.accept(key, record.value()); } return count; }; }
Example #8
Source File: TumblingWindowTest.java From kafka-tutorials with Apache License 2.0 | 6 votes |
private List<RatingCount> readOutputTopic(TopologyTestDriver testDriver, String outputTopic, Deserializer<String> keyDeserializer, Deserializer<String> valueDeserializer) { List<RatingCount> results = new ArrayList<>(); while(true) { ProducerRecord<String, String> record = testDriver.readOutput(outputTopic, keyDeserializer, valueDeserializer); if (record != null) { results.add(new RatingCount(record.key().toString(), record.value())); } else { break; } } return results; }
Example #9
Source File: TestInput.java From fluent-kafka-streams-tests with MIT License | 6 votes |
/** * <p>Constructor for the test input topic.</p> * * @param testDriver Kafka's {@link TopologyTestDriver} used in this test. * @param topic Name of input topic. * @param keySerde Serde for key type in topic. * @param valueSerde Serde for value type in topic. */ protected TestInput(final TopologyTestDriver testDriver, final String topic, final Serde<K> keySerde, final Serde<V> valueSerde) { this.testDriver = testDriver; this.topic = topic; this.keySerde = keySerde; this.valueSerde = valueSerde; this.consumerFactory = new ConsumerRecordFactory<>(topic, keySerde == null ? new UnspecifiedSerializer<K>() : keySerde.serializer(), valueSerde == null ? new UnspecifiedSerializer<V>() : valueSerde.serializer()) { @Override public ConsumerRecord<byte[], byte[]> create(final String topicName, final K key, final V value, final Headers headers, final long timestampMs) { final ConsumerRecord<byte[], byte[]> record = super.create(topicName, key, value, headers, timestampMs); testDriver.pipeInput(record); return record; } }; }
Example #10
Source File: JoinStreamToTableTest.java From kafka-tutorials with Apache License 2.0 | 6 votes |
private List<RatedMovie> readOutputTopic(TopologyTestDriver testDriver, String topic, Deserializer<String> keyDeserializer, SpecificAvroDeserializer<RatedMovie> makeRatedMovieDeserializer) { List<RatedMovie> results = new ArrayList<>(); while (true) { ProducerRecord<String, RatedMovie> record = testDriver.readOutput(topic, keyDeserializer, makeRatedMovieDeserializer); if (record != null) { results.add(record.value()); } else { break; } } return results; }
Example #11
Source File: SerializationTutorialTest.java From kafka-tutorials with Apache License 2.0 | 6 votes |
@Test public void shouldChangeSerializationFormat() throws IOException { SerializationTutorial tutorial = new SerializationTutorial(); final Properties envProps = tutorial.loadEnvProperties(TEST_CONFIG_FILE); final Properties streamsProps = tutorial.buildStreamsProperties(envProps); String inputTopicName = envProps.getProperty("input.avro.movies.topic.name"); String outputTopicName = envProps.getProperty("output.proto.movies.topic.name"); final SpecificAvroSerde<Movie> avroSerde = tutorial.movieAvroSerde(envProps); final KafkaProtobufSerde<MovieProtos.Movie> protobufSerde = tutorial.movieProtobufSerde(envProps); Topology topology = tutorial.buildTopology(envProps, avroSerde, protobufSerde); TopologyTestDriver testDriver = new TopologyTestDriver(topology, streamsProps); testDriver .createInputTopic(inputTopicName, Long().serializer(), avroSerde.serializer()) .pipeValueList(this.prepareInputFixture()); final List<MovieProtos.Movie> moviesProto = testDriver.createOutputTopic(outputTopicName, Long().deserializer(), protobufSerde.deserializer()) .readValuesToList(); assertThat(moviesProto, equalTo(expectedMovies())); }
Example #12
Source File: SplitStreamTest.java From kafka-tutorials with Apache License 2.0 | 6 votes |
private List<ActingEvent> readOutputTopic(TopologyTestDriver testDriver, String topic, Deserializer<String> keyDeserializer, SpecificAvroDeserializer<ActingEvent> valueDeserializer) { List<ActingEvent> results = new ArrayList<>(); while (true) { ProducerRecord<String, ActingEvent> record = testDriver.readOutput(topic, keyDeserializer, valueDeserializer); if (record != null) { results.add(record.value()); } else { break; } } return results; }
Example #13
Source File: MegabusRefResolverTest.java From emodb with Apache License 2.0 | 5 votes |
@BeforeSuite public void setUp() { Properties config = new Properties(); config.setProperty(StreamsConfig.APPLICATION_ID_CONFIG, "refResolverGroup"); config.setProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaEndpoint.toString()); config.setProperty(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); testDriver = new TopologyTestDriver(refResolver.topology(), config); }
Example #14
Source File: QueryTranslationTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
void processInput(final TopologyTestDriver testDriver, final ConsumerRecordFactory<String, String> recordFactory) { inputs.forEach(record -> testDriver.pipeInput( recordFactory.create(record.topic, record.key, record.value, record.timestamp)) ); }
Example #15
Source File: QueryTranslationTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
private TopologyTestDriver buildStreamsTopology(final Query query) throws Exception { final List<QueryMetadata> queries = ksqlEngine.buildMultipleQueries(query.statements(), Collections.emptyMap()); return new TopologyTestDriver(queries.get(queries.size() - 1).getTopology(), streamsProperties, 0); }
Example #16
Source File: TransformStreamTest.java From kafka-tutorials with Apache License 2.0 | 5 votes |
@Test public void testTransformStream() throws IOException { TransformStream ts = new TransformStream(); Properties envProps = ts.loadEnvProperties(TEST_CONFIG_FILE); Properties streamProps = ts.buildStreamsProperties(envProps); String inputTopic = envProps.getProperty("input.topic.name"); String outputTopic = envProps.getProperty("output.topic.name"); Topology topology = ts.buildTopology(envProps); TopologyTestDriver testDriver = new TopologyTestDriver(topology, streamProps); Serializer<String> keySerializer = Serdes.String().serializer(); SpecificAvroSerializer<RawMovie> valueSerializer = makeSerializer(envProps); Deserializer<String> keyDeserializer = Serdes.String().deserializer(); SpecificAvroDeserializer<Movie> valueDeserializer = makeDeserializer(envProps); ConsumerRecordFactory<String, RawMovie> inputFactory = new ConsumerRecordFactory<>(keySerializer, valueSerializer); List<RawMovie> input = new ArrayList<>(); input.add(RawMovie.newBuilder().setId(294).setTitle("Die Hard::1988").setGenre("action").build()); input.add(RawMovie.newBuilder().setId(354).setTitle("Tree of Life::2011").setGenre("drama").build()); input.add(RawMovie.newBuilder().setId(782).setTitle("A Walk in the Clouds::1995").setGenre("romance").build()); input.add(RawMovie.newBuilder().setId(128).setTitle("The Big Lebowski::1998").setGenre("comedy").build()); List<Movie> expectedOutput = new ArrayList<>(); expectedOutput.add(Movie.newBuilder().setTitle("Die Hard").setId(294).setReleaseYear(1988).setGenre("action").build()); expectedOutput.add(Movie.newBuilder().setTitle("Tree of Life").setId(354).setReleaseYear(2011).setGenre("drama").build()); expectedOutput.add(Movie.newBuilder().setTitle("A Walk in the Clouds").setId(782).setReleaseYear(1995).setGenre("romance").build()); expectedOutput.add(Movie.newBuilder().setTitle("The Big Lebowski").setId(128).setReleaseYear(1998).setGenre("comedy").build()); for (RawMovie rawMovie : input) { testDriver.pipeInput(inputFactory.create(inputTopic, rawMovie.getTitle(), rawMovie)); } List<Movie> actualOutput = readOutputTopic(testDriver, outputTopic, keyDeserializer, valueDeserializer); assertEquals(expectedOutput, actualOutput); }
Example #17
Source File: TestTopology.java From fluent-kafka-streams-tests with MIT License | 5 votes |
public void start() { this.schemaRegistry.start(); this.properties .setProperty(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, this.getSchemaRegistryUrl()); try { this.stateDirectory = Files.createTempDirectory("fluent-kafka-streams"); } catch (final IOException e) { throw new UncheckedIOException("Cannot create temporary state directory", e); } this.properties.setProperty(StreamsConfig.STATE_DIR_CONFIG, this.stateDirectory.toAbsolutePath().toString()); final Topology topology = this.topologyFactory.apply(this.properties); this.testDriver = new TopologyTestDriver(topology, this.properties); this.inputTopics.clear(); this.outputTopics.clear(); for (final TopologyDescription.Subtopology subtopology : topology.describe().subtopologies()) { for (final TopologyDescription.Node node : subtopology.nodes()) { if (node instanceof TopologyDescription.Source) { for (final String topic : ((Source) node).topicSet()) { addExternalTopics(this.inputTopics, topic); } } else if (node instanceof TopologyDescription.Sink) { addExternalTopics(this.outputTopics, ((TopologyDescription.Sink) node).topic()); } } } for (final GlobalStore store : topology.describe().globalStores()) { store.source().topicSet().forEach(name -> addExternalTopics(this.inputTopics, name)); } }
Example #18
Source File: SpanAggregationTopologyTest.java From zipkin-storage-kafka with Apache License 2.0 | 5 votes |
@Test void should_doNothing_whenAggregationDisabled() { Duration traceTimeout = Duration.ofSeconds(1); Topology topology = new SpanAggregationTopology( spansTopic, traceTopic, dependencyTopic, traceTimeout, false).get(); TopologyDescription description = topology.describe(); // Then: single threaded topology assertThat(description.subtopologies()).hasSize(0); TopologyTestDriver testDriver = new TopologyTestDriver(topology, props); testDriver.close(); }
Example #19
Source File: TopologyTestDriverUpdateEventGenerator.java From spring-cloud-stream-samples with Apache License 2.0 | 5 votes |
public TopologyTestDriverUpdateEventGenerator(TopologyTestDriver topologyTestDriver, String inputTopic, Serializer<ProductKey> keySerializer, Serializer<InventoryUpdateEvent> valueSerializer) { this.topologyTestDriver = topologyTestDriver; this.recordFactory = new ConsumerRecordFactory<>( inputTopic, keySerializer, valueSerializer); }
Example #20
Source File: WordCountProcessorApplicationTests.java From spring-cloud-stream-samples with Apache License 2.0 | 5 votes |
/** * Setup Stream topology * Add KStream based on @StreamListener annotation * Add to(topic) based @SendTo annotation */ @Before public void setup() { final StreamsBuilder builder = new StreamsBuilder(); KStream<Bytes, String> input = builder.stream(INPUT_TOPIC, Consumed.with(nullSerde, stringSerde)); KafkaStreamsWordCountApplication.WordCountProcessorApplication app = new KafkaStreamsWordCountApplication.WordCountProcessorApplication(); final Function<KStream<Bytes, String>, KStream<Bytes, KafkaStreamsWordCountApplication.WordCount>> process = app.process(); final KStream<Bytes, KafkaStreamsWordCountApplication.WordCount> output = process.apply(input); output.to(OUTPUT_TOPIC, Produced.with(nullSerde, countSerde)); testDriver = new TopologyTestDriver(builder.build(), getStreamsConfiguration()); }
Example #21
Source File: TestDriverInitializer.java From simplesource with Apache License 2.0 | 5 votes |
TopologyTestDriver build(Consumer<StreamsBuilder> builderConsumer) { Properties props = new Properties(); props.put(StreamsConfig.APPLICATION_ID_CONFIG, "test"); props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "dummy:1234"); props.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 0); props.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0); builderConsumer.accept(streamsBuilder); Topology topology = streamsBuilder.build(); return new TopologyTestDriver(topology, props); }
Example #22
Source File: Kafka_Streams_MachineLearning_H2O_GBM_ExampleTest.java From kafka-streams-machine-learning-examples with Apache License 2.0 | 5 votes |
@Before public void setup() throws IllegalAccessException, ClassNotFoundException, InstantiationException { testDriver = new TopologyTestDriver( Kafka_Streams_MachineLearning_H2O_GBM_Example.getStreamTopology( Kafka_Streams_MachineLearning_H2O_GBM_Example.modelClassName), Kafka_Streams_MachineLearning_H2O_GBM_Example.getStreamConfiguration("localhost:9092", Kafka_Streams_MachineLearning_H2O_GBM_Example.APPLICATION_ID)); }
Example #23
Source File: Kafka_Streams_MachineLearning_H2O_DeepLearning_ExampleTest.java From kafka-streams-machine-learning-examples with Apache License 2.0 | 5 votes |
@Before public void setup() throws IllegalAccessException, ClassNotFoundException, InstantiationException { testDriver = new TopologyTestDriver( Kafka_Streams_MachineLearning_H2O_DeepLearning_Example.getStreamTopology( Kafka_Streams_MachineLearning_H2O_DeepLearning_Example.modelClassName), Kafka_Streams_MachineLearning_H2O_DeepLearning_Example.getStreamConfiguration( "localhost:9092", Kafka_Streams_MachineLearning_H2O_DeepLearning_Example.APPLICATION_ID)); }
Example #24
Source File: QueryTranslationTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 4 votes |
@Test public void shouldBuildAndExecuteQuery() throws Exception { final TopologyTestDriver testDriver = buildStreamsTopology(query); query.processInput(testDriver, recordFactory); query.verifyOutput(testDriver); }
Example #25
Source File: KafkaAnomalyDetectorMapperTest.java From adaptive-alerting with Apache License 2.0 | 4 votes |
private void nullOnDeserException(TopologyTestDriver driver) { driver.pipeInput(stringFactory.create(INPUT_TOPIC, KAFKA_KEY, INVALID_INPUT_VALUE)); val record = driver.readOutput(DEFAULT_OUTPUT_TOPIC, stringDeser, mmdDeser); Assert.assertNull(record); }
Example #26
Source File: RunningAverageTest.java From kafka-tutorials with Apache License 2.0 | 4 votes |
@Before public void setUp() throws IOException, RestClientException { final Properties mockProps = new Properties(); mockProps.put("application.id", "kafka-movies-test"); mockProps.put("bootstrap.servers", "DUMMY_KAFKA_CONFLUENT_CLOUD_9092"); mockProps.put("schema.registry.url", "DUMMY_SR_CONFLUENT_CLOUD_8080"); mockProps.put("default.topic.replication.factor", "1"); mockProps.put("offset.reset.policy", "latest"); mockProps.put("specific.avro.reader", true); final RunningAverage streamsApp = new RunningAverage(); final Properties streamsConfig = streamsApp.buildStreamsProperties(mockProps); StreamsBuilder builder = new StreamsBuilder(); // workaround https://stackoverflow.com/a/50933452/27563 final String tempDirectory = Files.createTempDirectory("kafka-streams") .toAbsolutePath() .toString(); streamsConfig.setProperty(StreamsConfig.STATE_DIR_CONFIG, tempDirectory); final Map<String, String> mockSerdeConfig = RunningAverage.getSerdeConfig(streamsConfig); SpecificAvroSerde<CountAndSum> countAndSumSerde = new SpecificAvroSerde<>(new MockSchemaRegistryClient()); countAndSumSerde.configure(mockSerdeConfig, false); // MockSchemaRegistryClient doesn't require connection to Schema Registry which is perfect for unit test final MockSchemaRegistryClient client = new MockSchemaRegistryClient(); ratingSpecificAvroSerde = new SpecificAvroSerde<>(client); client.register(RATINGS_TOPIC_NAME + "-value", Rating.SCHEMA$); ratingSpecificAvroSerde.configure(mockSerdeConfig, false); KStream<Long, Rating> ratingStream = builder.stream(RATINGS_TOPIC_NAME, Consumed.with(Serdes.Long(), ratingSpecificAvroSerde)); final KTable<Long, Double> ratingAverageTable = RunningAverage.getRatingAverageTable(ratingStream, AVERAGE_RATINGS_TOPIC_NAME, countAndSumSerde); final Topology topology = builder.build(); testDriver = new TopologyTestDriver(topology, streamsConfig); }
Example #27
Source File: TopolologyTestDriverKafkaStreamsInventoryCountTests.java From spring-cloud-stream-samples with Apache License 2.0 | 4 votes |
@BeforeEach void setup() { configureDeserializer(countEventSerde.deserializer(), ProductKey.class, InventoryCountEvent.class, false); configureDeserializer(keySerde.deserializer() ,ProductKey.class, null, true); final StreamsBuilder builder = new StreamsBuilder(); KStream<ProductKey, InventoryUpdateEvent> input = builder.stream(INPUT_TOPIC, Consumed.with(keySerde, updateEventSerde)); KafkaStreamsInventoryAggregator inventoryAggregator = new KafkaStreamsInventoryAggregator(Stores.inMemoryKeyValueStore(STORE_NAME)); KStream<ProductKey, InventoryCountEvent> output = inventoryAggregator.process().apply(input); output.to(OUTPUT_TOPIC); Topology topology = builder.build(); testDriver = new TopologyTestDriver(topology, getStreamsConfiguration()); logger.debug(topology.describe().toString()); setEventGenerator(new TopologyTestDriverUpdateEventGenerator(testDriver, INPUT_TOPIC, keySerde.serializer(), updateEventSerde.serializer())); }
Example #28
Source File: Kafka_Streams_TensorFlow_Image_Recognition_ExampleTest.java From kafka-streams-machine-learning-examples with Apache License 2.0 | 4 votes |
@Before public void setup() throws IOException { testDriver = new TopologyTestDriver(Kafka_Streams_TensorFlow_Image_Recognition_Example.getStreamTopology(), Kafka_Streams_TensorFlow_Image_Recognition_Example.getStreamConfiguration("localhost:9092")); }
Example #29
Source File: StreamsTester.java From football-events with MIT License | 4 votes |
public void setUp(Topology topology) { testDriver = new TopologyTestDriver(topology, streamsProps); }
Example #30
Source File: TestDriverPublisher.java From simplesource with Apache License 2.0 | 4 votes |
TestDriverPublisher(final TopologyTestDriver driver, final Serde<K> keySerde, final Serde<V> valueSerde) { this.driver = driver; factory = new ConsumerRecordFactory<>(keySerde.serializer(), valueSerde.serializer()); }