io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde Java Examples
The following examples show how to use
io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AggregatingCountTest.java From kafka-tutorials with Apache License 2.0 | 7 votes |
private SpecificAvroSerde<TicketSale> makeSerializer(Properties envProps) throws IOException, RestClientException { final MockSchemaRegistryClient client = new MockSchemaRegistryClient(); String inputTopic = envProps.getProperty("input.topic.name"); String outputTopic = envProps.getProperty("output.topic.name"); final Schema schema = TicketSale.SCHEMA$; client.register(inputTopic + "-value", schema); client.register(outputTopic + "-value", schema); SpecificAvroSerde<TicketSale> serde = new SpecificAvroSerde<>(client); Map<String, String> config = new HashMap<>(); config.put("schema.registry.url", envProps.getProperty("schema.registry.url")); serde.configure(config, false); return serde; }
Example #2
Source File: FindDistinctEventsTest.java From kafka-tutorials with Apache License 2.0 | 7 votes |
private static SpecificAvroSerde<Click> makeSerializer(Properties envProps) throws IOException, RestClientException { final MockSchemaRegistryClient client = new MockSchemaRegistryClient(); String inputTopic = envProps.getProperty("input.topic.name"); String outputTopic = envProps.getProperty("output.topic.name"); final Schema schema = Click.SCHEMA$; client.register(inputTopic + "-value", schema); client.register(outputTopic + "-value", schema); SpecificAvroSerde<Click> serde = new SpecificAvroSerde<>(client); Map<String, String> config = new HashMap<>(); config.put("schema.registry.url", envProps.getProperty("schema.registry.url")); serde.configure(config, false); return serde; }
Example #3
Source File: AggregatingSumTest.java From kafka-tutorials with Apache License 2.0 | 7 votes |
private SpecificAvroSerde<TicketSale> makeSerializer(Properties envProps) throws IOException, RestClientException { final MockSchemaRegistryClient client = new MockSchemaRegistryClient(); String inputTopic = envProps.getProperty("input.topic.name"); String outputTopic = envProps.getProperty("output.topic.name"); final Schema schema = TicketSale.SCHEMA$; client.register(inputTopic + "-value", schema); client.register(outputTopic + "-value", schema); SpecificAvroSerde<TicketSale> serde = new SpecificAvroSerde<>(client); Map<String, String> config = new HashMap<>(); config.put("schema.registry.url", envProps.getProperty("schema.registry.url")); serde.configure(config, false); return serde; }
Example #4
Source File: SerializationTutorial.java From kafka-tutorials with Apache License 2.0 | 6 votes |
protected Topology buildTopology(Properties envProps, final SpecificAvroSerde<Movie> movieSpecificAvroSerde, final KafkaProtobufSerde<MovieProtos.Movie> movieProtoSerde) { final String inputAvroTopicName = envProps.getProperty("input.avro.movies.topic.name"); final String outProtoTopicName = envProps.getProperty("output.proto.movies.topic.name"); final StreamsBuilder builder = new StreamsBuilder(); // topic contains values in avro format final KStream<Long, Movie> avroMovieStream = builder.stream(inputAvroTopicName, Consumed.with(Long(), movieSpecificAvroSerde)); //convert and write movie data in protobuf format avroMovieStream .map((key, avroMovie) -> new KeyValue<>(key, MovieProtos.Movie.newBuilder() .setMovieId(avroMovie.getMovieId()) .setTitle(avroMovie.getTitle()) .setReleaseYear(avroMovie.getReleaseYear()) .build())) .to(outProtoTopicName, Produced.with(Long(), movieProtoSerde)); return builder.build(); }
Example #5
Source File: StreamsIngestTest.java From kafka-tutorials with Apache License 2.0 | 6 votes |
private SpecificAvroSerde<City> makeSerializer(Properties envProps) throws IOException, RestClientException { final MockSchemaRegistryClient client = new MockSchemaRegistryClient(); String inputTopic = envProps.getProperty("input.topic.name"); String outputTopic = envProps.getProperty("output.topic.name"); final Schema schema = City.SCHEMA$; client.register(inputTopic + "-value", schema); client.register(outputTopic + "-value", schema); SpecificAvroSerde<City> serde = new SpecificAvroSerde<>(client); Map<String, String> config = new HashMap<>(); config.put("schema.registry.url", envProps.getProperty("schema.registry.url")); serde.configure(config, false); return serde; }
Example #6
Source File: FilterEventsTest.java From kafka-tutorials with Apache License 2.0 | 6 votes |
private SpecificAvroSerde<Publication> makeSerializer(Properties envProps) throws IOException, RestClientException { final MockSchemaRegistryClient client = new MockSchemaRegistryClient(); String inputTopic = envProps.getProperty("input.topic.name"); String outputTopic = envProps.getProperty("output.topic.name"); final Schema schema = Publication.SCHEMA$; client.register(inputTopic + "-value", schema); client.register(outputTopic + "-value", schema); SpecificAvroSerde<Publication> serde = new SpecificAvroSerde<>(client); Map<String, String> config = new HashMap<>(); config.put("schema.registry.url", envProps.getProperty("schema.registry.url")); serde.configure(config, false); return serde; }
Example #7
Source File: RunningAverage.java From kafka-tutorials with Apache License 2.0 | 6 votes |
protected static KTable<Long, Double> getRatingAverageTable(KStream<Long, Rating> ratings, String avgRatingsTopicName, SpecificAvroSerde<CountAndSum> countAndSumSerde) { // Grouping Ratings KGroupedStream<Long, Double> ratingsById = ratings .map((key, rating) -> new KeyValue<>(rating.getMovieId(), rating.getRating())) .groupByKey(with(Long(), Double())); final KTable<Long, CountAndSum> ratingCountAndSum = ratingsById.aggregate(() -> new CountAndSum(0L, 0.0), (key, value, aggregate) -> { aggregate.setCount(aggregate.getCount() + 1); aggregate.setSum(aggregate.getSum() + value); return aggregate; }, Materialized.with(Long(), countAndSumSerde)); final KTable<Long, Double> ratingAverage = ratingCountAndSum.mapValues(value -> value.getSum() / value.getCount(), Materialized.as("average-ratings")); // persist the result in topic ratingAverage.toStream().to(avgRatingsTopicName); return ratingAverage; }
Example #8
Source File: AggregatingCount.java From kafka-tutorials with Apache License 2.0 | 6 votes |
public Topology buildTopology(Properties envProps, final SpecificAvroSerde<TicketSale> ticketSaleSerde) { final StreamsBuilder builder = new StreamsBuilder(); final String inputTopic = envProps.getProperty("input.topic.name"); final String outputTopic = envProps.getProperty("output.topic.name"); builder.stream(inputTopic, Consumed.with(Serdes.String(), ticketSaleSerde)) // Set key to title and value to ticket value .map((k, v) -> new KeyValue<>((String) v.getTitle(), (Integer) v.getTicketTotalValue())) // Group by title .groupByKey(Grouped.with(Serdes.String(), Serdes.Integer())) // Apply COUNT method .count() // Write to stream specified by outputTopic .toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Long())); return builder.build(); }
Example #9
Source File: AggregatingSum.java From kafka-tutorials with Apache License 2.0 | 6 votes |
public Topology buildTopology(Properties envProps, final SpecificAvroSerde<TicketSale> ticketSaleSerde) { final StreamsBuilder builder = new StreamsBuilder(); final String inputTopic = envProps.getProperty("input.topic.name"); final String outputTopic = envProps.getProperty("output.topic.name"); builder.stream(inputTopic, Consumed.with(Serdes.String(), ticketSaleSerde)) // Set key to title and value to ticket value .map((k, v) -> new KeyValue<>((String) v.getTitle(), (Integer) v.getTicketTotalValue())) // Group by title .groupByKey(Grouped.with(Serdes.String(), Serdes.Integer())) // Apply SUM aggregation .reduce(Integer::sum) // Write to stream specified by outputTopic .toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Integer())); return builder.build(); }
Example #10
Source File: TumblingWindow.java From kafka-tutorials with Apache License 2.0 | 6 votes |
public Properties buildStreamsProperties(Properties envProps) { Properties props = new Properties(); props.put(StreamsConfig.APPLICATION_ID_CONFIG, envProps.getProperty("application.id")); props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, envProps.getProperty("bootstrap.servers")); props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass()); props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class); props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url")); props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, RatingTimestampExtractor.class.getName()); props.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0); try { props.put(StreamsConfig.STATE_DIR_CONFIG, Files.createTempDirectory("tumbling-windows").toAbsolutePath().toString()); } catch(IOException e) { // If we can't have our own temporary directory, we can leave it with the default. We create a custom // one because running the app outside of Docker multiple times in quick succession will find the // previous state still hanging around in /tmp somewhere, which is not the expected result. } return props; }
Example #11
Source File: SerializationTutorialTest.java From kafka-tutorials with Apache License 2.0 | 6 votes |
@Test public void shouldChangeSerializationFormat() throws IOException { SerializationTutorial tutorial = new SerializationTutorial(); final Properties envProps = tutorial.loadEnvProperties(TEST_CONFIG_FILE); final Properties streamsProps = tutorial.buildStreamsProperties(envProps); String inputTopicName = envProps.getProperty("input.avro.movies.topic.name"); String outputTopicName = envProps.getProperty("output.proto.movies.topic.name"); final SpecificAvroSerde<Movie> avroSerde = tutorial.movieAvroSerde(envProps); final KafkaProtobufSerde<MovieProtos.Movie> protobufSerde = tutorial.movieProtobufSerde(envProps); Topology topology = tutorial.buildTopology(envProps, avroSerde, protobufSerde); TopologyTestDriver testDriver = new TopologyTestDriver(topology, streamsProps); testDriver .createInputTopic(inputTopicName, Long().serializer(), avroSerde.serializer()) .pipeValueList(this.prepareInputFixture()); final List<MovieProtos.Movie> moviesProto = testDriver.createOutputTopic(outputTopicName, Long().deserializer(), protobufSerde.deserializer()) .readValuesToList(); assertThat(moviesProto, equalTo(expectedMovies())); }
Example #12
Source File: AggregatingMinMax.java From kafka-tutorials with Apache License 2.0 | 6 votes |
public static Topology buildTopology(final StreamsBuilder builder, final Properties envProps, final SpecificAvroSerde<MovieTicketSales> ticketSaleSerde, final SpecificAvroSerde<YearlyMovieFigures> movieFiguresSerde) { final String inputTopic = envProps.getProperty("input.topic.name"); final String outputTopic = envProps.getProperty("output.topic.name"); builder.stream(inputTopic, Consumed.with(Serdes.String(), ticketSaleSerde)) .groupBy( (k, v) -> v.getReleaseYear(), Grouped.with(Serdes.Integer(), ticketSaleSerde)) .aggregate( () -> new YearlyMovieFigures(0, Integer.MAX_VALUE, Integer.MIN_VALUE), ((key, value, aggregate) -> new YearlyMovieFigures(key, Math.min(value.getTotalSales(), aggregate.getMinTotalSales()), Math.max(value.getTotalSales(), aggregate.getMaxTotalSales()))), Materialized.with(Serdes.Integer(), movieFiguresSerde)) .toStream() .to(outputTopic, Produced.with(Serdes.Integer(), movieFiguresSerde)); return builder.build(); }
Example #13
Source File: FindDistinctEvents.java From kafka-tutorials with Apache License 2.0 | 5 votes |
public Topology buildTopology(Properties envProps, final SpecificAvroSerde<Click> clicksSerde) { final StreamsBuilder builder = new StreamsBuilder(); final String inputTopic = envProps.getProperty("input.topic.name"); final String outputTopic = envProps.getProperty("output.topic.name"); // How long we "remember" an event. During this time, any incoming duplicates of the event // will be, well, dropped, thereby de-duplicating the input data. // // The actual value depends on your use case. To reduce memory and disk usage, you could // decrease the size to purge old windows more frequently at the cost of potentially missing out // on de-duplicating late-arriving records. final Duration windowSize = Duration.ofMinutes(2); // retention period must be at least window size -- for this use case, we don't need a longer retention period // and thus just use the window size as retention time final Duration retentionPeriod = windowSize; final StoreBuilder<WindowStore<String, Long>> dedupStoreBuilder = Stores.windowStoreBuilder( Stores.persistentWindowStore(storeName, retentionPeriod, windowSize, false ), Serdes.String(), Serdes.Long()); builder.addStateStore(dedupStoreBuilder); builder .stream(inputTopic, Consumed.with(Serdes.String(), clicksSerde)) .transformValues(() -> new DeduplicationTransformer<>(windowSize.toMillis(), (key, value) -> value.getIp()), storeName) .filter((k, v) -> v != null) .to(outputTopic, Produced.with(Serdes.String(), clicksSerde)); return builder.build(); }
Example #14
Source File: CogroupingStreams.java From kafka-tutorials with Apache License 2.0 | 5 votes |
static <T extends SpecificRecord> SpecificAvroSerde<T> getSpecificAvroSerde(final Properties envProps) { final SpecificAvroSerde<T> specificAvroSerde = new SpecificAvroSerde<>(); final HashMap<String, String> serdeConfig = new HashMap<>(); serdeConfig.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url")); specificAvroSerde.configure(serdeConfig, false); return specificAvroSerde; }
Example #15
Source File: FilterEvents.java From kafka-tutorials with Apache License 2.0 | 5 votes |
public Topology buildTopology(Properties envProps, final SpecificAvroSerde<Publication> publicationSerde) { final StreamsBuilder builder = new StreamsBuilder(); final String inputTopic = envProps.getProperty("input.topic.name"); final String outputTopic = envProps.getProperty("output.topic.name"); builder.stream(inputTopic, Consumed.with(Serdes.String(), publicationSerde)) .filter((name, publication) -> "George R. R. Martin".equals(publication.getName())) .to(outputTopic, Produced.with(Serdes.String(), publicationSerde)); return builder.build(); }
Example #16
Source File: StreamsIngest.java From kafka-tutorials with Apache License 2.0 | 5 votes |
public Topology buildTopology(Properties envProps, final SpecificAvroSerde<City> citySerde) { final StreamsBuilder builder = new StreamsBuilder(); final String inputTopic = envProps.getProperty("input.topic.name"); final String outputTopic = envProps.getProperty("output.topic.name"); KStream<String, City> citiesNoKey = builder.stream(inputTopic, Consumed.with(Serdes.String(), citySerde)); final KStream<Long, City> citiesKeyed = citiesNoKey.map((k, v) -> new KeyValue<>(v.getCityId(), v)); citiesKeyed.to(outputTopic, Produced.with(Serdes.Long(), citySerde)); return builder.build(); }
Example #17
Source File: StreamsIngest.java From kafka-tutorials with Apache License 2.0 | 5 votes |
private SpecificAvroSerde<City> citySerde(final Properties envProps) { final SpecificAvroSerde<City> serde = new SpecificAvroSerde<>(); Map<String, String> config = new HashMap<>(); config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url")); serde.configure(config, false); return serde; }
Example #18
Source File: MergeStreams.java From kafka-tutorials with Apache License 2.0 | 5 votes |
public Properties buildStreamsProperties(Properties envProps) { Properties props = new Properties(); props.put(StreamsConfig.APPLICATION_ID_CONFIG, envProps.getProperty("application.id")); props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, envProps.getProperty("bootstrap.servers")); props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass()); props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class); props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url")); return props; }
Example #19
Source File: FilterEvents.java From kafka-tutorials with Apache License 2.0 | 5 votes |
private SpecificAvroSerde<Publication> publicationSerde(final Properties envProps) { final SpecificAvroSerde<Publication> serde = new SpecificAvroSerde<>(); Map<String, String> config = new HashMap<>(); config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url")); serde.configure(config, false); return serde; }
Example #20
Source File: DynamicOutputTopic.java From kafka-tutorials with Apache License 2.0 | 5 votes |
static <T extends SpecificRecord> SpecificAvroSerde<T> getSpecificAvroSerde(final Properties envProps) { final SpecificAvroSerde<T> specificAvroSerde = new SpecificAvroSerde<>(); final HashMap<String, String> serdeConfig = new HashMap<>(); serdeConfig.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url")); specificAvroSerde.configure(serdeConfig, false); return specificAvroSerde; }
Example #21
Source File: SplitStream.java From kafka-tutorials with Apache License 2.0 | 5 votes |
public Properties buildStreamsProperties(Properties envProps) { Properties props = new Properties(); props.put(StreamsConfig.APPLICATION_ID_CONFIG, envProps.getProperty("application.id")); props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, envProps.getProperty("bootstrap.servers")); props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass()); props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class); props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url")); return props; }
Example #22
Source File: TransformStream.java From kafka-tutorials with Apache License 2.0 | 5 votes |
public Properties buildStreamsProperties(Properties envProps) { Properties props = new Properties(); props.put(StreamsConfig.APPLICATION_ID_CONFIG, envProps.getProperty("application.id")); props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, envProps.getProperty("bootstrap.servers")); props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass()); props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class); props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url")); return props; }
Example #23
Source File: FindDistinctEvents.java From kafka-tutorials with Apache License 2.0 | 5 votes |
private SpecificAvroSerde<Click> buildClicksSerde(final Properties envProps) { final SpecificAvroSerde<Click> serde = new SpecificAvroSerde<>(); Map<String, String> config = new HashMap<>(); config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url")); serde.configure(config, false); return serde; }
Example #24
Source File: AggregatingCount.java From kafka-tutorials with Apache License 2.0 | 5 votes |
private SpecificAvroSerde<TicketSale> ticketSaleSerde(final Properties envProps) { final SpecificAvroSerde<TicketSale> serde = new SpecificAvroSerde<>(); Map<String, String> config = new HashMap<>(); config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url")); serde.configure(config, false); return serde; }
Example #25
Source File: FkJoinTableToTable.java From kafka-tutorials with Apache License 2.0 | 5 votes |
static <T extends SpecificRecord> SpecificAvroSerde<T> getSpecificAvroSerde(final Properties envProps) { final SpecificAvroSerde<T> specificAvroSerde = new SpecificAvroSerde<>(); final HashMap<String, String> serdeConfig = new HashMap<>(); serdeConfig.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url")); specificAvroSerde.configure(serdeConfig, false); return specificAvroSerde; }
Example #26
Source File: CountInhabitantsWithAvro.java From fluent-kafka-streams-tests with MIT License | 5 votes |
public Properties getKafkaProperties() { final String brokers = "localhost:9092"; final Properties kafkaConfig = new Properties(); kafkaConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "inhabitants-per-city"); kafkaConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); kafkaConfig.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass()); kafkaConfig.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class); kafkaConfig.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, this.schemaRegistryUrl); return kafkaConfig; }
Example #27
Source File: AggregatingMinMax.java From kafka-tutorials with Apache License 2.0 | 5 votes |
public static SpecificAvroSerde<MovieTicketSales> ticketSaleSerde(final Properties envProps) { final SpecificAvroSerde<MovieTicketSales> serde = new SpecificAvroSerde<>(); serde.configure(Collections.singletonMap( AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url")), false); return serde; }
Example #28
Source File: AggregatingSum.java From kafka-tutorials with Apache License 2.0 | 5 votes |
private SpecificAvroSerde<TicketSale> ticketSaleSerde(final Properties envProps) { final SpecificAvroSerde<TicketSale> serde = new SpecificAvroSerde<>(); Map<String, String> config = new HashMap<>(); config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url")); serde.configure(config, false); return serde; }
Example #29
Source File: TumblingWindow.java From kafka-tutorials with Apache License 2.0 | 5 votes |
private SpecificAvroSerde<Rating> ratedMovieAvroSerde(Properties envProps) { SpecificAvroSerde<Rating> movieAvroSerde = new SpecificAvroSerde<>(); final HashMap<String, String> serdeConfig = new HashMap<>(); serdeConfig.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url")); movieAvroSerde.configure(serdeConfig, false); return movieAvroSerde; }
Example #30
Source File: AggregatingMinMax.java From kafka-tutorials with Apache License 2.0 | 5 votes |
public static SpecificAvroSerde<YearlyMovieFigures> movieFiguresSerde(final Properties envProps) { final SpecificAvroSerde<YearlyMovieFigures> serde = new SpecificAvroSerde<>(); serde.configure(Collections.singletonMap( AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url") ), false); return serde; }