Java Code Examples for org.apache.kafka.streams.state.Stores#inMemoryKeyValueStore()
The following examples show how to use
org.apache.kafka.streams.state.Stores#inMemoryKeyValueStore() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: StockPerformanceApplication.java From kafka-streams-in-action with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { StreamsConfig streamsConfig = new StreamsConfig(getProperties()); Deserializer<String> stringDeserializer = Serdes.String().deserializer(); Serializer<String> stringSerializer = Serdes.String().serializer(); Serde<StockPerformance> stockPerformanceSerde = StreamsSerdes.StockPerformanceSerde(); Serializer<StockPerformance> stockPerformanceSerializer = stockPerformanceSerde.serializer(); Serde<StockTransaction> stockTransactionSerde = StreamsSerdes.StockTransactionSerde(); Deserializer<StockTransaction> stockTransactionDeserializer = stockTransactionSerde.deserializer(); Topology topology = new Topology(); String stocksStateStore = "stock-performance-store"; double differentialThreshold = 0.02; KeyValueBytesStoreSupplier storeSupplier = Stores.inMemoryKeyValueStore(stocksStateStore); StoreBuilder<KeyValueStore<String, StockPerformance>> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), stockPerformanceSerde); topology.addSource("stocks-source", stringDeserializer, stockTransactionDeserializer,"stock-transactions") .addProcessor("stocks-processor", () -> new StockPerformanceProcessor(stocksStateStore, differentialThreshold), "stocks-source") .addStateStore(storeBuilder,"stocks-processor") .addSink("stocks-sink", "stock-performance", stringSerializer, stockPerformanceSerializer, "stocks-processor"); topology.addProcessor("stocks-printer", new KStreamPrinter("StockPerformance"), "stocks-processor"); KafkaStreams kafkaStreams = new KafkaStreams(topology, streamsConfig); MockDataProducer.produceStockTransactionsWithKeyFunction(50,50, 25, StockTransaction::getSymbol); System.out.println("Stock Analysis App Started"); kafkaStreams.cleanUp(); kafkaStreams.start(); Thread.sleep(70000); System.out.println("Shutting down the Stock Analysis App now"); kafkaStreams.close(); MockDataProducer.shutdown(); }
Example 2
Source File: KafkaStreamsPipeline.java From quarkus with Apache License 2.0 | 5 votes |
@Produces public Topology buildTopology() { StreamsBuilder builder = new StreamsBuilder(); ObjectMapperSerde<Category> categorySerde = new ObjectMapperSerde<>(Category.class); ObjectMapperSerde<Customer> customerSerde = new ObjectMapperSerde<>(Customer.class); ObjectMapperSerde<EnrichedCustomer> enrichedCustomerSerde = new ObjectMapperSerde<>(EnrichedCustomer.class); KTable<Integer, Category> categories = builder.table( "streams-test-categories", Consumed.with(Serdes.Integer(), categorySerde)); KStream<Integer, EnrichedCustomer> customers = builder .stream("streams-test-customers", Consumed.with(Serdes.Integer(), customerSerde)) .selectKey((id, customer) -> customer.category) .join( categories, (customer, category) -> { return new EnrichedCustomer(customer.id, customer.name, category); }, Joined.with(Serdes.Integer(), customerSerde, categorySerde)); KeyValueBytesStoreSupplier storeSupplier = Stores.inMemoryKeyValueStore("countstore"); customers.groupByKey() .count(Materialized.<Integer, Long> as(storeSupplier)); customers.selectKey((categoryId, customer) -> customer.id) .to("streams-test-customers-processed", Produced.with(Serdes.Integer(), enrichedCustomerSerde)); return builder.build(); }
Example 3
Source File: KafkaStreamsInventoryCountApplication.java From spring-cloud-stream-samples with Apache License 2.0 | 4 votes |
@Bean public KeyValueBytesStoreSupplier storeSupplier() { return Stores.inMemoryKeyValueStore(STORE_NAME); }
Example 4
Source File: TopolologyTestDriverKafkaStreamsInventoryCountTests.java From spring-cloud-stream-samples with Apache License 2.0 | 4 votes |
@BeforeEach void setup() { configureDeserializer(countEventSerde.deserializer(), ProductKey.class, InventoryCountEvent.class, false); configureDeserializer(keySerde.deserializer() ,ProductKey.class, null, true); final StreamsBuilder builder = new StreamsBuilder(); KStream<ProductKey, InventoryUpdateEvent> input = builder.stream(INPUT_TOPIC, Consumed.with(keySerde, updateEventSerde)); KafkaStreamsInventoryAggregator inventoryAggregator = new KafkaStreamsInventoryAggregator(Stores.inMemoryKeyValueStore(STORE_NAME)); KStream<ProductKey, InventoryCountEvent> output = inventoryAggregator.process().apply(input); output.to(OUTPUT_TOPIC); Topology topology = builder.build(); testDriver = new TopologyTestDriver(topology, getStreamsConfiguration()); logger.debug(topology.describe().toString()); setEventGenerator(new TopologyTestDriverUpdateEventGenerator(testDriver, INPUT_TOPIC, keySerde.serializer(), updateEventSerde.serializer())); }
Example 5
Source File: ZMartKafkaStreamsAddStateApp.java From kafka-streams-in-action with Apache License 2.0 | 2 votes |
public static void main(String[] args) throws Exception { StreamsConfig streamsConfig = new StreamsConfig(getProperties()); Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde(); Serde<PurchasePattern> purchasePatternSerde = StreamsSerdes.PurchasePatternSerde(); Serde<RewardAccumulator> rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde(); Serde<String> stringSerde = Serdes.String(); StreamsBuilder builder = new StreamsBuilder(); KStream<String,Purchase> purchaseKStream = builder.stream( "transactions", Consumed.with(stringSerde, purchaseSerde)) .mapValues(p -> Purchase.builder(p).maskCreditCard().build()); KStream<String, PurchasePattern> patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build()); patternKStream.print(Printed.<String, PurchasePattern>toSysOut().withLabel("patterns")); patternKStream.to("patterns", Produced.with(stringSerde, purchasePatternSerde)); // adding State to processor String rewardsStateStoreName = "rewardsPointsStore"; RewardsStreamPartitioner streamPartitioner = new RewardsStreamPartitioner(); KeyValueBytesStoreSupplier storeSupplier = Stores.inMemoryKeyValueStore(rewardsStateStoreName); StoreBuilder<KeyValueStore<String, Integer>> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), Serdes.Integer()); builder.addStateStore(storeBuilder); KStream<String, Purchase> transByCustomerStream = purchaseKStream.through( "customer_transactions", Produced.with(stringSerde, purchaseSerde, streamPartitioner)); KStream<String, RewardAccumulator> statefulRewardAccumulator = transByCustomerStream.transformValues(() -> new PurchaseRewardTransformer(rewardsStateStoreName), rewardsStateStoreName); statefulRewardAccumulator.print(Printed.<String, RewardAccumulator>toSysOut().withLabel("rewards")); statefulRewardAccumulator.to("rewards", Produced.with(stringSerde, rewardAccumulatorSerde)); // used only to produce data for this application, not typical usage MockDataProducer.producePurchaseData(); LOG.info("Starting Adding State Example"); KafkaStreams kafkaStreams = new KafkaStreams(builder.build(),streamsConfig); LOG.info("ZMart Adding State Application Started"); kafkaStreams.cleanUp(); kafkaStreams.start(); Thread.sleep(65000); LOG.info("Shutting down the Add State Application now"); kafkaStreams.close(); MockDataProducer.shutdown(); }