org.apache.kafka.streams.KeyValue Java Examples
The following examples show how to use
org.apache.kafka.streams.KeyValue.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: UserClicksPerMinute.java From fluent-kafka-streams-tests with MIT License | 8 votes |
public Topology getTopology() { final StreamsBuilder builder = new StreamsBuilder(); final KStream<Integer, ClickEvent> clickEvents = builder.stream(this.inputTopic); final KTable<Windowed<Integer>, Long> counts = clickEvents .groupByKey() .windowedBy(TimeWindows.of(Duration.ofMinutes(1))) .count(); counts.toStream() .map((key, value) -> KeyValue.pair( key.key(), new ClickOutput(key.key(), value, key.window().start()))) .to(this.outputTopic, Produced.with(Serdes.Integer(), new JsonSerde<>(ClickOutput.class))); return builder.build(); }
Example #2
Source File: KeyValueStoreGrpcImplLocalDispatcher.java From apicurio-registry with Apache License 2.0 | 6 votes |
@SuppressWarnings({"rawtypes", "unchecked"}) @Override public void filter(FilterReq request, StreamObserver<io.apicurio.registry.streams.distore.proto.KeyValue> responseObserver) { boolean ok = false; try ( Stream stream = keyValueStore(request.getStoreName()).filter(request.getFilter(), request.getOver()) ) { drainToKeyValue(request.getStoreName(), stream, responseObserver); ok = true; } catch (Throwable e) { responseObserver.onError(e); } if (ok) { responseObserver.onCompleted(); } }
Example #3
Source File: KafkaStreamsBinderMultipleInputTopicsTest.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
@StreamListener @SendTo("output") public KStream<?, WordCount> process( @Input("input") KStream<Object, String> input) { input.map((k, v) -> { System.out.println(k); System.out.println(v); return new KeyValue<>(k, v); }); return input .flatMapValues( value -> Arrays.asList(value.toLowerCase().split("\\W+"))) .map((key, value) -> new KeyValue<>(value, value)) .groupByKey(Serialized.with(Serdes.String(), Serdes.String())) .count(Materialized.as("WordCounts")).toStream() .map((key, value) -> new KeyValue<>(null, new WordCount(key, value))); }
Example #4
Source File: MetricsResource.java From kafka-streams-example with Apache License 2.0 | 6 votes |
/** * Query local state store to extract metrics * * @return local Metrics */ private Metrics getLocalMetrics() { HostInfo thisInstance = GlobalAppState.getInstance().getHostPortInfo(); KafkaStreams ks = GlobalAppState.getInstance().getKafkaStreams(); String source = thisInstance.host() + ":" + thisInstance.port(); Metrics localMetrics = new Metrics(); ReadOnlyKeyValueStore<String, Double> averageStore = ks .store(storeName, QueryableStoreTypes.<String, Double>keyValueStore()); LOGGER.log(Level.INFO, "Entries in store {0}", averageStore.approximateNumEntries()); KeyValueIterator<String, Double> storeIterator = averageStore.all(); while (storeIterator.hasNext()) { KeyValue<String, Double> kv = storeIterator.next(); localMetrics.add(source, kv.key, String.valueOf(kv.value)); } LOGGER.log(Level.INFO, "Local store state {0}", localMetrics); return localMetrics; }
Example #5
Source File: StreamToTableJoinIntegrationTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
@StreamListener @SendTo("output") public KStream<String, Long> process( @Input("input") KStream<String, Long> userClicksStream, @Input("input-x") KTable<String, String> userRegionsTable) { return userClicksStream .leftJoin(userRegionsTable, (clicks, region) -> new RegionWithClicks( region == null ? "UNKNOWN" : region, clicks), Joined.with(Serdes.String(), Serdes.Long(), null)) .map((user, regionWithClicks) -> new KeyValue<>( regionWithClicks.getRegion(), regionWithClicks.getClicks())) .groupByKey(Serialized.with(Serdes.String(), Serdes.Long())) .reduce(Long::sum) .toStream(); }
Example #6
Source File: StockPerformanceMultipleValuesTransformer.java From kafka-streams-in-action with Apache License 2.0 | 6 votes |
@Override @SuppressWarnings("deprecation") public KeyValue<String, List<KeyValue<String, StockPerformance>>> punctuate(long timestamp) { List<KeyValue<String, StockPerformance>> stockPerformanceList = new ArrayList<>(); KeyValueIterator<String, StockPerformance> performanceIterator = keyValueStore.all(); while (performanceIterator.hasNext()) { KeyValue<String, StockPerformance> keyValue = performanceIterator.next(); StockPerformance stockPerformance = keyValue.value; if (stockPerformance != null) { if (stockPerformance.priceDifferential() >= differentialThreshold || stockPerformance.volumeDifferential() >= differentialThreshold) { stockPerformanceList.add(keyValue); } } } return stockPerformanceList.isEmpty() ? null : KeyValue.pair(null, stockPerformanceList); }
Example #7
Source File: Kafka_Streams_MachineLearning_H2O_GBM_ExampleTest.java From kafka-streams-machine-learning-examples with Apache License 2.0 | 6 votes |
/** * Test based on * Kafka_Streams_TensorFlow_Image_Recognition_Example_IntegrationTest * */ @Test public void testList() { // Flight data (one single flight) --> We want to predict if it will be // delayed or not List<String> inputValues = Arrays.asList( "1987,10,14,3,741,730,912,849,PS,1451,NA,91,79,NA,23,11,SAN,SFO,447,NA,NA,0,NA,0,NA,NA,NA,NA,NA,YES,YES", "1999,10,14,3,741,730,912,849,PS,1451,NA,91,79,NA,23,11,SAN,SFO,447,NA,NA,0,NA,0,NA,NA,NA,NA,NA,YES,YES"); List<KeyValue<String, String>> records = inputValues.stream() .map(v -> new KeyValue<String, String>(null, v)).collect(Collectors.toList()); testDriver.pipeInput(recordFactory.create(Kafka_Streams_MachineLearning_H2O_GBM_Example.INPUT_TOPIC, records, 1L, 100L)); assertThat(getOutput()).isEqualTo("Prediction: Is Airline delayed? => YES"); assertThat(getOutput()).isEqualTo("Prediction: Is Airline delayed? => NO"); }
Example #8
Source File: StreamUtils.java From kafka-graphs with Apache License 2.0 | 6 votes |
public static <K, V> KTable<K, V> tableFromCollection( StreamsBuilder builder, Properties props, String topic, int numPartitions, short replicationFactor, Serde<K> keySerde, Serde<V> valueSerde, Collection<KeyValue<K, V>> values) { ClientUtils.createTopic(topic, numPartitions, replicationFactor, props); try (Producer<K, V> producer = new KafkaProducer<>(props, keySerde.serializer(), valueSerde.serializer())) { for (KeyValue<K, V> value : values) { ProducerRecord<K, V> producerRecord = new ProducerRecord<>(topic, value.key, value.value); producer.send(producerRecord); } producer.flush(); } return builder.table(topic, Consumed.with(keySerde, valueSerde), Materialized.with(keySerde, valueSerde)); }
Example #9
Source File: AggregatingCount.java From kafka-tutorials with Apache License 2.0 | 6 votes |
public Topology buildTopology(Properties envProps, final SpecificAvroSerde<TicketSale> ticketSaleSerde) { final StreamsBuilder builder = new StreamsBuilder(); final String inputTopic = envProps.getProperty("input.topic.name"); final String outputTopic = envProps.getProperty("output.topic.name"); builder.stream(inputTopic, Consumed.with(Serdes.String(), ticketSaleSerde)) // Set key to title and value to ticket value .map((k, v) -> new KeyValue<>((String) v.getTitle(), (Integer) v.getTicketTotalValue())) // Group by title .groupByKey(Grouped.with(Serdes.String(), Serdes.Integer())) // Apply COUNT method .count() // Write to stream specified by outputTopic .toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Long())); return builder.build(); }
Example #10
Source File: WordCountProcessorApplicationTests.java From spring-cloud-stream-samples with Apache License 2.0 | 6 votes |
/** * Test Word count of sentence list. */ @Test public void shouldCountWords() { final List<String> inputLines = Arrays.asList( "Kafka Streams Examples", "Spring Cloud Stream Sample", "Using Kafka Streams Test Utils" ); final List<KeyValue<String, String>> inputRecords = inputLines.stream().map(v -> new KeyValue<String, String>(null, v)).collect(Collectors.toList()); final Map<String, Long> expectedWordCounts = new HashMap<>(); expectedWordCounts.put("spring", 1L); expectedWordCounts.put("cloud", 1L); expectedWordCounts.put("examples", 1L); expectedWordCounts.put("sample", 1L); expectedWordCounts.put("streams", 2L); expectedWordCounts.put("stream", 1L); expectedWordCounts.put("test", 1L); expectedWordCounts.put("utils", 1L); expectedWordCounts.put("kafka", 2L); expectedWordCounts.put("using", 1L); testDriver.pipeInput(recordFactory.create(INPUT_TOPIC, inputRecords, 1L, 1000L)); //All feed in same 30s time window final Map<String, Long> actualWordCounts = getOutputList(); assertThat(actualWordCounts).containsAllEntriesOf(expectedWordCounts).hasSameSizeAs(expectedWordCounts); }
Example #11
Source File: ReadOnlyKeyValueStoreGrpcClient.java From apicurio-registry with Apache License 2.0 | 6 votes |
@Override public KeyValueIterator<K, V> range(K from, K to) { ByteString fromBytes = ByteString.copyFrom(keyValueSerde.serializeKey(from)); ByteString toBytes = ByteString.copyFrom(keyValueSerde.serializeKey(to)); StreamObserverSpliterator<io.apicurio.registry.streams.distore.proto.KeyValue> observer = new StreamObserverSpliterator<>(); stub.range( KeyFromKeyToReq .newBuilder() .setKeyFrom(fromBytes) .setKeyTo(toBytes) .setStoreName(storeName) .build(), observer ); return keyValueIterator(observer.stream()); }
Example #12
Source File: EndToEndIntegrationTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
@Test public void shouldSelectAllFromUsers() throws Exception { final QueuedQueryMetadata queryMetadata = executeQuery( "SELECT * from %s;", userTable); BlockingQueue<KeyValue<String, GenericRow>> rowQueue = queryMetadata.getRowQueue(); Set<String> actualUsers = new HashSet<>(); Set<String> expectedUsers = Utils.mkSet("USER_0", "USER_1", "USER_2", "USER_3", "USER_4"); while (actualUsers.size() < expectedUsers.size()) { KeyValue<String, GenericRow> nextRow = rowQueue.poll(); if (nextRow != null) { List<Object> columns = nextRow.value.getColumns(); assertEquals(6, columns.size()); actualUsers.add((String) columns.get(1)); } } assertEquals(expectedUsers, actualUsers); }
Example #13
Source File: CogroupingPunctuator.java From kafka-streams-in-action with Apache License 2.0 | 6 votes |
@Override public void punctuate(long timestamp) { KeyValueIterator<String, Tuple<List<ClickEvent>, List<StockTransaction>>> iterator = tupleStore.all(); while (iterator.hasNext()) { KeyValue<String, Tuple<List<ClickEvent>, List<StockTransaction>>> cogrouped = iterator.next(); // if either list contains values forward results if (cogrouped.value != null && (!cogrouped.value._1.isEmpty() || !cogrouped.value._2.isEmpty())) { List<ClickEvent> clickEvents = new ArrayList<>(cogrouped.value._1); List<StockTransaction> stockTransactions = new ArrayList<>(cogrouped.value._2); context.forward(cogrouped.key, Tuple.of(clickEvents, stockTransactions)); // empty out the current cogrouped results cogrouped.value._1.clear(); cogrouped.value._2.clear(); tupleStore.put(cogrouped.key, cogrouped.value); } } iterator.close(); }
Example #14
Source File: CogroupingMethodHandleProcessor.java From kafka-streams-in-action with Apache License 2.0 | 6 votes |
public void cogroup(long timestamp) { KeyValueIterator<String, Tuple<List<ClickEvent>, List<StockTransaction>>> iterator = tupleStore.all(); while (iterator.hasNext()) { KeyValue<String, Tuple<List<ClickEvent>, List<StockTransaction>>> cogrouping = iterator.next(); if (cogrouping.value != null && (!cogrouping.value._1.isEmpty() || !cogrouping.value._2.isEmpty())) { List<ClickEvent> clickEvents = new ArrayList<>(cogrouping.value._1); List<StockTransaction> stockTransactions = new ArrayList<>(cogrouping.value._2); context().forward(cogrouping.key, Tuple.of(clickEvents, stockTransactions)); cogrouping.value._1.clear(); cogrouping.value._2.clear(); tupleStore.put(cogrouping.key, cogrouping.value); } } iterator.close(); }
Example #15
Source File: SpannerTest.java From kafka-graphs with Apache License 2.0 | 6 votes |
static List<KeyValue<Edge<Long>, Void>> getEdges() { List<KeyValue<Edge<Long>, Void>> edges = new ArrayList<>(); edges.add(new KeyValue<>(new Edge<>(1L, 4L), null)); edges.add(new KeyValue<>(new Edge<>(4L, 7L), null)); edges.add(new KeyValue<>(new Edge<>(7L, 8L), null)); edges.add(new KeyValue<>(new Edge<>(4L, 8L), null)); edges.add(new KeyValue<>(new Edge<>(4L, 5L), null)); edges.add(new KeyValue<>(new Edge<>(5L, 6L), null)); edges.add(new KeyValue<>(new Edge<>(2L, 3L), null)); edges.add(new KeyValue<>(new Edge<>(3L, 4L), null)); edges.add(new KeyValue<>(new Edge<>(3L, 6L), null)); edges.add(new KeyValue<>(new Edge<>(8L, 9L), null)); edges.add(new KeyValue<>(new Edge<>(6L, 8L), null)); edges.add(new KeyValue<>(new Edge<>(5L, 9L), null)); return edges; }
Example #16
Source File: SessionWindowQueryVerticle.java From kiqr with Apache License 2.0 | 6 votes |
@Override public void start() throws Exception { execute(Config.SESSION_QUERY_ADDRESS_PREFIX, (abstractQuery, keySerde, valueSerde) -> { KeyBasedQuery query = (KeyBasedQuery) abstractQuery; ReadOnlySessionStore<Object, Object> store = streams.store(query.getStoreName(), QueryableStoreTypes.sessionStore()); try (KeyValueIterator<Windowed<Object>, Object> result = store.fetch(deserializeObject(keySerde, query.getKey()))) { if (result.hasNext()) { List<Window> results = new ArrayList<>(); while (result.hasNext()) { KeyValue<Windowed<Object>, Object> windowedEntry = result.next(); results.add(new Window(windowedEntry.key.window().start(), windowedEntry.key.window().end(), base64Encode(valueSerde, windowedEntry.value))); } return new SessionQueryResponse(results); } else { return new SessionQueryResponse(Collections.emptyList()); } } }); }
Example #17
Source File: StockPerformancePunctuator.java From kafka-streams-in-action with Apache License 2.0 | 6 votes |
@Override public void punctuate(long timestamp) { KeyValueIterator<String, StockPerformance> performanceIterator = keyValueStore.all(); while (performanceIterator.hasNext()) { KeyValue<String, StockPerformance> keyValue = performanceIterator.next(); String key = keyValue.key; StockPerformance stockPerformance = keyValue.value; if (stockPerformance != null) { if (stockPerformance.priceDifferential() >= differentialThreshold || stockPerformance.volumeDifferential() >= differentialThreshold) { context.forward(key, stockPerformance); } } } }
Example #18
Source File: WordCountMultipleBranchesIntegrationTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
@StreamListener("input") @SendTo({ "output1", "output2", "output3" }) @SuppressWarnings("unchecked") public KStream<?, WordCount>[] process(KStream<Object, String> input) { Predicate<Object, WordCount> isEnglish = (k, v) -> v.word.equals("english"); Predicate<Object, WordCount> isFrench = (k, v) -> v.word.equals("french"); Predicate<Object, WordCount> isSpanish = (k, v) -> v.word.equals("spanish"); return input .flatMapValues( value -> Arrays.asList(value.toLowerCase().split("\\W+"))) .groupBy((key, value) -> value).windowedBy(TimeWindows.of(Duration.ofSeconds(5))) .count(Materialized.as("WordCounts-multi")).toStream() .map((key, value) -> new KeyValue<>(null, new WordCount(key.key(), value, new Date(key.window().start()), new Date(key.window().end())))) .branch(isEnglish, isFrench, isSpanish); }
Example #19
Source File: KeyValueStoreGrpcImplLocalDispatcher.java From apicurio-registry with Apache License 2.0 | 6 votes |
@Override public void all(VoidReq request, StreamObserver<io.apicurio.registry.streams.distore.proto.KeyValue> responseObserver) { boolean ok = false; try ( KeyValueIterator<?, ?> iter = keyValueStore(request.getStoreName()).all() ) { drainToKeyValue(request.getStoreName(), iter, responseObserver); ok = true; } catch (Throwable e) { responseObserver.onError(e); } if (ok) { responseObserver.onCompleted(); } }
Example #20
Source File: StreamUtils.java From kafka-graphs with Apache License 2.0 | 5 votes |
public static <K, V> KTable<K, V> tableFromCollection( StreamsBuilder builder, Properties props, Serde<K> keySerde, Serde<V> valueSerde, Collection<KeyValue<K, V>> values) { return tableFromCollection(builder, props, "temp-" + UUID.randomUUID(), 50, (short) 1, keySerde, valueSerde, values); }
Example #21
Source File: ConnectedComponentsTest.java From kafka-graphs with Apache License 2.0 | 5 votes |
static List<KeyValue<Edge<Long>, Void>> getEdges() { List<KeyValue<Edge<Long>, Void>> edges = new ArrayList<>(); edges.add(new KeyValue<>(new Edge<>(1L, 2L), null)); edges.add(new KeyValue<>(new Edge<>(1L, 3L), null)); edges.add(new KeyValue<>(new Edge<>(2L, 3L), null)); edges.add(new KeyValue<>(new Edge<>(1L, 5L), null)); edges.add(new KeyValue<>(new Edge<>(6L, 7L), null)); edges.add(new KeyValue<>(new Edge<>(8L, 9L), null)); return edges; }
Example #22
Source File: KafkaStreamsWordCountApplication.java From spring-cloud-stream-samples with Apache License 2.0 | 5 votes |
@Bean public Function<KStream<Object, String>, KStream<?, WordCount>> process() { return input -> input .flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+"))) .map((key, value) -> new KeyValue<>(value, value)) .groupByKey(Grouped.with(Serdes.String(), Serdes.String())) .windowedBy(TimeWindows.of(Duration.ofSeconds(20))) .count(Materialized.as("WordCounts-1")) .toStream() .map((key, value) -> new KeyValue<>(null, new WordCount(key.key(), value, new Date(key.window().start()), new Date(key.window().end())))); }
Example #23
Source File: InteractiveQueryServer.java From kafka-streams-in-action with Apache License 2.0 | 5 votes |
private String fetchFromSessionStore(Map<String, String> params) { String store = params.get(STORE_PARAM); String key = params.get(KEY_PARAM); HostInfo storeHostInfo = getHostInfo(store, key); if (storeHostInfo.host().equals("unknown")) { return STORES_NOT_ACCESSIBLE; } if (dataNotLocal(storeHostInfo)) { LOG.info("{} located in state store on another instance !!!!", key); return fetchRemote(storeHostInfo, "session", params); } ReadOnlySessionStore<String, CustomerTransactions> readOnlySessionStore = kafkaStreams.store(store, QueryableStoreTypes.sessionStore()); List<String> results = new ArrayList<>(); List<KeyValue<String, List<String>>> sessionResults = new ArrayList<>(); try (KeyValueIterator<Windowed<String>, CustomerTransactions> iterator = readOnlySessionStore.fetch(key)) { while (iterator.hasNext()) { KeyValue<Windowed<String>, CustomerTransactions> windowed = iterator.next(); CustomerTransactions transactions = windowed.value; LocalDateTime startSession = getLocalDateTime(windowed.key.window().start()); LocalDateTime endSession = getLocalDateTime(windowed.key.window().end()); transactions.setSessionInfo(String.format("Session Window{start=%s, end=%s}", startSession.toLocalTime().toString(), endSession.toLocalTime().toString())); results.add(transactions.toString()); } sessionResults.add(new KeyValue<>(key, results)); } return gson.toJson(sessionResults); }
Example #24
Source File: KafkastreamsBinderPojoInputStringOutputIntegrationTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@StreamListener("input") @SendTo("output") public KStream<Integer, String> process(KStream<Object, Product> input) { return input.filter((key, product) -> product.getId() == 123) .map((key, value) -> new KeyValue<>(value, value)) .groupByKey(Serialized.with(new JsonSerde<>(Product.class), new JsonSerde<>(Product.class))) .windowedBy(TimeWindows.of(5000)) .count(Materialized.as("id-count-store")).toStream() .map((key, value) -> new KeyValue<>(key.key().id, "Count for product with ID 123: " + value)); }
Example #25
Source File: DeserializationErrorHandlerByKafkaTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@StreamListener("input") @SendTo("output") public KStream<?, String> process(KStream<Object, String> input) { return input .flatMapValues( value -> Arrays.asList(value.toLowerCase().split("\\W+"))) .map((key, value) -> new KeyValue<>(value, value)) .groupByKey(Serialized.with(Serdes.String(), Serdes.String())) .windowedBy(TimeWindows.of(5000)).count(Materialized.as("foo-WordCounts-x")) .toStream().map((key, value) -> new KeyValue<>(null, "Count for " + key.key() + " : " + value)); }
Example #26
Source File: KafkaStreamsMessageConversionDelegate.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
private void convertAndSetMessage(Object o, Class<?> valueClass, MessageConverter messageConverter, Message<?> msg) { Object result = valueClass.isAssignableFrom(msg.getPayload().getClass()) ? msg.getPayload() : messageConverter.fromMessage(msg, valueClass); Assert.notNull(result, "Failed to convert message " + msg); keyValueThreadLocal.set(new KeyValue<>(o, result)); }
Example #27
Source File: Kafka_Streams_TensorFlow_Image_Recognition_ExampleTest.java From kafka-streams-machine-learning-examples with Apache License 2.0 | 5 votes |
/** Test based on Kafka_Streams_TensorFlow_Image_Recognition_Example_IntegrationTest * */ @Test public void testList() { // Images: 'unknown', Airliner, 'unknown', Butterfly List<String> inputValues = Arrays.asList("src/main/resources/TensorFlow_Images/trained_airplane_2.jpg", "src/main/resources/TensorFlow_Images/devil.png", "src/main/resources/TensorFlow_Images/trained_butterfly.jpg"); List<KeyValue<String, String>> records = inputValues.stream().map(v -> new KeyValue<String, String>(null, v)).collect(Collectors.toList()); testDriver.pipeInput(recordFactory.create(Kafka_Streams_TensorFlow_Image_Recognition_Example.imageInputTopic, records, 1L, 100L)); assertThat(getOutput()).contains("What is the content of this picture? => airliner"); assertThat(getOutput()).doesNotContain("What is the content of this picture? => airliner"); assertThat(getOutput()).contains("What is the content of this picture? => cabbage butterfly"); }
Example #28
Source File: StreamUtils.java From kafka-graphs with Apache License 2.0 | 5 votes |
public static <K, V> List<KeyValue<K, V>> listFromStore(KafkaStreams streams, String storeName) { final ReadOnlyKeyValueStore<K, V> store = streams.store( storeName, QueryableStoreTypes.keyValueStore()); try (final KeyValueIterator<K, V> all = store.all()) { List<KeyValue<K, V>> result = new ArrayList<>(); while (all.hasNext()) { result.add(all.next()); } return result; } }
Example #29
Source File: VehicleStatusCountProcessor.java From microservice-patterns with Apache License 2.0 | 5 votes |
@Bean public KStream<String, Long> statusCountStreamProcessor(StreamsBuilder streamsBuilder) { KStream<Integer, VehicleLocation> stream = streamsBuilder.stream("gpslocation", //Read from topic Consumed.with(Serdes.Integer(), new JsonSerde<>(VehicleLocation.class))); //using Integer and JSON serde return stream.map((k,v)-> { // transform they key as Online/Offline based on status String online = v.isOnline() == true ? "Online" : "Offline"; return new KeyValue<>(online, v); }) .groupByKey(Serialized.with( //Group by the newly mapped key in previous step Serdes.String(), new JsonSerde<>(VehicleLocation.class)) ) .count(Materialized.as("statusCount")) // materialize this value to state store .toStream(); }
Example #30
Source File: KeyValueJoinStateStore.java From rya with Apache License 2.0 | 5 votes |
private static void printStateStoreKeyValueIterator(final KeyValueIterator<String, VisibilityBindingSet> rangeIt) { log.info("----------------"); while (rangeIt.hasNext()) { final KeyValue<String, VisibilityBindingSet> keyValue = rangeIt.next(); log.info(keyValue.key + " :::: " + keyValue.value); } log.info("----------------\n\n"); if (rangeIt != null) { rangeIt.close(); } }