Java Code Examples for org.apache.kafka.streams.kstream.KStream#filter()
The following examples show how to use
org.apache.kafka.streams.kstream.KStream#filter() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: IPFraudKafkaStreamApp.java From Building-Data-Streaming-Applications-with-Apache-Kafka with MIT License | 6 votes |
public static void main(String[] args) throws Exception { Properties kafkaStreamProperties = new Properties(); kafkaStreamProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, "IP-Fraud-Detection"); kafkaStreamProperties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); kafkaStreamProperties.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, "localhost:2181"); kafkaStreamProperties.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); kafkaStreamProperties.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); Serde<String> stringSerde = Serdes.String(); KStreamBuilder fraudDetectionTopology = new KStreamBuilder(); KStream<String, String> ipRecords = fraudDetectionTopology.stream(stringSerde, stringSerde, propertyReader.getPropertyValue("topic")); KStream<String, String> fraudIpRecords = ipRecords .filter((k, v) -> isFraud(v)); fraudIpRecords.to(propertyReader.getPropertyValue("output_topic")); KafkaStreams streamManager = new KafkaStreams(fraudDetectionTopology, kafkaStreamProperties); streamManager.start(); Runtime.getRuntime().addShutdownHook(new Thread(streamManager::close)); }
Example 2
Source File: KafkaStreamsBinderHealthIndicatorTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@StreamListener("input") @SendTo("output") public KStream<Object, Product> process(KStream<Object, Product> input) { return input.filter((key, product) -> { if (product.getId() != 123) { throw new IllegalArgumentException(); } return true; }); }
Example 3
Source File: KafkaStreamsBinderHealthIndicatorTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@StreamListener("input") @SendTo("output") public KStream<Object, Product> process(KStream<Object, Product> input) { return input.filter((key, product) -> { if (product.getId() != 123) { throw new IllegalArgumentException(); } return true; }); }
Example 4
Source File: KafkaStreamsBinderHealthIndicatorTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@StreamListener("input2") @SendTo("output2") public KStream<Object, Product> process2(KStream<Object, Product> input) { return input.filter((key, product) -> { if (product.getId() != 123) { throw new IllegalArgumentException(); } return true; }); }
Example 5
Source File: NamingChangelogAndRepartitionTopics.java From kafka-tutorials with Apache License 2.0 | 4 votes |
public Topology buildTopology(Properties envProps) { final StreamsBuilder builder = new StreamsBuilder(); final String inputTopic = envProps.getProperty("input.topic.name"); final String outputTopic = envProps.getProperty("output.topic.name"); final String joinTopic = envProps.getProperty("join.topic.name"); final Serde<Long> longSerde = Serdes.Long(); final Serde<String> stringSerde = Serdes.String(); final boolean addFilter = Boolean.parseBoolean(envProps.getProperty("add.filter")); final boolean addNames = Boolean.parseBoolean(envProps.getProperty("add.names")); KStream<Long, String> inputStream = builder.stream(inputTopic, Consumed.with(longSerde, stringSerde)) .selectKey((k, v) -> Long.parseLong(v.substring(0, 1))); if (addFilter) { inputStream = inputStream.filter((k, v) -> k != 100L); } final KStream<Long, String> joinedStream; final KStream<Long, Long> countStream; if (!addNames) { countStream = inputStream.groupByKey(Grouped.with(longSerde, stringSerde)) .count() .toStream(); joinedStream = inputStream.join(countStream, (v1, v2) -> v1 + v2.toString(), JoinWindows.of(Duration.ofMillis(100)), StreamJoined.with(longSerde, stringSerde, longSerde)); } else { countStream = inputStream.groupByKey(Grouped.with("count", longSerde, stringSerde)) .count(Materialized.as("the-counting-store")) .toStream(); joinedStream = inputStream.join(countStream, (v1, v2) -> v1 + v2.toString(), JoinWindows.of(Duration.ofMillis(100)), StreamJoined.with(longSerde, stringSerde, longSerde) .withName("join").withStoreName("the-join-store")); } joinedStream.to(joinTopic, Produced.with(longSerde, stringSerde)); countStream.map((k,v) -> KeyValue.pair(k.toString(), v.toString())).to(outputTopic, Produced.with(stringSerde, stringSerde)); return builder.build(); }
Example 6
Source File: AlarmMessageLogger.java From phoebus with Eclipse Public License 1.0 | 4 votes |
@Override public void run() { logger.info("Starting the alarm messages stream consumer for " + topic); Properties props = new Properties(); props.putAll(PropertiesHelper.getProperties()); props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-"+topic+"-alarm-messages"); if (!props.containsKey(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG)) { props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); } final String indexDateSpanUnits = props.getProperty("date_span_units"); final Integer indexDateSpanValue = Integer.parseInt(props.getProperty("date_span_value")); try { stateIndexNameHelper = new IndexNameHelper(topic + STATE_INDEX_FORMAT, indexDateSpanUnits, indexDateSpanValue); configIndexNameHelper = new IndexNameHelper(topic + CONFIG_INDEX_FORMAT , indexDateSpanUnits, indexDateSpanValue); } catch (Exception ex) { logger.log(Level.SEVERE, "Time based index creation failed.", ex); } // Attach a message time stamp. StreamsBuilder builder = new StreamsBuilder(); KStream<String, AlarmMessage> alarms = builder.stream(topic, Consumed.with(Serdes.String(), alarmMessageSerde).withTimestampExtractor(new TimestampExtractor() { @Override public long extract(ConsumerRecord<Object, Object> record, long previousTimestamp) { return record.timestamp(); } })); alarms = alarms.filter((k, v) -> { return v != null; }); alarms = alarms.map((key, value) -> { logger.config("Processing alarm message with key : " + key != null ? key : "null" + " " + value != null ? value.toString() : "null"); value.setKey(key); return new KeyValue<String, AlarmMessage>(key, value); }); @SuppressWarnings("unchecked") KStream<String, AlarmMessage>[] alarmBranches = alarms.branch((k,v) -> k.startsWith("state"), (k,v) -> k.startsWith("config"), (k,v) -> false ); processAlarmStateStream(alarmBranches[0], props); processAlarmConfigurationStream(alarmBranches[1], props); final KafkaStreams streams = new KafkaStreams(builder.build(), props); final CountDownLatch latch = new CountDownLatch(1); // attach shutdown handler to catch control-c Runtime.getRuntime().addShutdownHook(new Thread("streams-"+topic+"-alarm-messages-shutdown-hook") { @Override public void run() { streams.close(10, TimeUnit.SECONDS); System.out.println("\nShutting streams Done."); latch.countDown(); } }); try { streams.start(); latch.await(); } catch (Throwable e) { System.exit(1); } System.exit(0); }
Example 7
Source File: ExclamationAdvancedKafkaStream.java From kafka-streams-ex with MIT License | 4 votes |
/** Connects the topic "console" to two topics, adds 2-4 exclamation points, * writing all messages to the "exclamated" topic and the messages with * four exclamation points to the "much-exclamated" topic. * * @param args Not used. */ public static void main(String[] args) { // Configuration stuff. Properties config = new Properties(); // For the cluster. Assumes everything is local. config.put(StreamsConfig.APPLICATION_ID_CONFIG, "exclamation-advanced-kafka-streams"); config.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); config.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, "localhost:2181"); // Serde. config.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.ByteArray().getClass().getName()); config.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); KStreamBuilder builder = new KStreamBuilder(); // Read the stream from the topic into a KStream. KStream<byte[], String> text = builder.stream("console"); // Apply the transformations. KStream<byte[], String> exclamation = text.mapValues(x -> x + getExclamations()) .mapValues(x -> x + getExclamations()); KStream<byte[], String> muchExclamation = exclamation.filter((k,v) -> v.endsWith("!!!!")); // Sink them both. exclamation.to("exclamated"); muchExclamation.to("much-exclamated"); // Build and run. KafkaStreams streams = new KafkaStreams(builder, config); streams.start(); }