Java Code Examples for org.apache.kafka.common.serialization.Serdes#String
The following examples show how to use
org.apache.kafka.common.serialization.Serdes#String .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: StockPerformanceStreamsProcessorTopologyTest.java From kafka-streams-in-action with Apache License 2.0 | 6 votes |
@Test @DisplayName("Checking State Store for Value") public void shouldStorePerformanceObjectInStore() { Serde<String> stringSerde = Serdes.String(); Serde<StockTransaction> stockTransactionSerde = StreamsSerdes.StockTransactionSerde(); StockTransaction stockTransaction = DataGenerator.generateStockTransaction(); topologyTestDriver.process("stock-transactions", stockTransaction.getSymbol(), stockTransaction, stringSerde.serializer(), stockTransactionSerde.serializer()); KeyValueStore<String, StockPerformance> store = topologyTestDriver.getKeyValueStore("stock-performance-store"); assertThat(store.get(stockTransaction.getSymbol()), notNullValue()); StockPerformance stockPerformance = store.get(stockTransaction.getSymbol()); assertThat(stockPerformance.getCurrentShareVolume(), equalTo(stockTransaction.getShares())); assertThat(stockPerformance.getCurrentPrice(), equalTo(stockTransaction.getSharePrice())); }
Example 2
Source File: IPFraudKafkaStreamApp.java From Building-Data-Streaming-Applications-with-Apache-Kafka with MIT License | 6 votes |
public static void main(String[] args) throws Exception { Properties kafkaStreamProperties = new Properties(); kafkaStreamProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, "IP-Fraud-Detection"); kafkaStreamProperties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); kafkaStreamProperties.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, "localhost:2181"); kafkaStreamProperties.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); kafkaStreamProperties.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); Serde<String> stringSerde = Serdes.String(); KStreamBuilder fraudDetectionTopology = new KStreamBuilder(); KStream<String, String> ipRecords = fraudDetectionTopology.stream(stringSerde, stringSerde, propertyReader.getPropertyValue("topic")); KStream<String, String> fraudIpRecords = ipRecords .filter((k, v) -> isFraud(v)); fraudIpRecords.to(propertyReader.getPropertyValue("output_topic")); KafkaStreams streamManager = new KafkaStreams(fraudDetectionTopology, kafkaStreamProperties); streamManager.start(); Runtime.getRuntime().addShutdownHook(new Thread(streamManager::close)); }
Example 3
Source File: KafkaStreamWordCount.java From Building-Data-Streaming-Applications-with-Apache-Kafka with MIT License | 6 votes |
public static void main(String[] args) throws Exception { Properties kafkaStreamProperties = new Properties(); kafkaStreamProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, "kafka-stream-wordCount"); kafkaStreamProperties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); kafkaStreamProperties.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, "localhost:2181"); kafkaStreamProperties.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); kafkaStreamProperties.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); Serde<String> stringSerde = Serdes.String(); Serde<Long> longSerde = Serdes.Long(); KStreamBuilder streamTopology = new KStreamBuilder(); KStream<String, String> topicRecords = streamTopology.stream(stringSerde, stringSerde, "input"); KStream<String, Long> wordCounts = topicRecords .flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+"))) .map((key, word) -> new KeyValue<>(word, word)) .countByKey("Count") .toStream(); wordCounts.to(stringSerde, longSerde, "wordCount"); KafkaStreams streamManager = new KafkaStreams(streamTopology, kafkaStreamProperties); streamManager.start(); Runtime.getRuntime().addShutdownHook(new Thread(streamManager::close)); }
Example 4
Source File: KafkaRocksDBCacheTest.java From kcache with Apache License 2.0 | 5 votes |
@Override protected Cache<String, String> createAndInitKafkaCacheInstance(String bootstrapServers) { Cache<String, String> rocksDBCache = new RocksDBCache<>(topic, "/tmp", Serdes.String(), Serdes.String()); Properties props = new Properties(); props.put(KafkaCacheConfig.KAFKACACHE_BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); KafkaCacheConfig config = new KafkaCacheConfig(props); Cache<String, String> kafkaCache = Caches.concurrentCache( new KafkaCache<>(config, Serdes.String(), Serdes.String(), new StringUpdateHandler(), rocksDBCache)); kafkaCache.init(); return kafkaCache; }
Example 5
Source File: StockPerformanceStreamsAndProcessorMultipleValuesApplication.java From kafka-streams-in-action with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { StreamsConfig streamsConfig = new StreamsConfig(getProperties()); Serde<String> stringSerde = Serdes.String(); Serde<StockPerformance> stockPerformanceSerde = StreamsSerdes.StockPerformanceSerde(); Serde<StockTransaction> stockTransactionSerde = StreamsSerdes.StockTransactionSerde(); StreamsBuilder builder = new StreamsBuilder(); String stocksStateStore = "stock-performance-store"; double differentialThreshold = 0.05; TransformerSupplier<String, StockTransaction, KeyValue<String, List<KeyValue<String, StockPerformance>>>> transformerSupplier = () -> new StockPerformanceMultipleValuesTransformer(stocksStateStore, differentialThreshold); KeyValueBytesStoreSupplier storeSupplier = Stores.lruMap(stocksStateStore, 100); StoreBuilder<KeyValueStore<String, StockPerformance>> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), stockPerformanceSerde); builder.addStateStore(storeBuilder); builder.stream("stock-transactions", Consumed.with(stringSerde, stockTransactionSerde)) .transform(transformerSupplier, stocksStateStore).flatMap((dummyKey,valueList) -> valueList) .print(Printed.<String, StockPerformance>toSysOut().withLabel("StockPerformance")); //.to(stringSerde, stockPerformanceSerde, "stock-performance"); KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig); MockDataProducer.produceStockTransactionsWithKeyFunction(50, 50, 25, StockTransaction::getSymbol); System.out.println("Stock Analysis KStream/Process API App Started"); kafkaStreams.cleanUp(); kafkaStreams.start(); Thread.sleep(70000); System.out.println("Shutting down the Stock KStream/Process API Analysis App now"); kafkaStreams.close(); MockDataProducer.shutdown(); }
Example 6
Source File: Stream.java From hdinsight-kafka-java-get-started with MIT License | 5 votes |
public static void main( String[] args ) { Properties streamsConfig = new Properties(); // The name must be unique on the Kafka cluster streamsConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-example"); // Brokers streamsConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, args[0]); // Zookeeper //streamsConfig.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, args[1]); // SerDes for key and values streamsConfig.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); streamsConfig.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); // Serdes for the word and count Serde<String> stringSerde = Serdes.String(); Serde<Long> longSerde = Serdes.Long(); KStreamBuilder builder = new KStreamBuilder(); KStream<String, String> sentences = builder.stream(stringSerde, stringSerde, "test"); KStream<String, Long> wordCounts = sentences .flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+"))) .map((key, word) -> new KeyValue<>(word, word)) .countByKey("Counts") .toStream(); wordCounts.to(stringSerde, longSerde, "wordcounts"); KafkaStreams streams = new KafkaStreams(builder, streamsConfig); streams.start(); Runtime.getRuntime().addShutdownHook(new Thread(streams::close)); }
Example 7
Source File: StockPerformanceStreamsAndProcessorApplication.java From kafka-streams-in-action with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { StreamsConfig streamsConfig = new StreamsConfig(getProperties()); Serde<String> stringSerde = Serdes.String(); Serde<StockPerformance> stockPerformanceSerde = StreamsSerdes.StockPerformanceSerde(); Serde<StockTransaction> stockTransactionSerde = StreamsSerdes.StockTransactionSerde(); StreamsBuilder builder = new StreamsBuilder(); String stocksStateStore = "stock-performance-store"; double differentialThreshold = 0.02; KeyValueBytesStoreSupplier storeSupplier = Stores.lruMap(stocksStateStore, 100); StoreBuilder<KeyValueStore<String, StockPerformance>> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), stockPerformanceSerde); builder.addStateStore(storeBuilder); builder.stream("stock-transactions", Consumed.with(stringSerde, stockTransactionSerde)) .transform(() -> new StockPerformanceTransformer(stocksStateStore, differentialThreshold), stocksStateStore) .print(Printed.<String, StockPerformance>toSysOut().withLabel("StockPerformance")); //Uncomment this line and comment out the line above for writing to a topic //.to(stringSerde, stockPerformanceSerde, "stock-performance"); KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig); MockDataProducer.produceStockTransactionsWithKeyFunction(50, 50, 25, StockTransaction::getSymbol); System.out.println("Stock Analysis KStream/Process API App Started"); kafkaStreams.cleanUp(); kafkaStreams.start(); Thread.sleep(70000); System.out.println("Shutting down the Stock KStream/Process API Analysis App now"); kafkaStreams.close(); MockDataProducer.shutdown(); }
Example 8
Source File: SpecificClientIntegrationITCase.java From kiqr with Apache License 2.0 | 5 votes |
@Test public void successfulAllQuery() throws Exception{ SpecificBlockingKiqrClient<String, Long> client = new SpecificBlockingRestKiqrClientImpl<>("localhost", 44321, "kv", String.class, Long.class, Serdes.String(), Serdes.Long()); Map<String, Long> result = client.getAllKeyValues(); assertThat(result.entrySet(),hasSize(4)); assertThat(result, hasEntry("key1", 3L)); assertThat(result, hasEntry("key2", 6L)); assertThat(result, hasEntry("key3", 9L)); assertThat(result, hasEntry("key4", 12L)); }
Example 9
Source File: StockPerformanceInteractiveQueryApplication.java From kafka-streams-in-action with Apache License 2.0 | 4 votes |
public static void main(String[] args) { if(args.length < 2){ LOG.error("Need to specify host, port"); System.exit(1); } String host = args[0]; int port = Integer.parseInt(args[1]); final HostInfo hostInfo = new HostInfo(host, port); Properties properties = getProperties(); properties.put(StreamsConfig.APPLICATION_SERVER_CONFIG, host+":"+port); StreamsConfig streamsConfig = new StreamsConfig(properties); Serde<String> stringSerde = Serdes.String(); Serde<Long> longSerde = Serdes.Long(); Serde<StockTransaction> stockTransactionSerde = StreamsSerdes.StockTransactionSerde(); WindowedSerializer<String> windowedSerializer = new WindowedSerializer<>(stringSerde.serializer()); WindowedDeserializer<String> windowedDeserializer = new WindowedDeserializer<>(stringSerde.deserializer()); Serde<Windowed<String>> windowedSerde = Serdes.serdeFrom(windowedSerializer, windowedDeserializer); Serde<CustomerTransactions> customerTransactionsSerde = StreamsSerdes.CustomerTransactionsSerde(); Aggregator<String, StockTransaction, Integer> sharesAggregator = (k, v, i) -> v.getShares() + i; StreamsBuilder builder = new StreamsBuilder(); // data is already coming in keyed KStream<String, StockTransaction> stockTransactionKStream = builder.stream(MockDataProducer.STOCK_TRANSACTIONS_TOPIC, Consumed.with(stringSerde, stockTransactionSerde) .withOffsetResetPolicy(Topology.AutoOffsetReset.LATEST)); stockTransactionKStream.map((k,v) -> KeyValue.pair(v.getSector(), v)) .groupByKey(Serialized.with(stringSerde, stockTransactionSerde)) .count(Materialized.as("TransactionsBySector")) .toStream() .peek((k,v) -> LOG.info("Transaction count for {} {}", k, v)) .to("sector-transaction-counts", Produced.with(stringSerde, longSerde)); stockTransactionKStream.map((k,v) -> KeyValue.pair(v.getCustomerId(), v)) .groupByKey(Serialized.with(stringSerde, stockTransactionSerde)) .windowedBy(SessionWindows.with(TimeUnit.MINUTES.toMillis(60)).until(TimeUnit.MINUTES.toMillis(120))) .aggregate(CustomerTransactions::new,(k, v, ct) -> ct.update(v), (k, ct, other)-> ct.merge(other), Materialized.<String, CustomerTransactions, SessionStore<Bytes, byte[]>>as("CustomerPurchaseSessions") .withKeySerde(stringSerde).withValueSerde(customerTransactionsSerde)) .toStream() .peek((k,v) -> LOG.info("Session info for {} {}", k, v)) .to("session-transactions", Produced.with(windowedSerde, customerTransactionsSerde)); stockTransactionKStream.groupByKey(Serialized.with(stringSerde, stockTransactionSerde)) .windowedBy(TimeWindows.of(10000)) .aggregate(() -> 0, sharesAggregator, Materialized.<String, Integer, WindowStore<Bytes, byte[]>>as("NumberSharesPerPeriod") .withKeySerde(stringSerde) .withValueSerde(Serdes.Integer())) .toStream().peek((k,v)->LOG.info("key is {} value is {}", k, v)) .to("transaction-count", Produced.with(windowedSerde,Serdes.Integer())); KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig); InteractiveQueryServer queryServer = new InteractiveQueryServer(kafkaStreams, hostInfo); StateRestoreHttpReporter restoreReporter = new StateRestoreHttpReporter(queryServer); queryServer.init(); kafkaStreams.setGlobalStateRestoreListener(restoreReporter); kafkaStreams.setStateListener(((newState, oldState) -> { if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) { LOG.info("Setting the query server to ready"); queryServer.setReady(true); } else if (newState != KafkaStreams.State.RUNNING) { LOG.info("State not RUNNING, disabling the query server"); queryServer.setReady(false); } })); kafkaStreams.setUncaughtExceptionHandler((t, e) -> { LOG.error("Thread {} had a fatal error {}", t, e, e); shutdown(kafkaStreams, queryServer); }); Runtime.getRuntime().addShutdownHook(new Thread(() -> { shutdown(kafkaStreams, queryServer); })); LOG.info("Stock Analysis KStream Interactive Query App Started"); kafkaStreams.cleanUp(); kafkaStreams.start(); }
Example 10
Source File: PopsHopsApplication.java From kafka-streams-in-action with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { StreamsConfig streamsConfig = new StreamsConfig(getProperties()); Deserializer<BeerPurchase> beerPurchaseDeserializer = new JsonDeserializer<>(BeerPurchase.class); Serde<String> stringSerde = Serdes.String(); Deserializer<String> stringDeserializer = stringSerde.deserializer(); Serializer<String> stringSerializer = stringSerde.serializer(); Serializer<BeerPurchase> beerPurchaseSerializer = new JsonSerializer<>(); Topology toplogy = new Topology(); String domesticSalesSink = "domestic-beer-sales"; String internationalSalesSink = "international-beer-sales"; String purchaseSourceNodeName = "beer-purchase-source"; String purchaseProcessor = "purchase-processor"; BeerPurchaseProcessor beerProcessor = new BeerPurchaseProcessor(domesticSalesSink, internationalSalesSink); toplogy.addSource(LATEST, purchaseSourceNodeName, new UsePreviousTimeOnInvalidTimestamp(), stringDeserializer, beerPurchaseDeserializer, Topics.POPS_HOPS_PURCHASES.topicName()) .addProcessor(purchaseProcessor, () -> beerProcessor, purchaseSourceNodeName); //Uncomment these two lines and comment out the printer lines for writing to topics // .addSink(internationalSalesSink,"international-sales", stringSerializer, beerPurchaseSerializer, purchaseProcessor) // .addSink(domesticSalesSink,"domestic-sales", stringSerializer, beerPurchaseSerializer, purchaseProcessor); //You'll have to comment these lines out if you want to write to topics as they have the same node names toplogy.addProcessor(domesticSalesSink, new KStreamPrinter("domestic-sales"), purchaseProcessor ); toplogy.addProcessor(internationalSalesSink, new KStreamPrinter("international-sales"), purchaseProcessor ); KafkaStreams kafkaStreams = new KafkaStreams(toplogy, streamsConfig); MockDataProducer.produceBeerPurchases(5); System.out.println("Starting Pops-Hops Application now"); kafkaStreams.cleanUp(); kafkaStreams.start(); Thread.sleep(70000); System.out.println("Shutting down Pops-Hops Application now"); kafkaStreams.close(); MockDataProducer.shutdown(); }
Example 11
Source File: ZMartTopology.java From kafka-streams-in-action with Apache License 2.0 | 4 votes |
public static Topology build() { Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde(); Serde<PurchasePattern> purchasePatternSerde = StreamsSerdes.PurchasePatternSerde(); Serde<RewardAccumulator> rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde(); Serde<String> stringSerde = Serdes.String(); StreamsBuilder streamsBuilder = new StreamsBuilder(); KStream<String,Purchase> purchaseKStream = streamsBuilder.stream("transactions", Consumed.with(stringSerde, purchaseSerde)) .mapValues(p -> Purchase.builder(p).maskCreditCard().build()); KStream<String, PurchasePattern> patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build()); patternKStream.to("patterns", Produced.with(stringSerde,purchasePatternSerde)); KStream<String, RewardAccumulator> rewardsKStream = purchaseKStream.mapValues(purchase -> RewardAccumulator.builder(purchase).build()); rewardsKStream.to("rewards", Produced.with(stringSerde,rewardAccumulatorSerde)); purchaseKStream.to("purchases", Produced.with(Serdes.String(),purchaseSerde)); return streamsBuilder.build(); }
Example 12
Source File: SpecificClientIntegrationITCase.java From kiqr with Apache License 2.0 | 4 votes |
@Test public void successfulScalarQuery() throws Exception{ SpecificBlockingKiqrClient<String, Long> client = new SpecificBlockingRestKiqrClientImpl<>("localhost", 44321, "kv", String.class, Long.class, Serdes.String(), Serdes.Long()); Optional<Long> resultKey1 = client.getScalarKeyValue("key1"); assertTrue(resultKey1.isPresent()); assertThat(resultKey1.get(), is(equalTo(3L))); Optional<Long> resultKey2 = client.getScalarKeyValue("key3"); assertTrue(resultKey2.isPresent()); assertThat(resultKey2.get(), is(equalTo(9L))); }
Example 13
Source File: KafkaStreamsITest.java From java-specialagent with Apache License 2.0 | 4 votes |
public static void main(final String[] args) throws Exception { final EmbeddedKafkaRule embeddedKafkaRule = TestUtil.retry(new Callable<EmbeddedKafkaRule>() { @Override public EmbeddedKafkaRule call() { final EmbeddedKafkaRule rule = new EmbeddedKafkaRule(1, true, 1, "stream-test"); try { rule.before(); return rule; } catch (final Exception e) { rule.after(); throw e; } } }, 10); final Map<String,Object> senderProps = KafkaTestUtils.producerProps(embeddedKafkaRule.getEmbeddedKafka()); try (final Producer<Integer,String> producer = new KafkaProducer<>(senderProps)) { final CountDownLatch latch = TestUtil.initExpectedSpanLatch(4); final Properties config = new Properties(); config.put(StreamsConfig.APPLICATION_ID_CONFIG, "stream-app"); config.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, senderProps.get("bootstrap.servers")); config.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.Integer().getClass()); config.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass()); final ProducerRecord<Integer,String> record = new ProducerRecord<>("stream-test", 1, "test"); producer.send(record); final Serde<String> stringSerde = Serdes.String(); final Serde<Integer> intSerde = Serdes.Integer(); final StreamsBuilder builder = new StreamsBuilder(); final KStream<Integer,String> kStream = builder.stream("stream-test"); kStream.map(new KeyValueMapper<Integer,String,KeyValue<Integer,String>>() { @Override public KeyValue<Integer,String> apply(final Integer key, final String value) { TestUtil.checkActiveSpan(); return new KeyValue<>(key, value + "map"); } }).to("stream-out", Produced.with(intSerde, stringSerde)); KafkaStreams streams = new KafkaStreams(builder.build(), config); streams.start(); TestUtil.checkSpan(true, latch, new ComponentSpanCount("java-kafka", 3), new ComponentSpanCount("kafka-streams", 1)); streams.close(); } catch (final Throwable t) { t.printStackTrace(System.err); embeddedKafkaRule.after(); System.exit(1); } finally { embeddedKafkaRule.after(); System.exit(0); } }
Example 14
Source File: KafkaStreamsJoinsApp.java From kafka-streams-in-action with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { StreamsConfig streamsConfig = new StreamsConfig(getProperties()); StreamsBuilder builder = new StreamsBuilder(); Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde(); Serde<String> stringSerde = Serdes.String(); KeyValueMapper<String, Purchase, KeyValue<String,Purchase>> custIdCCMasking = (k, v) -> { Purchase masked = Purchase.builder(v).maskCreditCard().build(); return new KeyValue<>(masked.getCustomerId(), masked); }; Predicate<String, Purchase> coffeePurchase = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("coffee"); Predicate<String, Purchase> electronicPurchase = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("electronics"); int COFFEE_PURCHASE = 0; int ELECTRONICS_PURCHASE = 1; KStream<String, Purchase> transactionStream = builder.stream( "transactions", Consumed.with(Serdes.String(), purchaseSerde)).map(custIdCCMasking); KStream<String, Purchase>[] branchesStream = transactionStream.selectKey((k,v)-> v.getCustomerId()).branch(coffeePurchase, electronicPurchase); KStream<String, Purchase> coffeeStream = branchesStream[COFFEE_PURCHASE]; KStream<String, Purchase> electronicsStream = branchesStream[ELECTRONICS_PURCHASE]; ValueJoiner<Purchase, Purchase, CorrelatedPurchase> purchaseJoiner = new PurchaseJoiner(); JoinWindows twentyMinuteWindow = JoinWindows.of(60 * 1000 * 20); KStream<String, CorrelatedPurchase> joinedKStream = coffeeStream.join(electronicsStream, purchaseJoiner, twentyMinuteWindow, Joined.with(stringSerde, purchaseSerde, purchaseSerde)); joinedKStream.print(Printed.<String, CorrelatedPurchase>toSysOut().withLabel("joined KStream")); // used only to produce data for this application, not typical usage MockDataProducer.producePurchaseData(); LOG.info("Starting Join Examples"); KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig); kafkaStreams.start(); Thread.sleep(65000); LOG.info("Shutting down the Join Examples now"); kafkaStreams.close(); MockDataProducer.shutdown(); }
Example 15
Source File: StockCountsStreamsConnectIntegrationApplication.java From kafka-streams-in-action with Apache License 2.0 | 3 votes |
public static void main(String[] args) throws Exception { StreamsConfig streamsConfig = new StreamsConfig(getProperties()); Serde<String> stringSerde = Serdes.String(); Serde<StockTransaction> stockTransactionSerde = StreamsSerdes.StockTransactionSerde(); Serde<Long> longSerde = Serdes.Long(); StreamsBuilder builder = new StreamsBuilder(); builder.stream("dbTxnTRANSACTIONS", Consumed.with(stringSerde, stockTransactionSerde)) .peek((k, v)-> LOG.info("transactions from database key {} value {}",k, v)) .groupByKey(Serialized.with(stringSerde, stockTransactionSerde)) .aggregate(()-> 0L,(symb, stockTxn, numShares) -> numShares + stockTxn.getShares(), Materialized.with(stringSerde, longSerde)).toStream() .peek((k,v) -> LOG.info("Aggregated stock sales for {} {}",k, v)) .to( "stock-counts", Produced.with(stringSerde, longSerde)); KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig); CountDownLatch doneSignal = new CountDownLatch(1); Runtime.getRuntime().addShutdownHook(new Thread(()-> { doneSignal.countDown(); LOG.info("Shutting down the Stock Analysis KStream Connect App Started now"); kafkaStreams.close(); })); LOG.info("Stock Analysis KStream Connect App Started"); kafkaStreams.cleanUp(); kafkaStreams.start(); doneSignal.await(); }
Example 16
Source File: SpecificClientIntegrationITCase.java From kiqr with Apache License 2.0 | 3 votes |
@Test(expected = QueryExecutionException.class) public void invertedRangeQuery() throws Exception{ SpecificBlockingKiqrClient<String, Long> client = new SpecificBlockingRestKiqrClientImpl<>("localhost", 44321, "kv", String.class, Long.class, Serdes.String(), Serdes.Long()); Map<String, Long> result = client.getRangeKeyValues("key3", "key1"); assertThat(result.entrySet(),is(empty())); }
Example 17
Source File: PurchaseKafkaStreamsDriver.java From kafka-streams with Apache License 2.0 | 3 votes |
public static void main(String[] args) { StreamsConfig streamsConfig = new StreamsConfig(getProperties()); JsonDeserializer<Purchase> purchaseJsonDeserializer = new JsonDeserializer<>(Purchase.class); JsonSerializer<Purchase> purchaseJsonSerializer = new JsonSerializer<>(); JsonSerializer<RewardAccumulator> rewardAccumulatorJsonSerializer = new JsonSerializer<>(); JsonDeserializer<RewardAccumulator> rewardAccumulatorJsonDeserializer = new JsonDeserializer<>(RewardAccumulator.class); Serde<RewardAccumulator> rewardAccumulatorSerde = Serdes.serdeFrom(rewardAccumulatorJsonSerializer,rewardAccumulatorJsonDeserializer); JsonSerializer<PurchasePattern> purchasePatternJsonSerializer = new JsonSerializer<>(); JsonDeserializer<PurchasePattern> purchasePatternJsonDeserializer = new JsonDeserializer<>(PurchasePattern.class); Serde<PurchasePattern> purchasePatternSerde = Serdes.serdeFrom(purchasePatternJsonSerializer,purchasePatternJsonDeserializer); Serde<Purchase> purchaseSerde = Serdes.serdeFrom(purchaseJsonSerializer,purchaseJsonDeserializer); Serde<String> stringSerde = Serdes.String(); KStreamBuilder kStreamBuilder = new KStreamBuilder(); KStream<String,Purchase> purchaseKStream = kStreamBuilder.stream(stringSerde,purchaseSerde,"src-topic") .mapValues(p -> Purchase.builder(p).maskCreditCard().build()); purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build()).to(stringSerde,purchasePatternSerde,"patterns"); purchaseKStream.mapValues(purchase -> RewardAccumulator.builder(purchase).build()).to(stringSerde,rewardAccumulatorSerde,"rewards"); purchaseKStream.to(stringSerde,purchaseSerde,"purchases"); System.out.println("Starting PurchaseStreams Example"); KafkaStreams kafkaStreams = new KafkaStreams(kStreamBuilder,streamsConfig); kafkaStreams.start(); System.out.println("Now started PurchaseStreams Example"); }
Example 18
Source File: SpecificClientIntegrationITCase.java From kiqr with Apache License 2.0 | 3 votes |
@Test public void emptyRangeQuery() throws Exception{ SpecificBlockingKiqrClient<String, Long> client = new SpecificBlockingRestKiqrClientImpl<>("localhost", 44321, "kv", String.class, Long.class, Serdes.String(), Serdes.Long()); Map<String, Long> result = client.getRangeKeyValues("key6", "key7"); assertThat(result.entrySet(),is(empty())); }
Example 19
Source File: ZMartKafkaStreamsAdvancedReqsApp.java From kafka-streams-in-action with Apache License 2.0 | 2 votes |
public static void main(String[] args) throws Exception { StreamsConfig streamsConfig = new StreamsConfig(getProperties()); Serde<Purchase> purchaseSerde = StreamsSerdes.PurchaseSerde(); Serde<PurchasePattern> purchasePatternSerde = StreamsSerdes.PurchasePatternSerde(); Serde<RewardAccumulator> rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde(); Serde<String> stringSerde = Serdes.String(); StreamsBuilder builder = new StreamsBuilder(); // previous requirements KStream<String,Purchase> purchaseKStream = builder.stream( "transactions", Consumed.with(stringSerde, purchaseSerde)) .mapValues(p -> Purchase.builder(p).maskCreditCard().build()); KStream<String, PurchasePattern> patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build()); patternKStream.print( Printed.<String, PurchasePattern>toSysOut().withLabel("patterns")); patternKStream.to("patterns", Produced.with(stringSerde,purchasePatternSerde)); KStream<String, RewardAccumulator> rewardsKStream = purchaseKStream.mapValues(purchase -> RewardAccumulator.builder(purchase).build()); rewardsKStream.print(Printed.<String, RewardAccumulator>toSysOut().withLabel("rewards")); rewardsKStream.to("rewards", Produced.with(stringSerde,rewardAccumulatorSerde)); // selecting a key for storage and filtering out low dollar purchases KeyValueMapper<String, Purchase, Long> purchaseDateAsKey = (key, purchase) -> purchase.getPurchaseDate().getTime(); KStream<Long, Purchase> filteredKStream = purchaseKStream.filter((key, purchase) -> purchase.getPrice() > 5.00).selectKey(purchaseDateAsKey); filteredKStream.print(Printed.<Long, Purchase>toSysOut().withLabel("purchases")); filteredKStream.to("purchases", Produced.with(Serdes.Long(),purchaseSerde)); // branching stream for separating out purchases in new departments to their own topics Predicate<String, Purchase> isCoffee = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("coffee"); Predicate<String, Purchase> isElectronics = (key, purchase) -> purchase.getDepartment().equalsIgnoreCase("electronics"); int coffee = 0; int electronics = 1; KStream<String, Purchase>[] kstreamByDept = purchaseKStream.branch(isCoffee, isElectronics); kstreamByDept[coffee].to( "coffee", Produced.with(stringSerde, purchaseSerde)); kstreamByDept[coffee].print(Printed.<String, Purchase>toSysOut().withLabel( "coffee")); kstreamByDept[electronics].to("electronics", Produced.with(stringSerde, purchaseSerde)); kstreamByDept[electronics].print(Printed.<String, Purchase>toSysOut().withLabel("electronics")); // security Requirements to record transactions for certain employee ForeachAction<String, Purchase> purchaseForeachAction = (key, purchase) -> SecurityDBService.saveRecord(purchase.getPurchaseDate(), purchase.getEmployeeId(), purchase.getItemPurchased()); purchaseKStream.filter((key, purchase) -> purchase.getEmployeeId().equals("000000")).foreach(purchaseForeachAction); // used only to produce data for this application, not typical usage MockDataProducer.producePurchaseData(); KafkaStreams kafkaStreams = new KafkaStreams(builder.build(),streamsConfig); LOG.info("ZMart Advanced Requirements Kafka Streams Application Started"); kafkaStreams.start(); Thread.sleep(65000); LOG.info("Shutting down the Kafka Streams Application now"); kafkaStreams.close(); MockDataProducer.shutdown(); }
Example 20
Source File: GlobalKTableExample.java From kafka-streams-in-action with Apache License 2.0 | 2 votes |
public static void main(String[] args) throws Exception { StreamsConfig streamsConfig = new StreamsConfig(getProperties()); Serde<String> stringSerde = Serdes.String(); Serde<StockTransaction> transactionSerde = StreamsSerdes.StockTransactionSerde(); Serde<TransactionSummary> transactionSummarySerde = StreamsSerdes.TransactionSummarySerde(); StreamsBuilder builder = new StreamsBuilder(); long twentySeconds = 1000 * 20; KeyValueMapper<Windowed<TransactionSummary>, Long, KeyValue<String, TransactionSummary>> transactionMapper = (window, count) -> { TransactionSummary transactionSummary = window.key(); String newKey = transactionSummary.getIndustry(); transactionSummary.setSummaryCount(count); return KeyValue.pair(newKey, transactionSummary); }; KStream<String, TransactionSummary> countStream = builder.stream( STOCK_TRANSACTIONS_TOPIC, Consumed.with(stringSerde, transactionSerde).withOffsetResetPolicy(LATEST)) .groupBy((noKey, transaction) -> TransactionSummary.from(transaction), Serialized.with(transactionSummarySerde, transactionSerde)) .windowedBy(SessionWindows.with(twentySeconds)).count() .toStream().map(transactionMapper); GlobalKTable<String, String> publicCompanies = builder.globalTable(COMPANIES.topicName()); GlobalKTable<String, String> clients = builder.globalTable(CLIENTS.topicName()); countStream.leftJoin(publicCompanies, (key, txn) -> txn.getStockTicker(),TransactionSummary::withCompanyName) .leftJoin(clients, (key, txn) -> txn.getCustomerId(), TransactionSummary::withCustomerName) .print(Printed.<String, TransactionSummary>toSysOut().withLabel("Resolved Transaction Summaries")); KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig); kafkaStreams.cleanUp(); kafkaStreams.setUncaughtExceptionHandler((t, e) -> { LOG.error("had exception ", e); }); CustomDateGenerator dateGenerator = CustomDateGenerator.withTimestampsIncreasingBy(Duration.ofMillis(750)); DataGenerator.setTimestampGenerator(dateGenerator::get); MockDataProducer.produceStockTransactions(2, 5, 3, true); LOG.info("Starting GlobalKTable Example"); kafkaStreams.cleanUp(); kafkaStreams.start(); Thread.sleep(65000); LOG.info("Shutting down the GlobalKTable Example Application now"); kafkaStreams.close(); MockDataProducer.shutdown(); }