Java Code Examples for org.apache.kafka.common.serialization.Serdes#Long
The following examples show how to use
org.apache.kafka.common.serialization.Serdes#Long .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: StreamsRegistryConfiguration.java From apicurio-registry with Apache License 2.0 | 6 votes |
@Produces @ApplicationScoped public ReadOnlyKeyValueStore<Long, Str.TupleValue> globalIdKeyValueStore( KafkaStreams streams, HostInfo storageLocalHost, StreamsProperties properties ) { return new DistributedReadOnlyKeyValueStore<>( streams, storageLocalHost, properties.getGlobalIdStoreName(), Serdes.Long(), ProtoSerde.parsedWith(Str.TupleValue.parser()), new DefaultGrpcChannelProvider(), true, (filter, over, id, tuple) -> true ); }
Example 2
Source File: WordCount.java From fluent-kafka-streams-tests with MIT License | 6 votes |
public Topology getTopology() { final Serde<String> stringSerde = Serdes.String(); final Serde<Long> longSerde = Serdes.Long(); final StreamsBuilder builder = new StreamsBuilder(); final KStream<String, String> textLines = builder.stream(this.inputTopic); final Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS); final KTable<String, Long> wordCounts = textLines .flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))) .groupBy((key, word) -> word) .count(); wordCounts.toStream().to(this.outputTopic, Produced.with(stringSerde, longSerde)); return builder.build(); }
Example 3
Source File: WordCount.java From fluent-kafka-streams-tests with MIT License | 6 votes |
public Topology getTopology() { final Serde<String> stringSerde = Serdes.String(); final Serde<Long> longSerde = Serdes.Long(); final StreamsBuilder builder = new StreamsBuilder(); final KStream<String, String> textLines = builder.stream(this.inputTopic); final Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS); final KTable<String, Long> wordCounts = textLines .flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))) .groupBy((key, word) -> word) .count(); wordCounts.toStream().to(this.outputTopic, Produced.with(stringSerde, longSerde)); return builder.build(); }
Example 4
Source File: KafkaStreamWordCount.java From Building-Data-Streaming-Applications-with-Apache-Kafka with MIT License | 6 votes |
public static void main(String[] args) throws Exception { Properties kafkaStreamProperties = new Properties(); kafkaStreamProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, "kafka-stream-wordCount"); kafkaStreamProperties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); kafkaStreamProperties.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, "localhost:2181"); kafkaStreamProperties.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); kafkaStreamProperties.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); Serde<String> stringSerde = Serdes.String(); Serde<Long> longSerde = Serdes.Long(); KStreamBuilder streamTopology = new KStreamBuilder(); KStream<String, String> topicRecords = streamTopology.stream(stringSerde, stringSerde, "input"); KStream<String, Long> wordCounts = topicRecords .flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+"))) .map((key, word) -> new KeyValue<>(word, word)) .countByKey("Count") .toStream(); wordCounts.to(stringSerde, longSerde, "wordCount"); KafkaStreams streamManager = new KafkaStreams(streamTopology, kafkaStreamProperties); streamManager.start(); Runtime.getRuntime().addShutdownHook(new Thread(streamManager::close)); }
Example 5
Source File: KafkaStreamsLiveTest.java From tutorials with MIT License | 5 votes |
@Test @Ignore("it needs to have kafka broker running on local") public void shouldTestKafkaStreams() throws InterruptedException { //given String inputTopic = "inputTopic"; Properties streamsConfiguration = new Properties(); streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-live-test"); streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1000); streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); // Use a temporary directory for storing state, which will be automatically removed after the test. streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getAbsolutePath()); //when KStreamBuilder builder = new KStreamBuilder(); KStream<String, String> textLines = builder.stream(inputTopic); Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS); KTable<String, Long> wordCounts = textLines .flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))) .groupBy((key, word) -> word) .count(); wordCounts.foreach((word, count) -> System.out.println("word: " + word + " -> " + count)); String outputTopic = "outputTopic"; final Serde<String> stringSerde = Serdes.String(); final Serde<Long> longSerde = Serdes.Long(); wordCounts.to(stringSerde, longSerde, outputTopic); KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration); streams.start(); //then Thread.sleep(30000); streams.close(); }
Example 6
Source File: Stream.java From hdinsight-kafka-java-get-started with MIT License | 5 votes |
public static void main( String[] args ) { Properties streamsConfig = new Properties(); // The name must be unique on the Kafka cluster streamsConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-example"); // Brokers streamsConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, args[0]); // Zookeeper //streamsConfig.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, args[1]); // SerDes for key and values streamsConfig.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); streamsConfig.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); // Serdes for the word and count Serde<String> stringSerde = Serdes.String(); Serde<Long> longSerde = Serdes.Long(); KStreamBuilder builder = new KStreamBuilder(); KStream<String, String> sentences = builder.stream(stringSerde, stringSerde, "test"); KStream<String, Long> wordCounts = sentences .flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+"))) .map((key, word) -> new KeyValue<>(word, word)) .countByKey("Counts") .toStream(); wordCounts.to(stringSerde, longSerde, "wordcounts"); KafkaStreams streams = new KafkaStreams(builder, streamsConfig); streams.start(); Runtime.getRuntime().addShutdownHook(new Thread(streams::close)); }
Example 7
Source File: SpecificClientIntegrationITCase.java From kiqr with Apache License 2.0 | 5 votes |
@Test public void successfulAllQuery() throws Exception{ SpecificBlockingKiqrClient<String, Long> client = new SpecificBlockingRestKiqrClientImpl<>("localhost", 44321, "kv", String.class, Long.class, Serdes.String(), Serdes.Long()); Map<String, Long> result = client.getAllKeyValues(); assertThat(result.entrySet(),hasSize(4)); assertThat(result, hasEntry("key1", 3L)); assertThat(result, hasEntry("key2", 6L)); assertThat(result, hasEntry("key3", 9L)); assertThat(result, hasEntry("key4", 12L)); }
Example 8
Source File: WaitForDataService.java From apicurio-registry with Apache License 2.0 | 4 votes |
@Override public Serde<Long> reqSerde() { return Serdes.Long(); }
Example 9
Source File: StockPerformanceInteractiveQueryApplication.java From kafka-streams-in-action with Apache License 2.0 | 4 votes |
public static void main(String[] args) { if(args.length < 2){ LOG.error("Need to specify host, port"); System.exit(1); } String host = args[0]; int port = Integer.parseInt(args[1]); final HostInfo hostInfo = new HostInfo(host, port); Properties properties = getProperties(); properties.put(StreamsConfig.APPLICATION_SERVER_CONFIG, host+":"+port); StreamsConfig streamsConfig = new StreamsConfig(properties); Serde<String> stringSerde = Serdes.String(); Serde<Long> longSerde = Serdes.Long(); Serde<StockTransaction> stockTransactionSerde = StreamsSerdes.StockTransactionSerde(); WindowedSerializer<String> windowedSerializer = new WindowedSerializer<>(stringSerde.serializer()); WindowedDeserializer<String> windowedDeserializer = new WindowedDeserializer<>(stringSerde.deserializer()); Serde<Windowed<String>> windowedSerde = Serdes.serdeFrom(windowedSerializer, windowedDeserializer); Serde<CustomerTransactions> customerTransactionsSerde = StreamsSerdes.CustomerTransactionsSerde(); Aggregator<String, StockTransaction, Integer> sharesAggregator = (k, v, i) -> v.getShares() + i; StreamsBuilder builder = new StreamsBuilder(); // data is already coming in keyed KStream<String, StockTransaction> stockTransactionKStream = builder.stream(MockDataProducer.STOCK_TRANSACTIONS_TOPIC, Consumed.with(stringSerde, stockTransactionSerde) .withOffsetResetPolicy(Topology.AutoOffsetReset.LATEST)); stockTransactionKStream.map((k,v) -> KeyValue.pair(v.getSector(), v)) .groupByKey(Serialized.with(stringSerde, stockTransactionSerde)) .count(Materialized.as("TransactionsBySector")) .toStream() .peek((k,v) -> LOG.info("Transaction count for {} {}", k, v)) .to("sector-transaction-counts", Produced.with(stringSerde, longSerde)); stockTransactionKStream.map((k,v) -> KeyValue.pair(v.getCustomerId(), v)) .groupByKey(Serialized.with(stringSerde, stockTransactionSerde)) .windowedBy(SessionWindows.with(TimeUnit.MINUTES.toMillis(60)).until(TimeUnit.MINUTES.toMillis(120))) .aggregate(CustomerTransactions::new,(k, v, ct) -> ct.update(v), (k, ct, other)-> ct.merge(other), Materialized.<String, CustomerTransactions, SessionStore<Bytes, byte[]>>as("CustomerPurchaseSessions") .withKeySerde(stringSerde).withValueSerde(customerTransactionsSerde)) .toStream() .peek((k,v) -> LOG.info("Session info for {} {}", k, v)) .to("session-transactions", Produced.with(windowedSerde, customerTransactionsSerde)); stockTransactionKStream.groupByKey(Serialized.with(stringSerde, stockTransactionSerde)) .windowedBy(TimeWindows.of(10000)) .aggregate(() -> 0, sharesAggregator, Materialized.<String, Integer, WindowStore<Bytes, byte[]>>as("NumberSharesPerPeriod") .withKeySerde(stringSerde) .withValueSerde(Serdes.Integer())) .toStream().peek((k,v)->LOG.info("key is {} value is {}", k, v)) .to("transaction-count", Produced.with(windowedSerde,Serdes.Integer())); KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig); InteractiveQueryServer queryServer = new InteractiveQueryServer(kafkaStreams, hostInfo); StateRestoreHttpReporter restoreReporter = new StateRestoreHttpReporter(queryServer); queryServer.init(); kafkaStreams.setGlobalStateRestoreListener(restoreReporter); kafkaStreams.setStateListener(((newState, oldState) -> { if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) { LOG.info("Setting the query server to ready"); queryServer.setReady(true); } else if (newState != KafkaStreams.State.RUNNING) { LOG.info("State not RUNNING, disabling the query server"); queryServer.setReady(false); } })); kafkaStreams.setUncaughtExceptionHandler((t, e) -> { LOG.error("Thread {} had a fatal error {}", t, e, e); shutdown(kafkaStreams, queryServer); }); Runtime.getRuntime().addShutdownHook(new Thread(() -> { shutdown(kafkaStreams, queryServer); })); LOG.info("Stock Analysis KStream Interactive Query App Started"); kafkaStreams.cleanUp(); kafkaStreams.start(); }
Example 10
Source File: NamingChangelogAndRepartitionTopics.java From kafka-tutorials with Apache License 2.0 | 4 votes |
public Topology buildTopology(Properties envProps) { final StreamsBuilder builder = new StreamsBuilder(); final String inputTopic = envProps.getProperty("input.topic.name"); final String outputTopic = envProps.getProperty("output.topic.name"); final String joinTopic = envProps.getProperty("join.topic.name"); final Serde<Long> longSerde = Serdes.Long(); final Serde<String> stringSerde = Serdes.String(); final boolean addFilter = Boolean.parseBoolean(envProps.getProperty("add.filter")); final boolean addNames = Boolean.parseBoolean(envProps.getProperty("add.names")); KStream<Long, String> inputStream = builder.stream(inputTopic, Consumed.with(longSerde, stringSerde)) .selectKey((k, v) -> Long.parseLong(v.substring(0, 1))); if (addFilter) { inputStream = inputStream.filter((k, v) -> k != 100L); } final KStream<Long, String> joinedStream; final KStream<Long, Long> countStream; if (!addNames) { countStream = inputStream.groupByKey(Grouped.with(longSerde, stringSerde)) .count() .toStream(); joinedStream = inputStream.join(countStream, (v1, v2) -> v1 + v2.toString(), JoinWindows.of(Duration.ofMillis(100)), StreamJoined.with(longSerde, stringSerde, longSerde)); } else { countStream = inputStream.groupByKey(Grouped.with("count", longSerde, stringSerde)) .count(Materialized.as("the-counting-store")) .toStream(); joinedStream = inputStream.join(countStream, (v1, v2) -> v1 + v2.toString(), JoinWindows.of(Duration.ofMillis(100)), StreamJoined.with(longSerde, stringSerde, longSerde) .withName("join").withStoreName("the-join-store")); } joinedStream.to(joinTopic, Produced.with(longSerde, stringSerde)); countStream.map((k,v) -> KeyValue.pair(k.toString(), v.toString())).to(outputTopic, Produced.with(stringSerde, stringSerde)); return builder.build(); }
Example 11
Source File: SpecificClientIntegrationITCase.java From kiqr with Apache License 2.0 | 3 votes |
@Test(expected = QueryExecutionException.class) public void wrongStoreTypeRangeQuery() throws Exception{ SpecificBlockingKiqrClient<String, Long> client = new SpecificBlockingRestKiqrClientImpl<>("localhost", 44321, "window", String.class, Long.class, Serdes.String(), Serdes.Long()); Map<String, Long> result = client.getRangeKeyValues("key1", "key2"); }
Example 12
Source File: SpecificClientIntegrationITCase.java From kiqr with Apache License 2.0 | 3 votes |
@Test public void notFoundScalarQuery() throws Exception{ SpecificBlockingKiqrClient<String, Long> client = new SpecificBlockingRestKiqrClientImpl<>("localhost", 44321, "kv", String.class, Long.class, Serdes.String(), Serdes.Long()); Optional<Long> resultKey1 = client.getScalarKeyValue("key5"); assertFalse(resultKey1.isPresent()); }
Example 13
Source File: SpecificClientIntegrationITCase.java From kiqr with Apache License 2.0 | 3 votes |
@Test public void noSuchStoreScalarQuery() throws Exception{ SpecificBlockingKiqrClient<String, Long> client = new SpecificBlockingRestKiqrClientImpl<>("localhost", 44321, "idontexist", String.class, Long.class, Serdes.String(), Serdes.Long()); Optional<Long> resultKey1 = client.getScalarKeyValue("key1"); assertFalse(resultKey1.isPresent()); }
Example 14
Source File: SpecificClientIntegrationITCase.java From kiqr with Apache License 2.0 | 3 votes |
@Test(expected = QueryExecutionException.class) public void wrongStoreTypeScalarQuery() throws Exception{ SpecificBlockingKiqrClient<String, Long> client = new SpecificBlockingRestKiqrClientImpl<>("localhost", 44321, "window", String.class, Long.class, Serdes.String(), Serdes.Long()); Optional<Long> resultKey1 = client.getScalarKeyValue("key1"); }
Example 15
Source File: StockCountsStreamsConnectIntegrationApplication.java From kafka-streams-in-action with Apache License 2.0 | 3 votes |
public static void main(String[] args) throws Exception { StreamsConfig streamsConfig = new StreamsConfig(getProperties()); Serde<String> stringSerde = Serdes.String(); Serde<StockTransaction> stockTransactionSerde = StreamsSerdes.StockTransactionSerde(); Serde<Long> longSerde = Serdes.Long(); StreamsBuilder builder = new StreamsBuilder(); builder.stream("dbTxnTRANSACTIONS", Consumed.with(stringSerde, stockTransactionSerde)) .peek((k, v)-> LOG.info("transactions from database key {} value {}",k, v)) .groupByKey(Serialized.with(stringSerde, stockTransactionSerde)) .aggregate(()-> 0L,(symb, stockTxn, numShares) -> numShares + stockTxn.getShares(), Materialized.with(stringSerde, longSerde)).toStream() .peek((k,v) -> LOG.info("Aggregated stock sales for {} {}",k, v)) .to( "stock-counts", Produced.with(stringSerde, longSerde)); KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig); CountDownLatch doneSignal = new CountDownLatch(1); Runtime.getRuntime().addShutdownHook(new Thread(()-> { doneSignal.countDown(); LOG.info("Shutting down the Stock Analysis KStream Connect App Started now"); kafkaStreams.close(); })); LOG.info("Stock Analysis KStream Connect App Started"); kafkaStreams.cleanUp(); kafkaStreams.start(); doneSignal.await(); }
Example 16
Source File: SpecificClientIntegrationITCase.java From kiqr with Apache License 2.0 | 3 votes |
@Test public void noSuchStoreAllQuery() throws Exception{ SpecificBlockingKiqrClient<String, Long> client = new SpecificBlockingRestKiqrClientImpl<>("localhost", 44321, "idontexist", String.class, Long.class, Serdes.String(), Serdes.Long()); Map<String, Long> result = client.getAllKeyValues(); assertTrue(result.isEmpty()); }
Example 17
Source File: SpecificClientIntegrationITCase.java From kiqr with Apache License 2.0 | 3 votes |
@Test(expected = QueryExecutionException.class) public void wrongStoreTypeAllQuery() throws Exception{ SpecificBlockingKiqrClient<String, Long> client = new SpecificBlockingRestKiqrClientImpl<>("localhost", 44321, "window", String.class, Long.class, Serdes.String(), Serdes.Long()); Map<String, Long> result = client.getAllKeyValues(); }
Example 18
Source File: SpecificClientIntegrationITCase.java From kiqr with Apache License 2.0 | 3 votes |
@Test public void successfulRangeQuery() throws Exception{ SpecificBlockingKiqrClient<String, Long> client = new SpecificBlockingRestKiqrClientImpl<>("localhost", 44321, "kv", String.class, Long.class, Serdes.String(), Serdes.Long()); Map<String, Long> result = client.getRangeKeyValues("key1", "key2"); assertThat(result.entrySet(),hasSize(2)); assertThat(result, hasEntry("key1", 3L)); assertThat(result, hasEntry("key2", 6L)); }
Example 19
Source File: SpecificClientIntegrationITCase.java From kiqr with Apache License 2.0 | 3 votes |
@Test public void emptyRangeQuery() throws Exception{ SpecificBlockingKiqrClient<String, Long> client = new SpecificBlockingRestKiqrClientImpl<>("localhost", 44321, "kv", String.class, Long.class, Serdes.String(), Serdes.Long()); Map<String, Long> result = client.getRangeKeyValues("key6", "key7"); assertThat(result.entrySet(),is(empty())); }
Example 20
Source File: SpecificClientIntegrationITCase.java From kiqr with Apache License 2.0 | 3 votes |
@Test(expected = QueryExecutionException.class) public void invertedRangeQuery() throws Exception{ SpecificBlockingKiqrClient<String, Long> client = new SpecificBlockingRestKiqrClientImpl<>("localhost", 44321, "kv", String.class, Long.class, Serdes.String(), Serdes.Long()); Map<String, Long> result = client.getRangeKeyValues("key3", "key1"); assertThat(result.entrySet(),is(empty())); }