org.apache.kafka.common.serialization.Serde Java Examples
The following examples show how to use
org.apache.kafka.common.serialization.Serde.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KeyValueSerdeResolver.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
private Serde<?> getKeySerde(String keySerdeString) { Serde<?> keySerde; try { if (StringUtils.hasText(keySerdeString)) { keySerde = Utils.newInstance(keySerdeString, Serde.class); } else { keySerde = getFallbackSerde("default.key.serde"); } keySerde.configure(this.streamConfigGlobalProperties, true); } catch (ClassNotFoundException ex) { throw new IllegalStateException("Serde class not found: ", ex); } return keySerde; }
Example #2
Source File: CompositeSerdeRegistry.java From micronaut-kafka with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") @Override @Nonnull public <T> Serde<T> getSerde(Class<T> type) { Serde serde = serdeMap.get(type); if (serde != null) { return serde; } else { type = ReflectionUtils.getWrapperType(type); try { return Serdes.serdeFrom(type); } catch (IllegalArgumentException e) { for (SerdeRegistry registry : registries) { serde = registry.getSerde(type); if (serde != null) { serdeMap.put(type, serde); return serde; } } } throw new SerializationException("No available serde for type: " + type); } }
Example #3
Source File: StreamUtils.java From kafka-graphs with Apache License 2.0 | 6 votes |
public static <K, V> KTable<K, V> tableFromCollection( StreamsBuilder builder, Properties props, String topic, int numPartitions, short replicationFactor, Serde<K> keySerde, Serde<V> valueSerde, Collection<KeyValue<K, V>> values) { ClientUtils.createTopic(topic, numPartitions, replicationFactor, props); try (Producer<K, V> producer = new KafkaProducer<>(props, keySerde.serializer(), valueSerde.serializer())) { for (KeyValue<K, V> value : values) { ProducerRecord<K, V> producerRecord = new ProducerRecord<>(topic, value.key, value.value); producer.send(producerRecord); } producer.flush(); } return builder.table(topic, Consumed.with(keySerde, valueSerde), Materialized.with(keySerde, valueSerde)); }
Example #4
Source File: RocksDBCache.java From kcache with Apache License 2.0 | 6 votes |
public RocksDBCache(final String name, final String parentDir, final String rootDir, Serde<K> keySerde, Serde<V> valueSerde, Comparator<K> comparator) { this.name = name; this.parentDir = parentDir; this.rootDir = rootDir; this.keySerde = keySerde; this.valueSerde = valueSerde; this.comparator = comparator != null ? comparator : (k1, k2) -> { byte[] b1 = keySerde.serializer().serialize(null, k1); byte[] b2 = keySerde.serializer().serialize(null, k2); return BYTES_COMPARATOR.compare(b1, b2); }; }
Example #5
Source File: TestInput.java From fluent-kafka-streams-tests with MIT License | 6 votes |
/** * <p>Constructor for the test input topic.</p> * * @param testDriver Kafka's {@link TopologyTestDriver} used in this test. * @param topic Name of input topic. * @param keySerde Serde for key type in topic. * @param valueSerde Serde for value type in topic. */ protected TestInput(final TopologyTestDriver testDriver, final String topic, final Serde<K> keySerde, final Serde<V> valueSerde) { this.testDriver = testDriver; this.topic = topic; this.keySerde = keySerde; this.valueSerde = valueSerde; this.consumerFactory = new ConsumerRecordFactory<>(topic, keySerde == null ? new UnspecifiedSerializer<K>() : keySerde.serializer(), valueSerde == null ? new UnspecifiedSerializer<V>() : valueSerde.serializer()) { @Override public ConsumerRecord<byte[], byte[]> create(final String topicName, final K key, final V value, final Headers headers, final long timestampMs) { final ConsumerRecord<byte[], byte[]> record = super.create(topicName, key, value, headers, timestampMs); testDriver.pipeInput(record); return record; } }; }
Example #6
Source File: KafkaStreamsStreamListenerSetupMethodOrchestrator.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 6 votes |
private KStream<?, ?> getkStream(String inboundName, KafkaStreamsStateStoreProperties storeSpec, BindingProperties bindingProperties, KafkaStreamsConsumerProperties kafkaStreamsConsumerProperties, StreamsBuilder streamsBuilder, Serde<?> keySerde, Serde<?> valueSerde, Topology.AutoOffsetReset autoOffsetReset, boolean firstBuild) { if (storeSpec != null) { StoreBuilder storeBuilder = buildStateStore(storeSpec); streamsBuilder.addStateStore(storeBuilder); if (LOG.isInfoEnabled()) { LOG.info("state store " + storeBuilder.name() + " added to topology"); } } return getKStream(inboundName, bindingProperties, kafkaStreamsConsumerProperties, streamsBuilder, keySerde, valueSerde, autoOffsetReset, firstBuild); }
Example #7
Source File: VisibilityStatementSerdeTest.java From rya with Apache License 2.0 | 6 votes |
@Test public void serializeAndDeserialize() { // Create the object that will be serialized. final ValueFactory vf = SimpleValueFactory.getInstance(); final Statement statement = vf.createStatement( vf.createIRI("urn:person1"), vf.createIRI("urn:hasName"), vf.createLiteral("alice"), vf.createIRI("urn:testContext")); final VisibilityStatement original = new VisibilityStatement(statement, "a|b|c"); // Serialize it. try(final Serde<VisibilityStatement> serde = new VisibilityStatementSerde()) { final byte[] bytes = serde.serializer().serialize("topic", original); // Deserialize it. final VisibilityStatement deserialized = serde.deserializer().deserialize("topic", bytes); // Show the deserialized value matches the original. assertEquals(original, deserialized); } }
Example #8
Source File: GenericBlockingRestKiqrClientImpl.java From kiqr with Apache License 2.0 | 6 votes |
@Override public <K, V> Map<K, V> getRangeKeyValues(String store, Class<K> keyClass, Class<V> valueClass, Serde<K> keySerde, Serde<V> valueSerde, K from, K to) { return execute(() -> getUriBuilder() .setPath(String.format("/api/v1/kv/%s", store)) .addParameter("keySerde", keySerde.getClass().getName()) .addParameter("valueSerde", valueSerde.getClass().getName()) .addParameter("from", Base64.getEncoder().encodeToString(keySerde.serializer().serialize("", from))) .addParameter("to", Base64.getEncoder().encodeToString(keySerde.serializer().serialize("", to))) .build(), bytes -> { MultiValuedKeyValueQueryResponse resp = mapper.readValue(bytes, MultiValuedKeyValueQueryResponse.class); return resp.getResults().entrySet().stream() .map(entry -> { return new Pair<K, V>(deserialize(keyClass, keySerde, entry.getKey()), deserialize(valueClass, valueSerde, entry.getValue())); }).collect(Collectors.toMap(Pair::getKey, pair -> pair.getValue())); }, () -> Collections.emptyMap()); }
Example #9
Source File: StreamUtils.java From kafka-graphs with Apache License 2.0 | 6 votes |
public static <K, V> KStream<K, V> streamFromCollection( StreamsBuilder builder, Properties props, String topic, int numPartitions, short replicationFactor, Serde<K> keySerde, Serde<V> valueSerde, Collection<KeyValue<K, V>> values) { ClientUtils.createTopic(topic, numPartitions, replicationFactor, props); try (Producer<K, V> producer = new KafkaProducer<>(props, keySerde.serializer(), valueSerde.serializer())) { for (KeyValue<K, V> value : values) { ProducerRecord<K, V> producerRecord = new ProducerRecord<>(topic, value.key, value.value); producer.send(producerRecord); } producer.flush(); } return builder.stream(topic, Consumed.with(keySerde, valueSerde)); }
Example #10
Source File: IPFraudKafkaStreamApp.java From Building-Data-Streaming-Applications-with-Apache-Kafka with MIT License | 6 votes |
public static void main(String[] args) throws Exception { Properties kafkaStreamProperties = new Properties(); kafkaStreamProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, "IP-Fraud-Detection"); kafkaStreamProperties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); kafkaStreamProperties.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, "localhost:2181"); kafkaStreamProperties.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); kafkaStreamProperties.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); Serde<String> stringSerde = Serdes.String(); KStreamBuilder fraudDetectionTopology = new KStreamBuilder(); KStream<String, String> ipRecords = fraudDetectionTopology.stream(stringSerde, stringSerde, propertyReader.getPropertyValue("topic")); KStream<String, String> fraudIpRecords = ipRecords .filter((k, v) -> isFraud(v)); fraudIpRecords.to(propertyReader.getPropertyValue("output_topic")); KafkaStreams streamManager = new KafkaStreams(fraudDetectionTopology, kafkaStreamProperties); streamManager.start(); Runtime.getRuntime().addShutdownHook(new Thread(streamManager::close)); }
Example #11
Source File: WordCount.java From fluent-kafka-streams-tests with MIT License | 6 votes |
public Topology getTopology() { final Serde<String> stringSerde = Serdes.String(); final Serde<Long> longSerde = Serdes.Long(); final StreamsBuilder builder = new StreamsBuilder(); final KStream<String, String> textLines = builder.stream(this.inputTopic); final Pattern pattern = Pattern.compile("\\W+", Pattern.UNICODE_CHARACTER_CLASS); final KTable<String, Long> wordCounts = textLines .flatMapValues(value -> Arrays.asList(pattern.split(value.toLowerCase()))) .groupBy((key, word) -> word) .count(); wordCounts.toStream().to(this.outputTopic, Produced.with(stringSerde, longSerde)); return builder.build(); }
Example #12
Source File: AppSerdes.java From Kafka-Streams-Real-time-Stream-Processing with The Unlicense | 5 votes |
public static Serde<PaymentConfirmation> PaymentConfirmation() { PaymentConfirmationSerde serde = new PaymentConfirmationSerde(); Map<String, Object> serdeConfigs = new HashMap<>(); serdeConfigs.put("specific.class.name", PaymentConfirmation.class); serde.configure(serdeConfigs, false); return serde; }
Example #13
Source File: AvroGenericUtils.java From simplesource with Apache License 2.0 | 5 votes |
public static Serde<GenericRecord> genericAvroSerde( final String schemaRegistryUrl, final boolean useMockSchemaRegistry, final boolean isKey, final SchemaNameStrategy schemaNameStrategy) { final Map<String, Object> configMap = avroSchemaRegistryConfig(schemaRegistryUrl, schemaNameStrategy); final Serde<GenericRecord> serde = useMockSchemaRegistry ? new GenericAvroSerde(new MockSchemaRegistryClient()) : new GenericAvroSerde(); serde.configure(configMap, isKey); return serde; }
Example #14
Source File: SimulateStreamService.java From SkaETL with Apache License 2.0 | 5 votes |
public void createStreamSystemOut(String topicToConsume) { StreamsBuilder builder = new StreamsBuilder(); final Serde<SimulateData> simulateDataSerde = Serdes.serdeFrom(new SimulateDataSerializer(), new SimulateDataDeserializer()); builder.stream(topicToConsume, Consumed.with(Serdes.String(), simulateDataSerde)).process(() -> new LoggingProcessor<>()); KafkaStreams streams = new KafkaStreams(builder.build(), createKStreamProperties(SYSOUT_PROCESS, getBootstrapServer())); Runtime.getRuntime().addShutdownHook(new Thread(streams::close)); streams.start(); }
Example #15
Source File: StockPerformanceStreamsAndProcessorApplication.java From kafka-streams-in-action with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { StreamsConfig streamsConfig = new StreamsConfig(getProperties()); Serde<String> stringSerde = Serdes.String(); Serde<StockPerformance> stockPerformanceSerde = StreamsSerdes.StockPerformanceSerde(); Serde<StockTransaction> stockTransactionSerde = StreamsSerdes.StockTransactionSerde(); StreamsBuilder builder = new StreamsBuilder(); String stocksStateStore = "stock-performance-store"; double differentialThreshold = 0.02; KeyValueBytesStoreSupplier storeSupplier = Stores.lruMap(stocksStateStore, 100); StoreBuilder<KeyValueStore<String, StockPerformance>> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), stockPerformanceSerde); builder.addStateStore(storeBuilder); builder.stream("stock-transactions", Consumed.with(stringSerde, stockTransactionSerde)) .transform(() -> new StockPerformanceTransformer(stocksStateStore, differentialThreshold), stocksStateStore) .print(Printed.<String, StockPerformance>toSysOut().withLabel("StockPerformance")); //Uncomment this line and comment out the line above for writing to a topic //.to(stringSerde, stockPerformanceSerde, "stock-performance"); KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig); MockDataProducer.produceStockTransactionsWithKeyFunction(50, 50, 25, StockTransaction::getSymbol); System.out.println("Stock Analysis KStream/Process API App Started"); kafkaStreams.cleanUp(); kafkaStreams.start(); Thread.sleep(70000); System.out.println("Shutting down the Stock KStream/Process API Analysis App now"); kafkaStreams.close(); MockDataProducer.shutdown(); }
Example #16
Source File: AppSerdes.java From Kafka-Streams-Real-time-Stream-Processing with The Unlicense | 5 votes |
public static Serde<AdImpression> AdImpression() { AdImpressionSerde serde = new AdImpressionSerde(); Map<String, Object> serdeConfigs = new HashMap<>(); serdeConfigs.put("specific.class.name", AdImpression.class); serde.configure(serdeConfigs, false); return serde; }
Example #17
Source File: KsqlDelimitedTopicSerDe.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@Override public Serde<GenericRow> getGenericRowSerde(Schema schema, KsqlConfig ksqlConfig, boolean isInternal, SchemaRegistryClient schemaRegistryClient) { Map<String, Object> serdeProps = new HashMap<>(); final Serializer<GenericRow> genericRowSerializer = new KsqlDelimitedSerializer(schema); genericRowSerializer.configure(serdeProps, false); final Deserializer<GenericRow> genericRowDeserializer = new KsqlDelimitedDeserializer(schema); genericRowDeserializer.configure(serdeProps, false); return Serdes.serdeFrom(genericRowSerializer, genericRowDeserializer); }
Example #18
Source File: KeyValueSerdeResolver.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
/** * Provide the {@link Serde} for state store value. * @param valueSerdeString serde class used for value * @return {@link Serde} for the state store value. */ public Serde<?> getStateStoreValueSerde(String valueSerdeString) { try { return getValueSerde(valueSerdeString); } catch (ClassNotFoundException ex) { throw new IllegalStateException("Serde class not found: ", ex); } }
Example #19
Source File: SchemaKGroupedStream.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") public SchemaKTable aggregate( final Initializer initializer, final UdafAggregator aggregator, final WindowExpression windowExpression, final Serde<GenericRow> topicValueSerDe) { final KTable aggKtable; if (windowExpression != null) { final Materialized<String, GenericRow, ?> materialized = Materialized.<String, GenericRow, WindowStore<Bytes, byte[]>>with( Serdes.String(), topicValueSerDe); final KsqlWindowExpression ksqlWindowExpression = windowExpression.getKsqlWindowExpression(); aggKtable = ksqlWindowExpression.applyAggregate( kgroupedStream, initializer, aggregator, materialized ); } else { aggKtable = kgroupedStream.aggregate( initializer, aggregator, Materialized.with(Serdes.String(), topicValueSerDe) ); } return new SchemaKTable( schema, aggKtable, keyField, sourceSchemaKStreams, windowExpression != null, SchemaKStream.Type.AGGREGATE, functionRegistry, schemaRegistryClient ); }
Example #20
Source File: TwitterStreamsAnalyzer.java From kafka-streams with Apache License 2.0 | 5 votes |
public void run() { StreamsConfig streamsConfig = new StreamsConfig(getProperties()); JsonSerializer<Tweet> tweetJsonSerializer = new JsonSerializer<>(); JsonDeserializer<Tweet> tweetJsonDeserializer = new JsonDeserializer<>(Tweet.class); Serde<Tweet> tweetSerde = Serdes.serdeFrom(tweetJsonSerializer, tweetJsonDeserializer); KStreamBuilder kStreamBuilder = new KStreamBuilder(); Classifier classifier = new Classifier(); classifier.train(new File("src/main/resources/kafkaStreamsTwitterTrainingData_clean.csv")); KeyValueMapper<String, Tweet, String> languageToKey = (k, v) -> StringUtils.isNotBlank(v.getText()) ? classifier.classify(v.getText()):"unknown"; Predicate<String, Tweet> isEnglish = (k, v) -> k.equals("english"); Predicate<String, Tweet> isFrench = (k, v) -> k.equals("french"); Predicate<String, Tweet> isSpanish = (k, v) -> k.equals("spanish"); KStream<String, Tweet> tweetKStream = kStreamBuilder.stream(Serdes.String(), tweetSerde, "twitterData"); KStream<String, Tweet>[] filteredStreams = tweetKStream.selectKey(languageToKey).branch(isEnglish, isFrench, isSpanish); filteredStreams[0].to(Serdes.String(), tweetSerde, "english"); filteredStreams[1].to(Serdes.String(), tweetSerde, "french"); filteredStreams[2].to(Serdes.String(), tweetSerde, "spanish"); kafkaStreams = new KafkaStreams(kStreamBuilder, streamsConfig); System.out.println("Starting twitter analysis streams"); kafkaStreams.start(); System.out.println("Started"); }
Example #21
Source File: DynamicOutputTopic.java From kafka-tutorials with Apache License 2.0 | 5 votes |
public Topology buildTopology(Properties envProps) { final StreamsBuilder builder = new StreamsBuilder(); final String orderInputTopic = envProps.getProperty("input.topic.name"); final String orderOutputTopic = envProps.getProperty("output.topic.name"); final String specialOrderOutput = envProps.getProperty("special.order.topic.name"); final Serde<Long> longSerde = getPrimitiveAvroSerde(envProps, true); final Serde<Order> orderSerde = getSpecificAvroSerde(envProps); final Serde<CompletedOrder> completedOrderSerde = getSpecificAvroSerde(envProps); final ValueMapper<Order, CompletedOrder> orderProcessingSimulator = v -> { double amount = v.getQuantity() * FAKE_PRICE; return CompletedOrder.newBuilder().setAmount(amount).setId(v.getId() + "-" + v.getSku()).setName(v.getName()).build(); }; final TopicNameExtractor<Long, CompletedOrder> orderTopicNameExtractor = (key, completedOrder, recordContext) -> { final String compositeId = completedOrder.getId(); final String skuPart = compositeId.substring(compositeId.indexOf('-') + 1, 5); final String outTopic; if (skuPart.equals("QUA")) { outTopic = specialOrderOutput; } else { outTopic = orderOutputTopic; } return outTopic; }; final KStream<Long, Order> exampleStream = builder.stream(orderInputTopic, Consumed.with(longSerde, orderSerde)); exampleStream.mapValues(orderProcessingSimulator).to(orderTopicNameExtractor, Produced.with(longSerde, completedOrderSerde)); return builder.build(); }
Example #22
Source File: GraphSerialized.java From kafka-graphs with Apache License 2.0 | 5 votes |
private GraphSerialized(Serde<K> keySerde, Serde<VV> vertexValueSerde, Serde<EV> edgeValueSerde) { this.keySerde = keySerde; this.vertexValueSerde = vertexValueSerde; this.edgeValueSerde = edgeValueSerde; }
Example #23
Source File: FkJoinTableToTable.java From kafka-tutorials with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") static <T> Serde<T> getPrimitiveAvroSerde(final Properties envProps, boolean isKey) { final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(); final KafkaAvroSerializer serializer = new KafkaAvroSerializer(); final Map<String, String> config = new HashMap<>(); config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, envProps.getProperty("schema.registry.url")); deserializer.configure(config, isKey); serializer.configure(config, isKey); return (Serde<T>)Serdes.serdeFrom(serializer, deserializer); }
Example #24
Source File: AppSerdes.java From Kafka-Streams-Real-time-Stream-Processing with The Unlicense | 5 votes |
public static Serde<CampaignPerformance> CampaignPerformance() { CampaignPerformanceSerde serde = new CampaignPerformanceSerde(); Map<String, Object> serdeConfigs = new HashMap<>(); serdeConfigs.put("specific.class.name", CampaignPerformance.class); serde.configure(serdeConfigs, false); return serde; }
Example #25
Source File: KafkaStreamsAggregateSampleTests.java From spring-cloud-stream-samples with Apache License 2.0 | 5 votes |
@Test public void testKafkaStreamsWordCountProcessor() throws Exception { Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka); ObjectMapper mapper = new ObjectMapper(); Serde<DomainEvent> domainEventSerde = new JsonSerde<>(DomainEvent.class, mapper); senderProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); senderProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, domainEventSerde.serializer().getClass()); DefaultKafkaProducerFactory<String, DomainEvent> pf = new DefaultKafkaProducerFactory<>(senderProps); try { KafkaTemplate<String, DomainEvent> template = new KafkaTemplate<>(pf, true); template.setDefaultTopic("foobar"); DomainEvent ddEvent = new DomainEvent(); ddEvent.setBoardUuid("12345"); ddEvent.setEventType("create-domain-event"); template.sendDefault("", ddEvent); Thread.sleep(1000); RestTemplate restTemplate = new RestTemplate(); String fooResourceUrl = "http://localhost:" + randomServerPort + "/events"; ResponseEntity<String> response = restTemplate.getForEntity(fooResourceUrl, String.class); assertThat(response.getBody()).contains("create-domain-event"); } finally { pf.destroy(); } }
Example #26
Source File: KStreamBinder.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@Override @SuppressWarnings("unchecked") protected Binding<KStream<Object, Object>> doBindProducer(String name, KStream<Object, Object> outboundBindTarget, ExtendedProducerProperties<KafkaStreamsProducerProperties> properties) { ExtendedProducerProperties<KafkaProducerProperties> extendedProducerProperties = (ExtendedProducerProperties) properties; this.kafkaTopicProvisioner.provisionProducerDestination(name, extendedProducerProperties); Serde<?> keySerde = this.keyValueSerdeResolver .getOuboundKeySerde(properties.getExtension(), kafkaStreamsBindingInformationCatalogue.getOutboundKStreamResolvable()); LOG.info("Key Serde used for (outbound) " + name + ": " + keySerde.getClass().getName()); Serde<?> valueSerde; if (properties.isUseNativeEncoding()) { valueSerde = this.keyValueSerdeResolver.getOutboundValueSerde(properties, properties.getExtension(), kafkaStreamsBindingInformationCatalogue.getOutboundKStreamResolvable()); } else { valueSerde = Serdes.ByteArray(); } LOG.info("Value Serde used for (outbound) " + name + ": " + valueSerde.getClass().getName()); to(properties.isUseNativeEncoding(), name, outboundBindTarget, (Serde<Object>) keySerde, (Serde<Object>) valueSerde, properties.getExtension()); return new DefaultBinding<>(name, null, outboundBindTarget, null); }
Example #27
Source File: KafkaCache.java From kcache with Apache License 2.0 | 5 votes |
private void setUp(KafkaCacheConfig config, Serde<K> keySerde, Serde<V> valueSerde, CacheUpdateHandler<K, V> cacheUpdateHandler, Cache<K, V> localCache) { this.topic = config.getString(KafkaCacheConfig.KAFKACACHE_TOPIC_CONFIG); this.desiredReplicationFactor = config.getInt(KafkaCacheConfig.KAFKACACHE_TOPIC_REPLICATION_FACTOR_CONFIG); this.desiredNumPartitions = config.getInt(KafkaCacheConfig.KAFKACACHE_TOPIC_NUM_PARTITIONS_CONFIG); this.groupId = config.getString(KafkaCacheConfig.KAFKACACHE_GROUP_ID_CONFIG); this.clientId = config.getString(KafkaCacheConfig.KAFKACACHE_CLIENT_ID_CONFIG); if (this.clientId == null) { this.clientId = "kafka-cache-reader-" + this.topic; } this.requireCompact = config.getBoolean(KafkaCacheConfig.KAFKACACHE_TOPIC_REQUIRE_COMPACT_CONFIG); this.initTimeout = config.getInt(KafkaCacheConfig.KAFKACACHE_INIT_TIMEOUT_CONFIG); this.timeout = config.getInt(KafkaCacheConfig.KAFKACACHE_TIMEOUT_CONFIG); this.checkpointDir = config.getString(KafkaCacheConfig.KAFKACACHE_CHECKPOINT_DIR_CONFIG); this.cacheUpdateHandler = cacheUpdateHandler != null ? cacheUpdateHandler : (key, value, oldValue, tp, offset, ts) -> {}; this.keySerde = keySerde; this.valueSerde = valueSerde; this.localCache = localCache; this.config = config; this.bootstrapBrokers = config.bootstrapBrokers(); log.info("Initializing Kafka cache {} with broker endpoints {} ", clientId, bootstrapBrokers); }
Example #28
Source File: AppSerdes.java From Kafka-Streams-Real-time-Stream-Processing with The Unlicense | 5 votes |
public static Serde<AdInventories> AdInventories() { AdInventoriesSerde serde = new AdInventoriesSerde(); Map<String, Object> serdeConfigs = new HashMap<>(); serdeConfigs.put("specific.class.name", AdInventories.class); serde.configure(serdeConfigs, false); return serde; }
Example #29
Source File: SpecificBlockingRestKiqrClientImpl.java From kiqr with Apache License 2.0 | 5 votes |
public SpecificBlockingRestKiqrClientImpl(String host, int port, String store, Class<K> keyClass, Class<V> valueClass, Serde<K> keySerde, Serde<V> valueSerde) { this.keyClass = keyClass; this.valueClass = valueClass; this.keySerde = keySerde; this.valueSerde = valueSerde; this.genericClient = initGenericService(host, port); this.store = store; }
Example #30
Source File: AbstractKafkaStreamsBinderProcessor.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
private GlobalKTable<?, ?> getGlobalKTable(KafkaStreamsConsumerProperties kafkaStreamsConsumerProperties, StreamsBuilder streamsBuilder, Serde<?> keySerde, Serde<?> valueSerde, String materializedAs, String bindingDestination, Topology.AutoOffsetReset autoOffsetReset) { final Consumed<?, ?> consumed = getConsumed(kafkaStreamsConsumerProperties, keySerde, valueSerde, autoOffsetReset); return materializedAs != null ? materializedAsGlobalKTable(streamsBuilder, bindingDestination, materializedAs, keySerde, valueSerde, autoOffsetReset, kafkaStreamsConsumerProperties) : streamsBuilder.globalTable(bindingDestination, consumed); }