org.apache.kafka.streams.StreamsConfig Scala Examples
The following examples show how to use org.apache.kafka.streams.StreamsConfig.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: TestStreamsConfig.scala From scalatest-embedded-kafka with MIT License | 5 votes |
package net.manub.embeddedkafka.streams import java.nio.file.Files import net.manub.embeddedkafka.EmbeddedKafkaConfig import org.apache.kafka.clients.consumer.{ConsumerConfig, OffsetResetStrategy} import org.apache.kafka.streams.StreamsConfig def streamConfig(streamName: String, extraConfig: Map[String, AnyRef] = Map.empty)( implicit kafkaConfig: EmbeddedKafkaConfig): StreamsConfig = { import scala.collection.JavaConverters._ val defaultConfig = Map( StreamsConfig.APPLICATION_ID_CONFIG -> streamName, StreamsConfig.BOOTSTRAP_SERVERS_CONFIG -> s"localhost:${kafkaConfig.kafkaPort}", StreamsConfig.STATE_DIR_CONFIG -> Files .createTempDirectory(streamName) .toString, // force stream consumers to start reading from the beginning so as not to lose messages ConsumerConfig.AUTO_OFFSET_RESET_CONFIG -> OffsetResetStrategy.EARLIEST.toString.toLowerCase ) val configOverwrittenByExtra = defaultConfig ++ extraConfig new StreamsConfig(configOverwrittenByExtra.asJava) } }
Example 2
Source File: AggregationExampleWithSAM.scala From kafka-streams-scala-examples with Apache License 2.0 | 5 votes |
package com.knoldus.kafka.examples import java.util.Properties import org.apache.kafka.common.serialization.Serdes import org.apache.kafka.streams.kstream._ import org.apache.kafka.streams.{KafkaStreams, StreamsConfig} import scala.collection.JavaConverters._ object AggregationExampleWithSAM { def main(args: Array[String]): Unit = { val config = { val properties = new Properties() properties.put(StreamsConfig.APPLICATION_ID_CONFIG, "stream-application") properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092") properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass) properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass) properties } val stringSerde = Serdes.String() val longSerde = Serdes.Long() val builder = new KStreamBuilder() val originalStream = builder.stream("SourceTopic") //Works only with Scala 2.12.x val mappedStream: KTable[String, java.lang.Long] = originalStream.flatMapValues((value: String) => value.toLowerCase.split("\\W+").toIterable.asJava) .groupBy((_, word) => word) .count("Counts") mappedStream.to(stringSerde, longSerde, "SinkTopic") val streams = new KafkaStreams(builder, config) streams.start() } }
Example 3
Source File: AggregationExample.scala From kafka-streams-scala-examples with Apache License 2.0 | 5 votes |
package com.knoldus.kafka.examples import java.util.Properties import org.apache.kafka.common.serialization.Serdes import org.apache.kafka.streams.kstream.{KStreamBuilder, KeyValueMapper, ValueMapper} import org.apache.kafka.streams.{KafkaStreams, StreamsConfig} import scala.collection.JavaConverters._ object AggregationExample { def main(args: Array[String]): Unit = { val config = { val properties = new Properties() properties.put(StreamsConfig.APPLICATION_ID_CONFIG, "stream-application") properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092") properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass) properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass) properties } val stringSerde = Serdes.String() val longSerde = Serdes.Long() val builder = new KStreamBuilder() val originalStream = builder.stream("SourceTopic") val mappedStream = originalStream.flatMapValues[String] { new ValueMapper[String, java.lang.Iterable[java.lang.String]]() { override def apply(value: String): java.lang.Iterable[java.lang.String] = { value.toLowerCase.split("\\W+").toIterable.asJava } } }.groupBy { new KeyValueMapper[String, String, String]() { override def apply(key: String, word: String): String = word } }.count("Counts") mappedStream.to(stringSerde, longSerde, "SinkTopic") val streams = new KafkaStreams(builder, config) streams.start() } }
Example 4
Source File: AppConfiguration.scala From haystack-trends with Apache License 2.0 | 5 votes |
package com.expedia.www.haystack.trends.config import java.util.Properties import com.expedia.www.haystack.commons.config.ConfigurationLoader import com.expedia.www.haystack.commons.entities.encoders.EncoderFactory import com.expedia.www.haystack.trends.config.entities.{KafkaConfiguration, TransformerConfiguration} import com.typesafe.config.Config import org.apache.kafka.streams.StreamsConfig import org.apache.kafka.streams.Topology.AutoOffsetReset import org.apache.kafka.streams.processor.TimestampExtractor import scala.collection.JavaConverters._ import scala.util.matching.Regex class AppConfiguration { private val config = ConfigurationLoader.loadConfigFileWithEnvOverrides() val healthStatusFilePath: String = config.getString("health.status.path") def kafkaConfig: KafkaConfiguration = { // verify if the applicationId and bootstrap server config are non empty def verifyRequiredProps(props: Properties): Unit = { require(props.getProperty(StreamsConfig.APPLICATION_ID_CONFIG).nonEmpty) require(props.getProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG).nonEmpty) } def addProps(config: Config, props: Properties, prefix: (String) => String = identity): Unit = { config.entrySet().asScala.foreach(kv => { val propKeyName = prefix(kv.getKey) props.setProperty(propKeyName, kv.getValue.unwrapped().toString) }) } val kafka = config.getConfig("kafka") val producerConfig = kafka.getConfig("producer") val consumerConfig = kafka.getConfig("consumer") val streamsConfig = kafka.getConfig("streams") val props = new Properties // add stream specific properties addProps(streamsConfig, props) // validate props verifyRequiredProps(props) val timestampExtractor = Class.forName(props.getProperty("timestamp.extractor", "org.apache.kafka.streams.processor.WallclockTimestampExtractor")) KafkaConfiguration(new StreamsConfig(props), produceTopic = producerConfig.getString("topic"), consumeTopic = consumerConfig.getString("topic"), if (streamsConfig.hasPath("auto.offset.reset")) AutoOffsetReset.valueOf(streamsConfig.getString("auto.offset.reset").toUpperCase) else AutoOffsetReset.LATEST , timestampExtractor.newInstance().asInstanceOf[TimestampExtractor], kafka.getLong("close.timeout.ms")) } }
Example 5
Source File: FeatureSpec.scala From haystack-trends with Apache License 2.0 | 5 votes |
package com.expedia.www.haystack.trends.feature import java._ import java.util.Properties import com.expedia.metrics.MetricData import com.expedia.open.tracing.Span import com.expedia.www.haystack.commons.entities.encoders.Base64Encoder import com.expedia.www.haystack.trends.config.AppConfiguration import com.expedia.www.haystack.trends.config.entities.{KafkaConfiguration, TransformerConfiguration} import org.apache.kafka.streams.StreamsConfig import org.easymock.EasyMock import org.scalatest.easymock.EasyMockSugar import org.scalatest.{FeatureSpecLike, GivenWhenThen, Matchers} trait FeatureSpec extends FeatureSpecLike with GivenWhenThen with Matchers with EasyMockSugar { protected val METRIC_TYPE = "gauge" def generateTestSpan(duration: Long): Span = { val operationName = "testSpan" val serviceName = "testService" Span.newBuilder() .setDuration(duration) .setOperationName(operationName) .setServiceName(serviceName) .build() } protected def mockAppConfig: AppConfiguration = { val kafkaConsumeTopic = "test-consume" val kafkaProduceTopic = "test-produce" val streamsConfig = new Properties() streamsConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "test-app") streamsConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "test-kafka-broker") val kafkaConfig = KafkaConfiguration(new StreamsConfig(streamsConfig), kafkaProduceTopic, kafkaConsumeTopic, null, null, 0l) val transformerConfig = TransformerConfiguration(new Base64Encoder, enableMetricPointServiceLevelGeneration = true, List()) val appConfiguration = mock[AppConfiguration] expecting { appConfiguration.kafkaConfig.andReturn(kafkaConfig).anyTimes() appConfiguration.transformerConfiguration.andReturn(transformerConfig).anyTimes() } EasyMock.replay(appConfiguration) appConfiguration } protected def getMetricDataTags(metricData : MetricData): util.Map[String, String] = { metricData.getMetricDefinition.getTags.getKv } }
Example 6
Source File: FeatureSpec.scala From haystack-trends with Apache License 2.0 | 5 votes |
package com.expedia.www.haystack.trends.feature import java.util import java.util.Properties import com.expedia.metrics.{MetricData, MetricDefinition, TagCollection} import com.expedia.www.haystack.commons.entities.encoders.PeriodReplacementEncoder import com.expedia.www.haystack.trends.config.AppConfiguration import com.expedia.www.haystack.trends.config.entities.{KafkaConfiguration, KafkaProduceConfiguration, KafkaSinkTopic, StateStoreConfiguration} import org.apache.kafka.streams.StreamsConfig import org.apache.kafka.streams.Topology.AutoOffsetReset import org.apache.kafka.streams.processor.WallclockTimestampExtractor import org.easymock.EasyMock import org.scalatest._ import org.scalatest.easymock.EasyMockSugar import org.mockito.Mockito._ import scala.collection.JavaConverters._ trait FeatureSpec extends FeatureSpecLike with GivenWhenThen with Matchers with EasyMockSugar { def currentTimeInSecs: Long = { System.currentTimeMillis() / 1000l } protected def mockAppConfig: AppConfiguration = { val kafkaConsumeTopic = "test-consume" val kafkaProduceTopic = "test-produce" val kafkaMetricTankProduceTopic = "test-mdm-produce" val streamsConfig = new Properties() streamsConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "test-app") streamsConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "test-kafka-broker") val kafkaSinkTopics = List(KafkaSinkTopic("metrics","com.expedia.www.haystack.commons.kstreams.serde.metricdata.MetricDataSerde",true), KafkaSinkTopic("mdm","com.expedia.www.haystack.commons.kstreams.serde.metricdata.MetricTankSerde",true)) val kafkaConfig = KafkaConfiguration(new StreamsConfig(streamsConfig), KafkaProduceConfiguration(kafkaSinkTopics, None, "mdm", false), kafkaConsumeTopic, AutoOffsetReset.EARLIEST, new WallclockTimestampExtractor, 30000) val projectConfiguration = mock[AppConfiguration] expecting { projectConfiguration.kafkaConfig.andReturn(kafkaConfig).anyTimes() projectConfiguration.encoder.andReturn(new PeriodReplacementEncoder).anyTimes() projectConfiguration.stateStoreConfig.andReturn(StateStoreConfiguration(128, false, 60, Map())).anyTimes() projectConfiguration.additionalTags.andReturn(Map("k1"->"v1", "k2"-> "v2")).anyTimes() } EasyMock.replay(projectConfiguration) projectConfiguration } protected def getMetricData(metricKey: String, tags: Map[String, String], value: Double, timeStamp: Long): MetricData = { val tagsMap = new java.util.LinkedHashMap[String, String] { if (tags != null) putAll(tags.asJava) put(MetricDefinition.MTYPE, "gauge") put(MetricDefinition.UNIT, "short") } val metricDefinition = new MetricDefinition(metricKey, new TagCollection(tagsMap), TagCollection.EMPTY) new MetricData(metricDefinition, value, timeStamp) } protected def containsTagInMetricData(metricData: MetricData, tagKey: String, tagValue: String): Boolean = { val tags = getTagsFromMetricData(metricData) tags.containsKey(tagKey) && tags.get(tagKey).equalsIgnoreCase(tagValue) } protected def getTagsFromMetricData(metricData: MetricData): util.Map[String, String] = { metricData.getMetricDefinition.getTags.getKv } }
Example 7
Source File: WordCount.scala From kafka-streams with Apache License 2.0 | 5 votes |
import java.time.Duration import java.util.Properties import org.apache.kafka.streams.kstream.Materialized import org.apache.kafka.streams.scala.ImplicitConversions._ import org.apache.kafka.streams.scala._ import org.apache.kafka.streams.scala.kstream._ import org.apache.kafka.streams.{KafkaStreams, StreamsConfig} object WordCount extends App { import Serdes._ val props: Properties = { val p = new Properties() p.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-modified") p.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092") p } val builder: StreamsBuilder = new StreamsBuilder val textLines: KStream[String, String] = builder.stream[String, String]("text_lines") val wordCounts: KTable[String, Long] = textLines .flatMapValues(textLine => textLine.toLowerCase.split("\\W+")) .groupBy((_, word) => word) .count() wordCounts.toStream.to("word_count_results") val streams: KafkaStreams = new KafkaStreams(builder.build(), props) streams.start() sys.ShutdownHookThread { streams.close(Duration.ofSeconds(10)) } }
Example 8
Source File: WordCountTestable.scala From kafka-streams with Apache License 2.0 | 5 votes |
package com.supergloo import java.time.Duration import java.util.Properties import org.apache.kafka.streams.kstream.Materialized import org.apache.kafka.streams.scala.ImplicitConversions._ import org.apache.kafka.streams.{KafkaStreams, StreamsConfig, Topology} import org.apache.kafka.streams.scala.{Serdes, StreamsBuilder} import org.apache.kafka.streams.scala.kstream.{KStream, KTable} class WordCountTestable { import Serdes._ val props: Properties = { val p = new Properties() p.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-modified") p.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092") p } def countNumberOfWords(inputTopic: String, outputTopic: String, storeName: String): Topology = { val builder: StreamsBuilder = new StreamsBuilder val textLines: KStream[String, String] = builder.stream[String, String](inputTopic) val wordCounts: KTable[String, Long] = textLines .flatMapValues(textLine => textLine.toLowerCase.split("\\W+")) .groupBy((_, word) => word) .count()(Materialized.as("counts-store")) wordCounts.toStream.to(outputTopic) builder.build() } def toLowerCaseStream(inputTopic: String, outputTopic: String): Topology = { val builder: StreamsBuilder = new StreamsBuilder() val textLines: KStream[String, String] = builder.stream(inputTopic) val wordCounts: KStream[String, String] = textLines .mapValues(textLine => textLine.toLowerCase) wordCounts.to(outputTopic) builder.build() } } object WordCountTestable extends WordCountTestable { def main(args: Array[String]): Unit = { val builder: Topology = countNumberOfWords("input-topic", "output-topic", "counts-store") val streams: KafkaStreams = new KafkaStreams(builder, props) streams.start() sys.ShutdownHookThread { streams.close(Duration.ofSeconds(10)) } } }
Example 9
Source File: Application.scala From kafka-serde-scala with Apache License 2.0 | 5 votes |
package io.github.azhur.kafkaserdescala.example import java.util.Properties import io.github.azhur.kafkaserdecirce.CirceSupport import org.apache.kafka.clients.consumer.ConsumerConfig import org.apache.kafka.streams.{ KafkaStreams, StreamsConfig, Topology } import org.apache.kafka.streams.scala.StreamsBuilder object Application extends App with CirceSupport { import io.circe.generic.auto._ import org.apache.kafka.streams.scala.Serdes._ import org.apache.kafka.streams.scala.ImplicitConversions._ case class User(id: Long, name: String, age: Int) val topology = buildTopology("input_users", "output_users") val streamingApp = new KafkaStreams(topology, streamProperties()) streamingApp.start() sys.addShutdownHook({ streamingApp.close() }) def buildTopology(inputTopic: String, outputTopic: String): Topology = { val streamsBuilder = new StreamsBuilder() streamsBuilder .stream[String, User](inputTopic) .filter((_, user) => user.age > 18) .to(outputTopic) streamsBuilder.build() } def streamProperties(): Properties = { val streamsConfiguration = new Properties streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "test-app") streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092") streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, Topology.AutoOffsetReset.EARLIEST.toString.toLowerCase) streamsConfiguration } }
Example 10
Source File: StreamProcessorApp.scala From event-sourcing-kafka-streams with MIT License | 5 votes |
package org.amitayh.invoices.streamprocessor import java.util.Properties import java.util.concurrent.CountDownLatch import org.amitayh.invoices.common.Config import org.apache.kafka.streams.KafkaStreams.State import org.apache.kafka.streams.{KafkaStreams, StreamsConfig, Topology} import org.log4s.getLogger trait StreamProcessorApp extends App { def appId: String def topology: Topology private val logger = getLogger private val latch = new CountDownLatch(1) private val streams: KafkaStreams = { val props = new Properties props.put(StreamsConfig.APPLICATION_ID_CONFIG, appId) props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, Config.BootstrapServers) props.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, StreamsConfig.EXACTLY_ONCE) new KafkaStreams(topology, props) } streams.setStateListener((newState: State, oldState: State) => { logger.info(s"$oldState -> $newState") }) streams.setUncaughtExceptionHandler((_: Thread, e: Throwable) => { logger.error(e)(s"Exception was thrown in stream processor $appId") latch.countDown() }) def start(): Unit = { logger.info("Starting...") streams.start() sys.ShutdownHookThread(close()) latch.await() } def close(): Unit = { logger.info("Shutting down...") streams.close() } start() }