org.apache.flink.streaming.kafka.test.base.KafkaEvent Java Examples
The following examples show how to use
org.apache.flink.streaming.kafka.test.base.KafkaEvent.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Kafka011Example.java From Flink-CEPplus with Apache License 2.0 | 7 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); DataStream<KafkaEvent> input = env .addSource( new FlinkKafkaConsumer011<>( parameterTool.getRequired("input-topic"), new KafkaEventSchema(), parameterTool.getProperties()) .assignTimestampsAndWatermarks(new CustomWatermarkExtractor())) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink( new FlinkKafkaProducer011<>( parameterTool.getRequired("output-topic"), new KafkaEventSchema(), parameterTool.getProperties())); env.execute("Kafka 0.11 Example"); }
Example #2
Source File: Kafka010Example.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); DataStream<KafkaEvent> input = env .addSource( new FlinkKafkaConsumer010<>( parameterTool.getRequired("input-topic"), new KafkaEventSchema(), parameterTool.getProperties()) .assignTimestampsAndWatermarks(new CustomWatermarkExtractor())) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink( new FlinkKafkaProducer010<>( parameterTool.getRequired("output-topic"), new KafkaEventSchema(), parameterTool.getProperties())); env.execute("Kafka 0.10 Example"); }
Example #3
Source File: KafkaExample.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); DataStream<KafkaEvent> input = env .addSource( new FlinkKafkaConsumer<>( parameterTool.getRequired("input-topic"), new KafkaEventSchema(), parameterTool.getProperties()) .assignTimestampsAndWatermarks(new CustomWatermarkExtractor())) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink( new FlinkKafkaProducer<>( parameterTool.getRequired("output-topic"), new KeyedSerializationSchemaWrapper<>(new KafkaEventSchema()), parameterTool.getProperties(), FlinkKafkaProducer.Semantic.EXACTLY_ONCE)); env.execute("Modern Kafka Example"); }
Example #4
Source File: Kafka010Example.java From flink with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); DataStream<KafkaEvent> input = env .addSource( new FlinkKafkaConsumer010<>( parameterTool.getRequired("input-topic"), new KafkaEventSchema(), parameterTool.getProperties()) .assignTimestampsAndWatermarks(new CustomWatermarkExtractor())) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink( new FlinkKafkaProducer010<>( parameterTool.getRequired("output-topic"), new KafkaEventSchema(), parameterTool.getProperties())); env.execute("Kafka 0.10 Example"); }
Example #5
Source File: KafkaExample.java From flink with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); DataStream<KafkaEvent> input = env .addSource( new FlinkKafkaConsumer<>( parameterTool.getRequired("input-topic"), new KafkaEventSchema(), parameterTool.getProperties()) .assignTimestampsAndWatermarks(new CustomWatermarkExtractor())) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink( new FlinkKafkaProducer<>( parameterTool.getRequired("output-topic"), new KeyedSerializationSchemaWrapper<>(new KafkaEventSchema()), parameterTool.getProperties(), FlinkKafkaProducer.Semantic.EXACTLY_ONCE)); env.execute("Modern Kafka Example"); }
Example #6
Source File: Kafka011Example.java From flink with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); DataStream<KafkaEvent> input = env .addSource( new FlinkKafkaConsumer011<>( parameterTool.getRequired("input-topic"), new KafkaEventSchema(), parameterTool.getProperties()) .assignTimestampsAndWatermarks(new CustomWatermarkExtractor())) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink( new FlinkKafkaProducer011<>( parameterTool.getRequired("output-topic"), new KafkaEventSchema(), parameterTool.getProperties())); env.execute("Kafka 0.11 Example"); }
Example #7
Source File: Kafka010Example.java From flink with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); DataStream<KafkaEvent> input = env .addSource( new FlinkKafkaConsumer010<>( parameterTool.getRequired("input-topic"), new KafkaEventSchema(), parameterTool.getProperties()) .assignTimestampsAndWatermarks(new CustomWatermarkExtractor())) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink( new FlinkKafkaProducer010<>( parameterTool.getRequired("output-topic"), new KafkaEventSchema(), parameterTool.getProperties())); env.execute("Kafka 0.10 Example"); }
Example #8
Source File: KafkaExample.java From flink with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); DataStream<KafkaEvent> input = env .addSource( new FlinkKafkaConsumer<>( parameterTool.getRequired("input-topic"), new KafkaEventSchema(), parameterTool.getProperties()) .assignTimestampsAndWatermarks(new CustomWatermarkExtractor())) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink( new FlinkKafkaProducer<>( parameterTool.getRequired("output-topic"), new KeyedSerializationSchemaWrapper<>(new KafkaEventSchema()), parameterTool.getProperties(), FlinkKafkaProducer.Semantic.EXACTLY_ONCE)); env.execute("Modern Kafka Example"); }
Example #9
Source File: Kafka011Example.java From flink with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); DataStream<KafkaEvent> input = env .addSource( new FlinkKafkaConsumer011<>( parameterTool.getRequired("input-topic"), new KafkaEventSchema(), parameterTool.getProperties()) .assignTimestampsAndWatermarks(new CustomWatermarkExtractor())) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink( new FlinkKafkaProducer011<>( parameterTool.getRequired("output-topic"), new KafkaEventSchema(), parameterTool.getProperties())); env.execute("Kafka 0.11 Example"); }
Example #10
Source File: KinesisExample.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); String inputStream = parameterTool.getRequired("input-stream"); String outputStream = parameterTool.getRequired("output-stream"); FlinkKinesisConsumer<KafkaEvent> consumer = new FlinkKinesisConsumer<>( inputStream, new KafkaEventSchema(), parameterTool.getProperties()); consumer.setPeriodicWatermarkAssigner(new CustomWatermarkExtractor()); Properties producerProperties = new Properties(parameterTool.getProperties()); // producer needs region even when URL is specified producerProperties.putIfAbsent(ConsumerConfigConstants.AWS_REGION, "us-east-1"); // test driver does not deaggregate producerProperties.putIfAbsent("AggregationEnabled", String.valueOf(false)); // KPL does not recognize endpoint URL.. String kinesisUrl = producerProperties.getProperty(ConsumerConfigConstants.AWS_ENDPOINT); if (kinesisUrl != null) { URL url = new URL(kinesisUrl); producerProperties.put("KinesisEndpoint", url.getHost()); producerProperties.put("KinesisPort", Integer.toString(url.getPort())); producerProperties.put("VerifyCertificate", "false"); } FlinkKinesisProducer<KafkaEvent> producer = new FlinkKinesisProducer<>( new KafkaEventSchema(), producerProperties); producer.setDefaultStream(outputStream); producer.setDefaultPartition("fakePartition"); DataStream<KafkaEvent> input = env .addSource(consumer) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink(producer); env.execute(); }
Example #11
Source File: KinesisExample.java From flink with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); String inputStream = parameterTool.getRequired("input-stream"); String outputStream = parameterTool.getRequired("output-stream"); FlinkKinesisConsumer<KafkaEvent> consumer = new FlinkKinesisConsumer<>( inputStream, new KafkaEventSchema(), parameterTool.getProperties()); consumer.setPeriodicWatermarkAssigner(new CustomWatermarkExtractor()); Properties producerProperties = new Properties(parameterTool.getProperties()); // producer needs region even when URL is specified producerProperties.putIfAbsent(ConsumerConfigConstants.AWS_REGION, "us-east-1"); // test driver does not deaggregate producerProperties.putIfAbsent("AggregationEnabled", String.valueOf(false)); // KPL does not recognize endpoint URL.. String kinesisUrl = producerProperties.getProperty(ConsumerConfigConstants.AWS_ENDPOINT); if (kinesisUrl != null) { URL url = new URL(kinesisUrl); producerProperties.put("KinesisEndpoint", url.getHost()); producerProperties.put("KinesisPort", Integer.toString(url.getPort())); producerProperties.put("VerifyCertificate", "false"); } FlinkKinesisProducer<KafkaEvent> producer = new FlinkKinesisProducer<>( new KafkaEventSchema(), producerProperties); producer.setDefaultStream(outputStream); producer.setDefaultPartition("fakePartition"); DataStream<KafkaEvent> input = env .addSource(consumer) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink(producer); env.execute(); }
Example #12
Source File: KinesisExample.java From flink with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); String inputStream = parameterTool.getRequired("input-stream"); String outputStream = parameterTool.getRequired("output-stream"); FlinkKinesisConsumer<KafkaEvent> consumer = new FlinkKinesisConsumer<>( inputStream, new KafkaEventSchema(), parameterTool.getProperties()); consumer.setPeriodicWatermarkAssigner(new CustomWatermarkExtractor()); Properties producerProperties = new Properties(parameterTool.getProperties()); // producer needs region even when URL is specified producerProperties.putIfAbsent(ConsumerConfigConstants.AWS_REGION, "us-east-1"); // test driver does not deaggregate producerProperties.putIfAbsent("AggregationEnabled", String.valueOf(false)); // KPL does not recognize endpoint URL.. String kinesisUrl = producerProperties.getProperty(ConsumerConfigConstants.AWS_ENDPOINT); if (kinesisUrl != null) { URL url = new URL(kinesisUrl); producerProperties.put("KinesisEndpoint", url.getHost()); producerProperties.put("KinesisPort", Integer.toString(url.getPort())); producerProperties.put("VerifyCertificate", "false"); } FlinkKinesisProducer<KafkaEvent> producer = new FlinkKinesisProducer<>( new KafkaEventSchema(), producerProperties); producer.setDefaultStream(outputStream); producer.setDefaultPartition("fakePartition"); DataStream<KafkaEvent> input = env .addSource(consumer) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink(producer); env.execute(); }