org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer011 Java Examples
The following examples show how to use
org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer011.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Kafka011Example.java From Flink-CEPplus with Apache License 2.0 | 7 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); DataStream<KafkaEvent> input = env .addSource( new FlinkKafkaConsumer011<>( parameterTool.getRequired("input-topic"), new KafkaEventSchema(), parameterTool.getProperties()) .assignTimestampsAndWatermarks(new CustomWatermarkExtractor())) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink( new FlinkKafkaProducer011<>( parameterTool.getRequired("output-topic"), new KafkaEventSchema(), parameterTool.getProperties())); env.execute("Kafka 0.11 Example"); }
Example #2
Source File: Kafka011Example.java From flink with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); DataStream<KafkaEvent> input = env .addSource( new FlinkKafkaConsumer011<>( parameterTool.getRequired("input-topic"), new KafkaEventSchema(), parameterTool.getProperties()) .assignTimestampsAndWatermarks(new CustomWatermarkExtractor())) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink( new FlinkKafkaProducer011<>( parameterTool.getRequired("output-topic"), new KafkaEventSchema(), parameterTool.getProperties())); env.execute("Kafka 0.11 Example"); }
Example #3
Source File: Kafka011Example.java From flink with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); DataStream<KafkaEvent> input = env .addSource( new FlinkKafkaConsumer011<>( parameterTool.getRequired("input-topic"), new KafkaEventSchema(), parameterTool.getProperties()) .assignTimestampsAndWatermarks(new CustomWatermarkExtractor())) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink( new FlinkKafkaProducer011<>( parameterTool.getRequired("output-topic"), new KafkaEventSchema(), parameterTool.getProperties())); env.execute("Kafka 0.11 Example"); }
Example #4
Source File: FlinkDataPipeline.java From tutorials with MIT License | 6 votes |
public static void capitalize() throws Exception { String inputTopic = "flink_input"; String outputTopic = "flink_output"; String consumerGroup = "baeldung"; String address = "localhost:9092"; StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment(); FlinkKafkaConsumer011<String> flinkKafkaConsumer = createStringConsumerForTopic(inputTopic, address, consumerGroup); flinkKafkaConsumer.setStartFromEarliest(); DataStream<String> stringInputStream = environment.addSource(flinkKafkaConsumer); FlinkKafkaProducer011<String> flinkKafkaProducer = createStringProducer(outputTopic, address); stringInputStream .map(new WordsCapitalizer()) .addSink(flinkKafkaProducer); environment.execute(); }
Example #5
Source File: Main.java From flink-learning with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { final ParameterTool parameterTool = ExecutionEnvUtil.createParameterTool(args); StreamExecutionEnvironment env = ExecutionEnvUtil.prepare(parameterTool); DataStreamSource<MetricEvent> data = KafkaConfigUtil.buildSource(env); data.addSink(new FlinkKafkaProducer011<>( parameterTool.get("kafka.sink.brokers"), parameterTool.get("kafka.sink.topic"), new MetricSchema() )).name("flink-connectors-kafka") .setParallelism(parameterTool.getInt("stream.sink.parallelism")); env.execute("flink learning connectors kafka"); }
Example #6
Source File: FlinkKafkaProducerTest1.java From flink-learning with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { final ParameterTool parameterTool = ExecutionEnvUtil.createParameterTool(args); StreamExecutionEnvironment env = ExecutionEnvUtil.prepare(parameterTool); env.setParallelism(1); env.addSource(new SourceFunction<String>() { @Override public void run(SourceContext<String> context) throws Exception { while (true) { TimeZone tz = TimeZone.getTimeZone("Asia/Shanghai"); Instant instant = Instant.ofEpochMilli(System.currentTimeMillis() + tz.getOffset(System.currentTimeMillis())); String outline = String.format( "{\"user_id\": \"%s\", \"item_id\":\"%s\", \"category_id\": \"%s\", \"behavior\": \"%s\", \"ts\": \"%s\"}", random.nextInt(10), random.nextInt(100), random.nextInt(1000), "pv", instant.toString()); context.collect(outline); Thread.sleep(200); } } @Override public void cancel() { } }) .addSink(new FlinkKafkaProducer011<>( "localhost:9092", "user_behavior", new SimpleStringSchema() )).name("flink-connectors-kafka"); env.execute("flink kafka connector test"); }
Example #7
Source File: Main.java From flink-learning with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { final ParameterTool parameterTool = ExecutionEnvUtil.createParameterTool(args); StreamExecutionEnvironment env = ExecutionEnvUtil.prepare(parameterTool); DataStreamSource<MetricEvent> data = KafkaConfigUtil.buildSource(env); data.addSink(new FlinkKafkaProducer011<>( parameterTool.get("kafka.sink.brokers"), parameterTool.get("kafka.sink.topic"), new MetricSchema() )).name("flink-connectors-kafka") .setParallelism(parameterTool.getInt("stream.sink.parallelism")); env.execute("flink learning connectors kafka"); }
Example #8
Source File: FlinkKafkaProducerTest1.java From flink-learning with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { final ParameterTool parameterTool = ExecutionEnvUtil.createParameterTool(args); StreamExecutionEnvironment env = ExecutionEnvUtil.prepare(parameterTool); env.setParallelism(1); env.addSource(new SourceFunction<String>() { @Override public void run(SourceContext<String> context) throws Exception { while (true) { TimeZone tz = TimeZone.getTimeZone("Asia/Shanghai"); Instant instant = Instant.ofEpochMilli(System.currentTimeMillis() + tz.getOffset(System.currentTimeMillis())); String outline = String.format( "{\"user_id\": \"%s\", \"item_id\":\"%s\", \"category_id\": \"%s\", \"behavior\": \"%s\", \"ts\": \"%s\"}", random.nextInt(10), random.nextInt(100), random.nextInt(1000), "pv", instant.toString()); context.collect(outline); Thread.sleep(200); } } @Override public void cancel() { } }) .addSink(new FlinkKafkaProducer011<>( "localhost:9092", "user_behavior", new SimpleStringSchema() )).name("flink-connectors-kafka"); env.execute("flink kafka connector test"); }
Example #9
Source File: Kafka011DynamicSink.java From flink with Apache License 2.0 | 5 votes |
@Override protected SinkFunction<RowData> createKafkaProducer( String topic, Properties properties, SerializationSchema<RowData> serializationSchema, Optional<FlinkKafkaPartitioner<RowData>> partitioner) { return new FlinkKafkaProducer011<>( topic, serializationSchema, properties, partitioner); }
Example #10
Source File: FlinkDataPipeline.java From tutorials with MIT License | 5 votes |
public static void createBackup () throws Exception { String inputTopic = "flink_input"; String outputTopic = "flink_output"; String consumerGroup = "baeldung"; String kafkaAddress = "localhost:9092"; StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment(); environment.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); FlinkKafkaConsumer011<InputMessage> flinkKafkaConsumer = createInputMessageConsumer(inputTopic, kafkaAddress, consumerGroup); flinkKafkaConsumer.setStartFromEarliest(); flinkKafkaConsumer .assignTimestampsAndWatermarks(new InputMessageTimestampAssigner()); FlinkKafkaProducer011<Backup> flinkKafkaProducer = createBackupProducer(outputTopic, kafkaAddress); DataStream<InputMessage> inputMessagesStream = environment.addSource(flinkKafkaConsumer); inputMessagesStream .timeWindowAll(Time.hours(24)) .aggregate(new BackupAggregator()) .addSink(flinkKafkaProducer); environment.execute(); }
Example #11
Source File: Kafka011SinkBuilder.java From Alink with Apache License 2.0 | 4 votes |
@Override public RichSinkFunction<Row> build() { SerializationSchema<Row> serializationSchema = getSerializationSchema(); return new FlinkKafkaProducer011<Row>(topic, serializationSchema, properties); }
Example #12
Source File: Kafka011DynamicTableFactoryTest.java From flink with Apache License 2.0 | 4 votes |
@Override protected Class<?> getExpectedProducerClass() { return FlinkKafkaProducer011.class; }
Example #13
Source File: Producers.java From tutorials with MIT License | 4 votes |
public static FlinkKafkaProducer011<String> createStringProducer(String topic, String kafkaAddress) { return new FlinkKafkaProducer011<>(kafkaAddress, topic, new SimpleStringSchema()); }
Example #14
Source File: Producers.java From tutorials with MIT License | 4 votes |
public static FlinkKafkaProducer011<Backup> createBackupProducer(String topic, String kafkaAddress) { return new FlinkKafkaProducer011<Backup>(kafkaAddress, topic, new BackupSerializationSchema()); }