org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer010 Java Examples
The following examples show how to use
org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer010.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Kafka010Example.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); DataStream<KafkaEvent> input = env .addSource( new FlinkKafkaConsumer010<>( parameterTool.getRequired("input-topic"), new KafkaEventSchema(), parameterTool.getProperties()) .assignTimestampsAndWatermarks(new CustomWatermarkExtractor())) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink( new FlinkKafkaProducer010<>( parameterTool.getRequired("output-topic"), new KafkaEventSchema(), parameterTool.getProperties())); env.execute("Kafka 0.10 Example"); }
Example #2
Source File: Kafka010Example.java From flink with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); DataStream<KafkaEvent> input = env .addSource( new FlinkKafkaConsumer010<>( parameterTool.getRequired("input-topic"), new KafkaEventSchema(), parameterTool.getProperties()) .assignTimestampsAndWatermarks(new CustomWatermarkExtractor())) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink( new FlinkKafkaProducer010<>( parameterTool.getRequired("output-topic"), new KafkaEventSchema(), parameterTool.getProperties())); env.execute("Kafka 0.10 Example"); }
Example #3
Source File: Kafka010Example.java From flink with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); StreamExecutionEnvironment env = KafkaExampleUtil.prepareExecutionEnv(parameterTool); DataStream<KafkaEvent> input = env .addSource( new FlinkKafkaConsumer010<>( parameterTool.getRequired("input-topic"), new KafkaEventSchema(), parameterTool.getProperties()) .assignTimestampsAndWatermarks(new CustomWatermarkExtractor())) .keyBy("word") .map(new RollingAdditionMapper()); input.addSink( new FlinkKafkaProducer010<>( parameterTool.getRequired("output-topic"), new KafkaEventSchema(), parameterTool.getProperties())); env.execute("Kafka 0.10 Example"); }
Example #4
Source File: FlinkKafkaSinkExample.java From huaweicloud-cs-sdk with Apache License 2.0 | 5 votes |
public void writeKafka() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(1); DataStream<String> messageStream = env.addSource(new KafkaSourceGenerator()); messageStream.addSink(new FlinkKafkaProducer010<String>(topic, new SimpleStringSchema(), properties)); try { env.execute(); }catch(Exception e) { System.out.println(e.getMessage()); } }
Example #5
Source File: WriteIntoKafka.java From flinkDemo with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { // create execution environment StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); Map properties= new HashMap(); properties.put("bootstrap.servers", "192.168.10.63:6667,192.168.10.64:6667,192.168.10.65:6667"); properties.put("group.id", "t10"); properties.put("enable.auto.commit", "false"); properties.put("auto.commit.interval.ms", "1000"); properties.put("auto.offset.reset", "earliest"); properties.put("session.timeout.ms", "30000"); properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); properties.put("topic", "kks-topic-FFT"); //KafkaConsumer<String,String> kafkaConsumer = new KafkaConsumer<String, String>(properties); // parse user parameters //ParameterTool parameterTool = ParameterTool.fromArgs(args); ParameterTool parameterTool = ParameterTool.fromMap(properties); // add a simple source which is writing some strings DataStream<String> messageStream = env.addSource(new SimpleStringGenerator()); // write stream to Kafka messageStream.addSink(new FlinkKafkaProducer010<>(parameterTool.getRequired("bootstrap.servers"), parameterTool.getRequired("topic"), new SimpleStringSchema())); messageStream.rebalance().map(new MapFunction<String, String>() { private static final long serialVersionUID = 1L; @Override public String map(String value) throws Exception { return value; } }); messageStream.print(); env.execute(); }
Example #6
Source File: Kafka010DynamicSink.java From flink with Apache License 2.0 | 5 votes |
@Override protected FlinkKafkaProducerBase<RowData> createKafkaProducer( String topic, Properties properties, SerializationSchema<RowData> serializationSchema, Optional<FlinkKafkaPartitioner<RowData>> partitioner) { return new FlinkKafkaProducer010<>( topic, serializationSchema, properties, partitioner.orElse(null)); }
Example #7
Source File: TestAvroConsumerConfluent.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); if (parameterTool.getNumberOfParameters() < 6) { System.out.println("Missing parameters!\n" + "Usage: Kafka --input-topic <topic> --output-topic <topic> " + "--bootstrap.servers <kafka brokers> " + "--zookeeper.connect <zk quorum> " + "--schema-registry-url <confluent schema registry> --group.id <some id>"); return; } Properties config = new Properties(); config.setProperty("bootstrap.servers", parameterTool.getRequired("bootstrap.servers")); config.setProperty("group.id", parameterTool.getRequired("group.id")); config.setProperty("zookeeper.connect", parameterTool.getRequired("zookeeper.connect")); String schemaRegistryUrl = parameterTool.getRequired("schema-registry-url"); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getConfig().disableSysoutLogging(); DataStreamSource<User> input = env .addSource( new FlinkKafkaConsumer010<>( parameterTool.getRequired("input-topic"), ConfluentRegistryAvroDeserializationSchema.forSpecific(User.class, schemaRegistryUrl), config).setStartFromEarliest()); SingleOutputStreamOperator<String> mapToString = input .map((MapFunction<User, String>) SpecificRecordBase::toString); FlinkKafkaProducer010<String> stringFlinkKafkaProducer010 = new FlinkKafkaProducer010<>( parameterTool.getRequired("output-topic"), new SimpleStringSchema(), config); mapToString.addSink(stringFlinkKafkaProducer010); env.execute("Kafka 0.10 Confluent Schema Registry AVRO Example"); }
Example #8
Source File: KafkaEventsGeneratorJob.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { final ParameterTool params = ParameterTool.fromArgs(args); double errorRate = params.getDouble("error-rate", 0.0); int sleep = params.getInt("sleep", 1); String kafkaTopic = params.get("kafka-topic"); String brokers = params.get("brokers", "localhost:9092"); System.out.printf("Generating events to Kafka with standalone source with error rate %f and sleep delay %s millis\n", errorRate, sleep); System.out.println(); final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env .addSource(new EventsGeneratorSource(errorRate, sleep)) .addSink(new FlinkKafkaProducer010<>(brokers, kafkaTopic, new EventDeSerializer())); // trigger program execution env.execute("State machine example Kafka events generator job"); }
Example #9
Source File: TestAvroConsumerConfluent.java From flink with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); if (parameterTool.getNumberOfParameters() < 6) { System.out.println("Missing parameters!\n" + "Usage: Kafka --input-topic <topic> --output-topic <topic> " + "--bootstrap.servers <kafka brokers> " + "--zookeeper.connect <zk quorum> " + "--schema-registry-url <confluent schema registry> --group.id <some id>"); return; } Properties config = new Properties(); config.setProperty("bootstrap.servers", parameterTool.getRequired("bootstrap.servers")); config.setProperty("group.id", parameterTool.getRequired("group.id")); config.setProperty("zookeeper.connect", parameterTool.getRequired("zookeeper.connect")); String schemaRegistryUrl = parameterTool.getRequired("schema-registry-url"); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getConfig().disableSysoutLogging(); DataStreamSource<User> input = env .addSource( new FlinkKafkaConsumer010<>( parameterTool.getRequired("input-topic"), ConfluentRegistryAvroDeserializationSchema.forSpecific(User.class, schemaRegistryUrl), config).setStartFromEarliest()); SingleOutputStreamOperator<String> mapToString = input .map((MapFunction<User, String>) SpecificRecordBase::toString); FlinkKafkaProducer010<String> stringFlinkKafkaProducer010 = new FlinkKafkaProducer010<>( parameterTool.getRequired("output-topic"), new SimpleStringSchema(), config); mapToString.addSink(stringFlinkKafkaProducer010); env.execute("Kafka 0.10 Confluent Schema Registry AVRO Example"); }
Example #10
Source File: Kafka010SinkBuilder.java From Alink with Apache License 2.0 | 4 votes |
@Override public RichSinkFunction<Row> build() { SerializationSchema<Row> serializationSchema = getSerializationSchema(); return new FlinkKafkaProducer010<Row>(topic, serializationSchema, properties); }
Example #11
Source File: Kafka010DynamicTableFactoryTest.java From flink with Apache License 2.0 | 4 votes |
@Override protected Class<?> getExpectedProducerClass() { return FlinkKafkaProducer010.class; }
Example #12
Source File: TestAvroConsumerConfluent.java From flink with Apache License 2.0 | 3 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); if (parameterTool.getNumberOfParameters() < 6) { System.out.println("Missing parameters!\n" + "Usage: Kafka --input-topic <topic> --output-string-topic <topic> --output-avro-topic <topic> " + "--bootstrap.servers <kafka brokers> " + "--schema-registry-url <confluent schema registry> --group.id <some id>"); return; } Properties config = new Properties(); config.setProperty("bootstrap.servers", parameterTool.getRequired("bootstrap.servers")); config.setProperty("group.id", parameterTool.getRequired("group.id")); String schemaRegistryUrl = parameterTool.getRequired("schema-registry-url"); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource<User> input = env .addSource( new FlinkKafkaConsumer010<>( parameterTool.getRequired("input-topic"), ConfluentRegistryAvroDeserializationSchema.forSpecific(User.class, schemaRegistryUrl), config).setStartFromEarliest()); SingleOutputStreamOperator<String> mapToString = input .map((MapFunction<User, String>) SpecificRecordBase::toString); FlinkKafkaProducer010<String> stringFlinkKafkaProducer010 = new FlinkKafkaProducer010<>( parameterTool.getRequired("output-string-topic"), new SimpleStringSchema(), config); mapToString.addSink(stringFlinkKafkaProducer010); FlinkKafkaProducer010<User> avroFlinkKafkaProducer010 = new FlinkKafkaProducer010<>( parameterTool.getRequired("output-avro-topic"), ConfluentRegistryAvroSerializationSchema.forSpecific(User.class, parameterTool.getRequired("output-subject"), schemaRegistryUrl), config); input.addSink(avroFlinkKafkaProducer010); env.execute("Kafka 0.10 Confluent Schema Registry AVRO Example"); }