Java Code Examples for org.apache.flink.api.java.utils.ParameterTool#getNumberOfParameters()
The following examples show how to use
org.apache.flink.api.java.utils.ParameterTool#getNumberOfParameters() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KafkaExampleUtil.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public static StreamExecutionEnvironment prepareExecutionEnv(ParameterTool parameterTool) throws Exception { if (parameterTool.getNumberOfParameters() < 5) { System.out.println("Missing parameters!\n" + "Usage: Kafka --input-topic <topic> --output-topic <topic> " + "--bootstrap.servers <kafka brokers> " + "--zookeeper.connect <zk quorum> --group.id <some id>"); throw new Exception("Missing parameters!\n" + "Usage: Kafka --input-topic <topic> --output-topic <topic> " + "--bootstrap.servers <kafka brokers> " + "--zookeeper.connect <zk quorum> --group.id <some id>"); } StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getConfig().disableSysoutLogging(); env.getConfig().setRestartStrategy(RestartStrategies.fixedDelayRestart(4, 10000)); env.enableCheckpointing(5000); // create a checkpoint every 5 seconds env.getConfig().setGlobalJobParameters(parameterTool); // make parameters available in the web interface env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); return env; }
Example 2
Source File: PubSubExample.java From flink with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); if (parameterTool.getNumberOfParameters() < 3) { System.out.println("Missing parameters!\n" + "Usage: flink run PubSub.jar --input-subscription <subscription> --input-topicName <topic> --output-topicName <output-topic> " + "--google-project <google project name> "); return; } String projectName = parameterTool.getRequired("google-project"); String inputTopicName = parameterTool.getRequired("input-topicName"); String subscriptionName = parameterTool.getRequired("input-subscription"); String outputTopicName = parameterTool.getRequired("output-topicName"); PubSubPublisher pubSubPublisher = new PubSubPublisher(projectName, inputTopicName); pubSubPublisher.publish(10); runFlinkJob(projectName, subscriptionName, outputTopicName); }
Example 3
Source File: KafkaExampleUtil.java From flink with Apache License 2.0 | 6 votes |
public static StreamExecutionEnvironment prepareExecutionEnv(ParameterTool parameterTool) throws Exception { if (parameterTool.getNumberOfParameters() < 5) { System.out.println("Missing parameters!\n" + "Usage: Kafka --input-topic <topic> --output-topic <topic> " + "--bootstrap.servers <kafka brokers> " + "--group.id <some id>"); throw new Exception("Missing parameters!\n" + "Usage: Kafka --input-topic <topic> --output-topic <topic> " + "--bootstrap.servers <kafka brokers> " + "--group.id <some id>"); } StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getConfig().setRestartStrategy(RestartStrategies.fixedDelayRestart(4, 10000)); env.enableCheckpointing(5000); // create a checkpoint every 5 seconds env.getConfig().setGlobalJobParameters(parameterTool); // make parameters available in the web interface env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); return env; }
Example 4
Source File: PubSubExample.java From flink with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); if (parameterTool.getNumberOfParameters() < 3) { System.out.println("Missing parameters!\n" + "Usage: flink run PubSub.jar --input-subscription <subscription> --input-topicName <topic> --output-topicName <output-topic> " + "--google-project <google project name> "); return; } String projectName = parameterTool.getRequired("google-project"); String inputTopicName = parameterTool.getRequired("input-topicName"); String subscriptionName = parameterTool.getRequired("input-subscription"); String outputTopicName = parameterTool.getRequired("output-topicName"); PubSubPublisher pubSubPublisher = new PubSubPublisher(projectName, inputTopicName); pubSubPublisher.publish(10); runFlinkJob(projectName, subscriptionName, outputTopicName); }
Example 5
Source File: KafkaExampleUtil.java From flink with Apache License 2.0 | 6 votes |
public static StreamExecutionEnvironment prepareExecutionEnv(ParameterTool parameterTool) throws Exception { if (parameterTool.getNumberOfParameters() < 5) { System.out.println("Missing parameters!\n" + "Usage: Kafka --input-topic <topic> --output-topic <topic> " + "--bootstrap.servers <kafka brokers> " + "--zookeeper.connect <zk quorum> --group.id <some id>"); throw new Exception("Missing parameters!\n" + "Usage: Kafka --input-topic <topic> --output-topic <topic> " + "--bootstrap.servers <kafka brokers> " + "--zookeeper.connect <zk quorum> --group.id <some id>"); } StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getConfig().disableSysoutLogging(); env.getConfig().setRestartStrategy(RestartStrategies.fixedDelayRestart(4, 10000)); env.enableCheckpointing(5000); // create a checkpoint every 5 seconds env.getConfig().setGlobalJobParameters(parameterTool); // make parameters available in the web interface env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); return env; }
Example 6
Source File: Elasticsearch5SinkExample.java From flink with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { final ParameterTool parameterTool = ParameterTool.fromArgs(args); if (parameterTool.getNumberOfParameters() < 3) { System.out.println("Missing parameters!\n" + "Usage: --numRecords <numRecords> --index <index> --type <type>"); return; } final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getConfig().disableSysoutLogging(); env.enableCheckpointing(5000); DataStream<String> source = env.generateSequence(0, parameterTool.getInt("numRecords") - 1) .map(new MapFunction<Long, String>() { @Override public String map(Long value) throws Exception { return "message #" + value; } }); Map<String, String> userConfig = new HashMap<>(); userConfig.put("cluster.name", "elasticsearch"); // This instructs the sink to emit after every element, otherwise they would be buffered userConfig.put(ElasticsearchSink.CONFIG_KEY_BULK_FLUSH_MAX_ACTIONS, "1"); List<InetSocketAddress> transports = new ArrayList<>(); transports.add(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 9300)); source.addSink(new ElasticsearchSink<>(userConfig, transports, new ElasticsearchSinkFunction<String>() { @Override public void process(String element, RuntimeContext ctx, RequestIndexer indexer) { indexer.add(createIndexRequest(element, parameterTool)); } })); env.execute("Elasticsearch5.x end to end sink test example"); }
Example 7
Source File: Main.java From flink-learning with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); ParameterTool parameterTool = ExecutionEnvUtil.createParameterTool(args); if (parameterTool.getNumberOfParameters() < 4) { System.out.println("Missing parameters!\n" + "Usage: flink run PubSub.jar --input-subscription <subscription> --input-topicName <topic> --output-topicName <output-topic> " + "--google-project <google project name> "); return; } String projectName = parameterTool.getRequired("stream.project.name"); String inputTopicName = parameterTool.getRequired("stream.input.topicName"); String subscriptionName = parameterTool.getRequired("stream.input.subscription"); String outputTopicName = parameterTool.getRequired("stream.output.topicName"); PubSubPublisherUtil pubSubPublisher = new PubSubPublisherUtil(projectName, inputTopicName); pubSubPublisher.publish(10); env.addSource(PubSubSource.newBuilder() .withDeserializationSchema(new IntegerSerializer()) .withProjectName(projectName) .withSubscriptionName(subscriptionName) .withMessageRateLimit(1) .build()) .map(Main::printAndReturn).disableChaining() .addSink(PubSubSink.newBuilder() .withSerializationSchema(new IntegerSerializer()) .withProjectName(projectName) .withTopicName(outputTopicName).build()); env.enableCheckpointing(parameterTool.getLong(PropertiesConstants.STREAM_CHECKPOINT_INTERVAL, 1000L)); env.execute("Flink connector gcp pubsub test"); }
Example 8
Source File: Main.java From flink-learning with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); ParameterTool parameterTool = ExecutionEnvUtil.createParameterTool(args); if (parameterTool.getNumberOfParameters() < 4) { System.out.println("Missing parameters!\n" + "Usage: flink run PubSub.jar --input-subscription <subscription> --input-topicName <topic> --output-topicName <output-topic> " + "--google-project <google project name> "); return; } String projectName = parameterTool.getRequired("stream.project.name"); String inputTopicName = parameterTool.getRequired("stream.input.topicName"); String subscriptionName = parameterTool.getRequired("stream.input.subscription"); String outputTopicName = parameterTool.getRequired("stream.output.topicName"); PubSubPublisherUtil pubSubPublisher = new PubSubPublisherUtil(projectName, inputTopicName); pubSubPublisher.publish(10); env.addSource(PubSubSource.newBuilder() .withDeserializationSchema(new IntegerSerializer()) .withProjectName(projectName) .withSubscriptionName(subscriptionName) .withMessageRateLimit(1) .build()) .map(Main::printAndReturn).disableChaining() .addSink(PubSubSink.newBuilder() .withSerializationSchema(new IntegerSerializer()) .withProjectName(projectName) .withTopicName(outputTopicName).build()); env.enableCheckpointing(parameterTool.getLong(PropertiesConstants.STREAM_CHECKPOINT_INTERVAL, 1000L)); env.execute("Flink connector gcp pubsub test"); }
Example 9
Source File: Elasticsearch5SinkExample.java From flink with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { final ParameterTool parameterTool = ParameterTool.fromArgs(args); if (parameterTool.getNumberOfParameters() < 3) { System.out.println("Missing parameters!\n" + "Usage: --numRecords <numRecords> --index <index> --type <type>"); return; } final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.enableCheckpointing(5000); DataStream<String> source = env.generateSequence(0, parameterTool.getInt("numRecords") - 1) .map(new MapFunction<Long, String>() { @Override public String map(Long value) throws Exception { return "message #" + value; } }); Map<String, String> userConfig = new HashMap<>(); userConfig.put("cluster.name", "elasticsearch"); // This instructs the sink to emit after every element, otherwise they would be buffered userConfig.put(ElasticsearchSink.CONFIG_KEY_BULK_FLUSH_MAX_ACTIONS, "1"); List<InetSocketAddress> transports = new ArrayList<>(); transports.add(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 9300)); source.addSink(new ElasticsearchSink<>(userConfig, transports, new ElasticsearchSinkFunction<String>() { @Override public void process(String element, RuntimeContext ctx, RequestIndexer indexer) { indexer.add(createIndexRequest(element, parameterTool)); } })); env.execute("Elasticsearch5.x end to end sink test example"); }
Example 10
Source File: Elasticsearch5SinkExample.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { final ParameterTool parameterTool = ParameterTool.fromArgs(args); if (parameterTool.getNumberOfParameters() < 3) { System.out.println("Missing parameters!\n" + "Usage: --numRecords <numRecords> --index <index> --type <type>"); return; } final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getConfig().disableSysoutLogging(); env.enableCheckpointing(5000); DataStream<String> source = env.generateSequence(0, parameterTool.getInt("numRecords") - 1) .map(new MapFunction<Long, String>() { @Override public String map(Long value) throws Exception { return "message #" + value; } }); Map<String, String> userConfig = new HashMap<>(); userConfig.put("cluster.name", "elasticsearch"); // This instructs the sink to emit after every element, otherwise they would be buffered userConfig.put(ElasticsearchSink.CONFIG_KEY_BULK_FLUSH_MAX_ACTIONS, "1"); List<InetSocketAddress> transports = new ArrayList<>(); transports.add(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 9300)); source.addSink(new ElasticsearchSink<>(userConfig, transports, new ElasticsearchSinkFunction<String>() { @Override public void process(String element, RuntimeContext ctx, RequestIndexer indexer) { indexer.add(createIndexRequest(element, parameterTool)); } })); env.execute("Elasticsearch5.x end to end sink test example"); }
Example 11
Source File: TestAvroConsumerConfluent.java From flink with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); if (parameterTool.getNumberOfParameters() < 6) { System.out.println("Missing parameters!\n" + "Usage: Kafka --input-topic <topic> --output-topic <topic> " + "--bootstrap.servers <kafka brokers> " + "--zookeeper.connect <zk quorum> " + "--schema-registry-url <confluent schema registry> --group.id <some id>"); return; } Properties config = new Properties(); config.setProperty("bootstrap.servers", parameterTool.getRequired("bootstrap.servers")); config.setProperty("group.id", parameterTool.getRequired("group.id")); config.setProperty("zookeeper.connect", parameterTool.getRequired("zookeeper.connect")); String schemaRegistryUrl = parameterTool.getRequired("schema-registry-url"); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getConfig().disableSysoutLogging(); DataStreamSource<User> input = env .addSource( new FlinkKafkaConsumer010<>( parameterTool.getRequired("input-topic"), ConfluentRegistryAvroDeserializationSchema.forSpecific(User.class, schemaRegistryUrl), config).setStartFromEarliest()); SingleOutputStreamOperator<String> mapToString = input .map((MapFunction<User, String>) SpecificRecordBase::toString); FlinkKafkaProducer010<String> stringFlinkKafkaProducer010 = new FlinkKafkaProducer010<>( parameterTool.getRequired("output-topic"), new SimpleStringSchema(), config); mapToString.addSink(stringFlinkKafkaProducer010); env.execute("Kafka 0.10 Confluent Schema Registry AVRO Example"); }
Example 12
Source File: FlinkPulsarBatchJsonSinkExample.java From pulsar with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); if (parameterTool.getNumberOfParameters() < 2) { System.out.println("Missing parameters!"); System.out.println("Usage: pulsar --service-url <pulsar-service-url> --topic <topic>"); return; } // set up the execution environment final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.getConfig().setGlobalJobParameters(parameterTool); String serviceUrl = parameterTool.getRequired("service-url"); String topic = parameterTool.getRequired("topic"); System.out.println("Parameters:"); System.out.println("\tServiceUrl:\t" + serviceUrl); System.out.println("\tTopic:\t" + topic); // create PulsarJsonOutputFormat instance final OutputFormat<NasaMission> pulsarJsonOutputFormat = new PulsarJsonOutputFormat<>(serviceUrl, topic, new AuthenticationDisabled()); // create DataSet DataSet<NasaMission> nasaMissionDS = env.fromCollection(nasaMissions); // map nasa mission names to upper-case nasaMissionDS.map(nasaMission -> new NasaMission( nasaMission.id, nasaMission.missionName.toUpperCase(), nasaMission.startYear, nasaMission.endYear)) // filter missions which started after 1970 .filter(nasaMission -> nasaMission.startYear > 1970) // write batch data to Pulsar .output(pulsarJsonOutputFormat); // set parallelism to write Pulsar in parallel (optional) env.setParallelism(2); // execute program env.execute("Flink - Pulsar Batch Json"); }
Example 13
Source File: FlinkPulsarBatchCsvSinkExample.java From pulsar with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); if (parameterTool.getNumberOfParameters() < 2) { System.out.println("Missing parameters!"); System.out.println("Usage: pulsar --service-url <pulsar-service-url> --topic <topic>"); return; } // set up the execution environment final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.getConfig().setGlobalJobParameters(parameterTool); String serviceUrl = parameterTool.getRequired("service-url"); String topic = parameterTool.getRequired("topic"); System.out.println("Parameters:"); System.out.println("\tServiceUrl:\t" + serviceUrl); System.out.println("\tTopic:\t" + topic); // create PulsarCsvOutputFormat instance final OutputFormat<Tuple4<Integer, String, Integer, Integer>> pulsarCsvOutputFormat = new PulsarCsvOutputFormat<>(serviceUrl, topic, new AuthenticationDisabled()); // create DataSet DataSet<Tuple4<Integer, String, Integer, Integer>> nasaMissionDS = env.fromCollection(nasaMissions); // map nasa mission names to upper-case nasaMissionDS.map( new MapFunction<Tuple4<Integer, String, Integer, Integer>, Tuple4<Integer, String, Integer, Integer>>() { @Override public Tuple4<Integer, String, Integer, Integer> map( Tuple4<Integer, String, Integer, Integer> nasaMission) throws Exception { return new Tuple4( nasaMission.f0, nasaMission.f1.toUpperCase(), nasaMission.f2, nasaMission.f3); } } ) // filter missions which started after 1970 .filter(nasaMission -> nasaMission.f2 > 1970) // write batch data to Pulsar .output(pulsarCsvOutputFormat); // set parallelism to write Pulsar in parallel (optional) env.setParallelism(2); // execute program env.execute("Flink - Pulsar Batch Csv"); }
Example 14
Source File: FlinkPulsarBatchAvroSinkExample.java From pulsar with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); if (parameterTool.getNumberOfParameters() < 2) { System.out.println("Missing parameters!"); System.out.println("Usage: pulsar --service-url <pulsar-service-url> --topic <topic>"); return; } // set up the execution environment final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.getConfig().setGlobalJobParameters(parameterTool); String serviceUrl = parameterTool.getRequired("service-url"); String topic = parameterTool.getRequired("topic"); System.out.println("Parameters:"); System.out.println("\tServiceUrl:\t" + serviceUrl); System.out.println("\tTopic:\t" + topic); // create PulsarAvroOutputFormat instance final OutputFormat<NasaMission> pulsarAvroOutputFormat = new PulsarAvroOutputFormat<>(serviceUrl, topic, new AuthenticationDisabled()); // create DataSet DataSet<NasaMission> nasaMissionDS = env.fromCollection(nasaMissions); // map nasa mission names to upper-case nasaMissionDS.map(nasaMission -> new NasaMission( nasaMission.getId(), nasaMission.getName(), nasaMission.getStartYear(), nasaMission.getEndYear())) // filter missions which started after 1970 .filter(nasaMission -> nasaMission.getStartYear() > 1970) // write batch data to Pulsar .output(pulsarAvroOutputFormat); // set parallelism to write Pulsar in parallel (optional) env.setParallelism(2); // execute program env.execute("Flink - Pulsar Batch Avro"); }
Example 15
Source File: PulsarConsumerSourceWordCount.java From pulsar with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); if (parameterTool.getNumberOfParameters() < 2) { System.out.println("Missing parameters!"); System.out.println("Usage: pulsar --service-url <pulsar-service-url> --input-topic <topic> --subscription <sub> --output-topic <topic>"); return; } StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getConfig().disableSysoutLogging(); env.getConfig().setRestartStrategy(RestartStrategies.fixedDelayRestart(4, 10000)); env.enableCheckpointing(5000); env.getConfig().setGlobalJobParameters(parameterTool); env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime); String serviceUrl = parameterTool.getRequired("service-url"); String inputTopic = parameterTool.getRequired("input-topic"); String subscription = parameterTool.get("subscription", "flink-examples"); String outputTopic = parameterTool.get("output-topic", null); int parallelism = parameterTool.getInt("parallelism", 1); System.out.println("Parameters:"); System.out.println("\tServiceUrl:\t" + serviceUrl); System.out.println("\tInputTopic:\t" + inputTopic); System.out.println("\tSubscription:\t" + subscription); System.out.println("\tOutputTopic:\t" + outputTopic); System.out.println("\tParallelism:\t" + parallelism); PulsarSourceBuilder<String> builder = PulsarSourceBuilder.builder(new SimpleStringSchema()) .serviceUrl(serviceUrl) .topic(inputTopic) .subscriptionName(subscription); SourceFunction<String> src = builder.build(); DataStream<String> input = env.addSource(src); DataStream<WordWithCount> wc = input .flatMap((FlatMapFunction<String, WordWithCount>) (line, collector) -> { for (String word : line.split("\\s")) { collector.collect(new WordWithCount(word, 1)); } }) .returns(WordWithCount.class) .keyBy("word") .timeWindow(Time.seconds(5)) .reduce((ReduceFunction<WordWithCount>) (c1, c2) -> new WordWithCount(c1.word, c1.count + c2.count)); if (null != outputTopic) { wc.addSink(new FlinkPulsarProducer<>( serviceUrl, outputTopic, new AuthenticationDisabled(), wordWithCount -> wordWithCount.toString().getBytes(UTF_8), wordWithCount -> wordWithCount.word, null )).setParallelism(parallelism); } else { // print the results with a single thread, rather than in parallel wc.print().setParallelism(1); } env.execute("Pulsar Stream WordCount"); }
Example 16
Source File: TestAvroConsumerConfluent.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); if (parameterTool.getNumberOfParameters() < 6) { System.out.println("Missing parameters!\n" + "Usage: Kafka --input-topic <topic> --output-topic <topic> " + "--bootstrap.servers <kafka brokers> " + "--zookeeper.connect <zk quorum> " + "--schema-registry-url <confluent schema registry> --group.id <some id>"); return; } Properties config = new Properties(); config.setProperty("bootstrap.servers", parameterTool.getRequired("bootstrap.servers")); config.setProperty("group.id", parameterTool.getRequired("group.id")); config.setProperty("zookeeper.connect", parameterTool.getRequired("zookeeper.connect")); String schemaRegistryUrl = parameterTool.getRequired("schema-registry-url"); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getConfig().disableSysoutLogging(); DataStreamSource<User> input = env .addSource( new FlinkKafkaConsumer010<>( parameterTool.getRequired("input-topic"), ConfluentRegistryAvroDeserializationSchema.forSpecific(User.class, schemaRegistryUrl), config).setStartFromEarliest()); SingleOutputStreamOperator<String> mapToString = input .map((MapFunction<User, String>) SpecificRecordBase::toString); FlinkKafkaProducer010<String> stringFlinkKafkaProducer010 = new FlinkKafkaProducer010<>( parameterTool.getRequired("output-topic"), new SimpleStringSchema(), config); mapToString.addSink(stringFlinkKafkaProducer010); env.execute("Kafka 0.10 Confluent Schema Registry AVRO Example"); }
Example 17
Source File: TestAvroConsumerConfluent.java From flink with Apache License 2.0 | 3 votes |
public static void main(String[] args) throws Exception { // parse input arguments final ParameterTool parameterTool = ParameterTool.fromArgs(args); if (parameterTool.getNumberOfParameters() < 6) { System.out.println("Missing parameters!\n" + "Usage: Kafka --input-topic <topic> --output-string-topic <topic> --output-avro-topic <topic> " + "--bootstrap.servers <kafka brokers> " + "--schema-registry-url <confluent schema registry> --group.id <some id>"); return; } Properties config = new Properties(); config.setProperty("bootstrap.servers", parameterTool.getRequired("bootstrap.servers")); config.setProperty("group.id", parameterTool.getRequired("group.id")); String schemaRegistryUrl = parameterTool.getRequired("schema-registry-url"); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource<User> input = env .addSource( new FlinkKafkaConsumer010<>( parameterTool.getRequired("input-topic"), ConfluentRegistryAvroDeserializationSchema.forSpecific(User.class, schemaRegistryUrl), config).setStartFromEarliest()); SingleOutputStreamOperator<String> mapToString = input .map((MapFunction<User, String>) SpecificRecordBase::toString); FlinkKafkaProducer010<String> stringFlinkKafkaProducer010 = new FlinkKafkaProducer010<>( parameterTool.getRequired("output-string-topic"), new SimpleStringSchema(), config); mapToString.addSink(stringFlinkKafkaProducer010); FlinkKafkaProducer010<User> avroFlinkKafkaProducer010 = new FlinkKafkaProducer010<>( parameterTool.getRequired("output-avro-topic"), ConfluentRegistryAvroSerializationSchema.forSpecific(User.class, parameterTool.getRequired("output-subject"), schemaRegistryUrl), config); input.addSink(avroFlinkKafkaProducer010); env.execute("Kafka 0.10 Confluent Schema Registry AVRO Example"); }