Java Code Examples for org.apache.flink.streaming.api.datastream.DataStreamSource#print()
The following examples show how to use
org.apache.flink.streaming.api.datastream.DataStreamSource#print() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FlinkKafkaSchemaTest1.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { final ParameterTool parameterTool = ExecutionEnvUtil.createParameterTool(args); StreamExecutionEnvironment env = ExecutionEnvUtil.prepare(parameterTool); Properties props = buildKafkaProps(parameterTool); //kafka topic list List<String> topics = Arrays.asList(parameterTool.get("metrics.topic")); FlinkKafkaConsumer011<MetricEvent> consumer = new FlinkKafkaConsumer011<>(topics, new KafkaDeserializationSchemaWrapper<>(new MetricSchema()), props); DataStreamSource<MetricEvent> data = env.addSource(consumer); data.print(); env.execute("flink kafka connector test"); }
Example 2
Source File: FlinkKafkaConsumerTest1.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { final ParameterTool parameterTool = ExecutionEnvUtil.createParameterTool(args); StreamExecutionEnvironment env = ExecutionEnvUtil.prepare(parameterTool); Properties props = buildKafkaProps(parameterTool); //kafka topic list List<String> topics = Arrays.asList(parameterTool.get("metrics.topic"), parameterTool.get("logs.topic")); FlinkKafkaConsumer011<MetricEvent> consumer = new FlinkKafkaConsumer011<>(topics, new MetricSchema(), props); //kafka topic Pattern //FlinkKafkaConsumer011<MetricEvent> consumer = new FlinkKafkaConsumer011<>(java.utils.regex.Pattern.compile("test-topic-[0-9]"), new MetricSchema(), props); // consumer.setStartFromLatest(); // consumer.setStartFromEarliest() DataStreamSource<MetricEvent> data = env.addSource(consumer); data.print(); env.execute("flink kafka connector test"); }
Example 3
Source File: Main.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); ParameterTool parameterTool = ExecutionEnvUtil.PARAMETER_TOOL; //下面这些写死的参数可以放在配置文件中,然后通过 parameterTool 获取 final RMQConnectionConfig connectionConfig = new RMQConnectionConfig .Builder().setHost("localhost").setVirtualHost("/") .setPort(5672).setUserName("admin").setPassword("admin") .build(); DataStreamSource<String> zhisheng = env.addSource(new RMQSource<>(connectionConfig, "zhisheng", true, new SimpleStringSchema())) .setParallelism(1); zhisheng.print(); //如果想保证 exactly-once 或 at-least-once 需要把 checkpoint 开启 // env.enableCheckpointing(10000); env.execute("flink learning connectors rabbitmq"); }
Example 4
Source File: Main.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception{ final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); Properties props = new Properties(); props.put("bootstrap.servers", "localhost:9092"); props.put("zookeeper.connect", "localhost:2181"); props.put("group.id", "metric-group"); props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); //key 反序列化 props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); props.put("auto.offset.reset", "latest"); //value 反序列化 DataStreamSource<String> dataStreamSource = env.addSource(new FlinkKafkaConsumer011<>( "metric", //kafka topic new SimpleStringSchema(), // String 序列化 props)).setParallelism(1); dataStreamSource.print(); //把从 kafka 读取到的数据打印在控制台 env.execute("Flink add data source"); }
Example 5
Source File: FlinkKafkaConsumerTest1.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { final ParameterTool parameterTool = ExecutionEnvUtil.createParameterTool(args); StreamExecutionEnvironment env = ExecutionEnvUtil.prepare(parameterTool); Properties props = buildKafkaProps(parameterTool); //kafka topic list List<String> topics = Arrays.asList(parameterTool.get("metrics.topic"), parameterTool.get("logs.topic")); FlinkKafkaConsumer011<MetricEvent> consumer = new FlinkKafkaConsumer011<>(topics, new MetricSchema(), props); //kafka topic Pattern //FlinkKafkaConsumer011<MetricEvent> consumer = new FlinkKafkaConsumer011<>(java.utils.regex.Pattern.compile("test-topic-[0-9]"), new MetricSchema(), props); // consumer.setStartFromLatest(); // consumer.setStartFromEarliest() DataStreamSource<MetricEvent> data = env.addSource(consumer); data.print(); env.execute("flink kafka connector test"); }
Example 6
Source File: Main.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); ParameterTool parameterTool = ExecutionEnvUtil.PARAMETER_TOOL; //下面这些写死的参数可以放在配置文件中,然后通过 parameterTool 获取 final RMQConnectionConfig connectionConfig = new RMQConnectionConfig .Builder().setHost("localhost").setVirtualHost("/") .setPort(5672).setUserName("admin").setPassword("admin") .build(); DataStreamSource<String> zhisheng = env.addSource(new RMQSource<>(connectionConfig, "zhisheng", true, new SimpleStringSchema())) .setParallelism(1); zhisheng.print(); //如果想保证 exactly-once 或 at-least-once 需要把 checkpoint 开启 // env.enableCheckpointing(10000); env.execute("flink learning connectors rabbitmq"); }
Example 7
Source File: Main.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception{ final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); Properties props = new Properties(); props.put("bootstrap.servers", "localhost:9092"); props.put("zookeeper.connect", "localhost:2181"); props.put("group.id", "metric-group"); props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); //key 反序列化 props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); props.put("auto.offset.reset", "latest"); //value 反序列化 DataStreamSource<String> dataStreamSource = env.addSource(new FlinkKafkaConsumer011<>( "metric", //kafka topic new SimpleStringSchema(), // String 序列化 props)).setParallelism(1); dataStreamSource.print(); //把从 kafka 读取到的数据打印在控制台 env.execute("Flink add data source"); }
Example 8
Source File: Main.java From flink-learning with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(1); DataStreamSource<String> data = env.readTextFile("file:///usr/local/blink-1.5.1/README.txt"); data.print(); //两种格式都行,另外还支持写入到 hdfs // data.writeAsText("file:///usr/local/blink-1.5.1/README1.txt"); data.writeAsText("/usr/local/blink-1.5.1/README1.txt"); env.execute(); }
Example 9
Source File: KafkaDataSource.java From flink-simple-tutorial with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); // 配置 kafka 连接参数 String topic = "flink"; String bootStrapServers = "192.168.56.103:9092"; String zkConnect = "192.168.56.103:2181"; String groupID = "group_A"; Properties prop = new Properties(); prop.setProperty("bootstrap.servers", bootStrapServers); // prop.setProperty("zookeeper.connect", zkConnect); prop.setProperty("group.id", groupID); // 创建 kafka connector source // FlinkKafkaConsumer010<String> consumer010 = new FlinkKafkaConsumer010<>(topic, new SimpleStringSchema(), prop); FlinkKafkaConsumer<String> stringFlinkKafkaConsumer = new FlinkKafkaConsumer<>(topic, new SimpleStringSchema(), prop); // add source DataStreamSource<String> dataStream = env.addSource(stringFlinkKafkaConsumer); dataStream.print(); env.execute("Flink kafka test"); }
Example 10
Source File: Main.java From flink-learning with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(1); DataStreamSource<String> data = env.readTextFile("file:///usr/local/blink-1.5.1/README.txt"); data.print(); //两种格式都行,另外还支持写入到 hdfs // data.writeAsText("file:///usr/local/blink-1.5.1/README1.txt"); data.writeAsText("/usr/local/blink-1.5.1/README1.txt"); env.execute(); }
Example 11
Source File: FlinkKafkaSchemaTest1.java From flink-learning with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { final ParameterTool parameterTool = ExecutionEnvUtil.createParameterTool(args); StreamExecutionEnvironment env = ExecutionEnvUtil.prepare(parameterTool); Properties props = buildKafkaProps(parameterTool); //kafka topic list List<String> topics = Arrays.asList(parameterTool.get("metrics.topic")); FlinkKafkaConsumer011<MetricEvent> consumer = new FlinkKafkaConsumer011<>(topics, new KafkaDeserializationSchemaWrapper<>(new MetricSchema()), props); DataStreamSource<MetricEvent> data = env.addSource(consumer); data.print(); env.execute("flink kafka connector test"); }