Java Code Examples for org.apache.flink.streaming.api.datastream.DataStream#writeUsingOutputFormat()
The following examples show how to use
org.apache.flink.streaming.api.datastream.DataStream#writeUsingOutputFormat() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HBaseWriteStreamExample.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment .getExecutionEnvironment(); // data stream with random numbers DataStream<String> dataStream = env.addSource(new SourceFunction<String>() { private static final long serialVersionUID = 1L; private volatile boolean isRunning = true; @Override public void run(SourceContext<String> out) throws Exception { while (isRunning) { out.collect(String.valueOf(Math.floor(Math.random() * 100))); } } @Override public void cancel() { isRunning = false; } }); dataStream.writeUsingOutputFormat(new HBaseOutputFormat()); env.execute(); }
Example 2
Source File: HBaseStreamWriteMain.java From flink-learning with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { final ParameterTool parameterTool = ExecutionEnvUtil.createParameterTool(args); StreamExecutionEnvironment env = ExecutionEnvUtil.prepare(parameterTool); Properties props = KafkaConfigUtil.buildKafkaProps(parameterTool); /*env.addSource(new FlinkKafkaConsumer011<>( parameterTool.get(METRICS_TOPIC), //这个 kafka topic 需要和上面的工具类的 topic 一致 new SimpleStringSchema(), props)) .writeUsingOutputFormat(new HBaseOutputFormat());*/ DataStream<String> dataStream = env.addSource(new SourceFunction<String>() { private static final long serialVersionUID = 1L; private volatile boolean isRunning = true; @Override public void run(SourceContext<String> out) throws Exception { while (isRunning) { out.collect(String.valueOf(Math.floor(Math.random() * 100))); } } @Override public void cancel() { isRunning = false; } }); dataStream.writeUsingOutputFormat(new HBaseOutputFormat()); env.execute("Flink HBase connector sink"); }
Example 3
Source File: HBaseWriteStreamExample.java From flink with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment .getExecutionEnvironment(); // data stream with random numbers DataStream<String> dataStream = env.addSource(new SourceFunction<String>() { private static final long serialVersionUID = 1L; private volatile boolean isRunning = true; @Override public void run(SourceContext<String> out) throws Exception { while (isRunning) { out.collect(String.valueOf(Math.floor(Math.random() * 100))); } } @Override public void cancel() { isRunning = false; } }); dataStream.writeUsingOutputFormat(new HBaseOutputFormat()); env.execute(); }
Example 4
Source File: HBaseStreamWriteMain.java From flink-learning with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { final ParameterTool parameterTool = ExecutionEnvUtil.createParameterTool(args); StreamExecutionEnvironment env = ExecutionEnvUtil.prepare(parameterTool); Properties props = KafkaConfigUtil.buildKafkaProps(parameterTool); /*env.addSource(new FlinkKafkaConsumer011<>( parameterTool.get(METRICS_TOPIC), //这个 kafka topic 需要和上面的工具类的 topic 一致 new SimpleStringSchema(), props)) .writeUsingOutputFormat(new HBaseOutputFormat());*/ DataStream<String> dataStream = env.addSource(new SourceFunction<String>() { private static final long serialVersionUID = 1L; private volatile boolean isRunning = true; @Override public void run(SourceContext<String> out) throws Exception { while (isRunning) { out.collect(String.valueOf(Math.floor(Math.random() * 100))); } } @Override public void cancel() { isRunning = false; } }); dataStream.writeUsingOutputFormat(new HBaseOutputFormat()); env.execute("Flink HBase connector sink"); }
Example 5
Source File: JdbcRetractSinkStreamOp.java From Alink with Apache License 2.0 | 5 votes |
@Override public JdbcRetractSinkStreamOp linkFrom(StreamOperator<?>... inputs) { StreamOperator<?> in = checkAndGetFirst(inputs); in = in.link(new VectorSerializeStreamOp().setMLEnvironmentId(getMLEnvironmentId())); if (this.primaryColNames == null || this.primaryColNames.length == 0) { throw new RuntimeException("primary key must not be empty."); } try { if (!jdbcDB.hasTable(tableName)) { jdbcDB.createTable(tableName, in.getSchema()); } } catch (Exception ex) { throw new RuntimeException(ex.getMessage()); } DataStream <Tuple2 <Boolean, Row>> tuple2Stream = MLEnvironmentFactory.get(getMLEnvironmentId()).getStreamTableEnvironment().toRetractStream(in.getOutputTable(), Row.class); tuple2Stream.writeUsingOutputFormat( new JDBCUpserOutputFormat(jdbcDB.getUserName(), jdbcDB.getPassword(), jdbcDB.getDriverName(), jdbcDB.getDbUrl(), tableName, primaryColNames)); return this; }
Example 6
Source File: HBaseWriteStreamExample.java From flink with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment .getExecutionEnvironment(); // data stream with random numbers DataStream<String> dataStream = env.addSource(new SourceFunction<String>() { private static final long serialVersionUID = 1L; private volatile boolean isRunning = true; @Override public void run(SourceContext<String> out) throws Exception { while (isRunning) { out.collect(String.valueOf(Math.floor(Math.random() * 100))); } } @Override public void cancel() { isRunning = false; } }); dataStream.writeUsingOutputFormat(new HBaseOutputFormat()); env.execute(); }
Example 7
Source File: EmulatedPubSubSourceTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testFlinkSource() throws Exception { // Create some messages and put them into pubsub List<String> input = Arrays.asList("One", "Two", "Three", "Four", "Five", "Six", "Seven", "Eight", "Nine", "Ten"); List<String> messagesToSend = new ArrayList<>(input); messagesToSend.add("End"); // Publish the messages into PubSub Publisher publisher = pubsubHelper.createPublisher(PROJECT_NAME, TOPIC_NAME); messagesToSend.forEach(s -> { try { publisher .publish(PubsubMessage.newBuilder() .setData(ByteString.copyFromUtf8(s)) .build()) .get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } }); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.enableCheckpointing(1000); env.setParallelism(1); env.setRestartStrategy(RestartStrategies.noRestart()); DataStream<String> fromPubSub = env .addSource(PubSubSource.newBuilder() .withDeserializationSchema(new BoundedStringDeserializer(10)) .withProjectName(PROJECT_NAME) .withSubscriptionName(SUBSCRIPTION_NAME) .withCredentials(NoCredentials.getInstance()) .withPubSubSubscriberFactory(new PubSubSubscriberFactoryForEmulator(getPubSubHostPort(), PROJECT_NAME, SUBSCRIPTION_NAME, 10, Duration.ofSeconds(15), 100)) .build()) .name("PubSub source"); List<String> output = new ArrayList<>(); fromPubSub.writeUsingOutputFormat(new LocalCollectionOutputFormat<>(output)); env.execute(); assertEquals("Wrong number of elements", input.size(), output.size()); for (String test : input) { assertTrue("Missing " + test, output.contains(test)); } }
Example 8
Source File: MockAppendStreamTableSink.java From AthenaX with Apache License 2.0 | 4 votes |
@Override public void emitDataStream(DataStream<Row> dataStream) { dataStream.writeUsingOutputFormat(new LocalCollectionOutputFormat<>(rows)); }
Example 9
Source File: EmulatedPubSubSourceTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testFlinkSource() throws Exception { // Create some messages and put them into pubsub List<String> input = Arrays.asList("One", "Two", "Three", "Four", "Five", "Six", "Seven", "Eight", "Nine", "Ten"); List<String> messagesToSend = new ArrayList<>(input); messagesToSend.add("End"); // Publish the messages into PubSub Publisher publisher = pubsubHelper.createPublisher(PROJECT_NAME, TOPIC_NAME); messagesToSend.forEach(s -> { try { publisher .publish(PubsubMessage.newBuilder() .setData(ByteString.copyFromUtf8(s)) .build()) .get(); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } }); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.enableCheckpointing(1000); env.setParallelism(1); env.setRestartStrategy(RestartStrategies.noRestart()); DataStream<String> fromPubSub = env .addSource(PubSubSource.newBuilder() .withDeserializationSchema(new BoundedStringDeserializer(10)) .withProjectName(PROJECT_NAME) .withSubscriptionName(SUBSCRIPTION_NAME) .withCredentials(NoCredentials.getInstance()) .withPubSubSubscriberFactory(new PubSubSubscriberFactoryForEmulator(getPubSubHostPort(), PROJECT_NAME, SUBSCRIPTION_NAME, 10, Duration.ofSeconds(15), 100)) .build()) .name("PubSub source"); List<String> output = new ArrayList<>(); fromPubSub.writeUsingOutputFormat(new LocalCollectionOutputFormat<>(output)); env.execute(); assertEquals("Wrong number of elements", input.size(), output.size()); for (String test : input) { assertTrue("Missing " + test, output.contains(test)); } }