org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator Java Examples
The following examples show how to use
org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ChainBreakTest.java From flink with Apache License 2.0 | 6 votes |
@Override public void createRestoredJob(StreamExecutionEnvironment env) { /** * Original job: Source -> StatefulMap1 -> CHAIN(StatefulMap2 -> Map -> StatefulMap3) * Modified job: Source -> StatefulMap1 -> CHAIN(StatefulMap2 -> Map) -> StatefulMap3 */ DataStream<Integer> source = createSource(env, ExecutionMode.RESTORE); SingleOutputStreamOperator<Integer> first = createFirstStatefulMap(ExecutionMode.RESTORE, source); first.startNewChain(); SingleOutputStreamOperator<Integer> second = createSecondStatefulMap(ExecutionMode.RESTORE, first); second.startNewChain(); SingleOutputStreamOperator<Integer> stateless = createStatelessMap(second); SingleOutputStreamOperator<Integer> third = createThirdStatefulMap(ExecutionMode.RESTORE, stateless); third.startNewChain(); }
Example #2
Source File: CsvTableSink.java From flink with Apache License 2.0 | 6 votes |
@Override public DataStreamSink<?> consumeDataStream(DataStream<Row> dataStream) { SingleOutputStreamOperator<String> csvRows = dataStream.map(new CsvFormatter(fieldDelim == null ? "," : fieldDelim)); DataStreamSink<String> sink; if (writeMode != null) { sink = csvRows.writeAsText(path, writeMode); } else { sink = csvRows.writeAsText(path); } if (numFiles > 0) { csvRows.setParallelism(numFiles); sink.setParallelism(numFiles); } else { // if file number is not set, use input parallelism to make it chained. csvRows.setParallelism(dataStream.getParallelism()); sink.setParallelism(dataStream.getParallelism()); } sink.name(TableConnectorUtils.generateRuntimeName(CsvTableSink.class, fieldNames)); return sink; }
Example #3
Source File: ChainLengthDecreaseTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void createRestoredJob(StreamExecutionEnvironment env) { /** * Original job: Source -> StatefulMap1 -> CHAIN(StatefulMap2 -> Map -> StatefulMap3) * Modified job: Source -> StatefulMap1 -> CHAIN(Map -> StatefulMap3) */ DataStream<Integer> source = createSource(env, ExecutionMode.RESTORE); SingleOutputStreamOperator<Integer> first = createFirstStatefulMap(ExecutionMode.RESTORE, source); first.startNewChain(); SingleOutputStreamOperator<Integer> stateless = createStatelessMap(first); stateless.startNewChain(); SingleOutputStreamOperator<Integer> third = createThirdStatefulMap(ExecutionMode.RESTORE, stateless); }
Example #4
Source File: WordCountIntegrationTest.java From tutorials with MIT License | 6 votes |
@Test public void givenStreamOfEvents_whenProcessEvents_thenShouldApplyWindowingOnTransformation() throws Exception { // given final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); SingleOutputStreamOperator<Tuple2<Integer, Long>> windowed = env.fromElements(new Tuple2<>(16, ZonedDateTime.now().plusMinutes(25).toInstant().getEpochSecond()), new Tuple2<>(15, ZonedDateTime.now().plusMinutes(2).toInstant().getEpochSecond())) .assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<Tuple2<Integer, Long>>(Time.seconds(20)) { @Override public long extractTimestamp(Tuple2<Integer, Long> element) { return element.f1 * 1000; } }); SingleOutputStreamOperator<Tuple2<Integer, Long>> reduced = windowed.windowAll(TumblingEventTimeWindows.of(Time.seconds(5))).maxBy(0, true); reduced.print(); // when env.execute(); }
Example #5
Source File: Main.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); //并行度设置为 1 env.setParallelism(1); // env.setParallelism(4); SingleOutputStreamOperator<Word> data = env.socketTextStream("localhost", 9001) .map(new MapFunction<String, Word>() { @Override public Word map(String value) throws Exception { String[] split = value.split(","); return new Word(split[0], Integer.valueOf(split[1]), Long.valueOf(split[2])); } }); //Punctuated Watermark data.assignTimestampsAndWatermarks(new WordPunctuatedWatermark()); data.print(); env.execute("watermark demo"); }
Example #6
Source File: ChainUnionTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void createRestoredJob(StreamExecutionEnvironment env) { /** * Original job: Source -> StatefulMap1 -> CHAIN(StatefulMap2 -> Map -> StatefulMap3) * Modified job: Source -> CHAIN(StatefulMap1 -> StatefulMap2 -> Map -> StatefulMap3) */ DataStream<Integer> source = createSource(env, ExecutionMode.RESTORE); SingleOutputStreamOperator<Integer> first = createFirstStatefulMap(ExecutionMode.RESTORE, source); first.startNewChain(); SingleOutputStreamOperator<Integer> second = createSecondStatefulMap(ExecutionMode.RESTORE, first); SingleOutputStreamOperator<Integer> stateless = createStatelessMap(second); SingleOutputStreamOperator<Integer> third = createThirdStatefulMap(ExecutionMode.RESTORE, stateless); }
Example #7
Source File: AbstractNonKeyedOperatorRestoreTestBase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void createMigrationJob(StreamExecutionEnvironment env) { /** * Source -> StatefulMap1 -> CHAIN(StatefulMap2 -> Map -> StatefulMap3) */ DataStream<Integer> source = createSource(env, ExecutionMode.MIGRATE); SingleOutputStreamOperator<Integer> first = createFirstStatefulMap(ExecutionMode.MIGRATE, source); first.startNewChain(); SingleOutputStreamOperator<Integer> second = createSecondStatefulMap(ExecutionMode.MIGRATE, first); second.startNewChain(); SingleOutputStreamOperator<Integer> stateless = createStatelessMap(second); SingleOutputStreamOperator<Integer> third = createThirdStatefulMap(ExecutionMode.MIGRATE, stateless); }
Example #8
Source File: StateDescriptorPassingTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testApplyWindowAllState() throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class); DataStream<File> src = env.fromElements(new File("/")); SingleOutputStreamOperator<?> result = src .timeWindowAll(Time.milliseconds(1000)) .apply(new AllWindowFunction<File, String, TimeWindow>() { @Override public void apply(TimeWindow window, Iterable<File> input, Collector<String> out) {} }); validateListStateDescriptorConfigured(result); }
Example #9
Source File: StateDescriptorPassingTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testReduceWindowState() throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class); DataStream<File> src = env.fromElements(new File("/")); SingleOutputStreamOperator<?> result = src .keyBy(new KeySelector<File, String>() { @Override public String getKey(File value) { return null; } }) .timeWindow(Time.milliseconds(1000)) .reduce(new ReduceFunction<File>() { @Override public File reduce(File value1, File value2) { return null; } }); validateStateDescriptorConfigured(result); }
Example #10
Source File: PatternStream.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Applies a process function to the detected pattern sequence. For each pattern sequence the * provided {@link PatternProcessFunction} is called. In order to process timed out partial matches as well one can * use {@link TimedOutPartialMatchHandler} as additional interface. * * @param patternProcessFunction The pattern process function which is called for each detected * pattern sequence. * @param <R> Type of the resulting elements * @param outTypeInfo Explicit specification of output type. * @return {@link DataStream} which contains the resulting elements from the pattern process * function. */ public <R> SingleOutputStreamOperator<R> process( final PatternProcessFunction<T, R> patternProcessFunction, final TypeInformation<R> outTypeInfo) { // 这个方法会创建真正的nfafactory包含nfa.statue // 先判断client端是否register了,然后就注入进去了 // ------------ if (hasListener){ patternProcessFunction.registerListener(cepListener); } // ------------ return builder.build( outTypeInfo, builder.clean(patternProcessFunction)); }
Example #11
Source File: PatternStream.java From flink with Apache License 2.0 | 6 votes |
/** * Applies a select function to the detected pattern sequence. For each pattern sequence the * provided {@link PatternSelectFunction} is called. The pattern select function can produce * exactly one resulting element. * * @param patternSelectFunction The pattern select function which is called for each detected * pattern sequence. * @param <R> Type of the resulting elements * @return {@link DataStream} which contains the resulting elements from the pattern select * function. */ public <R> SingleOutputStreamOperator<R> select(final PatternSelectFunction<T, R> patternSelectFunction) { // we have to extract the output type from the provided pattern selection function manually // because the TypeExtractor cannot do that if the method is wrapped in a MapFunction final TypeInformation<R> returnType = TypeExtractor.getUnaryOperatorReturnType( patternSelectFunction, PatternSelectFunction.class, 0, 1, TypeExtractor.NO_INDEX, builder.getInputType(), null, false); return select(patternSelectFunction, returnType); }
Example #12
Source File: AbstractNonKeyedOperatorRestoreTestBase.java From flink with Apache License 2.0 | 6 votes |
@Override public void createMigrationJob(StreamExecutionEnvironment env) { /** * Source -> StatefulMap1 -> CHAIN(StatefulMap2 -> Map -> StatefulMap3) */ DataStream<Integer> source = createSource(env, ExecutionMode.MIGRATE); SingleOutputStreamOperator<Integer> first = createFirstStatefulMap(ExecutionMode.MIGRATE, source); first.startNewChain(); SingleOutputStreamOperator<Integer> second = createSecondStatefulMap(ExecutionMode.MIGRATE, first); second.startNewChain(); SingleOutputStreamOperator<Integer> stateless = createStatelessMap(second); SingleOutputStreamOperator<Integer> third = createThirdStatefulMap(ExecutionMode.MIGRATE, stateless); }
Example #13
Source File: ChainBreakTest.java From flink with Apache License 2.0 | 6 votes |
@Override public void createRestoredJob(StreamExecutionEnvironment env) { /** * Original job: Source -> StatefulMap1 -> CHAIN(StatefulMap2 -> Map -> StatefulMap3) * Modified job: Source -> StatefulMap1 -> CHAIN(StatefulMap2 -> Map) -> StatefulMap3 */ DataStream<Integer> source = createSource(env, ExecutionMode.RESTORE); SingleOutputStreamOperator<Integer> first = createFirstStatefulMap(ExecutionMode.RESTORE, source); first.startNewChain(); SingleOutputStreamOperator<Integer> second = createSecondStatefulMap(ExecutionMode.RESTORE, first); second.startNewChain(); SingleOutputStreamOperator<Integer> stateless = createStatelessMap(second); SingleOutputStreamOperator<Integer> third = createThirdStatefulMap(ExecutionMode.RESTORE, stateless); third.startNewChain(); }
Example #14
Source File: Main.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); ParameterTool parameterTool = ExecutionEnvUtil.PARAMETER_TOOL; Properties props = KafkaConfigUtil.buildKafkaProps(parameterTool); SingleOutputStreamOperator<Student> student = env.addSource(new FlinkKafkaConsumer011<>( parameterTool.get(METRICS_TOPIC), //这个 kafka topic 需要和上面的工具类的 topic 一致 new SimpleStringSchema(), props)).setParallelism(parameterTool.getInt(STREAM_PARALLELISM, 1)) .map(string -> GsonUtil.fromJson(string, Student.class)).setParallelism(4); //解析字符串成 student 对象 //timeWindowAll 并行度只能为 1 student.timeWindowAll(Time.minutes(1)).apply(new AllWindowFunction<Student, List<Student>, TimeWindow>() { @Override public void apply(TimeWindow window, Iterable<Student> values, Collector<List<Student>> out) throws Exception { ArrayList<Student> students = Lists.newArrayList(values); if (students.size() > 0) { log.info("1 分钟内收集到 student 的数据条数是:" + students.size()); out.collect(students); } } }).addSink(new SinkToMySQL()).setParallelism(parameterTool.getInt(STREAM_SINK_PARALLELISM, 1)); env.execute("flink learning connectors mysql"); }
Example #15
Source File: StateDescriptorPassingTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testFoldWindowState() throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class); DataStream<String> src = env.fromElements("abc"); SingleOutputStreamOperator<?> result = src .keyBy(new KeySelector<String, String>() { @Override public String getKey(String value) { return null; } }) .timeWindow(Time.milliseconds(1000)) .fold(new File("/"), new FoldFunction<String, File>() { @Override public File fold(File a, String e) { return null; } }); validateStateDescriptorConfigured(result); }
Example #16
Source File: ChainLengthIncreaseTest.java From flink with Apache License 2.0 | 6 votes |
@Override public void createRestoredJob(StreamExecutionEnvironment env) { /** * Original job: Source -> StatefulMap1 -> CHAIN(StatefulMap2 -> Map -> StatefulMap3) * Modified job: Source -> StatefulMap1 -> CHAIN(StatefulMap2 -> Map -> StatefulMap3 -> StatefulMap4) */ DataStream<Integer> source = createSource(env, ExecutionMode.RESTORE); SingleOutputStreamOperator<Integer> first = createFirstStatefulMap(ExecutionMode.RESTORE, source); first.startNewChain(); SingleOutputStreamOperator<Integer> second = createSecondStatefulMap(ExecutionMode.RESTORE, first); second.startNewChain(); SingleOutputStreamOperator<Integer> stateless = createStatelessMap(second); SingleOutputStreamOperator<Integer> stateless2 = createStatelessMap(stateless); SingleOutputStreamOperator<Integer> third = createThirdStatefulMap(ExecutionMode.RESTORE, stateless2); }
Example #17
Source File: StateDescriptorPassingTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testProcessAllWindowState() throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class); DataStream<File> src = env.fromElements(new File("/")); SingleOutputStreamOperator<?> result = src .timeWindowAll(Time.milliseconds(1000)) .process(new ProcessAllWindowFunction<File, String, TimeWindow>() { @Override public void process(Context ctx, Iterable<File> input, Collector<String> out) {} }); validateListStateDescriptorConfigured(result); }
Example #18
Source File: StateDescriptorPassingTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testProcessWindowState() throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class); DataStream<File> src = env.fromElements(new File("/")); SingleOutputStreamOperator<?> result = src .keyBy(new KeySelector<File, String>() { @Override public String getKey(File value) { return null; } }) .timeWindow(Time.milliseconds(1000)) .process(new ProcessWindowFunction<File, String, String, TimeWindow>() { @Override public void process(String s, Context ctx, Iterable<File> input, Collector<String> out) {} }); validateListStateDescriptorConfigured(result); }
Example #19
Source File: StateDescriptorPassingTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testProcessWindowState() throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class); DataStream<File> src = env.fromElements(new File("/")); SingleOutputStreamOperator<?> result = src .keyBy(new KeySelector<File, String>() { @Override public String getKey(File value) { return null; } }) .timeWindow(Time.milliseconds(1000)) .process(new ProcessWindowFunction<File, String, String, TimeWindow>() { @Override public void process(String s, Context ctx, Iterable<File> input, Collector<String> out) {} }); validateListStateDescriptorConfigured(result); }
Example #20
Source File: StateDescriptorPassingTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testReduceWindowState() throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class); DataStream<File> src = env.fromElements(new File("/")); SingleOutputStreamOperator<?> result = src .keyBy(new KeySelector<File, String>() { @Override public String getKey(File value) { return null; } }) .timeWindow(Time.milliseconds(1000)) .reduce(new ReduceFunction<File>() { @Override public File reduce(File value1, File value2) { return null; } }); validateStateDescriptorConfigured(result); }
Example #21
Source File: StateDescriptorPassingTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testProcessAllWindowState() throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class); DataStream<File> src = env.fromElements(new File("/")); SingleOutputStreamOperator<?> result = src .timeWindowAll(Time.milliseconds(1000)) .process(new ProcessAllWindowFunction<File, String, TimeWindow>() { @Override public void process(Context ctx, Iterable<File> input, Collector<String> out) {} }); validateListStateDescriptorConfigured(result); }
Example #22
Source File: Main.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); ParameterTool parameterTool = ExecutionEnvUtil.PARAMETER_TOOL; Properties props = KafkaConfigUtil.buildKafkaProps(parameterTool); SingleOutputStreamOperator<MetricEvent> metricData = env.addSource(new FlinkKafkaConsumer011<>( parameterTool.get(METRICS_TOPIC), new SimpleStringSchema(), props)).setParallelism(1) .map(string -> GsonUtil.fromJson(string, MetricEvent.class)); metricData.print(); CheckPointUtil.setCheckpointConfig(env, parameterTool) .execute("zhisheng --- checkpoint config example"); }
Example #23
Source File: DataStreamConversionUtil.java From flink with Apache License 2.0 | 6 votes |
/** * Convert the given DataStream to Table with specified colNames and colTypes. * * @param session the MLEnvironment using to convert DataStream to Table. * @param data the DataStream to convert. * @param colNames the specified colNames. * @param colTypes the specified colTypes. This variable is used only when the * DataStream is produced by a function and Flink cannot determine * automatically what the produced type is. * @return the converted Table. */ public static Table toTable(MLEnvironment session, DataStream <Row> data, String[] colNames, TypeInformation <?>[] colTypes) { try { if (null != colTypes) { // Try to add row type information for the datastream to be converted. // In most case, this keeps us from the rolling back logic in the catch block, // which adds an unnecessary map function just in order to add row type information. if (data instanceof SingleOutputStreamOperator) { ((SingleOutputStreamOperator) data).returns(new RowTypeInfo(colTypes, colNames)); } } return toTable(session, data, colNames); } catch (ValidationException ex) { if (null == colTypes) { throw ex; } else { DataStream <Row> t = fallbackToExplicitTypeDefine(data, colNames, colTypes); return toTable(session, t, colNames); } } }
Example #24
Source File: StateDescriptorPassingTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testReduceWindowState() throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class); DataStream<File> src = env.fromElements(new File("/")); SingleOutputStreamOperator<?> result = src .keyBy(new KeySelector<File, String>() { @Override public String getKey(File value) { return null; } }) .timeWindow(Time.milliseconds(1000)) .reduce(new ReduceFunction<File>() { @Override public File reduce(File value1, File value2) { return null; } }); validateStateDescriptorConfigured(result); }
Example #25
Source File: SideOutputITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testSideOutputWithMultipleConsumersWithObjectReuse() throws Exception { final OutputTag<String> sideOutputTag = new OutputTag<String>("side"){}; TestListResultSink<String> sideOutputResultSink1 = new TestListResultSink<>(); TestListResultSink<String> sideOutputResultSink2 = new TestListResultSink<>(); TestListResultSink<Integer> resultSink = new TestListResultSink<>(); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.getConfig().enableObjectReuse(); env.setParallelism(3); DataStream<Integer> dataStream = env.fromCollection(elements); SingleOutputStreamOperator<Integer> passThroughtStream = dataStream .process(new ProcessFunction<Integer, Integer>() { private static final long serialVersionUID = 1L; @Override public void processElement( Integer value, Context ctx, Collector<Integer> out) throws Exception { out.collect(value); ctx.output(sideOutputTag, "sideout-" + String.valueOf(value)); } }); passThroughtStream.getSideOutput(sideOutputTag).addSink(sideOutputResultSink1); passThroughtStream.getSideOutput(sideOutputTag).addSink(sideOutputResultSink2); passThroughtStream.addSink(resultSink); env.execute(); assertEquals(Arrays.asList("sideout-1", "sideout-2", "sideout-3", "sideout-4", "sideout-5"), sideOutputResultSink1.getSortedResult()); assertEquals(Arrays.asList("sideout-1", "sideout-2", "sideout-3", "sideout-4", "sideout-5"), sideOutputResultSink2.getSortedResult()); assertEquals(Arrays.asList(1, 2, 3, 4, 5), resultSink.getSortedResult()); }
Example #26
Source File: SideOutputITCase.java From flink with Apache License 2.0 | 5 votes |
/** * Test ProcessFunction side output. */ @Test public void testProcessFunctionSideOutput() throws Exception { final OutputTag<String> sideOutputTag = new OutputTag<String>("side"){}; TestListResultSink<String> sideOutputResultSink = new TestListResultSink<>(); TestListResultSink<Integer> resultSink = new TestListResultSink<>(); StreamExecutionEnvironment see = StreamExecutionEnvironment.getExecutionEnvironment(); see.setParallelism(3); DataStream<Integer> dataStream = see.fromCollection(elements); SingleOutputStreamOperator<Integer> passThroughtStream = dataStream .process(new ProcessFunction<Integer, Integer>() { private static final long serialVersionUID = 1L; @Override public void processElement( Integer value, Context ctx, Collector<Integer> out) throws Exception { out.collect(value); ctx.output(sideOutputTag, "sideout-" + String.valueOf(value)); } }); passThroughtStream.getSideOutput(sideOutputTag).addSink(sideOutputResultSink); passThroughtStream.addSink(resultSink); see.execute(); assertEquals(Arrays.asList("sideout-1", "sideout-2", "sideout-3", "sideout-4", "sideout-5"), sideOutputResultSink.getSortedResult()); assertEquals(Arrays.asList(1, 2, 3, 4, 5), resultSink.getSortedResult()); }
Example #27
Source File: KeyedJob.java From flink with Apache License 2.0 | 5 votes |
public static SingleOutputStreamOperator<Integer> createSecondStatefulMap(ExecutionMode mode, DataStream<Integer> input) { SingleOutputStreamOperator<Integer> map = input .map(new StatefulStringStoringMap(mode, "second")) .setParallelism(4) .uid("second"); return map; }
Example #28
Source File: SideOutputITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testSideOutputWithMultipleConsumers() throws Exception { final OutputTag<String> sideOutputTag = new OutputTag<String>("side"){}; TestListResultSink<String> sideOutputResultSink1 = new TestListResultSink<>(); TestListResultSink<String> sideOutputResultSink2 = new TestListResultSink<>(); TestListResultSink<Integer> resultSink = new TestListResultSink<>(); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(3); DataStream<Integer> dataStream = env.fromCollection(elements); SingleOutputStreamOperator<Integer> passThroughtStream = dataStream .process(new ProcessFunction<Integer, Integer>() { private static final long serialVersionUID = 1L; @Override public void processElement( Integer value, Context ctx, Collector<Integer> out) throws Exception { out.collect(value); ctx.output(sideOutputTag, "sideout-" + String.valueOf(value)); } }); passThroughtStream.getSideOutput(sideOutputTag).addSink(sideOutputResultSink1); passThroughtStream.getSideOutput(sideOutputTag).addSink(sideOutputResultSink2); passThroughtStream.addSink(resultSink); env.execute(); assertEquals(Arrays.asList("sideout-1", "sideout-2", "sideout-3", "sideout-4", "sideout-5"), sideOutputResultSink1.getSortedResult()); assertEquals(Arrays.asList("sideout-1", "sideout-2", "sideout-3", "sideout-4", "sideout-5"), sideOutputResultSink2.getSortedResult()); assertEquals(Arrays.asList(1, 2, 3, 4, 5), resultSink.getSortedResult()); }
Example #29
Source File: FlinkUniverse.java From stateful-functions with Apache License 2.0 | 5 votes |
private SingleOutputStreamOperator<Message> functionOperator( DataStream<Message> input, Map<EgressIdentifier<?>, OutputTag<Object>> sideOutputs) { TypeInformation<Message> typeInfo = input.getType(); FunctionGroupDispatchFactory operatorFactory = new FunctionGroupDispatchFactory(sideOutputs); return DataStreamUtils.reinterpretAsKeyedStream(input, new MessageKeySelector()) .transform(StatefulFunctionsJobConstants.FUNCTION_OPERATOR_NAME, typeInfo, operatorFactory) .uid(StatefulFunctionsJobConstants.FUNCTION_OPERATOR_UID); }
Example #30
Source File: StateDescriptorPassingTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private void validateStateDescriptorConfigured(SingleOutputStreamOperator<?> result) { OneInputTransformation<?, ?> transform = (OneInputTransformation<?, ?>) result.getTransformation(); WindowOperator<?, ?, ?, ?, ?> op = (WindowOperator<?, ?, ?, ?, ?>) transform.getOperator(); StateDescriptor<?, ?> descr = op.getStateDescriptor(); // this would be the first statement to fail if state descriptors were not properly initialized TypeSerializer<?> serializer = descr.getSerializer(); assertTrue(serializer instanceof KryoSerializer); Kryo kryo = ((KryoSerializer<?>) serializer).getKryo(); assertTrue("serializer registration was not properly passed on", kryo.getSerializer(File.class) instanceof JavaSerializer); }