org.apache.flink.util.OutputTag Java Examples
The following examples show how to use
org.apache.flink.util.OutputTag.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: RecordWriterOutput.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") public RecordWriterOutput( RecordWriter<SerializationDelegate<StreamRecord<OUT>>> recordWriter, TypeSerializer<OUT> outSerializer, OutputTag outputTag, StreamStatusProvider streamStatusProvider) { checkNotNull(recordWriter); this.outputTag = outputTag; // generic hack: cast the writer to generic Object type so we can use it // with multiplexed records and watermarks this.recordWriter = (RecordWriter<SerializationDelegate<StreamElement>>) (RecordWriter<?>) recordWriter; TypeSerializer<StreamElement> outRecordSerializer = new StreamElementSerializer<>(outSerializer); if (outSerializer != null) { serializationDelegate = new SerializationDelegate<StreamElement>(outRecordSerializer); } this.streamStatusProvider = checkNotNull(streamStatusProvider); }
Example #2
Source File: SingleOutputStreamOperator.java From flink with Apache License 2.0 | 6 votes |
/** * Gets the {@link DataStream} that contains the elements that are emitted from an operation * into the side output with the given {@link OutputTag}. * * @see org.apache.flink.streaming.api.functions.ProcessFunction.Context#output(OutputTag, Object) */ public <X> DataStream<X> getSideOutput(OutputTag<X> sideOutputTag) { if (wasSplitApplied) { throw new UnsupportedOperationException("getSideOutput() and split() may not be called on the same DataStream. " + "As a work-around, please add a no-op map function before the split() call."); } sideOutputTag = clean(requireNonNull(sideOutputTag)); // make a defensive copy sideOutputTag = new OutputTag<X>(sideOutputTag.getId(), sideOutputTag.getTypeInfo()); TypeInformation<?> type = requestedSideOutputs.get(sideOutputTag); if (type != null && !type.equals(sideOutputTag.getTypeInfo())) { throw new UnsupportedOperationException("A side output with a matching id was " + "already requested with a different type. This is not allowed, side output " + "ids need to be unique."); } requestedSideOutputs.put(sideOutputTag, sideOutputTag.getTypeInfo()); SideOutputTransformation<X> sideOutputTransformation = new SideOutputTransformation<>(this.getTransformation(), sideOutputTag); return new DataStream<>(this.getExecutionEnvironment(), sideOutputTransformation); }
Example #3
Source File: StreamEdge.java From flink with Apache License 2.0 | 6 votes |
public StreamEdge(StreamNode sourceVertex, StreamNode targetVertex, int typeNumber, List<String> selectedNames, StreamPartitioner<?> outputPartitioner, OutputTag outputTag, ShuffleMode shuffleMode) { this.sourceId = sourceVertex.getId(); this.targetId = targetVertex.getId(); this.typeNumber = typeNumber; this.selectedNames = selectedNames; this.outputPartitioner = outputPartitioner; this.outputTag = outputTag; this.sourceOperatorName = sourceVertex.getOperatorName(); this.targetOperatorName = targetVertex.getOperatorName(); this.shuffleMode = checkNotNull(shuffleMode); this.edgeId = sourceVertex + "_" + targetVertex + "_" + typeNumber + "_" + selectedNames + "_" + outputPartitioner; }
Example #4
Source File: CepOperator.java From flink with Apache License 2.0 | 6 votes |
public CepOperator( final TypeSerializer<IN> inputSerializer, final boolean isProcessingTime, final NFACompiler.NFAFactory<IN> nfaFactory, @Nullable final EventComparator<IN> comparator, @Nullable final AfterMatchSkipStrategy afterMatchSkipStrategy, final PatternProcessFunction<IN, OUT> function, @Nullable final OutputTag<IN> lateDataOutputTag) { super(function); this.inputSerializer = Preconditions.checkNotNull(inputSerializer); this.nfaFactory = Preconditions.checkNotNull(nfaFactory); this.isProcessingTime = isProcessingTime; this.comparator = comparator; this.lateDataOutputTag = lateDataOutputTag; if (afterMatchSkipStrategy == null) { this.afterMatchSkipStrategy = AfterMatchSkipStrategy.noSkip(); } else { this.afterMatchSkipStrategy = afterMatchSkipStrategy; } }
Example #5
Source File: EvictingWindowOperator.java From flink with Apache License 2.0 | 6 votes |
public EvictingWindowOperator(WindowAssigner<? super IN, W> windowAssigner, TypeSerializer<W> windowSerializer, KeySelector<IN, K> keySelector, TypeSerializer<K> keySerializer, StateDescriptor<? extends ListState<StreamRecord<IN>>, ?> windowStateDescriptor, InternalWindowFunction<Iterable<IN>, OUT, K, W> windowFunction, Trigger<? super IN, ? super W> trigger, Evictor<? super IN, ? super W> evictor, long allowedLateness, OutputTag<IN> lateDataOutputTag) { super(windowAssigner, windowSerializer, keySelector, keySerializer, null, windowFunction, trigger, allowedLateness, lateDataOutputTag); this.evictor = checkNotNull(evictor); this.evictingWindowStateDescriptor = checkNotNull(windowStateDescriptor); }
Example #6
Source File: SingleOutputStreamOperator.java From flink with Apache License 2.0 | 6 votes |
/** * Gets the {@link DataStream} that contains the elements that are emitted from an operation * into the side output with the given {@link OutputTag}. * * @see org.apache.flink.streaming.api.functions.ProcessFunction.Context#output(OutputTag, Object) */ public <X> DataStream<X> getSideOutput(OutputTag<X> sideOutputTag) { if (wasSplitApplied) { throw new UnsupportedOperationException("getSideOutput() and split() may not be called on the same DataStream. " + "As a work-around, please add a no-op map function before the split() call."); } sideOutputTag = clean(requireNonNull(sideOutputTag)); // make a defensive copy sideOutputTag = new OutputTag<X>(sideOutputTag.getId(), sideOutputTag.getTypeInfo()); TypeInformation<?> type = requestedSideOutputs.get(sideOutputTag); if (type != null && !type.equals(sideOutputTag.getTypeInfo())) { throw new UnsupportedOperationException("A side output with a matching id was " + "already requested with a different type. This is not allowed, side output " + "ids need to be unique."); } requestedSideOutputs.put(sideOutputTag, sideOutputTag.getTypeInfo()); SideOutputTransformation<X> sideOutputTransformation = new SideOutputTransformation<>(this.getTransformation(), sideOutputTag); return new DataStream<>(this.getExecutionEnvironment(), sideOutputTransformation); }
Example #7
Source File: CepOperatorBuilder.java From flink with Apache License 2.0 | 5 votes |
private CepOperatorBuilder( boolean isProcessingTime, NFACompiler.NFAFactory<Event> nfaFactory, EventComparator<Event> comparator, AfterMatchSkipStrategy skipStrategy, PatternProcessFunction<Event, OUT> processFunction, OutputTag<Event> lateDataOutputTag) { this.isProcessingTime = isProcessingTime; this.nfaFactory = nfaFactory; this.comparator = comparator; this.skipStrategy = skipStrategy; function = processFunction; this.lateDataOutputTag = lateDataOutputTag; }
Example #8
Source File: RecordWriterOutput.java From flink with Apache License 2.0 | 5 votes |
@Override public <X> void collect(OutputTag<X> outputTag, StreamRecord<X> record) { if (this.outputTag == null || !this.outputTag.equals(outputTag)) { // we are only responsible for emitting to the side-output specified by our // OutputTag. return; } pushToRecordWriter(record); }
Example #9
Source File: CoProcessOperator.java From flink with Apache License 2.0 | 5 votes |
@Override public <X> void output(OutputTag<X> outputTag, X value) { if (outputTag == null) { throw new IllegalArgumentException("OutputTag must not be null."); } output.collect(outputTag, new StreamRecord<>(value, element.getTimestamp())); }
Example #10
Source File: Sinks.java From flink-statefun with Apache License 2.0 | 5 votes |
private Sinks( Map<EgressIdentifier<?>, OutputTag<Object>> sideOutputs, Map<EgressIdentifier<?>, DecoratedSink> sinks) { this.sideOutputs = Objects.requireNonNull(sideOutputs); this.sinks = Objects.requireNonNull(sinks); }
Example #11
Source File: BroadcastStateTransformationTest.java From bravo with Apache License 2.0 | 5 votes |
public DataStream<String> constructTestPipeline(DataStream<String> source) { OutputTag<Integer> filtered = new OutputTag<>("filter", BasicTypeInfo.INT_TYPE_INFO); OutputTag<Integer> process = new OutputTag<>("process", BasicTypeInfo.INT_TYPE_INFO); SingleOutputStreamOperator<String> input = source.process(new ProcessFunction<String, String>() { private static final long serialVersionUID = 1L; @Override public void processElement(String s, Context ctx, Collector<String> out) throws Exception { if (s.startsWith("filter ")) { ctx.output(filtered, Integer.parseInt(s.substring(7))); } else if (s.startsWith("process ")) { ctx.output(process, Integer.parseInt(s.substring(8))); } else { throw new RuntimeException("oOoO"); } } }); BroadcastStream<Integer> broadcast = input.getSideOutput(filtered).broadcast(bcstate); return input.getSideOutput(process).keyBy(i -> i).connect(broadcast).process(new BroadcastProcessor(bcstate)) .uid("stateful"); }
Example #12
Source File: RegularWindowOperatorContractTest.java From flink with Apache License 2.0 | 5 votes |
@Override protected <W extends Window, OUT> KeyedOneInputStreamOperatorTestHarness<Integer, Integer, OUT> createWindowOperator( WindowAssigner<Integer, W> assigner, Trigger<Integer, W> trigger, long allowedLatenss, InternalWindowFunction<Iterable<Integer>, OUT, Integer, W> windowFunction, OutputTag<Integer> lateOutputTag) throws Exception { KeySelector<Integer, Integer> keySelector = new KeySelector<Integer, Integer>() { private static final long serialVersionUID = 1L; @Override public Integer getKey(Integer value) throws Exception { return value; } }; ListStateDescriptor<Integer> intListDescriptor = new ListStateDescriptor<>("int-list", IntSerializer.INSTANCE); @SuppressWarnings("unchecked") WindowOperator<Integer, Integer, Iterable<Integer>, OUT, W> operator = new WindowOperator<>( assigner, assigner.getWindowSerializer(new ExecutionConfig()), keySelector, IntSerializer.INSTANCE, intListDescriptor, windowFunction, trigger, allowedLatenss, lateOutputTag); return new KeyedOneInputStreamOperatorTestHarness<>( operator, keySelector, BasicTypeInfo.INT_TYPE_INFO); }
Example #13
Source File: PatternTimeoutFlatSelectAdapter.java From flink with Apache License 2.0 | 5 votes |
public PatternTimeoutFlatSelectAdapter( PatternFlatSelectFunction<IN, OUT> flatSelectFunction, PatternFlatTimeoutFunction<IN, T> flatTimeoutFunction, OutputTag<T> timedOutPartialMatchesTag) { super(flatSelectFunction); this.flatTimeoutFunction = checkNotNull(flatTimeoutFunction); this.timedOutPartialMatchesTag = checkNotNull(timedOutPartialMatchesTag); }
Example #14
Source File: CepProcessFunctionContextTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testCurrentProcessingTimeForTimedOutInEventTime() throws Exception { OutputTag<String> sideOutputTag = new OutputTag<String>("timedOut") {}; try ( OneInputStreamOperatorTestHarness<Event, String> harness = getCepTestHarness( createCepOperator( extractCurrentProcessingTimeAndNames(2, sideOutputTag), new NFATimingOutFactory(), EVENT_TIME))) { harness.open(); // events out of order to test if internal sorting does not mess up the timestamps harness.processElement(event().withName("A").withTimestamp(5).asStreamRecord()); harness.processElement(event().withName("B").withTimestamp(20).asStreamRecord()); harness.processElement(event().withName("C").withTimestamp(3).asStreamRecord()); harness.setProcessingTime(100); harness.processWatermark(22); assertOutput(harness.getOutput()) .nextElementEquals("100:C:A") .watermarkEquals(22) .hasNoMoreElements(); assertOutput(harness.getSideOutput(sideOutputTag)) .nextElementEquals("100:A") .hasNoMoreElements(); } }
Example #15
Source File: LegacyKeyedProcessOperator.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public <X> void output(OutputTag<X> outputTag, X value) { if (outputTag == null) { throw new IllegalArgumentException("OutputTag must not be null."); } output.collect(outputTag, new StreamRecord<>(value, element.getTimestamp())); }
Example #16
Source File: OperatorChain.java From flink with Apache License 2.0 | 5 votes |
public CopyingChainingOutput( OneInputStreamOperator<T, ?> operator, TypeSerializer<T> serializer, OutputTag<T> outputTag, StreamStatusProvider streamStatusProvider) { super(operator, streamStatusProvider, outputTag); this.serializer = serializer; }
Example #17
Source File: SideOutputTranslator.java From stateful-functions with Apache License 2.0 | 5 votes |
private static OutputTag<Object> outputTagFromId( EgressIdentifier<?> id, StaticallyRegisteredTypes types) { @SuppressWarnings("unchecked") EgressIdentifier<Object> casted = (EgressIdentifier<Object>) id; String name = String.format("%s.%s", id.namespace(), id.name()); TypeInformation<Object> typeInformation = types.registerType(casted.consumedType()); return new OutputTag<>(name, typeInformation); }
Example #18
Source File: KeyedProcessOperator.java From flink with Apache License 2.0 | 5 votes |
@Override public <X> void output(OutputTag<X> outputTag, X value) { if (outputTag == null) { throw new IllegalArgumentException("OutputTag must not be null."); } output.collect(outputTag, new StreamRecord<>(value, element.getTimestamp())); }
Example #19
Source File: WindowOperator.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public <X> void output(OutputTag<X> outputTag, X value) { if (outputTag == null) { throw new IllegalArgumentException("OutputTag must not be null."); } output.collect(outputTag, new StreamRecord<>(value, window.maxTimestamp())); }
Example #20
Source File: LegacyKeyedCoProcessOperator.java From flink with Apache License 2.0 | 5 votes |
@Override public <X> void output(OutputTag<X> outputTag, X value) { if (outputTag == null) { throw new IllegalArgumentException("OutputTag must not be null."); } output.collect(outputTag, new StreamRecord<>(value, timer.getTimestamp())); }
Example #21
Source File: LegacyKeyedProcessOperator.java From flink with Apache License 2.0 | 5 votes |
@Override public <X> void output(OutputTag<X> outputTag, X value) { if (outputTag == null) { throw new IllegalArgumentException("OutputTag must not be null."); } output.collect(outputTag, new StreamRecord<>(value, timer.getTimestamp())); }
Example #22
Source File: PatternTimeoutSelectAdapter.java From flink with Apache License 2.0 | 5 votes |
public PatternTimeoutSelectAdapter( final PatternSelectFunction<IN, OUT> selectFunction, final PatternTimeoutFunction<IN, T> timeoutFunction, final OutputTag<T> timedOutPartialMatchesTag) { super(selectFunction); this.timeoutFunction = checkNotNull(timeoutFunction); this.timedOutPartialMatchesTag = checkNotNull(timedOutPartialMatchesTag); }
Example #23
Source File: SideOutputTranslator.java From flink-statefun with Apache License 2.0 | 5 votes |
private static OutputTag<Object> outputTagFromId( EgressIdentifier<?> id, StaticallyRegisteredTypes types) { @SuppressWarnings("unchecked") EgressIdentifier<Object> casted = (EgressIdentifier<Object>) id; String name = String.format("%s.%s", id.namespace(), id.name()); TypeInformation<Object> typeInformation = types.registerType(casted.consumedType()); return new OutputTag<>(name, typeInformation); }
Example #24
Source File: OperatorChain.java From flink with Apache License 2.0 | 5 votes |
public CopyingChainingOutput( OneInputStreamOperator<T, ?> operator, TypeSerializer<T> serializer, OutputTag<T> outputTag, StreamStatusProvider streamStatusProvider) { super(operator, streamStatusProvider, outputTag); this.serializer = serializer; }
Example #25
Source File: KeyedCoProcessOperator.java From flink with Apache License 2.0 | 5 votes |
@Override public <X> void output(OutputTag<X> outputTag, X value) { if (outputTag == null) { throw new IllegalArgumentException("OutputTag must not be null."); } output.collect(outputTag, new StreamRecord<>(value, element.getTimestamp())); }
Example #26
Source File: RecordWriterOutput.java From flink with Apache License 2.0 | 5 votes |
@Override public <X> void collect(OutputTag<X> outputTag, StreamRecord<X> record) { if (this.outputTag == null || !this.outputTag.equals(outputTag)) { // we are not responsible for emitting to the side-output specified by this // OutputTag. return; } pushToRecordWriter(record); }
Example #27
Source File: CepOperatorBuilder.java From flink with Apache License 2.0 | 5 votes |
private CepOperatorBuilder( boolean isProcessingTime, NFACompiler.NFAFactory<Event> nfaFactory, EventComparator<Event> comparator, AfterMatchSkipStrategy skipStrategy, PatternProcessFunction<Event, OUT> processFunction, OutputTag<Event> lateDataOutputTag) { this.isProcessingTime = isProcessingTime; this.nfaFactory = nfaFactory; this.comparator = comparator; this.skipStrategy = skipStrategy; function = processFunction; this.lateDataOutputTag = lateDataOutputTag; }
Example #28
Source File: CepProcessFunctionContextTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testCurrentProcessingTimeForTimedOutInEventTime() throws Exception { OutputTag<String> sideOutputTag = new OutputTag<String>("timedOut") {}; try ( OneInputStreamOperatorTestHarness<Event, String> harness = getCepTestHarness( createCepOperator( extractCurrentProcessingTimeAndNames(2, sideOutputTag), new NFATimingOutFactory(), EVENT_TIME))) { harness.open(); // events out of order to test if internal sorting does not mess up the timestamps harness.processElement(event().withName("A").withTimestamp(5).asStreamRecord()); harness.processElement(event().withName("B").withTimestamp(20).asStreamRecord()); harness.processElement(event().withName("C").withTimestamp(3).asStreamRecord()); harness.setProcessingTime(100); harness.processWatermark(22); assertOutput(harness.getOutput()) .nextElementEquals("100:C:A") .watermarkEquals(22) .hasNoMoreElements(); assertOutput(harness.getSideOutput(sideOutputTag)) .nextElementEquals("100:A") .hasNoMoreElements(); } }
Example #29
Source File: WindowOperator.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Creates a new {@code WindowOperator} based on the given policies and user functions. */ public WindowOperator( WindowAssigner<? super IN, W> windowAssigner, TypeSerializer<W> windowSerializer, KeySelector<IN, K> keySelector, TypeSerializer<K> keySerializer, StateDescriptor<? extends AppendingState<IN, ACC>, ?> windowStateDescriptor, InternalWindowFunction<ACC, OUT, K, W> windowFunction, Trigger<? super IN, ? super W> trigger, long allowedLateness, OutputTag<IN> lateDataOutputTag) { super(windowFunction); checkArgument(!(windowAssigner instanceof BaseAlignedWindowAssigner), "The " + windowAssigner.getClass().getSimpleName() + " cannot be used with a WindowOperator. " + "This assigner is only used with the AccumulatingProcessingTimeWindowOperator and " + "the AggregatingProcessingTimeWindowOperator"); checkArgument(allowedLateness >= 0); checkArgument(windowStateDescriptor == null || windowStateDescriptor.isSerializerInitialized(), "window state serializer is not properly initialized"); this.windowAssigner = checkNotNull(windowAssigner); this.windowSerializer = checkNotNull(windowSerializer); this.keySelector = checkNotNull(keySelector); this.keySerializer = checkNotNull(keySerializer); this.windowStateDescriptor = windowStateDescriptor; this.trigger = checkNotNull(trigger); this.allowedLateness = allowedLateness; this.lateDataOutputTag = lateDataOutputTag; setChainingStrategy(ChainingStrategy.ALWAYS); }
Example #30
Source File: CepProcessFunctionContextTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testTimestampPassingForTimedOutInEventTime() throws Exception { OutputTag<String> timedOut = new OutputTag<String>("timedOut") {}; try ( OneInputStreamOperatorTestHarness<Event, String> harness = getCepTestHarness( createCepOperator( extractTimestampAndNames(2, timedOut), new NFATimingOutFactory(), EVENT_TIME))) { harness.open(); // events out of order to test if internal sorting does not mess up the timestamps harness.processElement(event().withName("A").withTimestamp(5).asStreamRecord()); harness.processElement(event().withName("C").withTimestamp(20).asStreamRecord()); harness.processElement(event().withName("B").withTimestamp(3).asStreamRecord()); harness.processWatermark(22); assertOutput(harness.getOutput()) .nextElementEquals("5:B:A") .watermarkEquals(22) .hasNoMoreElements(); assertOutput(harness.getSideOutput(timedOut)) .nextElementEquals("15:A") .hasNoMoreElements(); } }