org.apache.flink.streaming.api.operators.ChainingStrategy Java Examples
The following examples show how to use
org.apache.flink.streaming.api.operators.ChainingStrategy.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: StreamingJobGraphGenerator.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public static boolean isChainable(StreamEdge edge, StreamGraph streamGraph) { StreamNode upStreamVertex = streamGraph.getSourceVertex(edge); StreamNode downStreamVertex = streamGraph.getTargetVertex(edge); StreamOperator<?> headOperator = upStreamVertex.getOperator(); StreamOperator<?> outOperator = downStreamVertex.getOperator(); return downStreamVertex.getInEdges().size() == 1 && outOperator != null && headOperator != null && upStreamVertex.isSameSlotSharingGroup(downStreamVertex) && outOperator.getChainingStrategy() == ChainingStrategy.ALWAYS && (headOperator.getChainingStrategy() == ChainingStrategy.HEAD || headOperator.getChainingStrategy() == ChainingStrategy.ALWAYS) && (edge.getPartitioner() instanceof ForwardPartitioner) && upStreamVertex.getParallelism() == downStreamVertex.getParallelism() && streamGraph.isChainingEnabled(); }
Example #2
Source File: StreamGraphHasherV2.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private boolean isChainable(StreamEdge edge, boolean isChainingEnabled, StreamGraph streamGraph) { StreamNode upStreamVertex = streamGraph.getSourceVertex(edge); StreamNode downStreamVertex = streamGraph.getTargetVertex(edge); StreamOperator<?> headOperator = upStreamVertex.getOperator(); StreamOperator<?> outOperator = downStreamVertex.getOperator(); return downStreamVertex.getInEdges().size() == 1 && outOperator != null && headOperator != null && upStreamVertex.isSameSlotSharingGroup(downStreamVertex) && outOperator.getChainingStrategy() == ChainingStrategy.ALWAYS && (headOperator.getChainingStrategy() == ChainingStrategy.HEAD || headOperator.getChainingStrategy() == ChainingStrategy.ALWAYS) && (edge.getPartitioner() instanceof ForwardPartitioner) && upStreamVertex.getParallelism() == downStreamVertex.getParallelism() && isChainingEnabled; }
Example #3
Source File: StreamingJobGraphGenerator.java From flink with Apache License 2.0 | 6 votes |
public static boolean isChainable(StreamEdge edge, StreamGraph streamGraph) { StreamNode upStreamVertex = streamGraph.getSourceVertex(edge); StreamNode downStreamVertex = streamGraph.getTargetVertex(edge); StreamOperatorFactory<?> headOperator = upStreamVertex.getOperatorFactory(); StreamOperatorFactory<?> outOperator = downStreamVertex.getOperatorFactory(); return downStreamVertex.getInEdges().size() == 1 && outOperator != null && headOperator != null && upStreamVertex.isSameSlotSharingGroup(downStreamVertex) && outOperator.getChainingStrategy() == ChainingStrategy.ALWAYS && (headOperator.getChainingStrategy() == ChainingStrategy.HEAD || headOperator.getChainingStrategy() == ChainingStrategy.ALWAYS) && (edge.getPartitioner() instanceof ForwardPartitioner) && edge.getShuffleMode() != ShuffleMode.BATCH && upStreamVertex.getParallelism() == downStreamVertex.getParallelism() && streamGraph.isChainingEnabled(); }
Example #4
Source File: AsyncWaitOperator.java From flink with Apache License 2.0 | 6 votes |
public AsyncWaitOperator( AsyncFunction<IN, OUT> asyncFunction, long timeout, int capacity, AsyncDataStream.OutputMode outputMode) { super(asyncFunction); // TODO this is a temporary fix for the problems described under FLINK-13063 at the cost of breaking chains for // AsyncOperators. setChainingStrategy(ChainingStrategy.HEAD); Preconditions.checkArgument(capacity > 0, "The number of concurrent async operation should be greater than 0."); this.capacity = capacity; this.outputMode = Preconditions.checkNotNull(outputMode, "outputMode"); this.timeout = timeout; }
Example #5
Source File: StreamGraphHasherV2.java From flink with Apache License 2.0 | 6 votes |
private boolean isChainable(StreamEdge edge, boolean isChainingEnabled, StreamGraph streamGraph) { StreamNode upStreamVertex = streamGraph.getSourceVertex(edge); StreamNode downStreamVertex = streamGraph.getTargetVertex(edge); StreamOperatorFactory<?> headOperator = upStreamVertex.getOperatorFactory(); StreamOperatorFactory<?> outOperator = downStreamVertex.getOperatorFactory(); return downStreamVertex.getInEdges().size() == 1 && outOperator != null && headOperator != null && upStreamVertex.isSameSlotSharingGroup(downStreamVertex) && outOperator.getChainingStrategy() == ChainingStrategy.ALWAYS && (headOperator.getChainingStrategy() == ChainingStrategy.HEAD || headOperator.getChainingStrategy() == ChainingStrategy.ALWAYS) && (edge.getPartitioner() instanceof ForwardPartitioner) && upStreamVertex.getParallelism() == downStreamVertex.getParallelism() && isChainingEnabled; }
Example #6
Source File: AbstractMapBundleOperator.java From flink with Apache License 2.0 | 5 votes |
AbstractMapBundleOperator( MapBundleFunction<K, V, IN, OUT> function, BundleTrigger<IN> bundleTrigger) { chainingStrategy = ChainingStrategy.ALWAYS; this.function = checkNotNull(function, "function is null"); this.bundleTrigger = checkNotNull(bundleTrigger, "bundleTrigger is null"); }
Example #7
Source File: FeedbackUnionOperator.java From flink-statefun with Apache License 2.0 | 5 votes |
FeedbackUnionOperator( FeedbackKey<T> feedbackKey, SerializableFunction<T, OptionalLong> isBarrierMessage, SerializableFunction<T, ?> keySelector, long totalMemoryUsedForFeedbackCheckpointing, TypeSerializer<T> elementSerializer, MailboxExecutor mailboxExecutor) { this.feedbackKey = Objects.requireNonNull(feedbackKey); this.isBarrierMessage = Objects.requireNonNull(isBarrierMessage); this.keySelector = Objects.requireNonNull(keySelector); this.totalMemoryUsedForFeedbackCheckpointing = totalMemoryUsedForFeedbackCheckpointing; this.elementSerializer = Objects.requireNonNull(elementSerializer); this.mailboxExecutor = Objects.requireNonNull(mailboxExecutor); this.chainingStrategy = ChainingStrategy.ALWAYS; }
Example #8
Source File: FunctionGroupOperator.java From stateful-functions with Apache License 2.0 | 5 votes |
FunctionGroupOperator( Map<EgressIdentifier<?>, OutputTag<Object>> sideOutputs, MailboxExecutor mailboxExecutor, ChainingStrategy chainingStrategy) { this.sideOutputs = Objects.requireNonNull(sideOutputs); this.mailboxExecutor = Objects.requireNonNull(mailboxExecutor); this.chainingStrategy = chainingStrategy; }
Example #9
Source File: WindowOperator.java From flink with Apache License 2.0 | 5 votes |
/** * Creates a new {@code WindowOperator} based on the given policies and user functions. */ public WindowOperator( WindowAssigner<? super IN, W> windowAssigner, TypeSerializer<W> windowSerializer, KeySelector<IN, K> keySelector, TypeSerializer<K> keySerializer, StateDescriptor<? extends AppendingState<IN, ACC>, ?> windowStateDescriptor, InternalWindowFunction<ACC, OUT, K, W> windowFunction, Trigger<? super IN, ? super W> trigger, long allowedLateness, OutputTag<IN> lateDataOutputTag) { super(windowFunction); checkArgument(!(windowAssigner instanceof BaseAlignedWindowAssigner), "The " + windowAssigner.getClass().getSimpleName() + " cannot be used with a WindowOperator. " + "This assigner is only used with the AccumulatingProcessingTimeWindowOperator and " + "the AggregatingProcessingTimeWindowOperator"); checkArgument(allowedLateness >= 0); checkArgument(windowStateDescriptor == null || windowStateDescriptor.isSerializerInitialized(), "window state serializer is not properly initialized"); this.windowAssigner = checkNotNull(windowAssigner); this.windowSerializer = checkNotNull(windowSerializer); this.keySelector = checkNotNull(keySelector); this.keySerializer = checkNotNull(keySerializer); this.windowStateDescriptor = windowStateDescriptor; this.trigger = checkNotNull(trigger); this.allowedLateness = allowedLateness; this.lateDataOutputTag = lateDataOutputTag; setChainingStrategy(ChainingStrategy.ALWAYS); }
Example #10
Source File: FeedbackUnionOperator.java From stateful-functions with Apache License 2.0 | 5 votes |
FeedbackUnionOperator( FeedbackKey<T> feedbackKey, SerializablePredicate<T> isBarrierMessage, SerializableFunction<T, ?> keySelector, long totalMemoryUsedForFeedbackCheckpointing, TypeSerializer<T> elementSerializer, MailboxExecutor mailboxExecutor) { this.feedbackKey = Objects.requireNonNull(feedbackKey); this.isBarrierMessage = Objects.requireNonNull(isBarrierMessage); this.keySelector = Objects.requireNonNull(keySelector); this.totalMemoryUsedForFeedbackCheckpointing = totalMemoryUsedForFeedbackCheckpointing; this.elementSerializer = Objects.requireNonNull(elementSerializer); this.mailboxExecutor = Objects.requireNonNull(mailboxExecutor); this.chainingStrategy = ChainingStrategy.ALWAYS; }
Example #11
Source File: StreamTaskTimerITCase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Note: this test fails if we don't check for exceptions in the source contexts and do not * synchronize in the source contexts. */ @Test public void testOperatorChainedToSource() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(timeCharacteristic); env.setParallelism(1); DataStream<String> source = env.addSource(new InfiniteTestSource()); source.transform("Custom Operator", BasicTypeInfo.STRING_TYPE_INFO, new TimerOperator(ChainingStrategy.ALWAYS)); boolean testSuccess = false; try { env.execute("Timer test"); } catch (JobExecutionException e) { if (e.getCause() instanceof TimerException) { TimerException te = (TimerException) e.getCause(); if (te.getCause() instanceof RuntimeException) { RuntimeException re = (RuntimeException) te.getCause(); if (re.getMessage().equals("TEST SUCCESS")) { testSuccess = true; } else { throw e; } } else { throw e; } } else { throw e; } } Assert.assertTrue(testSuccess); }
Example #12
Source File: AsyncWaitOperatorTest.java From flink with Apache License 2.0 | 5 votes |
/** * This helper function is needed to check that the temporary fix for FLINK-13063 can be backwards compatible with * the old chaining behavior by setting the ChainingStrategy manually. TODO: remove after a proper fix for * FLINK-13063 is in place that allows chaining. */ private <IN, OUT> SingleOutputStreamOperator<OUT> addAsyncOperatorLegacyChained( DataStream<IN> in, AsyncFunction<IN, OUT> func, long timeout, int bufSize, AsyncDataStream.OutputMode mode) { TypeInformation<OUT> outTypeInfo = TypeExtractor.getUnaryOperatorReturnType( func, AsyncFunction.class, 0, 1, new int[]{1, 0}, in.getType(), Utils.getCallLocationName(), true); // create transform AsyncWaitOperator<IN, OUT> operator = new AsyncWaitOperator<>( in.getExecutionEnvironment().clean(func), timeout, bufSize, mode); operator.setChainingStrategy(ChainingStrategy.ALWAYS); return in.transform("async wait operator", outTypeInfo, operator); }
Example #13
Source File: StreamTaskTimerITCase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testTwoInputOperatorWithoutChaining() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(timeCharacteristic); env.setParallelism(1); DataStream<String> source = env.addSource(new InfiniteTestSource()); source.connect(source).transform( "Custom Operator", BasicTypeInfo.STRING_TYPE_INFO, new TwoInputTimerOperator(ChainingStrategy.NEVER)); boolean testSuccess = false; try { env.execute("Timer test"); } catch (JobExecutionException e) { if (e.getCause() instanceof TimerException) { TimerException te = (TimerException) e.getCause(); if (te.getCause() instanceof RuntimeException) { RuntimeException re = (RuntimeException) te.getCause(); if (re.getMessage().equals("TEST SUCCESS")) { testSuccess = true; } else { throw e; } } else { throw e; } } else { throw e; } } Assert.assertTrue(testSuccess); }
Example #14
Source File: StreamTaskTimerITCase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Note: this test fails if we don't check for exceptions in the source contexts and do not * synchronize in the source contexts. */ @Test public void testOneInputOperatorWithoutChaining() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(timeCharacteristic); env.setParallelism(1); DataStream<String> source = env.addSource(new InfiniteTestSource()); source.transform("Custom Operator", BasicTypeInfo.STRING_TYPE_INFO, new TimerOperator(ChainingStrategy.NEVER)); boolean testSuccess = false; try { env.execute("Timer test"); } catch (JobExecutionException e) { if (e.getCause() instanceof TimerException) { TimerException te = (TimerException) e.getCause(); if (te.getCause() instanceof RuntimeException) { RuntimeException re = (RuntimeException) te.getCause(); if (re.getMessage().equals("TEST SUCCESS")) { testSuccess = true; } else { throw e; } } else { throw e; } } else { throw e; } } Assert.assertTrue(testSuccess); }
Example #15
Source File: AsyncWaitOperator.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public AsyncWaitOperator( AsyncFunction<IN, OUT> asyncFunction, long timeout, int capacity, AsyncDataStream.OutputMode outputMode) { super(asyncFunction); chainingStrategy = ChainingStrategy.ALWAYS; Preconditions.checkArgument(capacity > 0, "The number of concurrent async operation should be greater than 0."); this.capacity = capacity; this.outputMode = Preconditions.checkNotNull(outputMode, "outputMode"); this.timeout = timeout; }
Example #16
Source File: SingleOutputStreamOperator.java From flink with Apache License 2.0 | 5 votes |
/** * Sets the {@link ChainingStrategy} for the given operator affecting the * way operators will possibly be co-located on the same thread for * increased performance. * * @param strategy * The selected {@link ChainingStrategy} * @return The operator with the modified chaining strategy */ @PublicEvolving private SingleOutputStreamOperator<T> setChainingStrategy(ChainingStrategy strategy) { if (transformation instanceof PhysicalTransformation) { ((PhysicalTransformation<T>) transformation).setChainingStrategy(strategy); } else { throw new UnsupportedOperationException("Cannot set chaining strategy on " + transformation); } return this; }
Example #17
Source File: StreamTaskTimerITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testTwoInputOperatorWithoutChaining() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(timeCharacteristic); env.setParallelism(1); DataStream<String> source = env.addSource(new InfiniteTestSource()); source.connect(source).transform( "Custom Operator", BasicTypeInfo.STRING_TYPE_INFO, new TwoInputTimerOperator(ChainingStrategy.NEVER)); boolean testSuccess = false; try { env.execute("Timer test"); } catch (JobExecutionException e) { if (e.getCause() instanceof TimerException) { TimerException te = (TimerException) e.getCause(); if (te.getCause() instanceof RuntimeException) { RuntimeException re = (RuntimeException) te.getCause(); if (re.getMessage().equals("TEST SUCCESS")) { testSuccess = true; } else { throw e; } } else { throw e; } } else { throw e; } } Assert.assertTrue(testSuccess); }
Example #18
Source File: StreamTaskTimerITCase.java From flink with Apache License 2.0 | 5 votes |
/** * Note: this test fails if we don't check for exceptions in the source contexts and do not * synchronize in the source contexts. */ @Test public void testOneInputOperatorWithoutChaining() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(timeCharacteristic); env.setParallelism(1); DataStream<String> source = env.addSource(new InfiniteTestSource()); source.transform("Custom Operator", BasicTypeInfo.STRING_TYPE_INFO, new TimerOperator(ChainingStrategy.NEVER)); boolean testSuccess = false; try { env.execute("Timer test"); } catch (JobExecutionException e) { if (e.getCause() instanceof TimerException) { TimerException te = (TimerException) e.getCause(); if (te.getCause() instanceof RuntimeException) { RuntimeException re = (RuntimeException) te.getCause(); if (re.getMessage().equals("TEST SUCCESS")) { testSuccess = true; } else { throw e; } } else { throw e; } } else { throw e; } } Assert.assertTrue(testSuccess); }
Example #19
Source File: StreamTaskTimerITCase.java From flink with Apache License 2.0 | 5 votes |
/** * Note: this test fails if we don't check for exceptions in the source contexts and do not * synchronize in the source contexts. */ @Test public void testOperatorChainedToSource() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(timeCharacteristic); env.setParallelism(1); DataStream<String> source = env.addSource(new InfiniteTestSource()); source.transform("Custom Operator", BasicTypeInfo.STRING_TYPE_INFO, new TimerOperator(ChainingStrategy.ALWAYS)); boolean testSuccess = false; try { env.execute("Timer test"); } catch (JobExecutionException e) { if (e.getCause() instanceof TimerException) { TimerException te = (TimerException) e.getCause(); if (te.getCause() instanceof RuntimeException) { RuntimeException re = (RuntimeException) te.getCause(); if (re.getMessage().equals("TEST SUCCESS")) { testSuccess = true; } else { throw e; } } else { throw e; } } else { throw e; } } Assert.assertTrue(testSuccess); }
Example #20
Source File: WindowOperator.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Creates a new {@code WindowOperator} based on the given policies and user functions. */ public WindowOperator( WindowAssigner<? super IN, W> windowAssigner, TypeSerializer<W> windowSerializer, KeySelector<IN, K> keySelector, TypeSerializer<K> keySerializer, StateDescriptor<? extends AppendingState<IN, ACC>, ?> windowStateDescriptor, InternalWindowFunction<ACC, OUT, K, W> windowFunction, Trigger<? super IN, ? super W> trigger, long allowedLateness, OutputTag<IN> lateDataOutputTag) { super(windowFunction); checkArgument(!(windowAssigner instanceof BaseAlignedWindowAssigner), "The " + windowAssigner.getClass().getSimpleName() + " cannot be used with a WindowOperator. " + "This assigner is only used with the AccumulatingProcessingTimeWindowOperator and " + "the AggregatingProcessingTimeWindowOperator"); checkArgument(allowedLateness >= 0); checkArgument(windowStateDescriptor == null || windowStateDescriptor.isSerializerInitialized(), "window state serializer is not properly initialized"); this.windowAssigner = checkNotNull(windowAssigner); this.windowSerializer = checkNotNull(windowSerializer); this.keySelector = checkNotNull(keySelector); this.keySerializer = checkNotNull(keySerializer); this.windowStateDescriptor = windowStateDescriptor; this.trigger = checkNotNull(trigger); this.allowedLateness = allowedLateness; this.lateDataOutputTag = lateDataOutputTag; setChainingStrategy(ChainingStrategy.ALWAYS); }
Example #21
Source File: WatermarkAssignerOperator.java From flink with Apache License 2.0 | 5 votes |
/** * Create a watermark assigner operator. * @param rowtimeFieldIndex the field index to extract event timestamp * @param watermarkDelay the delay by which watermarks are behind the maximum observed timestamp. * @param idleTimeout (idleness checking timeout) */ public WatermarkAssignerOperator(int rowtimeFieldIndex, long watermarkDelay, long idleTimeout) { this.rowtimeFieldIndex = rowtimeFieldIndex; this.watermarkDelay = watermarkDelay; this.idleTimeout = idleTimeout; this.chainingStrategy = ChainingStrategy.ALWAYS; }
Example #22
Source File: MiniBatchedWatermarkAssignerOperator.java From flink with Apache License 2.0 | 5 votes |
public MiniBatchedWatermarkAssignerOperator( int rowtimeFieldIndex, long watermarkDelay, long tzOffset, long idleTimeout, long watermarkInterval) { this.rowtimeFieldIndex = rowtimeFieldIndex; this.watermarkDelay = watermarkDelay; this.tzOffset = tzOffset; this.chainingStrategy = ChainingStrategy.ALWAYS; this.watermarkInterval = watermarkInterval; this.idleTimeout = idleTimeout; }
Example #23
Source File: WindowOperator.java From flink with Apache License 2.0 | 5 votes |
WindowOperator( NamespaceAggsHandleFunctionBase<W> windowAggregator, WindowAssigner<W> windowAssigner, Trigger<W> trigger, TypeSerializer<W> windowSerializer, LogicalType[] inputFieldTypes, LogicalType[] accumulatorTypes, LogicalType[] aggResultTypes, LogicalType[] windowPropertyTypes, int rowtimeIndex, boolean sendRetraction, long allowedLateness) { checkArgument(allowedLateness >= 0); this.windowAggregator = checkNotNull(windowAggregator); this.windowAssigner = checkNotNull(windowAssigner); this.trigger = checkNotNull(trigger); this.windowSerializer = checkNotNull(windowSerializer); this.inputFieldTypes = checkNotNull(inputFieldTypes); this.accumulatorTypes = checkNotNull(accumulatorTypes); this.aggResultTypes = checkNotNull(aggResultTypes); this.windowPropertyTypes = checkNotNull(windowPropertyTypes); this.allowedLateness = allowedLateness; this.sendRetraction = sendRetraction; // rowtime index should >= 0 when in event time mode checkArgument(!windowAssigner.isEventTime() || rowtimeIndex >= 0); this.rowtimeIndex = rowtimeIndex; setChainingStrategy(ChainingStrategy.ALWAYS); }
Example #24
Source File: WindowOperator.java From flink with Apache License 2.0 | 5 votes |
WindowOperator( WindowAssigner<W> windowAssigner, Trigger<W> trigger, TypeSerializer<W> windowSerializer, LogicalType[] inputFieldTypes, LogicalType[] accumulatorTypes, LogicalType[] aggResultTypes, LogicalType[] windowPropertyTypes, int rowtimeIndex, boolean sendRetraction, long allowedLateness) { checkArgument(allowedLateness >= 0); this.windowAssigner = checkNotNull(windowAssigner); this.trigger = checkNotNull(trigger); this.windowSerializer = checkNotNull(windowSerializer); this.inputFieldTypes = checkNotNull(inputFieldTypes); this.accumulatorTypes = checkNotNull(accumulatorTypes); this.aggResultTypes = checkNotNull(aggResultTypes); this.windowPropertyTypes = checkNotNull(windowPropertyTypes); this.allowedLateness = allowedLateness; this.sendRetraction = sendRetraction; // rowtime index should >= 0 when in event time mode checkArgument(!windowAssigner.isEventTime() || rowtimeIndex >= 0); this.rowtimeIndex = rowtimeIndex; setChainingStrategy(ChainingStrategy.ALWAYS); }
Example #25
Source File: FunctionGroupOperator.java From flink-statefun with Apache License 2.0 | 5 votes |
FunctionGroupOperator( Map<EgressIdentifier<?>, OutputTag<Object>> sideOutputs, StatefulFunctionsConfig configuration, MailboxExecutor mailboxExecutor, ChainingStrategy chainingStrategy) { this.sideOutputs = Objects.requireNonNull(sideOutputs); this.configuration = Objects.requireNonNull(configuration); this.mailboxExecutor = Objects.requireNonNull(mailboxExecutor); this.chainingStrategy = chainingStrategy; }
Example #26
Source File: EventTimeOrderingOperator.java From flink-connectors with Apache License 2.0 | 4 votes |
/** * Creates an event time-based reordering operator. */ public EventTimeOrderingOperator() { chainingStrategy = ChainingStrategy.ALWAYS; }
Example #27
Source File: TimestampITCase.java From flink with Apache License 2.0 | 4 votes |
public CustomOperator(boolean timestampsEnabled) { setChainingStrategy(ChainingStrategy.ALWAYS); this.timestampsEnabled = timestampsEnabled; }
Example #28
Source File: ContinuousFileReaderOperatorITCase.java From flink with Apache License 2.0 | 4 votes |
TestBoundedOneInputStreamOperator(int expectedProcessedElementCount) { // this operator must be chained with ContinuousFileReaderOperator // that way, this end input would be triggered after ContinuousFileReaderOperator chainingStrategy = ChainingStrategy.ALWAYS; this.expectedProcessedElementCount = expectedProcessedElementCount; }
Example #29
Source File: IngressRouterOperator.java From flink-statefun with Apache License 2.0 | 4 votes |
IngressRouterOperator(StatefulFunctionsConfig configuration, IngressIdentifier<T> id) { this.configuration = configuration; this.id = Objects.requireNonNull(id); this.chainingStrategy = ChainingStrategy.ALWAYS; }
Example #30
Source File: StreamTaskSelectiveReadingITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void testSequentialReading() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(1); DataStream<String> source0 = env.addSource( new TestStringSource("Source0", new String[] { "Hello-1", "Hello-2", "Hello-3", "Hello-4", "Hello-5", "Hello-6" })); DataStream<Integer> source1 = env.addSource( new TestIntegerSource("Source1", new Integer[] { 1, 2, 3 })) .setParallelism(2); TestListResultSink<String> resultSink = new TestListResultSink<>(); TwoInputStreamOperator<String, Integer, String> twoInputStreamOperator = new SequentialReadingStreamOperator("Operator0"); twoInputStreamOperator.setChainingStrategy(ChainingStrategy.NEVER); source0.connect(source1) .transform( "Custom Operator", BasicTypeInfo.STRING_TYPE_INFO, twoInputStreamOperator ) .addSink(resultSink); env.execute("Selective reading test"); List<String> result = resultSink.getResult(); List<String> expected1 = Arrays.asList( "[Operator0-1]: [Source0-0]: Hello-1", "[Operator0-1]: [Source0-0]: Hello-2", "[Operator0-1]: [Source0-0]: Hello-3", "[Operator0-1]: [Source0-0]: Hello-4", "[Operator0-1]: [Source0-0]: Hello-5", "[Operator0-1]: [Source0-0]: Hello-6" ); List<String> expected2 = Arrays.asList( "[Operator0-2]: 1", "[Operator0-2]: 2", "[Operator0-2]: 3", "[Operator0-2]: 2", "[Operator0-2]: 4", "[Operator0-2]: 6" ); Collections.sort(expected2); assertEquals(expected1.size() + expected2.size(), result.size()); assertEquals(expected1, result.subList(0, expected1.size())); List<String> result2 = result.subList(expected1.size(), expected1.size() + expected2.size()); Collections.sort(result2); assertEquals(expected2, result2); }