org.apache.flink.streaming.runtime.tasks.OneInputStreamTask Java Examples
The following examples show how to use
org.apache.flink.streaming.runtime.tasks.OneInputStreamTask.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: StreamTaskOperatorTimerTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testOperatorYieldExecutesSelectedTimers() throws Exception { final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>( OneInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO); testHarness.setupOperatorChain(new OperatorID(), new TestOperatorFactory()) .chain(new OperatorID(), new TestOperatorFactory(), StringSerializer.INSTANCE) .finish(); testHarness.invoke(); testHarness.waitForTaskRunning(); final String trigger = TRIGGER_PREFIX + 42; testHarness.processElement(new StreamRecord<>(trigger)); testHarness.endInput(); testHarness.waitForTaskCompletion(); List<String> events = new ArrayList<>(); testHarness.getOutput().forEach(element -> events.add(((StreamRecord<String>) element).getValue())); assertThat(events, is(Arrays.asList(trigger, RESULT_PREFIX + "1:0", RESULT_PREFIX + "0:0"))); }
Example #2
Source File: StreamTaskTimerTest.java From flink with Apache License 2.0 | 6 votes |
private StreamTaskTestHarness<?> startTestHarness() throws Exception { final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>( OneInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO); testHarness.setupOutputForSingletonOperatorChain(); StreamConfig streamConfig = testHarness.getStreamConfig(); streamConfig.setChainIndex(0); streamConfig.setStreamOperator(new StreamMap<String, String>(new DummyMapFunction<>())); testHarness.invoke(); testHarness.waitForTaskRunning(); return testHarness; }
Example #3
Source File: MailboxOperatorTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testAvoidTaskStarvation() throws Exception { final OneInputStreamTaskTestHarness<Integer, Integer> testHarness = new OneInputStreamTaskTestHarness<>( OneInputStreamTask::new, BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO); final int maxProcessingElements = 3; testHarness.setupOperatorChain(new OperatorID(), new ReplicatingMailOperatorFactory(maxProcessingElements)) .chain(new OperatorID(), new ReplicatingMailOperatorFactory(maxProcessingElements), IntSerializer.INSTANCE) .finish(); testHarness.invoke(); testHarness.waitForTaskRunning(); for (int i = 0; i < maxProcessingElements; i++) { testHarness.processElement(new StreamRecord<>(0)); } testHarness.endInput(); testHarness.waitForTaskCompletion(); // with each input two mails should be processed, one of each operator in the chain List<Integer> expected = new ArrayList<>(); for (int i = 0; i < maxProcessingElements; i++) { expected.add(i * 2); } List<Integer> numMailsProcessed = testHarness.getOutput().stream() .map(element -> ((StreamRecord<Integer>) element).getValue()) .collect(Collectors.toList()); assertThat(numMailsProcessed, is(expected)); }
Example #4
Source File: StreamGraph.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public <IN, OUT> void addOperator( Integer vertexID, String slotSharingGroup, @Nullable String coLocationGroup, StreamOperator<OUT> operatorObject, TypeInformation<IN> inTypeInfo, TypeInformation<OUT> outTypeInfo, String operatorName) { if (operatorObject instanceof StoppableStreamSource) { addNode(vertexID, slotSharingGroup, coLocationGroup, StoppableSourceStreamTask.class, operatorObject, operatorName); } else if (operatorObject instanceof StreamSource) { addNode(vertexID, slotSharingGroup, coLocationGroup, SourceStreamTask.class, operatorObject, operatorName); } else { addNode(vertexID, slotSharingGroup, coLocationGroup, OneInputStreamTask.class, operatorObject, operatorName); } TypeSerializer<IN> inSerializer = inTypeInfo != null && !(inTypeInfo instanceof MissingTypeInfo) ? inTypeInfo.createSerializer(executionConfig) : null; TypeSerializer<OUT> outSerializer = outTypeInfo != null && !(outTypeInfo instanceof MissingTypeInfo) ? outTypeInfo.createSerializer(executionConfig) : null; setSerializers(vertexID, inSerializer, null, outSerializer); if (operatorObject instanceof OutputTypeConfigurable && outTypeInfo != null) { @SuppressWarnings("unchecked") OutputTypeConfigurable<OUT> outputTypeConfigurable = (OutputTypeConfigurable<OUT>) operatorObject; // sets the output type which must be know at StreamGraph creation time outputTypeConfigurable.setOutputType(outTypeInfo, executionConfig); } if (operatorObject instanceof InputTypeConfigurable) { InputTypeConfigurable inputTypeConfigurable = (InputTypeConfigurable) operatorObject; inputTypeConfigurable.setInputType(inTypeInfo, executionConfig); } if (LOG.isDebugEnabled()) { LOG.debug("Vertex: {}", vertexID); } }
Example #5
Source File: StreamGraph.java From flink with Apache License 2.0 | 5 votes |
public <IN, OUT> void addOperator( Integer vertexID, @Nullable String slotSharingGroup, @Nullable String coLocationGroup, StreamOperatorFactory<OUT> operatorFactory, TypeInformation<IN> inTypeInfo, TypeInformation<OUT> outTypeInfo, String operatorName) { Class<? extends AbstractInvokable> invokableClass = operatorFactory.isStreamSource() ? SourceStreamTask.class : OneInputStreamTask.class; addOperator(vertexID, slotSharingGroup, coLocationGroup, operatorFactory, inTypeInfo, outTypeInfo, operatorName, invokableClass); }
Example #6
Source File: StreamGraph.java From flink with Apache License 2.0 | 5 votes |
public <IN, OUT> void addOperator( Integer vertexID, @Nullable String slotSharingGroup, @Nullable String coLocationGroup, StreamOperatorFactory<OUT> operatorFactory, TypeInformation<IN> inTypeInfo, TypeInformation<OUT> outTypeInfo, String operatorName) { if (operatorFactory.isStreamSource()) { addNode(vertexID, slotSharingGroup, coLocationGroup, SourceStreamTask.class, operatorFactory, operatorName); } else { addNode(vertexID, slotSharingGroup, coLocationGroup, OneInputStreamTask.class, operatorFactory, operatorName); } TypeSerializer<IN> inSerializer = inTypeInfo != null && !(inTypeInfo instanceof MissingTypeInfo) ? inTypeInfo.createSerializer(executionConfig) : null; TypeSerializer<OUT> outSerializer = outTypeInfo != null && !(outTypeInfo instanceof MissingTypeInfo) ? outTypeInfo.createSerializer(executionConfig) : null; setSerializers(vertexID, inSerializer, null, outSerializer); if (operatorFactory.isOutputTypeConfigurable() && outTypeInfo != null) { // sets the output type which must be know at StreamGraph creation time operatorFactory.setOutputType(outTypeInfo, executionConfig); } if (operatorFactory.isInputTypeConfigurable()) { operatorFactory.setInputType(inTypeInfo, executionConfig); } if (LOG.isDebugEnabled()) { LOG.debug("Vertex: {}", vertexID); } }
Example #7
Source File: StreamTaskTimerTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testOpenCloseAndTimestamps() throws Exception { final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>( OneInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO); testHarness.setupOutputForSingletonOperatorChain(); StreamConfig streamConfig = testHarness.getStreamConfig(); StreamMap<String, String> mapOperator = new StreamMap<>(new DummyMapFunction<String>()); streamConfig.setStreamOperator(mapOperator); streamConfig.setOperatorID(new OperatorID()); testHarness.invoke(); testHarness.waitForTaskRunning(); final OneInputStreamTask<String, String> mapTask = testHarness.getTask(); // first one spawns thread mapTask.getProcessingTimeService().registerTimer(System.currentTimeMillis(), new ProcessingTimeCallback() { @Override public void onProcessingTime(long timestamp) { } }); assertEquals(1, StreamTask.TRIGGER_THREAD_GROUP.activeCount()); testHarness.endInput(); testHarness.waitForTaskCompletion(); // thread needs to die in time long deadline = System.currentTimeMillis() + 4000; while (StreamTask.TRIGGER_THREAD_GROUP.activeCount() > 0 && System.currentTimeMillis() < deadline) { Thread.sleep(10); } assertEquals("Trigger timer thread did not properly shut down", 0, StreamTask.TRIGGER_THREAD_GROUP.activeCount()); }
Example #8
Source File: AsyncWaitOperatorTest.java From flink with Apache License 2.0 | 4 votes |
/** * Tests that the AsyncWaitOperator works together with chaining. */ @Test public void testOperatorChainWithProcessingTime() throws Exception { JobVertex chainedVertex = createChainedVertex(new MyAsyncFunction(), new MyAsyncFunction()); final OneInputStreamTaskTestHarness<Integer, Integer> testHarness = new OneInputStreamTaskTestHarness<>( OneInputStreamTask::new, 1, 1, BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO); testHarness.setupOutputForSingletonOperatorChain(); testHarness.taskConfig = chainedVertex.getConfiguration(); final StreamConfig streamConfig = testHarness.getStreamConfig(); final StreamConfig operatorChainStreamConfig = new StreamConfig(chainedVertex.getConfiguration()); streamConfig.setStreamOperatorFactory( operatorChainStreamConfig.getStreamOperatorFactory(AsyncWaitOperatorTest.class.getClassLoader())); testHarness.invoke(); testHarness.waitForTaskRunning(); long initialTimestamp = 0L; testHarness.processElement(new StreamRecord<>(5, initialTimestamp)); testHarness.processElement(new StreamRecord<>(6, initialTimestamp + 1L)); testHarness.processElement(new StreamRecord<>(7, initialTimestamp + 2L)); testHarness.processElement(new StreamRecord<>(8, initialTimestamp + 3L)); testHarness.processElement(new StreamRecord<>(9, initialTimestamp + 4L)); testHarness.endInput(); testHarness.waitForTaskCompletion(); List<Object> expectedOutput = new LinkedList<>(); expectedOutput.add(new StreamRecord<>(22, initialTimestamp)); expectedOutput.add(new StreamRecord<>(26, initialTimestamp + 1L)); expectedOutput.add(new StreamRecord<>(30, initialTimestamp + 2L)); expectedOutput.add(new StreamRecord<>(34, initialTimestamp + 3L)); expectedOutput.add(new StreamRecord<>(38, initialTimestamp + 4L)); TestHarnessUtil.assertOutputEqualsSorted( "Test for chained operator with AsyncWaitOperator failed", expectedOutput, testHarness.getOutput(), new StreamRecordComparator()); }
Example #9
Source File: StatefulOperatorChainedTaskTest.java From flink with Apache License 2.0 | 4 votes |
private void triggerCheckpoint( OneInputStreamTaskTestHarness<String, String> testHarness, OneInputStreamTask<String, String> streamTask) throws Exception { long checkpointId = 1L; CheckpointMetaData checkpointMetaData = new CheckpointMetaData(checkpointId, 1L); testHarness.getTaskStateManager().setWaitForReportLatch(new OneShotLatch()); while (!streamTask.triggerCheckpointAsync(checkpointMetaData, CheckpointOptions.forCheckpointWithDefaultLocation(), false).get()) {} testHarness.getTaskStateManager().getWaitForReportLatch().await(); long reportedCheckpointId = testHarness.getTaskStateManager().getReportedCheckpointId(); assertEquals(checkpointId, reportedCheckpointId); }
Example #10
Source File: StatefulOperatorChainedTaskTest.java From flink with Apache License 2.0 | 4 votes |
private JobManagerTaskRestore createRunAndCheckpointOperatorChain( OperatorID headId, OneInputStreamOperator<String, String> headOperator, OperatorID tailId, OneInputStreamOperator<String, String> tailOperator, Optional<JobManagerTaskRestore> restore) throws Exception { File localRootDir = temporaryFolder.newFolder(); final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>( OneInputStreamTask::new, 1, 1, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO, localRootDir); testHarness.setupOperatorChain(headId, headOperator) .chain(tailId, tailOperator, StringSerializer.INSTANCE, true) .finish(); if (restore.isPresent()) { JobManagerTaskRestore taskRestore = restore.get(); testHarness.setTaskStateSnapshot( taskRestore.getRestoreCheckpointId(), taskRestore.getTaskStateSnapshot()); } StreamMockEnvironment environment = new StreamMockEnvironment( testHarness.jobConfig, testHarness.taskConfig, testHarness.getExecutionConfig(), testHarness.memorySize, new MockInputSplitProvider(), testHarness.bufferSize, testHarness.getTaskStateManager()); Configuration configuration = new Configuration(); configuration.setString(STATE_BACKEND.key(), "rocksdb"); File file = temporaryFolder.newFolder(); configuration.setString(CHECKPOINTS_DIRECTORY.key(), file.toURI().toString()); configuration.setString(INCREMENTAL_CHECKPOINTS.key(), "true"); environment.setTaskManagerInfo( new TestingTaskManagerRuntimeInfo( configuration, System.getProperty("java.io.tmpdir").split(",|" + File.pathSeparator))); testHarness.invoke(environment); testHarness.waitForTaskRunning(); OneInputStreamTask<String, String> streamTask = testHarness.getTask(); processRecords(testHarness); triggerCheckpoint(testHarness, streamTask); TestTaskStateManager taskStateManager = testHarness.getTaskStateManager(); JobManagerTaskRestore jobManagerTaskRestore = new JobManagerTaskRestore( taskStateManager.getReportedCheckpointId(), taskStateManager.getLastJobManagerTaskStateSnapshot()); testHarness.endInput(); testHarness.waitForTaskCompletion(); return jobManagerTaskRestore; }
Example #11
Source File: StreamTaskTimerTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void checkScheduledTimestampe() throws Exception { final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>( OneInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO); testHarness.setupOutputForSingletonOperatorChain(); StreamConfig streamConfig = testHarness.getStreamConfig(); StreamMap<String, String> mapOperator = new StreamMap<>(new DummyMapFunction<String>()); streamConfig.setStreamOperator(mapOperator); testHarness.invoke(); testHarness.waitForTaskRunning(); final OneInputStreamTask<String, String> mapTask = testHarness.getTask(); final AtomicReference<Throwable> errorRef = new AtomicReference<>(); final long t1 = System.currentTimeMillis(); final long t2 = System.currentTimeMillis() - 200; final long t3 = System.currentTimeMillis() + 100; final long t4 = System.currentTimeMillis() + 200; ProcessingTimeService timeService = mapTask.getProcessingTimeService(); timeService.registerTimer(t1, new ValidatingProcessingTimeCallback(errorRef, t1, 0)); timeService.registerTimer(t2, new ValidatingProcessingTimeCallback(errorRef, t2, 1)); timeService.registerTimer(t3, new ValidatingProcessingTimeCallback(errorRef, t3, 2)); timeService.registerTimer(t4, new ValidatingProcessingTimeCallback(errorRef, t4, 3)); long deadline = System.currentTimeMillis() + 20000; while (errorRef.get() == null && ValidatingProcessingTimeCallback.numInSequence < 4 && System.currentTimeMillis() < deadline) { Thread.sleep(100); } // handle errors if (errorRef.get() != null) { errorRef.get().printStackTrace(); fail(errorRef.get().getMessage()); } assertEquals(4, ValidatingProcessingTimeCallback.numInSequence); testHarness.endInput(); testHarness.waitForTaskCompletion(); // wait until the trigger thread is shut down. otherwise, the other tests may become unstable deadline = System.currentTimeMillis() + 4000; while (StreamTask.TRIGGER_THREAD_GROUP.activeCount() > 0 && System.currentTimeMillis() < deadline) { Thread.sleep(10); } assertEquals("Trigger timer thread did not properly shut down", 0, StreamTask.TRIGGER_THREAD_GROUP.activeCount()); }
Example #12
Source File: StatefulOperatorChainedTaskTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
private JobManagerTaskRestore createRunAndCheckpointOperatorChain( OperatorID headId, OneInputStreamOperator<String, String> headOperator, OperatorID tailId, OneInputStreamOperator<String, String> tailOperator, Optional<JobManagerTaskRestore> restore) throws Exception { File localRootDir = temporaryFolder.newFolder(); final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>( OneInputStreamTask::new, 1, 1, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO, localRootDir); testHarness.setupOperatorChain(headId, headOperator) .chain(tailId, tailOperator, StringSerializer.INSTANCE, true) .finish(); if (restore.isPresent()) { JobManagerTaskRestore taskRestore = restore.get(); testHarness.setTaskStateSnapshot( taskRestore.getRestoreCheckpointId(), taskRestore.getTaskStateSnapshot()); } StreamMockEnvironment environment = new StreamMockEnvironment( testHarness.jobConfig, testHarness.taskConfig, testHarness.getExecutionConfig(), testHarness.memorySize, new MockInputSplitProvider(), testHarness.bufferSize, testHarness.getTaskStateManager()); Configuration configuration = new Configuration(); configuration.setString(STATE_BACKEND.key(), "rocksdb"); File file = temporaryFolder.newFolder(); configuration.setString(CHECKPOINTS_DIRECTORY.key(), file.toURI().toString()); configuration.setString(INCREMENTAL_CHECKPOINTS.key(), "true"); environment.setTaskManagerInfo( new TestingTaskManagerRuntimeInfo( configuration, System.getProperty("java.io.tmpdir").split(",|" + File.pathSeparator))); testHarness.invoke(environment); testHarness.waitForTaskRunning(); OneInputStreamTask<String, String> streamTask = testHarness.getTask(); processRecords(testHarness); triggerCheckpoint(testHarness, streamTask); TestTaskStateManager taskStateManager = testHarness.getTaskStateManager(); JobManagerTaskRestore jobManagerTaskRestore = new JobManagerTaskRestore( taskStateManager.getReportedCheckpointId(), taskStateManager.getLastJobManagerTaskStateSnapshot()); testHarness.endInput(); testHarness.waitForTaskCompletion(); return jobManagerTaskRestore; }
Example #13
Source File: AsyncWaitOperatorTest.java From flink with Apache License 2.0 | 4 votes |
/** * Tests that the AsyncWaitOperator works together with chaining. */ @Test public void testOperatorChainWithProcessingTime() throws Exception { JobVertex chainedVertex = createChainedVertex(false); final OneInputStreamTaskTestHarness<Integer, Integer> testHarness = new OneInputStreamTaskTestHarness<>( OneInputStreamTask::new, 1, 1, BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO); testHarness.setupOutputForSingletonOperatorChain(); testHarness.taskConfig = chainedVertex.getConfiguration(); final StreamConfig streamConfig = testHarness.getStreamConfig(); final StreamConfig operatorChainStreamConfig = new StreamConfig(chainedVertex.getConfiguration()); final AsyncWaitOperator<Integer, Integer> headOperator = operatorChainStreamConfig.getStreamOperator(AsyncWaitOperatorTest.class.getClassLoader()); streamConfig.setStreamOperator(headOperator); testHarness.invoke(); testHarness.waitForTaskRunning(); long initialTimestamp = 0L; testHarness.processElement(new StreamRecord<>(5, initialTimestamp)); testHarness.processElement(new StreamRecord<>(6, initialTimestamp + 1L)); testHarness.processElement(new StreamRecord<>(7, initialTimestamp + 2L)); testHarness.processElement(new StreamRecord<>(8, initialTimestamp + 3L)); testHarness.processElement(new StreamRecord<>(9, initialTimestamp + 4L)); testHarness.endInput(); testHarness.waitForTaskCompletion(); ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>(); expectedOutput.add(new StreamRecord<>(22, initialTimestamp)); expectedOutput.add(new StreamRecord<>(26, initialTimestamp + 1L)); expectedOutput.add(new StreamRecord<>(30, initialTimestamp + 2L)); expectedOutput.add(new StreamRecord<>(34, initialTimestamp + 3L)); expectedOutput.add(new StreamRecord<>(38, initialTimestamp + 4L)); TestHarnessUtil.assertOutputEqualsSorted( "Test for chained operator with AsyncWaitOperator failed", expectedOutput, testHarness.getOutput(), new StreamRecordComparator()); }
Example #14
Source File: StatefulOperatorChainedTaskTest.java From flink with Apache License 2.0 | 4 votes |
private void triggerCheckpoint( OneInputStreamTaskTestHarness<String, String> testHarness, OneInputStreamTask<String, String> streamTask) throws Exception { long checkpointId = 1L; CheckpointMetaData checkpointMetaData = new CheckpointMetaData(checkpointId, 1L); testHarness.getTaskStateManager().setWaitForReportLatch(new OneShotLatch()); while (!streamTask.triggerCheckpoint(checkpointMetaData, CheckpointOptions.forCheckpointWithDefaultLocation(), false)) {} testHarness.getTaskStateManager().getWaitForReportLatch().await(); long reportedCheckpointId = testHarness.getTaskStateManager().getReportedCheckpointId(); assertEquals(checkpointId, reportedCheckpointId); }
Example #15
Source File: StatefulOperatorChainedTaskTest.java From flink with Apache License 2.0 | 4 votes |
private JobManagerTaskRestore createRunAndCheckpointOperatorChain( OperatorID headId, OneInputStreamOperator<String, String> headOperator, OperatorID tailId, OneInputStreamOperator<String, String> tailOperator, Optional<JobManagerTaskRestore> restore) throws Exception { File localRootDir = temporaryFolder.newFolder(); final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>( OneInputStreamTask::new, 1, 1, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO, localRootDir); testHarness.setupOperatorChain(headId, headOperator) .chain(tailId, tailOperator, StringSerializer.INSTANCE, true) .finish(); if (restore.isPresent()) { JobManagerTaskRestore taskRestore = restore.get(); testHarness.setTaskStateSnapshot( taskRestore.getRestoreCheckpointId(), taskRestore.getTaskStateSnapshot()); } StreamMockEnvironment environment = new StreamMockEnvironment( testHarness.jobConfig, testHarness.taskConfig, testHarness.getExecutionConfig(), testHarness.memorySize, new MockInputSplitProvider(), testHarness.bufferSize, testHarness.getTaskStateManager()); Configuration configuration = new Configuration(); configuration.setString(STATE_BACKEND.key(), "rocksdb"); File file = temporaryFolder.newFolder(); configuration.setString(CHECKPOINTS_DIRECTORY.key(), file.toURI().toString()); configuration.setString(INCREMENTAL_CHECKPOINTS.key(), "true"); environment.setTaskManagerInfo( new TestingTaskManagerRuntimeInfo( configuration, System.getProperty("java.io.tmpdir").split(",|" + File.pathSeparator))); testHarness.invoke(environment); testHarness.waitForTaskRunning(); OneInputStreamTask<String, String> streamTask = testHarness.getTask(); processRecords(testHarness); triggerCheckpoint(testHarness, streamTask); TestTaskStateManager taskStateManager = testHarness.getTaskStateManager(); JobManagerTaskRestore jobManagerTaskRestore = new JobManagerTaskRestore( taskStateManager.getReportedCheckpointId(), taskStateManager.getLastJobManagerTaskStateSnapshot()); testHarness.endInput(); testHarness.waitForTaskCompletion(); return jobManagerTaskRestore; }
Example #16
Source File: StreamTaskTimerTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void checkScheduledTimestampe() throws Exception { final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>( OneInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO); testHarness.setupOutputForSingletonOperatorChain(); StreamConfig streamConfig = testHarness.getStreamConfig(); StreamMap<String, String> mapOperator = new StreamMap<>(new DummyMapFunction<String>()); streamConfig.setStreamOperator(mapOperator); testHarness.invoke(); testHarness.waitForTaskRunning(); final OneInputStreamTask<String, String> mapTask = testHarness.getTask(); final AtomicReference<Throwable> errorRef = new AtomicReference<>(); final long t1 = System.currentTimeMillis(); final long t2 = System.currentTimeMillis() - 200; final long t3 = System.currentTimeMillis() + 100; final long t4 = System.currentTimeMillis() + 200; ProcessingTimeService timeService = mapTask.getProcessingTimeService(); timeService.registerTimer(t1, new ValidatingProcessingTimeCallback(errorRef, t1, 0)); timeService.registerTimer(t2, new ValidatingProcessingTimeCallback(errorRef, t2, 1)); timeService.registerTimer(t3, new ValidatingProcessingTimeCallback(errorRef, t3, 2)); timeService.registerTimer(t4, new ValidatingProcessingTimeCallback(errorRef, t4, 3)); long deadline = System.currentTimeMillis() + 20000; while (errorRef.get() == null && ValidatingProcessingTimeCallback.numInSequence < 4 && System.currentTimeMillis() < deadline) { Thread.sleep(100); } // handle errors if (errorRef.get() != null) { errorRef.get().printStackTrace(); fail(errorRef.get().getMessage()); } assertEquals(4, ValidatingProcessingTimeCallback.numInSequence); testHarness.endInput(); testHarness.waitForTaskCompletion(); // wait until the trigger thread is shut down. otherwise, the other tests may become unstable deadline = System.currentTimeMillis() + 4000; while (StreamTask.TRIGGER_THREAD_GROUP.activeCount() > 0 && System.currentTimeMillis() < deadline) { Thread.sleep(10); } assertEquals("Trigger timer thread did not properly shut down", 0, StreamTask.TRIGGER_THREAD_GROUP.activeCount()); }
Example #17
Source File: StreamTaskTimerTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testOpenCloseAndTimestamps() throws Exception { final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>( OneInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO); testHarness.setupOutputForSingletonOperatorChain(); StreamConfig streamConfig = testHarness.getStreamConfig(); StreamMap<String, String> mapOperator = new StreamMap<>(new DummyMapFunction<String>()); streamConfig.setStreamOperator(mapOperator); streamConfig.setOperatorID(new OperatorID()); testHarness.invoke(); testHarness.waitForTaskRunning(); final OneInputStreamTask<String, String> mapTask = testHarness.getTask(); // first one spawns thread mapTask.getProcessingTimeService().registerTimer(System.currentTimeMillis(), new ProcessingTimeCallback() { @Override public void onProcessingTime(long timestamp) { } }); assertEquals(1, StreamTask.TRIGGER_THREAD_GROUP.activeCount()); testHarness.endInput(); testHarness.waitForTaskCompletion(); // thread needs to die in time long deadline = System.currentTimeMillis() + 4000; while (StreamTask.TRIGGER_THREAD_GROUP.activeCount() > 0 && System.currentTimeMillis() < deadline) { Thread.sleep(10); } assertEquals("Trigger timer thread did not properly shut down", 0, StreamTask.TRIGGER_THREAD_GROUP.activeCount()); }
Example #18
Source File: AsyncWaitOperatorTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
/** * Tests that the AsyncWaitOperator works together with chaining. */ @Test public void testOperatorChainWithProcessingTime() throws Exception { JobVertex chainedVertex = createChainedVertex(false); final OneInputStreamTaskTestHarness<Integer, Integer> testHarness = new OneInputStreamTaskTestHarness<>( OneInputStreamTask::new, 1, 1, BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO); testHarness.setupOutputForSingletonOperatorChain(); testHarness.taskConfig = chainedVertex.getConfiguration(); final StreamConfig streamConfig = testHarness.getStreamConfig(); final StreamConfig operatorChainStreamConfig = new StreamConfig(chainedVertex.getConfiguration()); final AsyncWaitOperator<Integer, Integer> headOperator = operatorChainStreamConfig.getStreamOperator(AsyncWaitOperatorTest.class.getClassLoader()); streamConfig.setStreamOperator(headOperator); testHarness.invoke(); testHarness.waitForTaskRunning(); long initialTimestamp = 0L; testHarness.processElement(new StreamRecord<>(5, initialTimestamp)); testHarness.processElement(new StreamRecord<>(6, initialTimestamp + 1L)); testHarness.processElement(new StreamRecord<>(7, initialTimestamp + 2L)); testHarness.processElement(new StreamRecord<>(8, initialTimestamp + 3L)); testHarness.processElement(new StreamRecord<>(9, initialTimestamp + 4L)); testHarness.endInput(); testHarness.waitForTaskCompletion(); ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>(); expectedOutput.add(new StreamRecord<>(22, initialTimestamp)); expectedOutput.add(new StreamRecord<>(26, initialTimestamp + 1L)); expectedOutput.add(new StreamRecord<>(30, initialTimestamp + 2L)); expectedOutput.add(new StreamRecord<>(34, initialTimestamp + 3L)); expectedOutput.add(new StreamRecord<>(38, initialTimestamp + 4L)); TestHarnessUtil.assertOutputEqualsSorted( "Test for chained operator with AsyncWaitOperator failed", expectedOutput, testHarness.getOutput(), new StreamRecordComparator()); }
Example #19
Source File: StatefulOperatorChainedTaskTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
private void triggerCheckpoint( OneInputStreamTaskTestHarness<String, String> testHarness, OneInputStreamTask<String, String> streamTask) throws Exception { long checkpointId = 1L; CheckpointMetaData checkpointMetaData = new CheckpointMetaData(checkpointId, 1L); testHarness.getTaskStateManager().setWaitForReportLatch(new OneShotLatch()); while (!streamTask.triggerCheckpoint(checkpointMetaData, CheckpointOptions.forCheckpointWithDefaultLocation())) {} testHarness.getTaskStateManager().getWaitForReportLatch().await(); long reportedCheckpointId = testHarness.getTaskStateManager().getReportedCheckpointId(); assertEquals(checkpointId, reportedCheckpointId); }
Example #20
Source File: TestProcessingTimeServiceTest.java From flink with Apache License 2.0 | 2 votes |
@Test public void testCustomTimeServiceProvider() throws Throwable { final TestProcessingTimeService tp = new TestProcessingTimeService(); final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>( (env) -> new OneInputStreamTask<>(env, tp), BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO); testHarness.setupOutputForSingletonOperatorChain(); StreamConfig streamConfig = testHarness.getStreamConfig(); StreamMap<String, String> mapOperator = new StreamMap<>(new StreamTaskTimerTest.DummyMapFunction<>()); streamConfig.setStreamOperator(mapOperator); streamConfig.setOperatorID(new OperatorID()); testHarness.invoke(); testHarness.waitForTaskRunning(); ProcessingTimeService processingTimeService = ((StreamMap<?, ?>) testHarness.getHeadOperator()).getProcessingTimeService(); assertEquals(Long.MIN_VALUE, processingTimeService.getCurrentProcessingTime()); tp.setCurrentTime(11); assertEquals(processingTimeService.getCurrentProcessingTime(), 11); tp.setCurrentTime(15); tp.setCurrentTime(16); assertEquals(processingTimeService.getCurrentProcessingTime(), 16); // register 2 tasks processingTimeService.registerTimer(30, timestamp -> {}); processingTimeService.registerTimer(40, timestamp -> {}); assertEquals(2, tp.getNumActiveTimers()); tp.setCurrentTime(35); assertEquals(1, tp.getNumActiveTimers()); tp.setCurrentTime(40); assertEquals(0, tp.getNumActiveTimers()); tp.shutdownService(); }