Java Code Examples for org.apache.flink.api.common.typeinfo.BasicTypeInfo#STRING_TYPE_INFO
The following examples show how to use
org.apache.flink.api.common.typeinfo.BasicTypeInfo#STRING_TYPE_INFO .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: GenericDataSourceBaseTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testDataSourcePlain() { try { TestNonRichInputFormat in = new TestNonRichInputFormat(); GenericDataSourceBase<String, TestNonRichInputFormat> source = new GenericDataSourceBase<String, TestNonRichInputFormat>( in, new OperatorInformation<String>(BasicTypeInfo.STRING_TYPE_INFO), "testSource"); ExecutionConfig executionConfig = new ExecutionConfig(); executionConfig.disableObjectReuse(); List<String> resultMutableSafe = source.executeOnCollections(null, executionConfig); in.reset(); executionConfig.enableObjectReuse(); List<String> resultRegular = source.executeOnCollections(null, executionConfig); assertEquals(asList(TestIOData.NAMES), resultMutableSafe); assertEquals(asList(TestIOData.NAMES), resultRegular); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
Example 2
Source File: KeyedCoProcessOperatorTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testEventTimeTimers() throws Exception { KeyedCoProcessOperator<String, Integer, String, String> operator = new KeyedCoProcessOperator<>(new EventTimeTriggeringProcessFunction()); TwoInputStreamOperatorTestHarness<Integer, String, String> testHarness = new KeyedTwoInputStreamOperatorTestHarness<>( operator, new IntToStringKeySelector<>(), new IdentityKeySelector<String>(), BasicTypeInfo.STRING_TYPE_INFO); testHarness.setup(); testHarness.open(); testHarness.processElement1(new StreamRecord<>(17, 42L)); testHarness.processElement2(new StreamRecord<>("18", 42L)); testHarness.processWatermark1(new Watermark(5)); testHarness.processWatermark2(new Watermark(5)); testHarness.processWatermark1(new Watermark(6)); testHarness.processWatermark2(new Watermark(6)); ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>(); expectedOutput.add(new StreamRecord<>("INPUT1:17", 42L)); expectedOutput.add(new StreamRecord<>("INPUT2:18", 42L)); expectedOutput.add(new StreamRecord<>("1777", 5L)); expectedOutput.add(new Watermark(5L)); expectedOutput.add(new StreamRecord<>("1777", 6L)); expectedOutput.add(new Watermark(6L)); TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput()); testHarness.close(); }
Example 3
Source File: OneInputStreamTaskTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * This test verifies that open() and close() are correctly called. This test also verifies * that timestamps of emitted elements are correct. {@link StreamMap} assigns the input * timestamp to emitted elements. */ @Test public void testOpenCloseAndTimestamps() throws Exception { final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>( OneInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO); testHarness.setupOutputForSingletonOperatorChain(); StreamConfig streamConfig = testHarness.getStreamConfig(); StreamMap<String, String> mapOperator = new StreamMap<String, String>(new TestOpenCloseMapFunction()); streamConfig.setStreamOperator(mapOperator); streamConfig.setOperatorID(new OperatorID()); long initialTime = 0L; ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>(); testHarness.invoke(); testHarness.waitForTaskRunning(); testHarness.processElement(new StreamRecord<String>("Hello", initialTime + 1)); testHarness.processElement(new StreamRecord<String>("Ciao", initialTime + 2)); expectedOutput.add(new StreamRecord<String>("Hello", initialTime + 1)); expectedOutput.add(new StreamRecord<String>("Ciao", initialTime + 2)); testHarness.waitForInputProcessing(); testHarness.endInput(); testHarness.waitForTaskCompletion(); assertTrue("RichFunction methods where not called.", TestOpenCloseMapFunction.closeCalled); TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput()); }
Example 4
Source File: ParquetTableSource.java From flink with Apache License 2.0 | 5 votes |
@Nullable private Tuple2<Column, Comparable> extractColumnAndLiteral(BinaryComparison comp) { TypeInformation<?> typeInfo = getLiteralType(comp); String columnName = getColumnName(comp); // fetch literal and ensure it is comparable Object value = getLiteral(comp); // validate that literal is comparable if (!(value instanceof Comparable)) { LOG.warn("Encountered a non-comparable literal of type {}." + "Cannot push predicate [{}] into ParquetTablesource." + "This is a bug and should be reported.", value.getClass().getCanonicalName(), comp); return null; } if (typeInfo == BasicTypeInfo.BYTE_TYPE_INFO || typeInfo == BasicTypeInfo.SHORT_TYPE_INFO || typeInfo == BasicTypeInfo.INT_TYPE_INFO) { return new Tuple2<>(FilterApi.intColumn(columnName), (Integer) value); } else if (typeInfo == BasicTypeInfo.LONG_TYPE_INFO) { return new Tuple2<>(FilterApi.longColumn(columnName), (Long) value); } else if (typeInfo == BasicTypeInfo.FLOAT_TYPE_INFO) { return new Tuple2<>(FilterApi.floatColumn(columnName), (Float) value); } else if (typeInfo == BasicTypeInfo.BOOLEAN_TYPE_INFO) { return new Tuple2<>(FilterApi.booleanColumn(columnName), (Boolean) value); } else if (typeInfo == BasicTypeInfo.DOUBLE_TYPE_INFO) { return new Tuple2<>(FilterApi.doubleColumn(columnName), (Double) value); } else if (typeInfo == BasicTypeInfo.STRING_TYPE_INFO) { return new Tuple2<>(FilterApi.binaryColumn(columnName), Binary.fromString((String) value)); } else { // unsupported type return null; } }
Example 5
Source File: CollectionDataSets.java From flink with Apache License 2.0 | 5 votes |
public static DataSet<Tuple2<Tuple2<Integer, Integer>, String>> getSmallNestedTupleDataSet(ExecutionEnvironment env) { List<Tuple2<Tuple2<Integer, Integer>, String>> data = new ArrayList<>(); data.add(new Tuple2<>(new Tuple2<>(1, 1), "one")); data.add(new Tuple2<>(new Tuple2<>(2, 2), "two")); data.add(new Tuple2<>(new Tuple2<>(3, 3), "three")); TupleTypeInfo<Tuple2<Tuple2<Integer, Integer>, String>> type = new TupleTypeInfo<>( new TupleTypeInfo<Tuple2<Integer, Integer>>(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO), BasicTypeInfo.STRING_TYPE_INFO ); return env.fromCollection(data, type); }
Example 6
Source File: KeyedCoProcessOperatorTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Verifies that we don't have leakage between different keys. */ @Test public void testProcessingTimeTimerWithState() throws Exception { KeyedCoProcessOperator<String, Integer, String, String> operator = new KeyedCoProcessOperator<>(new ProcessingTimeTriggeringStatefulProcessFunction()); TwoInputStreamOperatorTestHarness<Integer, String, String> testHarness = new KeyedTwoInputStreamOperatorTestHarness<>( operator, new IntToStringKeySelector<>(), new IdentityKeySelector<String>(), BasicTypeInfo.STRING_TYPE_INFO); testHarness.setup(); testHarness.open(); testHarness.setProcessingTime(1); testHarness.processElement1(new StreamRecord<>(17)); // should set timer for 6 testHarness.processElement1(new StreamRecord<>(13)); // should set timer for 6 testHarness.setProcessingTime(2); testHarness.processElement1(new StreamRecord<>(13)); // should delete timer again testHarness.processElement2(new StreamRecord<>("42")); // should set timer for 7 testHarness.setProcessingTime(6); testHarness.setProcessingTime(7); ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>(); expectedOutput.add(new StreamRecord<>("INPUT1:17")); expectedOutput.add(new StreamRecord<>("INPUT1:13")); expectedOutput.add(new StreamRecord<>("INPUT2:42")); expectedOutput.add(new StreamRecord<>("STATE:17")); expectedOutput.add(new StreamRecord<>("STATE:42")); TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput()); testHarness.close(); }
Example 7
Source File: TypeExtractorTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testEither() { MapFunction<?, ?> function = new MapFunction<Either<String, Boolean>, Either<String, Boolean>>() { @Override public Either<String, Boolean> map(Either<String, Boolean> value) throws Exception { return null; } }; TypeInformation<?> expected = new EitherTypeInfo(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.BOOLEAN_TYPE_INFO); TypeInformation<?> ti = TypeExtractor.getMapReturnTypes((MapFunction) function, expected); Assert.assertEquals(expected, ti); }
Example 8
Source File: SelectorFunctionKeysTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testAreCompatible3() throws Keys.IncompatibleKeysException { TypeInformation<String> t1 = BasicTypeInfo.STRING_TYPE_INFO; TypeInformation<Pojo2> t2 = TypeExtractor.getForClass(Pojo2.class); Keys.ExpressionKeys<String> ek1 = new Keys.ExpressionKeys<>("*", t1); Keys<Pojo2> sk2 = new Keys.SelectorFunctionKeys<>( new KeySelector1(), t2, BasicTypeInfo.STRING_TYPE_INFO ); Assert.assertTrue(sk2.areCompatible(ek1)); }
Example 9
Source File: StatefulJobWBroadcastStateMigrationITCase.java From flink with Apache License 2.0 | 5 votes |
@Override public void open(Configuration parameters) throws Exception { super.open(parameters); stateDesc = new MapStateDescriptor<>( "broadcast-state-3", BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO ); }
Example 10
Source File: RowTypeInfoTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override protected RowTypeInfo[] getTestData() { return new RowTypeInfo[] { new RowTypeInfo(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO), new RowTypeInfo(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.BOOLEAN_TYPE_INFO), new RowTypeInfo(typeList), new RowTypeInfo( new TypeInformation[]{BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO}, new String[]{"int", "int2"}) }; }
Example 11
Source File: WindowOperatorMigrationTest.java From flink with Apache License 2.0 | 4 votes |
/** * Manually run this to write binary snapshot data. */ @Ignore @Test public void writeSessionWindowsWithCountTriggerSnapshot() throws Exception { final int sessionSize = 3; ListStateDescriptor<Tuple2<String, Integer>> stateDesc = new ListStateDescriptor<>("window-contents", STRING_INT_TUPLE.createSerializer(new ExecutionConfig())); WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>( EventTimeSessionWindows.withGap(Time.seconds(sessionSize)), new TimeWindow.Serializer(), new TupleKeySelector<String>(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new SessionWindowFunction()), PurgingTrigger.of(CountTrigger.of(4)), 0, null /* late data output tag */); OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector<>(), BasicTypeInfo.STRING_TYPE_INFO); testHarness.setup(); testHarness.open(); // add elements out-of-order testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 3500)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000)); // do snapshot and save to file OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L); OperatorSnapshotUtil.writeStateHandle( snapshot, "src/test/resources/win-op-migration-test-session-with-stateful-trigger-flink" + flinkGenerateSavepointVersion + "-snapshot"); testHarness.close(); }
Example 12
Source File: InternalWindowFunctionTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@SuppressWarnings("unchecked") @Test public void testInternalIterableAllWindowFunction() throws Exception { AllWindowFunctionMock mock = mock(AllWindowFunctionMock.class); InternalIterableAllWindowFunction<Long, String, TimeWindow> windowFunction = new InternalIterableAllWindowFunction<>(mock); // check setOutputType TypeInformation<String> stringType = BasicTypeInfo.STRING_TYPE_INFO; ExecutionConfig execConf = new ExecutionConfig(); execConf.setParallelism(42); StreamingFunctionUtils.setOutputType(windowFunction, stringType, execConf); verify(mock).setOutputType(stringType, execConf); // check open Configuration config = new Configuration(); windowFunction.open(config); verify(mock).open(config); // check setRuntimeContext RuntimeContext rCtx = mock(RuntimeContext.class); windowFunction.setRuntimeContext(rCtx); verify(mock).setRuntimeContext(rCtx); // check apply TimeWindow w = mock(TimeWindow.class); Iterable<Long> i = (Iterable<Long>) mock(Iterable.class); Collector<String> c = (Collector<String>) mock(Collector.class); InternalWindowFunction.InternalWindowContext ctx = mock(InternalWindowFunction.InternalWindowContext.class); windowFunction.process(((byte) 0), w, ctx, i, c); verify(mock).apply(w, i, c); // check close windowFunction.close(); verify(mock).close(); }
Example 13
Source File: TwoInputStreamTaskTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
/** * This test verifies that open() and close() are correctly called. This test also verifies * that timestamps of emitted elements are correct. {@link CoStreamMap} assigns the input * timestamp to emitted elements. */ @Test @SuppressWarnings("unchecked") public void testOpenCloseAndTimestamps() throws Exception { final TwoInputStreamTaskTestHarness<String, Integer, String> testHarness = new TwoInputStreamTaskTestHarness<>( TwoInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO); testHarness.setupOutputForSingletonOperatorChain(); StreamConfig streamConfig = testHarness.getStreamConfig(); CoStreamMap<String, Integer, String> coMapOperator = new CoStreamMap<String, Integer, String>(new TestOpenCloseMapFunction()); streamConfig.setStreamOperator(coMapOperator); streamConfig.setOperatorID(new OperatorID()); long initialTime = 0L; ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>(); testHarness.invoke(); testHarness.waitForTaskRunning(); testHarness.processElement(new StreamRecord<String>("Hello", initialTime + 1), 0, 0); expectedOutput.add(new StreamRecord<String>("Hello", initialTime + 1)); // wait until the input is processed to ensure ordering of the output testHarness.waitForInputProcessing(); testHarness.processElement(new StreamRecord<Integer>(1337, initialTime + 2), 1, 0); expectedOutput.add(new StreamRecord<String>("1337", initialTime + 2)); testHarness.waitForInputProcessing(); testHarness.endInput(); testHarness.waitForTaskCompletion(); Assert.assertTrue("RichFunction methods were not called.", TestOpenCloseMapFunction.closeCalled); TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput()); }
Example 14
Source File: InternalWindowFunctionTest.java From flink with Apache License 2.0 | 4 votes |
@SuppressWarnings("unchecked") @Test public void testInternalIterableAllWindowFunction() throws Exception { AllWindowFunctionMock mock = mock(AllWindowFunctionMock.class); InternalIterableAllWindowFunction<Long, String, TimeWindow> windowFunction = new InternalIterableAllWindowFunction<>(mock); // check setOutputType TypeInformation<String> stringType = BasicTypeInfo.STRING_TYPE_INFO; ExecutionConfig execConf = new ExecutionConfig(); execConf.setParallelism(42); StreamingFunctionUtils.setOutputType(windowFunction, stringType, execConf); verify(mock).setOutputType(stringType, execConf); // check open Configuration config = new Configuration(); windowFunction.open(config); verify(mock).open(config); // check setRuntimeContext RuntimeContext rCtx = mock(RuntimeContext.class); windowFunction.setRuntimeContext(rCtx); verify(mock).setRuntimeContext(rCtx); // check apply TimeWindow w = mock(TimeWindow.class); Iterable<Long> i = (Iterable<Long>) mock(Iterable.class); Collector<String> c = (Collector<String>) mock(Collector.class); InternalWindowFunction.InternalWindowContext ctx = mock(InternalWindowFunction.InternalWindowContext.class); windowFunction.process(((byte) 0), w, ctx, i, c); verify(mock).apply(w, i, c); // check close windowFunction.close(); verify(mock).close(); }
Example 15
Source File: KeyedCoProcessOperatorTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
/** * Verifies that we don't have leakage between different keys. */ @Test public void testEventTimeTimerWithState() throws Exception { KeyedCoProcessOperator<String, Integer, String, String> operator = new KeyedCoProcessOperator<>(new EventTimeTriggeringStatefulProcessFunction()); TwoInputStreamOperatorTestHarness<Integer, String, String> testHarness = new KeyedTwoInputStreamOperatorTestHarness<>( operator, new IntToStringKeySelector<>(), new IdentityKeySelector<String>(), BasicTypeInfo.STRING_TYPE_INFO); testHarness.setup(); testHarness.open(); testHarness.processWatermark1(new Watermark(1)); testHarness.processWatermark2(new Watermark(1)); testHarness.processElement1(new StreamRecord<>(17, 0L)); // should set timer for 6 testHarness.processElement1(new StreamRecord<>(13, 0L)); // should set timer for 6 testHarness.processWatermark1(new Watermark(2)); testHarness.processWatermark2(new Watermark(2)); testHarness.processElement1(new StreamRecord<>(13, 1L)); // should delete timer testHarness.processElement2(new StreamRecord<>("42", 1L)); // should set timer for 7 testHarness.processWatermark1(new Watermark(6)); testHarness.processWatermark2(new Watermark(6)); testHarness.processWatermark1(new Watermark(7)); testHarness.processWatermark2(new Watermark(7)); ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>(); expectedOutput.add(new Watermark(1L)); expectedOutput.add(new StreamRecord<>("INPUT1:17", 0L)); expectedOutput.add(new StreamRecord<>("INPUT1:13", 0L)); expectedOutput.add(new Watermark(2L)); expectedOutput.add(new StreamRecord<>("INPUT2:42", 1L)); expectedOutput.add(new StreamRecord<>("STATE:17", 6L)); expectedOutput.add(new Watermark(6L)); expectedOutput.add(new StreamRecord<>("STATE:42", 7L)); expectedOutput.add(new Watermark(7L)); TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput()); testHarness.close(); }
Example 16
Source File: EvictingWindowOperatorTest.java From flink with Apache License 2.0 | 4 votes |
/** * Tests CountEvictor evictAfter behavior. */ @Test public void testCountEvictorEvictAfter() throws Exception { AtomicInteger closeCalled = new AtomicInteger(0); final int windowSize = 4; final int triggerCount = 2; final boolean evictAfter = true; @SuppressWarnings({"unchecked", "rawtypes"}) TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer = (TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig())); ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer); EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>( GlobalWindows.create(), new GlobalWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new RichSumReducer<GlobalWindow>(closeCalled)), CountTrigger.of(triggerCount), CountEvictor.of(windowSize, evictAfter), 0, null /* late data output tag */); OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO); long initialTime = 0L; ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>(); testHarness.open(); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3000)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3999)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 999)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1998)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1999)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000)); expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), Long.MAX_VALUE)); expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE)); expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE)); TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator()); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 10999)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000)); expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE)); expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 6), Long.MAX_VALUE)); TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator()); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000)); expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 6), Long.MAX_VALUE)); TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator()); testHarness.close(); Assert.assertEquals("Close was not called.", 1, closeCalled.get()); }
Example 17
Source File: OneInputStreamTaskTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
/** * This test verifies that checkpoint barriers and barrier buffers work correctly with * concurrent checkpoint barriers where one checkpoint is "overtaking" another checkpoint, i.e. * some inputs receive barriers from an earlier checkpoint, thereby blocking, * then all inputs receive barriers from a later checkpoint. */ @Test public void testOvertakingCheckpointBarriers() throws Exception { final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>( OneInputStreamTask::new, 2, 2, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO); testHarness.setupOutputForSingletonOperatorChain(); StreamConfig streamConfig = testHarness.getStreamConfig(); StreamMap<String, String> mapOperator = new StreamMap<String, String>(new IdentityMap()); streamConfig.setStreamOperator(mapOperator); streamConfig.setOperatorID(new OperatorID()); ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>(); long initialTime = 0L; testHarness.invoke(); testHarness.waitForTaskRunning(); testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 0); // These elements should be buffered until we receive barriers from // all inputs testHarness.processElement(new StreamRecord<String>("Hello-0-0", initialTime), 0, 0); testHarness.processElement(new StreamRecord<String>("Ciao-0-0", initialTime), 0, 0); // These elements should be forwarded, since we did not yet receive a checkpoint barrier // on that input, only add to same input, otherwise we would not know the ordering // of the output since the Task might read the inputs in any order testHarness.processElement(new StreamRecord<String>("Hello-1-1", initialTime), 1, 1); testHarness.processElement(new StreamRecord<String>("Ciao-1-1", initialTime), 1, 1); expectedOutput.add(new StreamRecord<String>("Hello-1-1", initialTime)); expectedOutput.add(new StreamRecord<String>("Ciao-1-1", initialTime)); testHarness.waitForInputProcessing(); // we should not yet see the barrier, only the two elements from non-blocked input TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput()); // Now give a later barrier to all inputs, this should unblock the first channel, // thereby allowing the two blocked elements through testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 0); testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 1); testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 0); testHarness.processEvent(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 1); expectedOutput.add(new CancelCheckpointMarker(0)); expectedOutput.add(new StreamRecord<String>("Hello-0-0", initialTime)); expectedOutput.add(new StreamRecord<String>("Ciao-0-0", initialTime)); expectedOutput.add(new CheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation())); testHarness.waitForInputProcessing(); TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput()); // Then give the earlier barrier, these should be ignored testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 0, 1); testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 0); testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forCheckpointWithDefaultLocation()), 1, 1); testHarness.waitForInputProcessing(); testHarness.endInput(); testHarness.waitForTaskCompletion(); TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput()); }
Example 18
Source File: DataGenerators.java From flink with Apache License 2.0 | 4 votes |
public MockTransformation() { super("MockTransform", BasicTypeInfo.STRING_TYPE_INFO, 1); }
Example 19
Source File: EvictingWindowOperatorTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test @SuppressWarnings("unchecked") public void testCountTrigger() throws Exception { final int windowSize = 4; final int windowSlide = 2; @SuppressWarnings({"unchecked", "rawtypes"}) TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer = (TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig())); ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer); EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>( GlobalWindows.create(), new GlobalWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>( new ReduceApplyWindowFunction<>( new SumReducer(), // on some versions of Java we seem to need the explicit type new PassThroughWindowFunction<String, GlobalWindow, Tuple2<String, Integer>>())), CountTrigger.of(windowSlide), CountEvictor.of(windowSize), 0, null /* late data output tag */); OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO); long initialTime = 0L; ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>(); testHarness.open(); // The global window actually ignores these timestamps... // add elements out-of-order testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3000)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3999)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 999)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1998)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1999)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000)); expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), Long.MAX_VALUE)); expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE)); expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE)); TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator()); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 10999)); testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000)); expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE)); expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE)); TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator()); testHarness.close(); }
Example 20
Source File: StreamExecutionEnvironment.java From flink with Apache License 2.0 | 3 votes |
/** * Reads the given file line-by-line and creates a data stream that contains a string with the * contents of each such line. The {@link java.nio.charset.Charset} with the given name will be * used to read the files. * * <p><b>NOTES ON CHECKPOINTING: </b> The source monitors the path, creates the * {@link org.apache.flink.core.fs.FileInputSplit FileInputSplits} to be processed, * forwards them to the downstream readers to read the actual data, * and exits, without waiting for the readers to finish reading. This implies that no more checkpoint * barriers are going to be forwarded after the source exits, thus having no checkpoints after that point. * * @param filePath * The path of the file, as a URI (e.g., "file:///some/local/file" or "hdfs://host:port/file/path") * @param charsetName * The name of the character set used to read the file * @return The data stream that represents the data read from the given file as text lines */ public DataStreamSource<String> readTextFile(String filePath, String charsetName) { Preconditions.checkArgument(!StringUtils.isNullOrWhitespaceOnly(filePath), "The file path must not be null or blank."); TextInputFormat format = new TextInputFormat(new Path(filePath)); format.setFilesFilter(FilePathFilter.createDefaultFilter()); TypeInformation<String> typeInfo = BasicTypeInfo.STRING_TYPE_INFO; format.setCharsetName(charsetName); return readFile(format, filePath, FileProcessingMode.PROCESS_ONCE, -1, typeInfo); }