org.apache.flink.shaded.guava18.com.google.common.collect.Lists Java Examples
The following examples show how to use
org.apache.flink.shaded.guava18.com.google.common.collect.Lists.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HiveTableSourceITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testReadNonPartitionedTable() throws Exception { final String dbName = "source_db"; final String tblName = "test"; TableEnvironment tEnv = createTableEnv(); tEnv.executeSql("CREATE TABLE source_db.test ( a INT, b INT, c STRING, d BIGINT, e DOUBLE)"); HiveTestUtils.createTextTableInserter(hiveShell, dbName, tblName) .addRow(new Object[] { 1, 1, "a", 1000L, 1.11 }) .addRow(new Object[] { 2, 2, "b", 2000L, 2.22 }) .addRow(new Object[] { 3, 3, "c", 3000L, 3.33 }) .addRow(new Object[] { 4, 4, "d", 4000L, 4.44 }) .commit(); Table src = tEnv.sqlQuery("select * from hive.source_db.test"); List<Row> rows = Lists.newArrayList(src.execute().collect()); Assert.assertEquals(4, rows.size()); Assert.assertEquals("1,1,a,1000,1.11", rows.get(0).toString()); Assert.assertEquals("2,2,b,2000,2.22", rows.get(1).toString()); Assert.assertEquals("3,3,c,3000,3.33", rows.get(2).toString()); Assert.assertEquals("4,4,d,4000,4.44", rows.get(3).toString()); }
Example #2
Source File: DynamicProcessingTimeSessionWindowsTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testMergeCoveringWindow() { MergingWindowAssigner.MergeCallback callback = mock(MergingWindowAssigner.MergeCallback.class); SessionWindowTimeGapExtractor extractor = mock(SessionWindowTimeGapExtractor.class); when(extractor.extract(any())).thenReturn(5000L); DynamicProcessingTimeSessionWindows assigner = DynamicProcessingTimeSessionWindows.withDynamicGap(extractor); assigner.mergeWindows( Lists.newArrayList( new TimeWindow(1, 1), new TimeWindow(0, 2), new TimeWindow(4, 7), new TimeWindow(5, 6)), callback); verify(callback, times(1)).merge( (Collection<TimeWindow>) argThat(containsInAnyOrder(new TimeWindow(1, 1), new TimeWindow(0, 2))), eq(new TimeWindow(0, 2))); verify(callback, times(1)).merge( (Collection<TimeWindow>) argThat(containsInAnyOrder(new TimeWindow(5, 6), new TimeWindow(4, 7))), eq(new TimeWindow(4, 7))); verify(callback, times(2)).merge(anyCollection(), Matchers.anyObject()); }
Example #3
Source File: SecurityUtilsTest.java From flink with Apache License 2.0 | 6 votes |
/** * Verify that we fall back to a second configuration if the first one is incompatible. */ @Test public void testSecurityContextShouldFallbackToSecond() throws Exception { Configuration testFlinkConf = new Configuration(); testFlinkConf.set( SecurityOptions.SECURITY_CONTEXT_FACTORY_CLASSES, Lists.newArrayList( IncompatibleTestSecurityContextFactory.class.getCanonicalName(), TestSecurityContextFactory.class.getCanonicalName())); SecurityConfiguration testSecurityConf = new SecurityConfiguration(testFlinkConf); SecurityUtils.install(testSecurityConf); assertEquals( TestSecurityContextFactory.TestSecurityContext.class, SecurityUtils.getInstalledContext().getClass()); SecurityUtils.uninstall(); assertEquals(NoOpSecurityContext.class, SecurityUtils.getInstalledContext().getClass()); }
Example #4
Source File: TableEnvHiveConnectorITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testInsertPartitionWithStarSource() throws Exception { TableEnvironment tableEnv = getTableEnvWithHiveCatalog(); tableEnv.executeSql("create table src (x int,y string)"); HiveTestUtils.createTextTableInserter( hiveShell, "default", "src") .addRow(new Object[]{1, "a"}) .commit(); tableEnv.executeSql("create table dest (x int) partitioned by (p1 int,p2 string)"); TableEnvUtil.execInsertSqlAndWaitResult(tableEnv, "insert into dest partition (p1=1) select * from src"); List<Row> results = Lists.newArrayList(tableEnv.sqlQuery("select * from dest").execute().collect()); assertEquals("[1,1,a]", results.toString()); tableEnv.executeSql("drop table if exists src"); tableEnv.executeSql("drop table if exists dest"); }
Example #5
Source File: ProcessingTimeSessionWindowsTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testMergeConsecutiveWindows() { MergingWindowAssigner.MergeCallback callback = mock(MergingWindowAssigner.MergeCallback.class); ProcessingTimeSessionWindows assigner = ProcessingTimeSessionWindows.withGap(Time.milliseconds(5000)); assigner.mergeWindows( Lists.newArrayList( new TimeWindow(0, 1), new TimeWindow(1, 2), new TimeWindow(2, 3), new TimeWindow(4, 5), new TimeWindow(5, 6)), callback); verify(callback, times(1)).merge( (Collection<TimeWindow>) argThat(containsInAnyOrder(new TimeWindow(0, 1), new TimeWindow(1, 2), new TimeWindow(2, 3))), eq(new TimeWindow(0, 3))); verify(callback, times(1)).merge( (Collection<TimeWindow>) argThat(containsInAnyOrder(new TimeWindow(4, 5), new TimeWindow(5, 6))), eq(new TimeWindow(4, 6))); verify(callback, times(2)).merge(anyCollection(), Matchers.anyObject()); }
Example #6
Source File: NFAITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testSkipTillAnyZeroOrMore() throws Exception { List<List<Event>> resultingPatterns = testZeroOrMore(Quantifier.ConsumingStrategy.SKIP_TILL_ANY); compareMaps(resultingPatterns, Lists.<List<Event>>newArrayList( Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.middleEvent1, ConsecutiveData.middleEvent2, ConsecutiveData.middleEvent3, ConsecutiveData.middleEvent4, ConsecutiveData.end), Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.middleEvent1, ConsecutiveData.middleEvent2, ConsecutiveData.middleEvent4, ConsecutiveData.end), Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.middleEvent1, ConsecutiveData.middleEvent3, ConsecutiveData.middleEvent4, ConsecutiveData.end), Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.middleEvent1, ConsecutiveData.middleEvent4, ConsecutiveData.end), Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.middleEvent1, ConsecutiveData.middleEvent2, ConsecutiveData.middleEvent3, ConsecutiveData.end), Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.middleEvent1, ConsecutiveData.middleEvent2, ConsecutiveData.end), Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.middleEvent1, ConsecutiveData.middleEvent3, ConsecutiveData.end), Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.middleEvent1, ConsecutiveData.end), Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.end) )); }
Example #7
Source File: NFAITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private void testStartWithOneOrZeroOrMoreStrict(Pattern<Event, ?> pattern) throws Exception { List<StreamRecord<Event>> inputEvents = new ArrayList<>(); inputEvents.add(new StreamRecord<>(ConsecutiveData.startEvent, 1)); inputEvents.add(new StreamRecord<>(ConsecutiveData.middleEvent1, 3)); inputEvents.add(new StreamRecord<>(ConsecutiveData.startEvent, 4)); inputEvents.add(new StreamRecord<>(ConsecutiveData.middleEvent2, 5)); inputEvents.add(new StreamRecord<>(ConsecutiveData.middleEvent3, 6)); NFA<Event> nfa = compile(pattern, false); List<List<Event>> resultingPatterns = feedNFA(inputEvents, nfa); compareMaps(resultingPatterns, Lists.<List<Event>>newArrayList( Lists.newArrayList(ConsecutiveData.middleEvent1), Lists.newArrayList(ConsecutiveData.middleEvent2, ConsecutiveData.middleEvent3), Lists.newArrayList(ConsecutiveData.middleEvent2), Lists.newArrayList(ConsecutiveData.middleEvent3) )); }
Example #8
Source File: EventTimeSessionWindowsTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testMergeCoveringWindow() { MergingWindowAssigner.MergeCallback callback = mock(MergingWindowAssigner.MergeCallback.class); EventTimeSessionWindows assigner = EventTimeSessionWindows.withGap(Time.milliseconds(5000)); assigner.mergeWindows( Lists.newArrayList( new TimeWindow(1, 1), new TimeWindow(0, 2), new TimeWindow(4, 7), new TimeWindow(5, 6)), callback); verify(callback, times(1)).merge( (Collection<TimeWindow>) argThat(containsInAnyOrder(new TimeWindow(1, 1), new TimeWindow(0, 2))), eq(new TimeWindow(0, 2))); verify(callback, times(1)).merge( (Collection<TimeWindow>) argThat(containsInAnyOrder(new TimeWindow(5, 6), new TimeWindow(4, 7))), eq(new TimeWindow(4, 7))); verify(callback, times(2)).merge(anyCollection(), Matchers.anyObject()); }
Example #9
Source File: FlinkKafkaProducer011.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public void initializeState(FunctionInitializationContext context) throws Exception { if (semantic != Semantic.NONE && !((StreamingRuntimeContext) this.getRuntimeContext()).isCheckpointingEnabled()) { LOG.warn("Using {} semantic, but checkpointing is not enabled. Switching to {} semantic.", semantic, Semantic.NONE); semantic = Semantic.NONE; } nextTransactionalIdHintState = context.getOperatorStateStore().getUnionListState( NEXT_TRANSACTIONAL_ID_HINT_DESCRIPTOR); transactionalIdsGenerator = new TransactionalIdsGenerator( getRuntimeContext().getTaskName() + "-" + ((StreamingRuntimeContext) getRuntimeContext()).getOperatorUniqueID(), getRuntimeContext().getIndexOfThisSubtask(), getRuntimeContext().getNumberOfParallelSubtasks(), kafkaProducersPoolSize, SAFE_SCALE_DOWN_FACTOR); if (semantic != Semantic.EXACTLY_ONCE) { nextTransactionalIdHint = null; } else { ArrayList<NextTransactionalIdHint> transactionalIdHints = Lists.newArrayList(nextTransactionalIdHintState.get()); if (transactionalIdHints.size() > 1) { throw new IllegalStateException( "There should be at most one next transactional id hint written by the first subtask"); } else if (transactionalIdHints.size() == 0) { nextTransactionalIdHint = new NextTransactionalIdHint(0, 0); // this means that this is either: // (1) the first execution of this application // (2) previous execution has failed before first checkpoint completed // // in case of (2) we have to abort all previous transactions abortTransactions(transactionalIdsGenerator.generateIdsToAbort()); } else { nextTransactionalIdHint = transactionalIdHints.get(0); } } super.initializeState(context); }
Example #10
Source File: DynamicEventTimeSessionWindowsTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testMergeSingleWindow() { MergingWindowAssigner.MergeCallback callback = mock(MergingWindowAssigner.MergeCallback.class); SessionWindowTimeGapExtractor extractor = mock(SessionWindowTimeGapExtractor.class); when(extractor.extract(any())).thenReturn(5000L); DynamicEventTimeSessionWindows assigner = DynamicEventTimeSessionWindows.withDynamicGap(extractor); assigner.mergeWindows(Lists.newArrayList(new TimeWindow(0, 1)), callback); verify(callback, never()).merge(anyCollection(), Matchers.anyObject()); }
Example #11
Source File: NFAITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testStrictOneOrMore() throws Exception { List<List<Event>> resultingPatterns = testOneOrMore(Quantifier.ConsumingStrategy.STRICT); comparePatterns(resultingPatterns, Lists.<List<Event>>newArrayList( Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.middleEvent1, ConsecutiveData.middleEvent2, ConsecutiveData.middleEvent3, ConsecutiveData.end), Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.middleEvent1, ConsecutiveData.middleEvent2, ConsecutiveData.end), Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.middleEvent1, ConsecutiveData.end) )); }
Example #12
Source File: ProcessingTimeTriggerTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testMergingWindows() throws Exception { TriggerTestHarness<Object, TimeWindow> testHarness = new TriggerTestHarness<>(ProcessingTimeTrigger.create(), new TimeWindow.Serializer()); assertTrue(ProcessingTimeTrigger.create().canMerge()); assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2))); assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(2, 4))); assertEquals(0, testHarness.numStateEntries()); assertEquals(0, testHarness.numEventTimeTimers()); assertEquals(2, testHarness.numProcessingTimeTimers()); assertEquals(1, testHarness.numProcessingTimeTimers(new TimeWindow(0, 2))); assertEquals(1, testHarness.numProcessingTimeTimers(new TimeWindow(2, 4))); testHarness.mergeWindows(new TimeWindow(0, 4), Lists.newArrayList(new TimeWindow(0, 2), new TimeWindow(2, 4))); assertEquals(0, testHarness.numStateEntries()); assertEquals(0, testHarness.numEventTimeTimers()); assertEquals(1, testHarness.numProcessingTimeTimers()); assertEquals(0, testHarness.numProcessingTimeTimers(new TimeWindow(0, 2))); assertEquals(0, testHarness.numProcessingTimeTimers(new TimeWindow(2, 4))); assertEquals(1, testHarness.numProcessingTimeTimers(new TimeWindow(0, 4))); assertEquals(TriggerResult.FIRE, testHarness.advanceProcessingTime(4, new TimeWindow(0, 4))); assertEquals(0, testHarness.numStateEntries()); assertEquals(0, testHarness.numProcessingTimeTimers()); assertEquals(0, testHarness.numEventTimeTimers()); }
Example #13
Source File: AfterMatchSkipITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testSkipToNextWithFollowedByAny() throws Exception { List<List<Event>> resultingPatterns = TwoVariablesFollowedByAny.compute(AfterMatchSkipStrategy.skipToNext()); compareMaps(resultingPatterns, Lists.newArrayList( Lists.newArrayList(TwoVariablesFollowedByAny.a1, TwoVariablesFollowedByAny.b1), Lists.newArrayList(TwoVariablesFollowedByAny.a2, TwoVariablesFollowedByAny.b2) )); }
Example #14
Source File: NFAITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testNoConditionLoopingNFA() throws Exception { List<StreamRecord<Event>> inputEvents = new ArrayList<>(); Event a = new Event(40, "a", 1.0); Event b = new Event(41, "b", 2.0); Event c = new Event(42, "c", 3.0); Event d = new Event(43, "d", 4.0); Event e = new Event(44, "e", 5.0); inputEvents.add(new StreamRecord<>(a, 1)); inputEvents.add(new StreamRecord<>(b, 2)); inputEvents.add(new StreamRecord<>(c, 3)); inputEvents.add(new StreamRecord<>(d, 4)); inputEvents.add(new StreamRecord<>(e, 5)); Pattern<Event, ?> pattern = Pattern.<Event>begin("start").followedBy("end").oneOrMore(); NFA<Event> nfa = compile(pattern, false); List<List<Event>> resultingPatterns = feedNFA(inputEvents, nfa); comparePatterns(resultingPatterns, Lists.<List<Event>>newArrayList( Lists.newArrayList(a, b, c, d, e), Lists.newArrayList(a, b, c, d), Lists.newArrayList(a, b, c), Lists.newArrayList(a, b), Lists.newArrayList(b, c, d, e), Lists.newArrayList(b, c, d), Lists.newArrayList(b, c), Lists.newArrayList(c, d, e), Lists.newArrayList(c, d), Lists.newArrayList(d, e) )); }
Example #15
Source File: UnsignedTypeConversionITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testUnsignedType() throws Exception { // write data to db TableEnvUtil.execInsertSqlAndWaitResult(tEnv, "insert into jdbc_sink select" + " tiny_c," + " tiny_un_c," + " small_c," + " small_un_c ," + " int_c," + " int_un_c," + " big_c ," + " big_un_c from data"); // read data from db using jdbc connection and compare PreparedStatement query = connection.prepareStatement(String.format("select tiny_c, tiny_un_c, small_c, small_un_c," + " int_c, int_un_c, big_c, big_un_c from %s", TABLE_NAME)); ResultSet resultSet = query.executeQuery(); while (resultSet.next()) { assertEquals(Integer.valueOf(127), resultSet.getObject("tiny_c")); assertEquals(Integer.valueOf(255), resultSet.getObject("tiny_un_c")); assertEquals(Integer.valueOf(32767), resultSet.getObject("small_c")); assertEquals(Integer.valueOf(65535), resultSet.getObject("small_un_c")); assertEquals(Integer.valueOf(2147483647), resultSet.getObject("int_c")); assertEquals(Long.valueOf(4294967295L), resultSet.getObject("int_un_c")); assertEquals(Long.valueOf(9223372036854775807L), resultSet.getObject("big_c")); assertEquals(new BigInteger("18446744073709551615"), resultSet.getObject("big_un_c")); } // read data from db using flink and compare Iterator<Row> collected = tEnv.executeSql("select tiny_c, tiny_un_c, small_c, small_un_c," + " int_c, int_un_c, big_c, big_un_c from jdbc_source") .collect(); List<String> result = Lists.newArrayList(collected).stream() .map(Row::toString) .sorted() .collect(Collectors.toList()); List<String> expected = Collections.singletonList( "127,255,32767,65535,2147483647,4294967295,9223372036854775807,18446744073709551615"); assertEquals(expected, result); }
Example #16
Source File: SideOutputTransformation.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public Collection<StreamTransformation<?>> getTransitivePredecessors() { List<StreamTransformation<?>> result = Lists.newArrayList(); result.add(this); result.addAll(input.getTransitivePredecessors()); return result; }
Example #17
Source File: ScheduleOrUpdateConsumersTest.java From flink with Apache License 2.0 | 5 votes |
@Override public void invoke() throws Exception { List<RecordWriter<IntValue>> writers = Lists.newArrayListWithCapacity(2); // The order of intermediate result creation in the job graph specifies which produced // result partition is pipelined/blocking. final RecordWriter<IntValue> pipelinedWriter = new RecordWriterBuilder<IntValue>().build(getEnvironment().getWriter(0)); final RecordWriter<IntValue> blockingWriter = new RecordWriterBuilder<IntValue>().build(getEnvironment().getWriter(1)); writers.add(pipelinedWriter); writers.add(blockingWriter); final int numberOfTimesToSend = getTaskConfiguration().getInteger(CONFIG_KEY, 0); final IntValue subtaskIndex = new IntValue( getEnvironment().getTaskInfo().getIndexOfThisSubtask()); // Produce the first intermediate result and then the second in a serial fashion. for (RecordWriter<IntValue> writer : writers) { try { for (int i = 0; i < numberOfTimesToSend; i++) { writer.emit(subtaskIndex); } writer.flushAll(); } finally { writer.clearBuffers(); } } }
Example #18
Source File: FlinkKafkaProducer011.java From flink with Apache License 2.0 | 5 votes |
private void migrateNextTransactionalIdHindState(FunctionInitializationContext context) throws Exception { ListState<NextTransactionalIdHint> oldNextTransactionalIdHintState = context.getOperatorStateStore().getUnionListState( NEXT_TRANSACTIONAL_ID_HINT_DESCRIPTOR); nextTransactionalIdHintState = context.getOperatorStateStore().getUnionListState(NEXT_TRANSACTIONAL_ID_HINT_DESCRIPTOR_V2); ArrayList<NextTransactionalIdHint> oldTransactionalIdHints = Lists.newArrayList(oldNextTransactionalIdHintState.get()); if (!oldTransactionalIdHints.isEmpty()) { nextTransactionalIdHintState.addAll(oldTransactionalIdHints); //clear old state oldNextTransactionalIdHintState.clear(); } }
Example #19
Source File: UnionTransformation.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public Collection<StreamTransformation<?>> getTransitivePredecessors() { List<StreamTransformation<?>> result = Lists.newArrayList(); result.add(this); for (StreamTransformation<T> input: inputs) { result.addAll(input.getTransitivePredecessors()); } return result; }
Example #20
Source File: AfterMatchSkipITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testNoSkip() throws Exception { List<StreamRecord<Event>> streamEvents = new ArrayList<>(); Event a1 = new Event(1, "a", 0.0); Event a2 = new Event(2, "a", 0.0); Event a3 = new Event(3, "a", 0.0); Event a4 = new Event(4, "a", 0.0); Event a5 = new Event(5, "a", 0.0); Event a6 = new Event(6, "a", 0.0); streamEvents.add(new StreamRecord<Event>(a1)); streamEvents.add(new StreamRecord<Event>(a2)); streamEvents.add(new StreamRecord<Event>(a3)); streamEvents.add(new StreamRecord<Event>(a4)); streamEvents.add(new StreamRecord<Event>(a5)); streamEvents.add(new StreamRecord<Event>(a6)); Pattern<Event, ?> pattern = Pattern.<Event>begin("start", AfterMatchSkipStrategy.noSkip()) .where(new SimpleCondition<Event>() { @Override public boolean filter(Event value) throws Exception { return value.getName().equals("a"); } }).times(3); NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).build(); List<List<Event>> resultingPatterns = nfaTestHarness.feedRecords(streamEvents); comparePatterns(resultingPatterns, Lists.newArrayList( Lists.newArrayList(a1, a2, a3), Lists.newArrayList(a2, a3, a4), Lists.newArrayList(a3, a4, a5), Lists.newArrayList(a4, a5, a6) )); }
Example #21
Source File: TwoInputTransformation.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public Collection<StreamTransformation<?>> getTransitivePredecessors() { List<StreamTransformation<?>> result = Lists.newArrayList(); result.add(this); result.addAll(input1.getTransitivePredecessors()); result.addAll(input2.getTransitivePredecessors()); return result; }
Example #22
Source File: OneInputTransformation.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public Collection<StreamTransformation<?>> getTransitivePredecessors() { List<StreamTransformation<?>> result = Lists.newArrayList(); result.add(this); result.addAll(input.getTransitivePredecessors()); return result; }
Example #23
Source File: SelectTransformation.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public Collection<StreamTransformation<?>> getTransitivePredecessors() { List<StreamTransformation<?>> result = Lists.newArrayList(); result.add(this); result.addAll(input.getTransitivePredecessors()); return result; }
Example #24
Source File: NFAITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testAnyWithNoConditionNFA() throws Exception { List<StreamRecord<Event>> inputEvents = new ArrayList<>(); Event a = new Event(40, "a", 1.0); Event b = new Event(41, "b", 2.0); Event c = new Event(42, "c", 3.0); Event d = new Event(43, "d", 4.0); Event e = new Event(44, "e", 5.0); inputEvents.add(new StreamRecord<>(a, 1)); inputEvents.add(new StreamRecord<>(b, 2)); inputEvents.add(new StreamRecord<>(c, 3)); inputEvents.add(new StreamRecord<>(d, 4)); inputEvents.add(new StreamRecord<>(e, 5)); Pattern<Event, ?> pattern = Pattern.<Event>begin("start").followedByAny("end"); NFA<Event> nfa = compile(pattern, false); List<List<Event>> resultingPatterns = feedNFA(inputEvents, nfa); compareMaps(resultingPatterns, Lists.<List<Event>>newArrayList( Lists.newArrayList(a, b), Lists.newArrayList(a, c), Lists.newArrayList(a, d), Lists.newArrayList(a, e), Lists.newArrayList(b, c), Lists.newArrayList(b, d), Lists.newArrayList(b, e), Lists.newArrayList(c, d), Lists.newArrayList(c, e), Lists.newArrayList(d, e) )); }
Example #25
Source File: AfterMatchSkipITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testSkipToNextWithQuantifierAtTheEnd() throws Exception { List<List<Event>> resultingPatterns = QuantifierAtEndOfPattern.compute(AfterMatchSkipStrategy.skipToNext()); comparePatterns(resultingPatterns, Lists.<List<Event>>newArrayList( Lists.newArrayList(QuantifierAtEndOfPattern.a1, QuantifierAtEndOfPattern.b1) )); }
Example #26
Source File: ContinuousEventTimeTriggerTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testMergingWindows() throws Exception { TriggerTestHarness<Object, TimeWindow> testHarness = new TriggerTestHarness<>(ContinuousEventTimeTrigger.<TimeWindow>of(Time.hours(1)), new TimeWindow.Serializer()); assertTrue(ContinuousEventTimeTrigger.<TimeWindow>of(Time.hours(1)).canMerge()); assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2))); assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(2, 4))); assertEquals(2, testHarness.numStateEntries()); assertEquals(0, testHarness.numProcessingTimeTimers()); assertEquals(4, testHarness.numEventTimeTimers()); assertEquals(2, testHarness.numEventTimeTimers(new TimeWindow(0, 2))); assertEquals(2, testHarness.numEventTimeTimers(new TimeWindow(2, 4))); testHarness.mergeWindows(new TimeWindow(0, 4), Lists.newArrayList(new TimeWindow(0, 2), new TimeWindow(2, 4))); assertEquals(1, testHarness.numStateEntries()); assertEquals(0, testHarness.numProcessingTimeTimers()); assertEquals(5, testHarness.numEventTimeTimers()); // on merging, timers are not cleaned up assertEquals(2, testHarness.numEventTimeTimers(new TimeWindow(0, 2))); assertEquals(2, testHarness.numEventTimeTimers(new TimeWindow(2, 4))); assertEquals(1, testHarness.numEventTimeTimers(new TimeWindow(0, 4))); Collection<Tuple2<TimeWindow, TriggerResult>> triggerResults = testHarness.advanceWatermark(4); boolean sawFiring = false; for (Tuple2<TimeWindow, TriggerResult> r : triggerResults) { if (r.f0.equals(new TimeWindow(0, 4))) { sawFiring = true; assertTrue(r.f1.equals(TriggerResult.FIRE)); } } assertTrue(sawFiring); assertEquals(1, testHarness.numStateEntries()); assertEquals(0, testHarness.numProcessingTimeTimers()); assertEquals(1, testHarness.numEventTimeTimers()); }
Example #27
Source File: ScheduleOrUpdateConsumersTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public void invoke() throws Exception { List<RecordWriter<IntValue>> writers = Lists.newArrayListWithCapacity(2); // The order of intermediate result creation in the job graph specifies which produced // result partition is pipelined/blocking. final RecordWriter<IntValue> pipelinedWriter = new RecordWriter<>(getEnvironment().getWriter(0)); final RecordWriter<IntValue> blockingWriter = new RecordWriter<>(getEnvironment().getWriter(1)); writers.add(pipelinedWriter); writers.add(blockingWriter); final int numberOfTimesToSend = getTaskConfiguration().getInteger(CONFIG_KEY, 0); final IntValue subtaskIndex = new IntValue( getEnvironment().getTaskInfo().getIndexOfThisSubtask()); // Produce the first intermediate result and then the second in a serial fashion. for (RecordWriter<IntValue> writer : writers) { try { for (int i = 0; i < numberOfTimesToSend; i++) { writer.emit(subtaskIndex); } writer.flushAll(); } finally { writer.clearBuffers(); } } }
Example #28
Source File: AfterMatchSkipITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testSkipToFirstNonExistentPositionWithoutException() throws Exception { List<List<Event>> resultingPatterns = MissedSkipTo.compute(AfterMatchSkipStrategy.skipToFirst("b")); compareMaps(resultingPatterns, Collections.singletonList( Lists.newArrayList(MissedSkipTo.a, MissedSkipTo.c) )); }
Example #29
Source File: EventTimeTriggerTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testMergingWindows() throws Exception { TriggerTestHarness<Object, TimeWindow> testHarness = new TriggerTestHarness<>(EventTimeTrigger.create(), new TimeWindow.Serializer()); assertTrue(EventTimeTrigger.create().canMerge()); assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2))); assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(2, 4))); assertEquals(0, testHarness.numStateEntries()); assertEquals(0, testHarness.numProcessingTimeTimers()); assertEquals(2, testHarness.numEventTimeTimers()); assertEquals(1, testHarness.numEventTimeTimers(new TimeWindow(0, 2))); assertEquals(1, testHarness.numEventTimeTimers(new TimeWindow(2, 4))); testHarness.mergeWindows(new TimeWindow(0, 4), Lists.newArrayList(new TimeWindow(0, 2), new TimeWindow(2, 4))); assertEquals(0, testHarness.numStateEntries()); assertEquals(0, testHarness.numProcessingTimeTimers()); assertEquals(1, testHarness.numEventTimeTimers()); assertEquals(0, testHarness.numEventTimeTimers(new TimeWindow(0, 2))); assertEquals(0, testHarness.numEventTimeTimers(new TimeWindow(2, 4))); assertEquals(1, testHarness.numEventTimeTimers(new TimeWindow(0, 4))); assertEquals(TriggerResult.FIRE, testHarness.advanceWatermark(4, new TimeWindow(0, 4))); assertEquals(0, testHarness.numStateEntries()); assertEquals(0, testHarness.numProcessingTimeTimers()); assertEquals(0, testHarness.numEventTimeTimers()); }
Example #30
Source File: NFAITCase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testSkipTillAnyOneOrMore() throws Exception { List<List<Event>> resultingPatterns = testOneOrMore(Quantifier.ConsumingStrategy.SKIP_TILL_ANY); compareMaps(resultingPatterns, Lists.<List<Event>>newArrayList( Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.middleEvent1, ConsecutiveData.middleEvent2, ConsecutiveData.middleEvent3, ConsecutiveData.middleEvent4, ConsecutiveData.end), Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.middleEvent1, ConsecutiveData.middleEvent3, ConsecutiveData.middleEvent4, ConsecutiveData.end), Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.middleEvent1, ConsecutiveData.middleEvent2, ConsecutiveData.middleEvent3, ConsecutiveData.end), Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.middleEvent1, ConsecutiveData.middleEvent2, ConsecutiveData.middleEvent4, ConsecutiveData.end), Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.middleEvent1, ConsecutiveData.middleEvent2, ConsecutiveData.end), Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.middleEvent1, ConsecutiveData.middleEvent3, ConsecutiveData.end), Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.middleEvent1, ConsecutiveData.middleEvent4, ConsecutiveData.end), Lists.newArrayList(ConsecutiveData.startEvent, ConsecutiveData.middleEvent1, ConsecutiveData.end) )); }