org.apache.flink.api.java.functions.KeySelector Java Examples
The following examples show how to use
org.apache.flink.api.java.functions.KeySelector.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KeySelectorUtil.java From flink with Apache License 2.0 | 6 votes |
public static <X, K> KeySelector<X, K> getSelectorForOneKey( Keys<X> keys, Partitioner<K> partitioner, TypeInformation<X> typeInfo, ExecutionConfig executionConfig) { if (!(typeInfo instanceof CompositeType)) { throw new InvalidTypesException( "This key operation requires a composite type such as Tuples, POJOs, case classes, etc"); } if (partitioner != null) { keys.validateCustomPartitioner(partitioner, null); } CompositeType<X> compositeType = (CompositeType<X>) typeInfo; int[] logicalKeyPositions = keys.computeLogicalKeyPositions(); if (logicalKeyPositions.length != 1) { throw new IllegalArgumentException("There must be exactly 1 key specified"); } TypeComparator<X> comparator = compositeType.createComparator( logicalKeyPositions, new boolean[] { true }, 0, executionConfig); return new OneKeySelector<>(comparator); }
Example #2
Source File: JoinOperatorTest.java From flink with Apache License 2.0 | 6 votes |
@Test(expected = InvalidProgramException.class) public void testJoinKeyMixing3() { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple5<Integer, Long, String, Long, Integer>> ds1 = env.fromCollection(emptyTupleData, tupleTypeInfo); DataSet<CustomType> ds2 = env.fromCollection(customTypeData); // should not work, incompatible types ds1.join(ds2) .where(2) .equalTo( new KeySelector<CustomType, Long>() { @Override public Long getKey(CustomType value) { return value.myLong; } } ); }
Example #3
Source File: Utils.java From incubator-samoa with Apache License 2.0 | 6 votes |
public static DataStream subscribe(DataStream<SamoaType> stream, PartitioningScheme partitioning) { switch (partitioning) { case BROADCAST: return stream.broadcast(); case GROUP_BY_KEY: return stream.keyBy(new KeySelector<SamoaType, String>() { @Override public String getKey(SamoaType samoaType) throws Exception { return samoaType.f0; } }); case SHUFFLE: default: return stream.shuffle(); } }
Example #4
Source File: SortPartitionTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testSortPartitionWithKeySelector4() { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple4<Integer, Long, CustomType, Long[]>> tupleDs = env.fromCollection(tupleWithCustomData, tupleWithCustomInfo); // should work try { tupleDs.sortPartition(new KeySelector<Tuple4<Integer, Long, CustomType, Long[]>, Tuple2<Integer, Long>>() { @Override public Tuple2<Integer, Long> getKey(Tuple4<Integer, Long, CustomType, Long[]> value) throws Exception { return new Tuple2<>(value.f0, value.f1); } }, Order.ASCENDING); } catch (Exception e) { Assert.fail(); } }
Example #5
Source File: JoinOperatorTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testJoinKeyMixedKeySelectorTurned() { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<CustomType> ds1 = env.fromCollection(customTypeData); DataSet<CustomType> ds2 = env.fromCollection(customTypeData); try { ds1.join(ds2).where(new KeySelector<CustomType, Integer>() { @Override public Integer getKey(CustomType value) throws Exception { return value.myInt; } }).equalTo("myInt"); } catch (Exception e) { e.printStackTrace(); Assert.fail(); } }
Example #6
Source File: PartitionOperatorTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testRangePartitionCustomPartitionerByKeySelector() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); final DataSet<CustomPojo> ds = getPojoDataSet(env); ds.partitionCustom(new Partitioner<Integer>() { @Override public int partition(Integer key, int numPartitions) { return 1; } }, new KeySelector<CustomPojo, Integer>() { @Override public Integer getKey(CustomPojo value) throws Exception { return value.getNumber(); } }); }
Example #7
Source File: CoGroupOperatorTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testCoGroupKeyMixing2() { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple5<Integer, Long, String, Long, Integer>> ds1 = env.fromCollection(emptyTupleData, tupleTypeInfo); DataSet<CustomType> ds2 = env.fromCollection(customTypeData); // should work try { ds1.coGroup(ds2) .where(3) .equalTo( new KeySelector<CustomType, Long>() { @Override public Long getKey(CustomType value) { return value.myLong; } } ); } catch (Exception e) { Assert.fail(); } }
Example #8
Source File: GroupingTest.java From flink with Apache License 2.0 | 6 votes |
@Test @SuppressWarnings("serial") public void testGroupByKeySelector3() { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); this.customTypeData.add(new CustomType()); try { DataSet<CustomType> customDs = env.fromCollection(customTypeData); // should not work customDs.groupBy( new KeySelector<GroupingTest.CustomType, CustomType>() { @Override public CustomType getKey(CustomType value) { return value; } }); } catch (Exception e) { Assert.fail(); } }
Example #9
Source File: SortPartitionTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testSortPartitionWithKeySelector4() { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple4<Integer, Long, CustomType, Long[]>> tupleDs = env.fromCollection(tupleWithCustomData, tupleWithCustomInfo); // should work try { tupleDs.sortPartition(new KeySelector<Tuple4<Integer, Long, CustomType, Long[]>, Tuple2<Integer, Long>>() { @Override public Tuple2<Integer, Long> getKey(Tuple4<Integer, Long, CustomType, Long[]> value) throws Exception { return new Tuple2<>(value.f0, value.f1); } }, Order.ASCENDING); } catch (Exception e) { Assert.fail(); } }
Example #10
Source File: JoinOperatorTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test(expected = InvalidProgramException.class) public void testJoinKeyMixing4() { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple5<Integer, Long, String, Long, Integer>> ds1 = env.fromCollection(emptyTupleData, tupleTypeInfo); DataSet<CustomType> ds2 = env.fromCollection(customTypeData); // should not work, more than one key field position ds1.join(ds2) .where(1, 3) .equalTo( new KeySelector<CustomType, Long>() { @Override public Long getKey(CustomType value) { return value.myLong; } } ); }
Example #11
Source File: SortPartitionITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testSortPartitionWithKeySelector1() throws Exception { /* * Test sort partition on an extracted key */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(4); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env); List<Tuple1<Boolean>> result = ds .map(new IdMapper<Tuple3<Integer, Long, String>>()).setParallelism(4) // parallelize input .sortPartition(new KeySelector<Tuple3<Integer, Long, String>, Long>() { @Override public Long getKey(Tuple3<Integer, Long, String> value) throws Exception { return value.f1; } }, Order.ASCENDING) .mapPartition(new OrderCheckMapper<>(new Tuple3AscendingChecker())) .distinct().collect(); String expected = "(true)\n"; compareResultAsText(result, expected); }
Example #12
Source File: PartitionITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testRangePartitionerOnSequenceData() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSource<Long> dataSource = env.generateSequence(0, 10000); KeySelector<Long, Long> keyExtractor = new ObjectSelfKeySelector(); MapPartitionFunction<Long, Tuple2<Long, Long>> minMaxSelector = new MinMaxSelector<>(new LongComparator(true)); Comparator<Tuple2<Long, Long>> tuple2Comparator = new Tuple2Comparator(new LongComparator(true)); List<Tuple2<Long, Long>> collected = dataSource.partitionByRange(keyExtractor).mapPartition(minMaxSelector).collect(); Collections.sort(collected, tuple2Comparator); long previousMax = -1; for (Tuple2<Long, Long> tuple2 : collected) { if (previousMax == -1) { previousMax = tuple2.f1; } else { long currentMin = tuple2.f0; assertTrue(tuple2.f0 < tuple2.f1); assertEquals(previousMax + 1, currentMin); previousMax = tuple2.f1; } } }
Example #13
Source File: WeatherDataComplexEventProcessingExample.java From FlinkExperiments with MIT License | 6 votes |
private static <TWarningType extends IWarning> DataStream<TWarningType> toWarningStream(DataStream<LocalWeatherData> localWeatherDataDataStream, IWarningPattern<LocalWeatherData, TWarningType> warningPattern) { PatternStream<LocalWeatherData> tempPatternStream = CEP.pattern( localWeatherDataDataStream.keyBy(new KeySelector<LocalWeatherData, String>() { @Override public String getKey(LocalWeatherData localWeatherData) throws Exception { return localWeatherData.getStation().getWban(); } }), warningPattern.getEventPattern()); DataStream<TWarningType> warnings = tempPatternStream.select(new PatternSelectFunction<LocalWeatherData, TWarningType>() { @Override public TWarningType select(Map<String, List<LocalWeatherData>> map) throws Exception { return warningPattern.create(map); } }, new GenericTypeInfo<TWarningType>(warningPattern.getWarningTargetType())); return warnings; }
Example #14
Source File: StateDescriptorPassingTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testProcessWindowState() throws Exception { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class); DataStream<File> src = env.fromElements(new File("/")); SingleOutputStreamOperator<?> result = src .keyBy(new KeySelector<File, String>() { @Override public String getKey(File value) { return null; } }) .timeWindow(Time.milliseconds(1000)) .process(new ProcessWindowFunction<File, String, String, TimeWindow>() { @Override public void process(String s, Context ctx, Iterable<File> input, Collector<String> out) {} }); validateListStateDescriptorConfigured(result); }
Example #15
Source File: PartitionITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testRangePartitionerOnSequenceData() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSource<Long> dataSource = env.generateSequence(0, 10000); KeySelector<Long, Long> keyExtractor = new ObjectSelfKeySelector(); MapPartitionFunction<Long, Tuple2<Long, Long>> minMaxSelector = new MinMaxSelector<>(new LongComparator(true)); Comparator<Tuple2<Long, Long>> tuple2Comparator = new Tuple2Comparator(new LongComparator(true)); List<Tuple2<Long, Long>> collected = dataSource.partitionByRange(keyExtractor).mapPartition(minMaxSelector).collect(); Collections.sort(collected, tuple2Comparator); long previousMax = -1; for (Tuple2<Long, Long> tuple2 : collected) { if (previousMax == -1) { previousMax = tuple2.f1; } else { long currentMin = tuple2.f0; assertTrue(tuple2.f0 < tuple2.f1); assertEquals(previousMax + 1, currentMin); previousMax = tuple2.f1; } } }
Example #16
Source File: KeyedTwoInputStreamOperatorTestHarness.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public KeyedTwoInputStreamOperatorTestHarness( TwoInputStreamOperator<IN1, IN2, OUT> operator, KeySelector<IN1, K> keySelector1, KeySelector<IN2, K> keySelector2, TypeInformation<K> keyType, int maxParallelism, int numSubtasks, int subtaskIndex) throws Exception { super(operator, maxParallelism, numSubtasks, subtaskIndex); ClosureCleaner.clean(keySelector1, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, false); ClosureCleaner.clean(keySelector2, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, false); config.setStatePartitioner(0, keySelector1); config.setStatePartitioner(1, keySelector2); config.setStateKeySerializer(keyType.createSerializer(executionConfig)); }
Example #17
Source File: WindowOperator.java From flink with Apache License 2.0 | 5 votes |
/** * Creates a new {@code WindowOperator} based on the given policies and user functions. */ public WindowOperator( WindowAssigner<? super IN, W> windowAssigner, TypeSerializer<W> windowSerializer, KeySelector<IN, K> keySelector, TypeSerializer<K> keySerializer, StateDescriptor<? extends AppendingState<IN, ACC>, ?> windowStateDescriptor, InternalWindowFunction<ACC, OUT, K, W> windowFunction, Trigger<? super IN, ? super W> trigger, long allowedLateness, OutputTag<IN> lateDataOutputTag) { super(windowFunction); checkArgument(!(windowAssigner instanceof BaseAlignedWindowAssigner), "The " + windowAssigner.getClass().getSimpleName() + " cannot be used with a WindowOperator. " + "This assigner is only used with the AccumulatingProcessingTimeWindowOperator and " + "the AggregatingProcessingTimeWindowOperator"); checkArgument(allowedLateness >= 0); checkArgument(windowStateDescriptor == null || windowStateDescriptor.isSerializerInitialized(), "window state serializer is not properly initialized"); this.windowAssigner = checkNotNull(windowAssigner); this.windowSerializer = checkNotNull(windowSerializer); this.keySelector = checkNotNull(keySelector); this.keySerializer = checkNotNull(keySerializer); this.windowStateDescriptor = windowStateDescriptor; this.trigger = checkNotNull(trigger); this.allowedLateness = allowedLateness; this.lateDataOutputTag = lateDataOutputTag; setChainingStrategy(ChainingStrategy.ALWAYS); }
Example #18
Source File: PartitionOperatorTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testRangePartitionBySelectorComplexKeyWithOrders() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); final DataSet<NestedPojo> ds = getNestedPojoDataSet(env); ds.partitionByRange(new KeySelector<NestedPojo, CustomPojo>() { @Override public CustomPojo getKey(NestedPojo value) throws Exception { return value.getNested(); } }).withOrders(Order.ASCENDING); }
Example #19
Source File: JoinOperatorTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test(expected = InvalidProgramException.class) public void testJoinKeyMixedWrong() { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<CustomType> ds1 = env.fromCollection(customTypeData); DataSet<CustomType> ds2 = env.fromCollection(customTypeData); // wrongly mix String and Integer ds1.join(ds2).where("myString").equalTo(new KeySelector<CustomType, Integer>() { @Override public Integer getKey(CustomType value) throws Exception { return value.myInt; } }); }
Example #20
Source File: BootstrapTransformationTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testStreamConfig() { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSource<String> input = env.fromElements(""); BootstrapTransformation<String> transformation = OperatorTransformation .bootstrapWith(input) .keyBy(new CustomKeySelector()) .transform(new ExampleKeyedStateBootstrapFunction()); StreamConfig config = transformation.getConfig(OperatorIDGenerator.fromUid("uid"), new MemoryStateBackend(), null); KeySelector selector = config.getStatePartitioner(0, Thread.currentThread().getContextClassLoader()); Assert.assertEquals("Incorrect key selector forwarded to stream operator", CustomKeySelector.class, selector.getClass()); }
Example #21
Source File: StreamGroupedReduceTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testGroupedReduce() throws Exception { KeySelector<Integer, Integer> keySelector = new IntegerKeySelector(); StreamGroupedReduce<Integer> operator = new StreamGroupedReduce<>(new MyReducer(), IntSerializer.INSTANCE); OneInputStreamOperatorTestHarness<Integer, Integer> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, keySelector, BasicTypeInfo.INT_TYPE_INFO); long initialTime = 0L; ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>(); testHarness.open(); testHarness.processElement(new StreamRecord<>(1, initialTime + 1)); testHarness.processElement(new StreamRecord<>(1, initialTime + 2)); testHarness.processWatermark(new Watermark(initialTime + 2)); testHarness.processElement(new StreamRecord<>(2, initialTime + 3)); testHarness.processElement(new StreamRecord<>(2, initialTime + 4)); testHarness.processElement(new StreamRecord<>(3, initialTime + 5)); expectedOutput.add(new StreamRecord<>(1, initialTime + 1)); expectedOutput.add(new StreamRecord<>(2, initialTime + 2)); expectedOutput.add(new Watermark(initialTime + 2)); expectedOutput.add(new StreamRecord<>(2, initialTime + 3)); expectedOutput.add(new StreamRecord<>(4, initialTime + 4)); expectedOutput.add(new StreamRecord<>(3, initialTime + 5)); TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput()); }
Example #22
Source File: CepOperatorTestUtilities.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public static <T> OneInputStreamOperatorTestHarness<Event, T> getCepTestHarness( CepOperator<Event, Integer, T> cepOperator) throws Exception { KeySelector<Event, Integer> keySelector = new TestKeySelector(); return new KeyedOneInputStreamOperatorTestHarness<>( cepOperator, keySelector, BasicTypeInfo.INT_TYPE_INFO); }
Example #23
Source File: WindowTranslationTest.java From flink with Apache License 2.0 | 5 votes |
@Test @SuppressWarnings("rawtypes") public void testMergingWindowsWithEvictor() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); DataStream<Integer> source = env.fromElements(1, 2); DataStream<String> window1 = source .keyBy(new KeySelector<Integer, String>() { @Override public String getKey(Integer value) throws Exception { return value.toString(); } }) .window(EventTimeSessionWindows.withGap(Time.seconds(5))) .evictor(CountEvictor.of(5)) .process(new TestProcessWindowFunction()); final OneInputTransformation<Integer, String> transform = (OneInputTransformation<Integer, String>) window1.getTransformation(); final OneInputStreamOperator<Integer, String> operator = transform.getOperator(); Assert.assertTrue(operator instanceof WindowOperator); WindowOperator<String, Integer, ?, ?, ?> winOperator = (WindowOperator<String, Integer, ?, ?, ?>) operator; Assert.assertTrue(winOperator.getTrigger() instanceof EventTimeTrigger); Assert.assertTrue(winOperator.getWindowAssigner() instanceof EventTimeSessionWindows); Assert.assertTrue(winOperator.getStateDescriptor() instanceof ListStateDescriptor); processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, 1); }
Example #24
Source File: RocksIncrementalCheckpointRescalingTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private KeyedOneInputStreamOperatorTestHarness<String, String, Integer> getHarnessTest( KeySelector<String, String> keySelector, int maxParallelism, int taskParallelism, int subtaskIdx) throws Exception { return new KeyedOneInputStreamOperatorTestHarness<>( new KeyedProcessOperator<>(new TestKeyedFunction()), keySelector, BasicTypeInfo.STRING_TYPE_INFO, maxParallelism, taskParallelism, subtaskIdx); }
Example #25
Source File: RescalingITCase.java From flink with Apache License 2.0 | 5 votes |
private static JobGraph createJobGraphWithKeyedState( int parallelism, int maxParallelism, int numberKeys, int numberElements, boolean terminateAfterEmission, int checkpointingInterval) { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(parallelism); if (0 < maxParallelism) { env.getConfig().setMaxParallelism(maxParallelism); } env.enableCheckpointing(checkpointingInterval); env.setRestartStrategy(RestartStrategies.noRestart()); env.getConfig().setUseSnapshotCompression(true); DataStream<Integer> input = env.addSource(new SubtaskIndexSource( numberKeys, numberElements, terminateAfterEmission)) .keyBy(new KeySelector<Integer, Integer>() { private static final long serialVersionUID = -7952298871120320940L; @Override public Integer getKey(Integer value) throws Exception { return value; } }); SubtaskIndexFlatMapper.workCompletedLatch = new CountDownLatch(numberKeys); DataStream<Tuple2<Integer, Integer>> result = input.flatMap(new SubtaskIndexFlatMapper(numberElements)); result.addSink(new CollectionSink<Tuple2<Integer, Integer>>()); return env.getStreamGraph().getJobGraph(); }
Example #26
Source File: MultipleInputITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testKeyedState() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(1); TestListResultSink<Long> resultSink = new TestListResultSink<>(); DataStream<Long> source1 = env.fromElements(0L, 3L); DataStream<Long> source2 = env.fromElements(13L, 16L); DataStream<Long> source3 = env.fromElements(101L, 104L); KeyedMultipleInputTransformation<Long> transform = new KeyedMultipleInputTransformation<>( "My Operator", new KeyedSumMultipleInputOperatorFactory(), BasicTypeInfo.LONG_TYPE_INFO, 1, BasicTypeInfo.LONG_TYPE_INFO); KeySelector<Long, Long> keySelector = (KeySelector<Long, Long>) value -> value % 3; env.addOperator(transform .addInput(source1.getTransformation(), keySelector) .addInput(source2.getTransformation(), keySelector) .addInput(source3.getTransformation(), keySelector)); new MultipleConnectedStreams(env) .transform(transform) .addSink(resultSink); env.execute(); List<Long> result = resultSink.getResult(); Collections.sort(result); assertThat(result, contains(0L, 3L, 13L, 13L + 16L, 101L, 101L + 104L)); }
Example #27
Source File: MockContext.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public static <IN, OUT, KEY> List<OUT> createAndExecuteForKeyedStream( OneInputStreamOperator<IN, OUT> operator, List<IN> inputs, KeySelector<IN, KEY> keySelector, TypeInformation<KEY> keyType) throws Exception { OneInputStreamOperatorTestHarness<IN, OUT> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, keySelector, keyType); testHarness.setup(); testHarness.open(); operator.open(); for (IN in: inputs) { testHarness.processElement(new StreamRecord<>(in)); } testHarness.close(); ConcurrentLinkedQueue<Object> output = testHarness.getOutput(); List<OUT> result = new ArrayList<>(); for (Object o : output) { if (o instanceof StreamRecord) { result.add((OUT) ((StreamRecord) o).getValue()); } } return result; }
Example #28
Source File: SortPartitionTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test(expected = InvalidProgramException.class) public void testSortPartitionWithKeySelector2() { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple4<Integer, Long, CustomType, Long[]>> tupleDs = env.fromCollection(tupleWithCustomData, tupleWithCustomInfo); // must not work tupleDs.sortPartition(new KeySelector<Tuple4<Integer, Long, CustomType, Long[]>, Long[]>() { @Override public Long[] getKey(Tuple4<Integer, Long, CustomType, Long[]> value) throws Exception { return value.f3; } }, Order.ASCENDING); }
Example #29
Source File: OneInputStreamTaskTestHarness.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public <K> void configureForKeyedStream( KeySelector<IN, K> keySelector, TypeInformation<K> keyType) { ClosureCleaner.clean(keySelector, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, false); streamConfig.setStatePartitioner(0, keySelector); streamConfig.setStateKeySerializer(keyType.createSerializer(executionConfig)); }
Example #30
Source File: StreamGroupedFoldTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testOpenClose() throws Exception { KeySelector<Integer, Integer> keySelector = new KeySelector<Integer, Integer>() { @Override public Integer getKey(Integer value) { return value; } }; StreamGroupedFold<Integer, String, Integer> operator = new StreamGroupedFold<>( new TestOpenCloseFoldFunction(), "init"); operator.setOutputType(BasicTypeInfo.STRING_TYPE_INFO, new ExecutionConfig()); OneInputStreamOperatorTestHarness<Integer, String> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, keySelector, BasicTypeInfo.INT_TYPE_INFO); long initialTime = 0L; testHarness.open(); testHarness.processElement(new StreamRecord<>(1, initialTime)); testHarness.processElement(new StreamRecord<>(2, initialTime)); testHarness.close(); assertTrue("RichFunction methods where not called.", TestOpenCloseFoldFunction.closeCalled); assertTrue("Output contains no elements.", testHarness.getOutput().size() > 0); }