Java Code Examples for org.apache.flink.api.java.typeutils.TypeExtractor#getForObject()
The following examples show how to use
org.apache.flink.api.java.typeutils.TypeExtractor#getForObject() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: WritableSerializerTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testStringArrayWritable() { StringArrayWritable[] data = new StringArrayWritable[]{ new StringArrayWritable(new String[]{}), new StringArrayWritable(new String[]{""}), new StringArrayWritable(new String[]{"a", "a"}), new StringArrayWritable(new String[]{"a", "b"}), new StringArrayWritable(new String[]{"c", "c"}), new StringArrayWritable(new String[]{"d", "f"}), new StringArrayWritable(new String[]{"d", "m"}), new StringArrayWritable(new String[]{"z", "x"}), new StringArrayWritable(new String[]{"a", "a", "a"}) }; WritableTypeInfo<StringArrayWritable> writableTypeInfo = (WritableTypeInfo<StringArrayWritable>) TypeExtractor.getForObject(data[0]); WritableSerializer<StringArrayWritable> writableSerializer = (WritableSerializer<StringArrayWritable>) writableTypeInfo.createSerializer(new ExecutionConfig()); SerializerTestInstance<StringArrayWritable> testInstance = new SerializerTestInstance<StringArrayWritable>(writableSerializer, writableTypeInfo.getTypeClass(), -1, data); testInstance.testAll(); }
Example 2
Source File: StreamExecutionEnvironment.java From flink with Apache License 2.0 | 6 votes |
/** * Creates a new data stream that contains the given elements. The elements must all be of the * same type, for example, all of the {@link String} or {@link Integer}. * * <p>The framework will try and determine the exact type from the elements. In case of generic * elements, it may be necessary to manually supply the type information via * {@link #fromCollection(java.util.Collection, org.apache.flink.api.common.typeinfo.TypeInformation)}. * * <p>Note that this operation will result in a non-parallel data stream source, i.e. a data * stream source with a degree of parallelism one. * * @param data * The array of elements to create the data stream from. * @param <OUT> * The type of the returned data stream * @return The data stream representing the given array of elements */ @SafeVarargs public final <OUT> DataStreamSource<OUT> fromElements(OUT... data) { if (data.length == 0) { throw new IllegalArgumentException("fromElements needs at least one element as argument"); } TypeInformation<OUT> typeInfo; try { typeInfo = TypeExtractor.getForObject(data[0]); } catch (Exception e) { throw new RuntimeException("Could not create TypeInformation for type " + data[0].getClass().getName() + "; please specify the TypeInformation manually via " + "StreamExecutionEnvironment#fromElements(Collection, TypeInformation)", e); } return fromCollection(Arrays.asList(data), typeInfo); }
Example 3
Source File: StreamExecutionEnvironment.java From flink with Apache License 2.0 | 6 votes |
/** * Creates a new data stream that contains the given elements. The elements must all be of the * same type, for example, all of the {@link String} or {@link Integer}. * * <p>The framework will try and determine the exact type from the elements. In case of generic * elements, it may be necessary to manually supply the type information via * {@link #fromCollection(java.util.Collection, org.apache.flink.api.common.typeinfo.TypeInformation)}. * * <p>Note that this operation will result in a non-parallel data stream source, i.e. a data * stream source with a degree of parallelism one. * * @param data * The array of elements to create the data stream from. * @param <OUT> * The type of the returned data stream * @return The data stream representing the given array of elements */ @SafeVarargs public final <OUT> DataStreamSource<OUT> fromElements(OUT... data) { if (data.length == 0) { throw new IllegalArgumentException("fromElements needs at least one element as argument"); } TypeInformation<OUT> typeInfo; try { typeInfo = TypeExtractor.getForObject(data[0]); } catch (Exception e) { throw new RuntimeException("Could not create TypeInformation for type " + data[0].getClass().getName() + "; please specify the TypeInformation manually via " + "StreamExecutionEnvironment#fromElements(Collection, TypeInformation)", e); } return fromCollection(Arrays.asList(data), typeInfo); }
Example 4
Source File: StreamExecutionEnvironment.java From flink with Apache License 2.0 | 6 votes |
/** * Creates a data stream from the given non-empty collection. The type of the data stream is that of the * elements in the collection. * * <p>The framework will try and determine the exact type from the collection elements. In case of generic * elements, it may be necessary to manually supply the type information via * {@link #fromCollection(java.util.Collection, org.apache.flink.api.common.typeinfo.TypeInformation)}. * * <p>Note that this operation will result in a non-parallel data stream source, i.e. a data stream source with * parallelism one. * * @param data * The collection of elements to create the data stream from. * @param <OUT> * The generic type of the returned data stream. * @return * The data stream representing the given collection */ public <OUT> DataStreamSource<OUT> fromCollection(Collection<OUT> data) { Preconditions.checkNotNull(data, "Collection must not be null"); if (data.isEmpty()) { throw new IllegalArgumentException("Collection must not be empty"); } OUT first = data.iterator().next(); if (first == null) { throw new IllegalArgumentException("Collection must not contain null elements"); } TypeInformation<OUT> typeInfo; try { typeInfo = TypeExtractor.getForObject(first); } catch (Exception e) { throw new RuntimeException("Could not create TypeInformation for type " + first.getClass() + "; please specify the TypeInformation manually via " + "StreamExecutionEnvironment#fromElements(Collection, TypeInformation)", e); } return fromCollection(data, typeInfo); }
Example 5
Source File: WritableSerializerTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testStringArrayWritable() { StringArrayWritable[] data = new StringArrayWritable[]{ new StringArrayWritable(new String[]{}), new StringArrayWritable(new String[]{""}), new StringArrayWritable(new String[]{"a", "a"}), new StringArrayWritable(new String[]{"a", "b"}), new StringArrayWritable(new String[]{"c", "c"}), new StringArrayWritable(new String[]{"d", "f"}), new StringArrayWritable(new String[]{"d", "m"}), new StringArrayWritable(new String[]{"z", "x"}), new StringArrayWritable(new String[]{"a", "a", "a"}) }; WritableTypeInfo<StringArrayWritable> writableTypeInfo = (WritableTypeInfo<StringArrayWritable>) TypeExtractor.getForObject(data[0]); WritableSerializer<StringArrayWritable> writableSerializer = (WritableSerializer<StringArrayWritable>) writableTypeInfo.createSerializer(new ExecutionConfig()); SerializerTestInstance<StringArrayWritable> testInstance = new SerializerTestInstance<StringArrayWritable>(writableSerializer, writableTypeInfo.getTypeClass(), -1, data); testInstance.testAll(); }
Example 6
Source File: ReduceDriverTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testReduceDriverImmutableEmpty() { try { TestTaskContext<ReduceFunction<Tuple2<String, Integer>>, Tuple2<String, Integer>> context = new TestTaskContext<ReduceFunction<Tuple2<String,Integer>>, Tuple2<String,Integer>>(); List<Tuple2<String, Integer>> data = DriverTestData.createReduceImmutableData(); TupleTypeInfo<Tuple2<String, Integer>> typeInfo = (TupleTypeInfo<Tuple2<String, Integer>>) TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<String, Integer>> input = EmptyMutableObjectIterator.get(); context.setDriverStrategy(DriverStrategy.SORTED_REDUCE); TypeComparator<Tuple2<String, Integer>> comparator = typeInfo.createComparator(new int[]{0}, new boolean[] {true}, 0, new ExecutionConfig()); GatheringCollector<Tuple2<String, Integer>> result = new GatheringCollector<Tuple2<String,Integer>>(typeInfo.createSerializer(new ExecutionConfig())); context.setInput1(input, typeInfo.createSerializer(new ExecutionConfig())); context.setComparator1(comparator); context.setCollector(result); ReduceDriver<Tuple2<String, Integer>> driver = new ReduceDriver<Tuple2<String,Integer>>(); driver.setup(context); driver.prepare(); driver.run(); Assert.assertEquals(0, result.getList().size()); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example 7
Source File: AllGroupReduceDriverTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testAllReduceDriverMutable() { try { TestTaskContext<GroupReduceFunction<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>, Tuple2<StringValue, IntValue>> context = new TestTaskContext<GroupReduceFunction<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>, Tuple2<StringValue, IntValue>>(); List<Tuple2<StringValue, IntValue>> data = DriverTestData.createReduceMutableData(); TypeInformation<Tuple2<StringValue, IntValue>> typeInfo = TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<StringValue, IntValue>> input = new RegularToMutableObjectIterator<Tuple2<StringValue, IntValue>>(data.iterator(), typeInfo.createSerializer(new ExecutionConfig())); GatheringCollector<Tuple2<StringValue, IntValue>> result = new GatheringCollector<Tuple2<StringValue, IntValue>>(typeInfo.createSerializer(new ExecutionConfig())); context.setDriverStrategy(DriverStrategy.ALL_GROUP_REDUCE); context.setInput1(input, typeInfo.createSerializer(new ExecutionConfig())); context.setCollector(result); context.setUdf(new ConcatSumMutableReducer()); AllGroupReduceDriver<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>> driver = new AllGroupReduceDriver<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>(); driver.setup(context); driver.prepare(); driver.run(); Tuple2<StringValue, IntValue> res = result.getList().get(0); char[] foundString = res.f0.getValue().toCharArray(); Arrays.sort(foundString); char[] expectedString = "abcddeeeffff".toCharArray(); Arrays.sort(expectedString); Assert.assertArrayEquals(expectedString, foundString); Assert.assertEquals(78, res.f1.getValue()); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example 8
Source File: LargeRecordHandler.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private TypeSerializer<Object> createSerializer(Object key, int pos) { if (key == null) { throw new NullKeyFieldException(pos); } try { TypeInformation<Object> info = TypeExtractor.getForObject(key); return info.createSerializer(executionConfig); } catch (Throwable t) { throw new RuntimeException("Could not create key serializer for type " + key); } }
Example 9
Source File: GroupReduceDriverTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testAllReduceDriverAccumulatingImmutable() { try { TestTaskContext<GroupReduceFunction<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>, Tuple2<StringValue, IntValue>> context = new TestTaskContext<GroupReduceFunction<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>, Tuple2<StringValue, IntValue>>(); List<Tuple2<StringValue, IntValue>> data = DriverTestData.createReduceMutableData(); TupleTypeInfo<Tuple2<StringValue, IntValue>> typeInfo = (TupleTypeInfo<Tuple2<StringValue, IntValue>>) TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<StringValue, IntValue>> input = new RegularToMutableObjectIterator<Tuple2<StringValue, IntValue>>(data.iterator(), typeInfo.createSerializer(new ExecutionConfig())); TypeComparator<Tuple2<StringValue, IntValue>> comparator = typeInfo.createComparator(new int[]{0}, new boolean[] {true}, 0, new ExecutionConfig()); GatheringCollector<Tuple2<StringValue, IntValue>> result = new GatheringCollector<Tuple2<StringValue, IntValue>>(typeInfo.createSerializer(new ExecutionConfig())); context.setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); context.setInput1(input, typeInfo.createSerializer(new ExecutionConfig())); context.setComparator1(comparator); context.setCollector(result); context.setUdf(new ConcatSumMutableAccumulatingReducer()); context.setMutableObjectMode(false); GroupReduceDriver<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>> driver = new GroupReduceDriver<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>(); driver.setup(context); driver.prepare(); driver.run(); Object[] res = result.getList().toArray(); Object[] expected = DriverTestData.createReduceMutableDataGroupedResult().toArray(); DriverTestData.compareTupleArrays(expected, res); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example 10
Source File: AllGroupReduceDriverTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testAllReduceDriverImmutable() { try { TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String, Integer>> context = new TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String,Integer>>(); List<Tuple2<String, Integer>> data = DriverTestData.createReduceImmutableData(); TypeInformation<Tuple2<String, Integer>> typeInfo = TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<String, Integer>> input = new RegularToMutableObjectIterator<Tuple2<String, Integer>>(data.iterator(), typeInfo.createSerializer(new ExecutionConfig())); GatheringCollector<Tuple2<String, Integer>> result = new GatheringCollector<Tuple2<String,Integer>>(typeInfo.createSerializer(new ExecutionConfig())); context.setDriverStrategy(DriverStrategy.ALL_GROUP_REDUCE); context.setInput1(input, typeInfo.createSerializer(new ExecutionConfig())); context.setCollector(result); context.setUdf(new ConcatSumReducer()); AllGroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>> driver = new AllGroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>>(); driver.setup(context); driver.prepare(); driver.run(); Tuple2<String,Integer> res = result.getList().get(0); char[] foundString = res.f0.toCharArray(); Arrays.sort(foundString); char[] expectedString = "abcddeeeffff".toCharArray(); Arrays.sort(expectedString); Assert.assertArrayEquals(expectedString, foundString); Assert.assertEquals(78, res.f1.intValue()); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example 11
Source File: GroupReduceDriverTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testAllReduceDriverImmutableEmpty() { try { TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String, Integer>> context = new TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String,Integer>>(); List<Tuple2<String, Integer>> data = DriverTestData.createReduceImmutableData(); TupleTypeInfo<Tuple2<String, Integer>> typeInfo = (TupleTypeInfo<Tuple2<String, Integer>>) TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<String, Integer>> input = EmptyMutableObjectIterator.get(); TypeComparator<Tuple2<String, Integer>> comparator = typeInfo.createComparator(new int[]{0}, new boolean[] {true}, 0, new ExecutionConfig()); context.setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); GatheringCollector<Tuple2<String, Integer>> result = new GatheringCollector<Tuple2<String,Integer>>(typeInfo.createSerializer(new ExecutionConfig())); context.setInput1(input, typeInfo.createSerializer(new ExecutionConfig())); context.setComparator1(comparator); context.setCollector(result); GroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>> driver = new GroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>>(); driver.setup(context); driver.prepare(); driver.run(); Assert.assertTrue(result.getList().isEmpty()); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example 12
Source File: GroupReduceDriverTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testAllReduceDriverMutable() { try { TestTaskContext<GroupReduceFunction<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>, Tuple2<StringValue, IntValue>> context = new TestTaskContext<GroupReduceFunction<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>, Tuple2<StringValue, IntValue>>(); List<Tuple2<StringValue, IntValue>> data = DriverTestData.createReduceMutableData(); TupleTypeInfo<Tuple2<StringValue, IntValue>> typeInfo = (TupleTypeInfo<Tuple2<StringValue, IntValue>>) TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<StringValue, IntValue>> input = new RegularToMutableObjectIterator<Tuple2<StringValue, IntValue>>(data.iterator(), typeInfo.createSerializer(new ExecutionConfig())); TypeComparator<Tuple2<StringValue, IntValue>> comparator = typeInfo.createComparator(new int[]{0}, new boolean[] {true}, 0, new ExecutionConfig()); GatheringCollector<Tuple2<StringValue, IntValue>> result = new GatheringCollector<Tuple2<StringValue, IntValue>>(typeInfo.createSerializer(new ExecutionConfig())); context.setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); context.setInput1(input, typeInfo.createSerializer(new ExecutionConfig())); context.setComparator1(comparator); context.setCollector(result); context.setUdf(new ConcatSumMutableReducer()); GroupReduceDriver<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>> driver = new GroupReduceDriver<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>(); driver.setup(context); driver.prepare(); driver.run(); Object[] res = result.getList().toArray(); Object[] expected = DriverTestData.createReduceMutableDataGroupedResult().toArray(); DriverTestData.compareTupleArrays(expected, res); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example 13
Source File: ReduceDriverTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testReduceDriverImmutableEmpty() { try { TestTaskContext<ReduceFunction<Tuple2<String, Integer>>, Tuple2<String, Integer>> context = new TestTaskContext<ReduceFunction<Tuple2<String,Integer>>, Tuple2<String,Integer>>(); List<Tuple2<String, Integer>> data = DriverTestData.createReduceImmutableData(); TupleTypeInfo<Tuple2<String, Integer>> typeInfo = (TupleTypeInfo<Tuple2<String, Integer>>) TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<String, Integer>> input = EmptyMutableObjectIterator.get(); context.setDriverStrategy(DriverStrategy.SORTED_REDUCE); TypeComparator<Tuple2<String, Integer>> comparator = typeInfo.createComparator(new int[]{0}, new boolean[] {true}, 0, new ExecutionConfig()); GatheringCollector<Tuple2<String, Integer>> result = new GatheringCollector<Tuple2<String,Integer>>(typeInfo.createSerializer(new ExecutionConfig())); context.setInput1(input, typeInfo.createSerializer(new ExecutionConfig())); context.setComparator1(comparator); context.setCollector(result); ReduceDriver<Tuple2<String, Integer>> driver = new ReduceDriver<Tuple2<String,Integer>>(); driver.setup(context); driver.prepare(); driver.run(); Assert.assertEquals(0, result.getList().size()); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example 14
Source File: StreamProjectTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testProject() throws Exception { TypeInformation<Tuple5<Integer, String, Integer, String, Integer>> inType = TypeExtractor .getForObject(new Tuple5<Integer, String, Integer, String, Integer>(2, "a", 3, "b", 4)); int[] fields = new int[]{4, 4, 3}; TupleSerializer<Tuple3<Integer, Integer, String>> serializer = new TupleTypeInfo<Tuple3<Integer, Integer, String>>(StreamProjection.extractFieldTypes(fields, inType)) .createSerializer(new ExecutionConfig()); @SuppressWarnings("unchecked") StreamProject<Tuple5<Integer, String, Integer, String, Integer>, Tuple3<Integer, Integer, String>> operator = new StreamProject<Tuple5<Integer, String, Integer, String, Integer>, Tuple3<Integer, Integer, String>>( fields, serializer); OneInputStreamOperatorTestHarness<Tuple5<Integer, String, Integer, String, Integer>, Tuple3<Integer, Integer, String>> testHarness = new OneInputStreamOperatorTestHarness<Tuple5<Integer, String, Integer, String, Integer>, Tuple3<Integer, Integer, String>>(operator); long initialTime = 0L; ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>(); testHarness.open(); testHarness.processElement(new StreamRecord<Tuple5<Integer, String, Integer, String, Integer>>(new Tuple5<Integer, String, Integer, String, Integer>(2, "a", 3, "b", 4), initialTime + 1)); testHarness.processElement(new StreamRecord<Tuple5<Integer, String, Integer, String, Integer>>(new Tuple5<Integer, String, Integer, String, Integer>(2, "s", 3, "c", 2), initialTime + 2)); testHarness.processElement(new StreamRecord<Tuple5<Integer, String, Integer, String, Integer>>(new Tuple5<Integer, String, Integer, String, Integer>(2, "a", 3, "c", 2), initialTime + 3)); testHarness.processWatermark(new Watermark(initialTime + 2)); testHarness.processElement(new StreamRecord<Tuple5<Integer, String, Integer, String, Integer>>(new Tuple5<Integer, String, Integer, String, Integer>(2, "a", 3, "a", 7), initialTime + 4)); expectedOutput.add(new StreamRecord<Tuple3<Integer, Integer, String>>(new Tuple3<Integer, Integer, String>(4, 4, "b"), initialTime + 1)); expectedOutput.add(new StreamRecord<Tuple3<Integer, Integer, String>>(new Tuple3<Integer, Integer, String>(2, 2, "c"), initialTime + 2)); expectedOutput.add(new StreamRecord<Tuple3<Integer, Integer, String>>(new Tuple3<Integer, Integer, String>(2, 2, "c"), initialTime + 3)); expectedOutput.add(new Watermark(initialTime + 2)); expectedOutput.add(new StreamRecord<Tuple3<Integer, Integer, String>>(new Tuple3<Integer, Integer, String>(7, 7, "a"), initialTime + 4)); TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput()); }
Example 15
Source File: StreamProjectTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testProject() throws Exception { TypeInformation<Tuple5<Integer, String, Integer, String, Integer>> inType = TypeExtractor .getForObject(new Tuple5<Integer, String, Integer, String, Integer>(2, "a", 3, "b", 4)); int[] fields = new int[]{4, 4, 3}; TupleSerializer<Tuple3<Integer, Integer, String>> serializer = new TupleTypeInfo<Tuple3<Integer, Integer, String>>(StreamProjection.extractFieldTypes(fields, inType)) .createSerializer(new ExecutionConfig()); @SuppressWarnings("unchecked") StreamProject<Tuple5<Integer, String, Integer, String, Integer>, Tuple3<Integer, Integer, String>> operator = new StreamProject<Tuple5<Integer, String, Integer, String, Integer>, Tuple3<Integer, Integer, String>>( fields, serializer); OneInputStreamOperatorTestHarness<Tuple5<Integer, String, Integer, String, Integer>, Tuple3<Integer, Integer, String>> testHarness = new OneInputStreamOperatorTestHarness<Tuple5<Integer, String, Integer, String, Integer>, Tuple3<Integer, Integer, String>>(operator); long initialTime = 0L; ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>(); testHarness.open(); testHarness.processElement(new StreamRecord<Tuple5<Integer, String, Integer, String, Integer>>(new Tuple5<Integer, String, Integer, String, Integer>(2, "a", 3, "b", 4), initialTime + 1)); testHarness.processElement(new StreamRecord<Tuple5<Integer, String, Integer, String, Integer>>(new Tuple5<Integer, String, Integer, String, Integer>(2, "s", 3, "c", 2), initialTime + 2)); testHarness.processElement(new StreamRecord<Tuple5<Integer, String, Integer, String, Integer>>(new Tuple5<Integer, String, Integer, String, Integer>(2, "a", 3, "c", 2), initialTime + 3)); testHarness.processWatermark(new Watermark(initialTime + 2)); testHarness.processElement(new StreamRecord<Tuple5<Integer, String, Integer, String, Integer>>(new Tuple5<Integer, String, Integer, String, Integer>(2, "a", 3, "a", 7), initialTime + 4)); expectedOutput.add(new StreamRecord<Tuple3<Integer, Integer, String>>(new Tuple3<Integer, Integer, String>(4, 4, "b"), initialTime + 1)); expectedOutput.add(new StreamRecord<Tuple3<Integer, Integer, String>>(new Tuple3<Integer, Integer, String>(2, 2, "c"), initialTime + 2)); expectedOutput.add(new StreamRecord<Tuple3<Integer, Integer, String>>(new Tuple3<Integer, Integer, String>(2, 2, "c"), initialTime + 3)); expectedOutput.add(new Watermark(initialTime + 2)); expectedOutput.add(new StreamRecord<Tuple3<Integer, Integer, String>>(new Tuple3<Integer, Integer, String>(7, 7, "a"), initialTime + 4)); TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput()); }
Example 16
Source File: FieldAccessorTest.java From flink with Apache License 2.0 | 5 votes |
@Test(expected = CompositeType.InvalidFieldReferenceException.class) public void testIllegalTupleInPojoInTuple() { Tuple2<String, Foo> t = Tuple2.of("aa", new Foo(8, Tuple2.of("ddd", 9L), (short) 2)); TupleTypeInfo<Tuple2<String, Foo>> tpeInfo = (TupleTypeInfo<Tuple2<String, Foo>>) TypeExtractor.getForObject(t); FieldAccessorFactory.getAccessor(tpeInfo, "illegal.illegal.illegal", null); }
Example 17
Source File: AggregationFunctionTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void groupSumIntegerTest() throws Exception { // preparing expected outputs List<Tuple2<Integer, Integer>> expectedGroupSumList = new ArrayList<>(); List<Tuple2<Integer, Integer>> expectedGroupMinList = new ArrayList<>(); List<Tuple2<Integer, Integer>> expectedGroupMaxList = new ArrayList<>(); int groupedSum0 = 0; int groupedSum1 = 0; int groupedSum2 = 0; for (int i = 0; i < 9; i++) { int groupedSum; switch (i % 3) { case 0: groupedSum = groupedSum0 += i; break; case 1: groupedSum = groupedSum1 += i; break; default: groupedSum = groupedSum2 += i; break; } expectedGroupSumList.add(new Tuple2<>(i % 3, groupedSum)); expectedGroupMinList.add(new Tuple2<>(i % 3, i % 3)); expectedGroupMaxList.add(new Tuple2<>(i % 3, i)); } // some necessary boiler plate TypeInformation<Tuple2<Integer, Integer>> typeInfo = TypeExtractor.getForObject(new Tuple2<>(0, 0)); ExecutionConfig config = new ExecutionConfig(); KeySelector<Tuple2<Integer, Integer>, Tuple> keySelector = KeySelectorUtil.getSelectorForKeys( new Keys.ExpressionKeys<>(new int[]{0}, typeInfo), typeInfo, config); TypeInformation<Tuple> keyType = TypeExtractor.getKeySelectorTypes(keySelector, typeInfo); // aggregations tested ReduceFunction<Tuple2<Integer, Integer>> sumFunction = new SumAggregator<>(1, typeInfo, config); ReduceFunction<Tuple2<Integer, Integer>> minFunction = new ComparableAggregator<>( 1, typeInfo, AggregationType.MIN, config); ReduceFunction<Tuple2<Integer, Integer>> maxFunction = new ComparableAggregator<>( 1, typeInfo, AggregationType.MAX, config); List<Tuple2<Integer, Integer>> groupedSumList = MockContext.createAndExecuteForKeyedStream( new StreamGroupedReduce<>(sumFunction, typeInfo.createSerializer(config)), getInputList(), keySelector, keyType); List<Tuple2<Integer, Integer>> groupedMinList = MockContext.createAndExecuteForKeyedStream( new StreamGroupedReduce<>(minFunction, typeInfo.createSerializer(config)), getInputList(), keySelector, keyType); List<Tuple2<Integer, Integer>> groupedMaxList = MockContext.createAndExecuteForKeyedStream( new StreamGroupedReduce<>(maxFunction, typeInfo.createSerializer(config)), getInputList(), keySelector, keyType); assertEquals(expectedGroupSumList, groupedSumList); assertEquals(expectedGroupMinList, groupedMinList); assertEquals(expectedGroupMaxList, groupedMaxList); }
Example 18
Source File: AggregationFunctionTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void pojoMinMaxByTest() throws Exception { // Pojos are grouped on field 0, aggregated on field 1 // preparing expected outputs List<MyPojo3> maxByFirstExpected = ImmutableList.of( new MyPojo3(0, 0), new MyPojo3(1, 1), new MyPojo3(2, 2), new MyPojo3(2, 2), new MyPojo3(2, 2), new MyPojo3(2, 2), new MyPojo3(2, 2), new MyPojo3(2, 2), new MyPojo3(2, 2)); List<MyPojo3> maxByLastExpected = ImmutableList.of( new MyPojo3(0, 0), new MyPojo3(1, 1), new MyPojo3(2, 2), new MyPojo3(2, 2), new MyPojo3(2, 2), new MyPojo3(2, 5), new MyPojo3(2, 5), new MyPojo3(2, 5), new MyPojo3(2, 8)); List<MyPojo3> minByFirstExpected = ImmutableList.of( new MyPojo3(0, 0), new MyPojo3(0, 0), new MyPojo3(0, 0), new MyPojo3(0, 0), new MyPojo3(0, 0), new MyPojo3(0, 0), new MyPojo3(0, 0), new MyPojo3(0, 0), new MyPojo3(0, 0)); List<MyPojo3> minByLastExpected = ImmutableList.of( new MyPojo3(0, 0), new MyPojo3(0, 0), new MyPojo3(0, 0), new MyPojo3(0, 3), new MyPojo3(0, 3), new MyPojo3(0, 3), new MyPojo3(0, 6), new MyPojo3(0, 6), new MyPojo3(0, 6)); // some necessary boiler plate TypeInformation<MyPojo3> typeInfo = TypeExtractor.getForObject(new MyPojo3(0, 0)); ExecutionConfig config = new ExecutionConfig(); KeySelector<MyPojo3, Tuple> keySelector = KeySelectorUtil.getSelectorForKeys( new Keys.ExpressionKeys<>(new String[]{"f0"}, typeInfo), typeInfo, config); TypeInformation<Tuple> keyType = TypeExtractor.getKeySelectorTypes(keySelector, typeInfo); // aggregations tested ReduceFunction<MyPojo3> maxByFunctionFirst = new ComparableAggregator<>("f1", typeInfo, AggregationType.MAXBY, true, config); ReduceFunction<MyPojo3> maxByFunctionLast = new ComparableAggregator<>("f1", typeInfo, AggregationType.MAXBY, false, config); ReduceFunction<MyPojo3> minByFunctionFirst = new ComparableAggregator<>("f1", typeInfo, AggregationType.MINBY, true, config); ReduceFunction<MyPojo3> minByFunctionLast = new ComparableAggregator<>("f1", typeInfo, AggregationType.MINBY, false, config); assertEquals(maxByFirstExpected, MockContext.createAndExecuteForKeyedStream( new StreamGroupedReduce<>(maxByFunctionFirst, typeInfo.createSerializer(config)), getInputByPojoList(), keySelector, keyType)); assertEquals(maxByLastExpected, MockContext.createAndExecuteForKeyedStream( new StreamGroupedReduce<>(maxByFunctionLast, typeInfo.createSerializer(config)), getInputByPojoList(), keySelector, keyType)); assertEquals(minByLastExpected, MockContext.createAndExecuteForKeyedStream( new StreamGroupedReduce<>(minByFunctionLast, typeInfo.createSerializer(config)), getInputByPojoList(), keySelector, keyType)); assertEquals(minByFirstExpected, MockContext.createAndExecuteForKeyedStream( new StreamGroupedReduce<>(minByFunctionFirst, typeInfo.createSerializer(config)), getInputByPojoList(), keySelector, keyType)); }
Example 19
Source File: AggregationFunctionTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void minMaxByTest() throws Exception { // Tuples are grouped on field 0, aggregated on field 1 // preparing expected outputs List<Tuple3<Integer, Integer, Integer>> maxByFirstExpected = ImmutableList.of( Tuple3.of(0, 0, 0), Tuple3.of(0, 1, 1), Tuple3.of(0, 2, 2), Tuple3.of(0, 2, 2), Tuple3.of(0, 2, 2), Tuple3.of(0, 2, 2), Tuple3.of(0, 2, 2), Tuple3.of(0, 2, 2), Tuple3.of(0, 2, 2)); List<Tuple3<Integer, Integer, Integer>> maxByLastExpected = ImmutableList.of( Tuple3.of(0, 0, 0), Tuple3.of(0, 1, 1), Tuple3.of(0, 2, 2), Tuple3.of(0, 2, 2), Tuple3.of(0, 2, 2), Tuple3.of(0, 2, 5), Tuple3.of(0, 2, 5), Tuple3.of(0, 2, 5), Tuple3.of(0, 2, 8)); List<Tuple3<Integer, Integer, Integer>> minByFirstExpected = ImmutableList.of( Tuple3.of(0, 0, 0), Tuple3.of(0, 0, 0), Tuple3.of(0, 0, 0), Tuple3.of(0, 0, 0), Tuple3.of(0, 0, 0), Tuple3.of(0, 0, 0), Tuple3.of(0, 0, 0), Tuple3.of(0, 0, 0), Tuple3.of(0, 0, 0)); List<Tuple3<Integer, Integer, Integer>> minByLastExpected = ImmutableList.of( Tuple3.of(0, 0, 0), Tuple3.of(0, 0, 0), Tuple3.of(0, 0, 0), Tuple3.of(0, 0, 3), Tuple3.of(0, 0, 3), Tuple3.of(0, 0, 3), Tuple3.of(0, 0, 6), Tuple3.of(0, 0, 6), Tuple3.of(0, 0, 6)); // some necessary boiler plate TypeInformation<Tuple3<Integer, Integer, Integer>> typeInfo = TypeExtractor .getForObject(Tuple3.of(0, 0, 0)); ExecutionConfig config = new ExecutionConfig(); KeySelector<Tuple3<Integer, Integer, Integer>, Tuple> keySelector = KeySelectorUtil.getSelectorForKeys( new Keys.ExpressionKeys<>(new int[]{0}, typeInfo), typeInfo, config); TypeInformation<Tuple> keyType = TypeExtractor.getKeySelectorTypes(keySelector, typeInfo); // aggregations tested ReduceFunction<Tuple3<Integer, Integer, Integer>> maxByFunctionFirst = new ComparableAggregator<>(1, typeInfo, AggregationType.MAXBY, true, config); ReduceFunction<Tuple3<Integer, Integer, Integer>> maxByFunctionLast = new ComparableAggregator<>(1, typeInfo, AggregationType.MAXBY, false, config); ReduceFunction<Tuple3<Integer, Integer, Integer>> minByFunctionFirst = new ComparableAggregator<>(1, typeInfo, AggregationType.MINBY, true, config); ReduceFunction<Tuple3<Integer, Integer, Integer>> minByFunctionLast = new ComparableAggregator<>(1, typeInfo, AggregationType.MINBY, false, config); assertEquals(maxByFirstExpected, MockContext.createAndExecuteForKeyedStream( new StreamGroupedReduce<>(maxByFunctionFirst, typeInfo.createSerializer(config)), getInputByList(), keySelector, keyType)); assertEquals(maxByLastExpected, MockContext.createAndExecuteForKeyedStream( new StreamGroupedReduce<>(maxByFunctionLast, typeInfo.createSerializer(config)), getInputByList(), keySelector, keyType)); assertEquals(minByLastExpected, MockContext.createAndExecuteForKeyedStream( new StreamGroupedReduce<>(minByFunctionLast, typeInfo.createSerializer(config)), getInputByList(), keySelector, keyType)); assertEquals(minByFirstExpected, MockContext.createAndExecuteForKeyedStream( new StreamGroupedReduce<>(minByFunctionFirst, typeInfo.createSerializer(config)), getInputByList(), keySelector, keyType)); }
Example 20
Source File: PojoSerializerTest.java From flink with Apache License 2.0 | 4 votes |
/** * This tests if the hashes returned by the pojo and tuple comparators are the same */ @SuppressWarnings({ "rawtypes", "unchecked" }) @Test public void testTuplePojoTestEquality() { // test with a simple, string-key first. PojoTypeInfo<TestUserClass> pType = (PojoTypeInfo<TestUserClass>) type; List<FlatFieldDescriptor> result = new ArrayList<FlatFieldDescriptor>(); pType.getFlatFields("nestedClass.dumm2", 0, result); int[] fields = new int[1]; // see below fields[0] = result.get(0).getPosition(); TypeComparator<TestUserClass> pojoComp = pType.createComparator( fields, new boolean[]{true}, 0, new ExecutionConfig()); TestUserClass pojoTestRecord = new TestUserClass(0, "abc", 3d, new int[] {1,2,3}, new Date(), new NestedTestUserClass(1, "haha", 4d, new int[] {5,4,3})); int pHash = pojoComp.hash(pojoTestRecord); Tuple1<String> tupleTest = new Tuple1<String>("haha"); TupleTypeInfo<Tuple1<String>> tType = (TupleTypeInfo<Tuple1<String>>)TypeExtractor.getForObject(tupleTest); TypeComparator<Tuple1<String>> tupleComp = tType.createComparator(new int[] {0}, new boolean[] {true}, 0, new ExecutionConfig()); int tHash = tupleComp.hash(tupleTest); Assert.assertTrue("The hashing for tuples and pojos must be the same, so that they are mixable", pHash == tHash); Tuple3<Integer, String, Double> multiTupleTest = new Tuple3<Integer, String, Double>(1, "haha", 4d); // its important here to use the same values. TupleTypeInfo<Tuple3<Integer, String, Double>> multiTupleType = (TupleTypeInfo<Tuple3<Integer, String, Double>>)TypeExtractor.getForObject(multiTupleTest); ExpressionKeys fieldKey = new ExpressionKeys(new int[]{1,0,2}, multiTupleType); ExpressionKeys expressKey = new ExpressionKeys(new String[] {"nestedClass.dumm2", "nestedClass.dumm1", "nestedClass.dumm3"}, pType); try { Assert.assertTrue("Expecting the keys to be compatible", fieldKey.areCompatible(expressKey)); } catch (IncompatibleKeysException e) { e.printStackTrace(); Assert.fail("Keys must be compatible: "+e.getMessage()); } TypeComparator<TestUserClass> multiPojoComp = pType.createComparator( expressKey.computeLogicalKeyPositions(), new boolean[]{true, true, true}, 0, new ExecutionConfig()); int multiPojoHash = multiPojoComp.hash(pojoTestRecord); // pojo order is: dumm2 (str), dumm1 (int), dumm3 (double). TypeComparator<Tuple3<Integer, String, Double>> multiTupleComp = multiTupleType.createComparator(fieldKey.computeLogicalKeyPositions(), new boolean[] {true, true,true}, 0, new ExecutionConfig()); int multiTupleHash = multiTupleComp.hash(multiTupleTest); Assert.assertTrue("The hashing for tuples and pojos must be the same, so that they are mixable. Also for those with multiple key fields", multiPojoHash == multiTupleHash); }