Java Code Examples for org.apache.flink.test.operators.util.CollectionDataSets#getCustomTypeDataSet()

The following examples show how to use org.apache.flink.test.operators.util.CollectionDataSets#getCustomTypeDataSet() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: GroupReduceITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCorrectnessOfGroupReduceOnCustomTypeWithKeyExtractorAndCombine() throws Exception {
	/*
	 * check correctness of groupReduce on custom type with key extractor and combine
	 */
	org.junit.Assume.assumeTrue(mode != TestExecutionMode.COLLECTION);

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<CustomType> reduceDs = ds.
			groupBy(new KeySelector3()).reduceGroup(new CustomTypeGroupReduceWithCombine());

	List<CustomType> result = reduceDs.collect();

	String expected = "1,0,test1\n" +
			"2,3,test2\n" +
			"3,12,test3\n" +
			"4,30,test4\n" +
			"5,60,test5\n" +
			"6,105,test6\n";

	compareResultAsText(result, expected);
}
 
Example 2
Source File: JoinITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testJoinOnATupleInputWithKeyFieldSelectorAndACustomTypeInputWithKeyExtractor()
		throws Exception {
	/*
	 * Join on a tuple input with key field selector and a custom type input with key extractor
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<Tuple3<Integer, Long, String>> ds1 = CollectionDataSets.getSmall3TupleDataSet(env);
	DataSet<CustomType> ds2 = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<Tuple2<String, String>> joinDs =
			ds1.join(ds2)
					.where(1).equalTo(new KeySelector2())
					.with(new T3CustJoin());

	List<Tuple2<String, String>> result = joinDs.collect();

	String expected = "Hi,Hello\n" +
			"Hello,Hello world\n" +
			"Hello world,Hello world\n";

	compareResultAsTuples(result, expected);
}
 
Example 3
Source File: CoGroupITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCoGroupTwoCustomTypeInputsWithExpressionKeys() throws Exception {
	/*
	 * CoGroup on two custom type inputs using expression keys
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<CustomType> ds2 = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<CustomType> coGroupDs = ds.coGroup(ds2).where("myInt").equalTo("myInt").with(new CustomTypeCoGroup());

	List<CustomType> result = coGroupDs.collect();

	String expected = "1,0,test\n" +
			"2,6,test\n" +
			"3,24,test\n" +
			"4,60,test\n" +
			"5,120,test\n" +
			"6,210,test\n";

	compareResultAsText(result, expected);
}
 
Example 4
Source File: DistinctITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCorrectnessOfDistinctOnCustomTypeWithTypeExtractor() throws Exception {
	/*
	 * check correctness of distinct on custom type with type extractor
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<Tuple1<Integer>> reduceDs = ds
			.distinct(new KeySelector3())
			.map(new Mapper3());

	List<Tuple1<Integer>> result = reduceDs.collect();

	String expected = "1\n" +
			"2\n" +
			"3\n" +
			"4\n" +
			"5\n" +
			"6\n";

	compareResultAsTuples(result, expected);
}
 
Example 5
Source File: FilterITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testFilterOnCustomType() throws Exception {
	/*
	 * Test filter on custom type
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<CustomType> filterDs = ds.
			filter(new Filter6());
	List<CustomType> result = filterDs.collect();

	String expected = "3,3,Hello world, how are you?\n"
			+
			"3,4,I am fine.\n" +
			"3,5,Luke Skywalker\n";

	compareResultAsText(result, expected);
}
 
Example 6
Source File: GroupReduceITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCorrectnessOfAllGroupReduceForCustomTypes() throws Exception {
	/*
	 * check correctness of all-groupreduce for custom types
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<CustomType> reduceDs = ds.reduceGroup(new AllAddingCustomTypeGroupReduce());

	List<CustomType> result = reduceDs.collect();

	String expected = "91,210,Hello!";

	compareResultAsText(result, expected);
}
 
Example 7
Source File: GroupReduceITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testPojoKeySelectorGroupSort() throws Exception {
	/*
	 * check correctness of sorted groupReduce on custom type with keyselector sorting
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<CustomType> reduceDs = ds
			.groupBy(new TwoTuplePojoExtractor())
			.sortGroup(new StringPojoExtractor(), Order.DESCENDING)
			.reduceGroup(new CustomTypeSortedGroupReduce());

	List<CustomType> result = reduceDs.collect();

	String expected = "1,0,Hi\n"
			+
			"2,3,Hello world-Hello\n" +
			"3,12,Luke Skywalker-I am fine.-Hello world, how are you?\n" +
			"4,30,Comment#4-Comment#3-Comment#2-Comment#1\n" +
			"5,60,Comment#9-Comment#8-Comment#7-Comment#6-Comment#5\n" +
			"6,105,Comment#15-Comment#14-Comment#13-Comment#12-Comment#11-Comment#10\n";

	compareResultAsText(result, expected);
}
 
Example 8
Source File: GroupReduceITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testCorrectnessOfGroupReduceOnCustomTypeWithKeyExtractorAndCombine() throws Exception {
	/*
	 * check correctness of groupReduce on custom type with key extractor and combine
	 */
	org.junit.Assume.assumeTrue(mode != TestExecutionMode.COLLECTION);

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<CustomType> reduceDs = ds.
			groupBy(new KeySelector3()).reduceGroup(new CustomTypeGroupReduceWithCombine());

	List<CustomType> result = reduceDs.collect();

	String expected = "1,0,test1\n" +
			"2,3,test2\n" +
			"3,12,test3\n" +
			"4,30,test4\n" +
			"5,60,test5\n" +
			"6,105,test6\n";

	compareResultAsText(result, expected);
}
 
Example 9
Source File: GroupReduceITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCorrectnessOfAllGroupReduceForCustomTypes() throws Exception {
	/*
	 * check correctness of all-groupreduce for custom types
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<CustomType> reduceDs = ds.reduceGroup(new AllAddingCustomTypeGroupReduce());

	List<CustomType> result = reduceDs.collect();

	String expected = "91,210,Hello!";

	compareResultAsText(result, expected);
}
 
Example 10
Source File: ReduceITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testAllReduceForCustomTypes() throws Exception {
	/*
	 * All-reduce for custom types
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<CustomType> reduceDs = ds.
			reduce(new AllAddingCustomTypeReduce());

	List<CustomType> result = reduceDs.collect();

	String expected = "91,210,Hello!";

	compareResultAsText(result, expected);
}
 
Example 11
Source File: GroupReduceITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testPojoKeySelectorGroupSort() throws Exception {
	/*
	 * check correctness of sorted groupReduce on custom type with keyselector sorting
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<CustomType> reduceDs = ds
			.groupBy(new TwoTuplePojoExtractor())
			.sortGroup(new StringPojoExtractor(), Order.DESCENDING)
			.reduceGroup(new CustomTypeSortedGroupReduce());

	List<CustomType> result = reduceDs.collect();

	String expected = "1,0,Hi\n"
			+
			"2,3,Hello world-Hello\n" +
			"3,12,Luke Skywalker-I am fine.-Hello world, how are you?\n" +
			"4,30,Comment#4-Comment#3-Comment#2-Comment#1\n" +
			"5,60,Comment#9-Comment#8-Comment#7-Comment#6-Comment#5\n" +
			"6,105,Comment#15-Comment#14-Comment#13-Comment#12-Comment#11-Comment#10\n";

	compareResultAsText(result, expected);
}
 
Example 12
Source File: GroupReduceITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCorrectnessOfGroupReduceOnCustomTypeWithTypeExtractor() throws Exception {
	/*
	 * check correctness of groupReduce on custom type with type extractor
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<CustomType> reduceDs = ds.
			groupBy(new KeySelector2()).reduceGroup(new CustomTypeGroupReduce());

	List<CustomType> result = reduceDs.collect();

	String expected = "1,0,Hello!\n" +
			"2,3,Hello!\n" +
			"3,12,Hello!\n" +
			"4,30,Hello!\n" +
			"5,60,Hello!\n" +
			"6,105,Hello!\n";

	compareResultAsText(result, expected);
}
 
Example 13
Source File: GroupReduceITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testPojoKeySelectorGroupSort() throws Exception {
	/*
	 * check correctness of sorted groupReduce on custom type with keyselector sorting
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<CustomType> reduceDs = ds
			.groupBy(new TwoTuplePojoExtractor())
			.sortGroup(new StringPojoExtractor(), Order.DESCENDING)
			.reduceGroup(new CustomTypeSortedGroupReduce());

	List<CustomType> result = reduceDs.collect();

	String expected = "1,0,Hi\n"
			+
			"2,3,Hello world-Hello\n" +
			"3,12,Luke Skywalker-I am fine.-Hello world, how are you?\n" +
			"4,30,Comment#4-Comment#3-Comment#2-Comment#1\n" +
			"5,60,Comment#9-Comment#8-Comment#7-Comment#6-Comment#5\n" +
			"6,105,Comment#15-Comment#14-Comment#13-Comment#12-Comment#11-Comment#10\n";

	compareResultAsText(result, expected);
}
 
Example 14
Source File: GroupReduceITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCorrectnessOfGroupReduceOnCustomTypeWithKeyExtractorAndCombine() throws Exception {
	/*
	 * check correctness of groupReduce on custom type with key extractor and combine
	 */
	org.junit.Assume.assumeTrue(mode != TestExecutionMode.COLLECTION);

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<CustomType> reduceDs = ds.
			groupBy(new KeySelector3()).reduceGroup(new CustomTypeGroupReduceWithCombine());

	List<CustomType> result = reduceDs.collect();

	String expected = "1,0,test1\n" +
			"2,3,test2\n" +
			"3,12,test3\n" +
			"4,30,test4\n" +
			"5,60,test5\n" +
			"6,105,test6\n";

	compareResultAsText(result, expected);
}
 
Example 15
Source File: CoGroupITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testCoGroupOnATupleInputWithKeyFieldSelectorAndACustomTypeInputWithKeyExtractor()
		throws Exception {
	/*
	 * CoGroup on a tuple input with key field selector and a custom type input with key extractor
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds = CollectionDataSets.get5TupleDataSet(env);
	DataSet<CustomType> ds2 = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<Tuple3<Integer, Long, String>> coGroupDs = ds.coGroup(ds2).where(2).equalTo(new
			KeySelector2()).with(new MixedCoGroup());

	List<Tuple3<Integer, Long, String>> result = coGroupDs.collect();

	String expected = "0,1,test\n" +
			"1,2,test\n" +
			"2,5,test\n" +
			"3,15,test\n" +
			"4,33,test\n" +
			"5,63,test\n" +
			"6,109,test\n" +
			"7,4,test\n" +
			"8,4,test\n" +
			"9,4,test\n" +
			"10,5,test\n" +
			"11,5,test\n" +
			"12,5,test\n" +
			"13,5,test\n" +
			"14,5,test\n";

	compareResultAsTuples(result, expected);
}
 
Example 16
Source File: FlatMapITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testTypeConversionFlatMapperCustomToTuple() throws Exception {
	/*
	 * Test type conversion flatmapper (Custom -> Tuple)
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<Tuple3<Integer, Long, String>> typeConversionFlatMapDs = ds.
			flatMap(new FlatMapper4());

	List<Tuple3<Integer, Long, String>> result = typeConversionFlatMapDs.collect();

	String expected = "1,0,Hi\n" +
			"2,1,Hello\n" +
			"2,2,Hello world\n" +
			"3,3,Hello world, how are you?\n" +
			"3,4,I am fine.\n" +
			"3,5,Luke Skywalker\n" +
			"4,6,Comment#1\n" +
			"4,7,Comment#2\n" +
			"4,8,Comment#3\n" +
			"4,9,Comment#4\n" +
			"5,10,Comment#5\n" +
			"5,11,Comment#6\n" +
			"5,12,Comment#7\n" +
			"5,13,Comment#8\n" +
			"5,14,Comment#9\n" +
			"6,15,Comment#10\n" +
			"6,16,Comment#11\n" +
			"6,17,Comment#12\n" +
			"6,18,Comment#13\n" +
			"6,19,Comment#14\n" +
			"6,20,Comment#15\n";

	compareResultAsTuples(result, expected);
}
 
Example 17
Source File: MapITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testMapperOnCustomLowercaseString() throws Exception {
	/*
	 * Test mapper on Custom - lowercase myString
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<CustomType> customMapDs = ds.
			map(new Mapper6());

	List<CustomType> result = customMapDs.collect();

	String expected = "1,0,hi\n" +
			"2,1,hello\n" +
			"2,2,hello world\n" +
			"3,3,hello world, how are you?\n" +
			"3,4,i am fine.\n" +
			"3,5,luke skywalker\n" +
			"4,6,comment#1\n" +
			"4,7,comment#2\n" +
			"4,8,comment#3\n" +
			"4,9,comment#4\n" +
			"5,10,comment#5\n" +
			"5,11,comment#6\n" +
			"5,12,comment#7\n" +
			"5,13,comment#8\n" +
			"5,14,comment#9\n" +
			"6,15,comment#10\n" +
			"6,16,comment#11\n" +
			"6,17,comment#12\n" +
			"6,18,comment#13\n" +
			"6,19,comment#14\n" +
			"6,20,comment#15\n";

	compareResultAsText(result, expected);
}
 
Example 18
Source File: MapITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testTypeConversionMapperCustomToTuple() throws Exception {
	/*
	 * Test type conversion mapper (Custom -> Tuple)
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<Tuple3<Integer, Long, String>> typeConversionMapDs = ds.
			map(new Mapper3());

	List<Tuple3<Integer, Long, String>> result = typeConversionMapDs.collect();

	String expected = "1,0,Hi\n" +
			"2,1,Hello\n" +
			"2,2,Hello world\n" +
			"3,3,Hello world, how are you?\n" +
			"3,4,I am fine.\n" +
			"3,5,Luke Skywalker\n" +
			"4,6,Comment#1\n" +
			"4,7,Comment#2\n" +
			"4,8,Comment#3\n" +
			"4,9,Comment#4\n" +
			"5,10,Comment#5\n" +
			"5,11,Comment#6\n" +
			"5,12,Comment#7\n" +
			"5,13,Comment#8\n" +
			"5,14,Comment#9\n" +
			"6,15,Comment#10\n" +
			"6,16,Comment#11\n" +
			"6,17,Comment#12\n" +
			"6,18,Comment#13\n" +
			"6,19,Comment#14\n" +
			"6,20,Comment#15\n";

	compareResultAsTuples(result, expected);
}
 
Example 19
Source File: CoGroupITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testCoGroupOnACustomTypeWithKeyExtractorAndATupleInputWithKeyFieldSelector()
		throws Exception {
	/*
	 * CoGroup on a tuple input with key field selector and a custom type input with key extractor
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds = CollectionDataSets.get5TupleDataSet(env);
	DataSet<CustomType> ds2 = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<CustomType> coGroupDs = ds2.coGroup(ds).where(new KeySelector3()).equalTo(2).with
			(new MixedCoGroup2());

	List<CustomType> result = coGroupDs.collect();

	String expected = "0,1,test\n" +
			"1,2,test\n" +
			"2,5,test\n" +
			"3,15,test\n" +
			"4,33,test\n" +
			"5,63,test\n" +
			"6,109,test\n" +
			"7,4,test\n" +
			"8,4,test\n" +
			"9,4,test\n" +
			"10,5,test\n" +
			"11,5,test\n" +
			"12,5,test\n" +
			"13,5,test\n" +
			"14,5,test\n";

	compareResultAsText(result, expected);
}
 
Example 20
Source File: FlatMapITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testTypeConversionFlatMapperCustomToTuple() throws Exception {
	/*
	 * Test type conversion flatmapper (Custom -> Tuple)
	 */

	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
	DataSet<Tuple3<Integer, Long, String>> typeConversionFlatMapDs = ds.
			flatMap(new FlatMapper4());

	List<Tuple3<Integer, Long, String>> result = typeConversionFlatMapDs.collect();

	String expected = "1,0,Hi\n" +
			"2,1,Hello\n" +
			"2,2,Hello world\n" +
			"3,3,Hello world, how are you?\n" +
			"3,4,I am fine.\n" +
			"3,5,Luke Skywalker\n" +
			"4,6,Comment#1\n" +
			"4,7,Comment#2\n" +
			"4,8,Comment#3\n" +
			"4,9,Comment#4\n" +
			"5,10,Comment#5\n" +
			"5,11,Comment#6\n" +
			"5,12,Comment#7\n" +
			"5,13,Comment#8\n" +
			"5,14,Comment#9\n" +
			"6,15,Comment#10\n" +
			"6,16,Comment#11\n" +
			"6,17,Comment#12\n" +
			"6,18,Comment#13\n" +
			"6,19,Comment#14\n" +
			"6,20,Comment#15\n";

	compareResultAsTuples(result, expected);
}