org.apache.flink.test.operators.util.CollectionDataSets Java Examples
The following examples show how to use
org.apache.flink.test.operators.util.CollectionDataSets.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TypeHintITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testUnsortedGroupReduceWithTypeInformationTypeHint() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.getSmall3TupleDataSet(env); DataSet<Integer> resultDs = ds .groupBy(0) .reduceGroup(new GroupReducer<Tuple3<Integer, Long, String>, Integer>()) .returns(BasicTypeInfo.INT_TYPE_INFO); List<Integer> result = resultDs.collect(); String expectedResult = "2\n" + "3\n" + "1\n"; compareResultAsText(result, expectedResult); }
Example #2
Source File: JavaTableEnvironmentITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testSimpleRegister() throws Exception { final String tableName = "MyTable"; ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); BatchTableEnvironment tableEnv = BatchTableEnvironment.create(env, config()); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env); tableEnv.createTemporaryView(tableName, ds); Table t = tableEnv.from(tableName); Table result = t.select($("f0"), $("f1")); DataSet<Row> resultSet = tableEnv.toDataSet(result, Row.class); List<Row> results = resultSet.collect(); String expected = "1,1\n" + "2,2\n" + "3,2\n" + "4,3\n" + "5,3\n" + "6,3\n" + "7,4\n" + "8,4\n" + "9,4\n" + "10,4\n" + "11,5\n" + "12,5\n" + "13,5\n" + "14,5\n" + "15,5\n" + "16,6\n" + "17,6\n" + "18,6\n" + "19,6\n" + "20,6\n" + "21,6\n"; compareResultAsText(results, expected); }
Example #3
Source File: JoinITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testJoinNestedPojoAgainstTupleSelectedUsingInteger() throws Exception { /* * Join nested pojo against tuple (selected as an integer) */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<POJO> ds1 = CollectionDataSets.getSmallPojoDataSet(env); DataSet<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> ds2 = CollectionDataSets.getSmallTuplebasedDataSet(env); DataSet<Tuple2<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>>> joinDs = ds1.join(ds2).where("nestedPojo.longNumber").equalTo(6); // <--- difference! List<Tuple2<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>>> result = joinDs.collect(); String expected = "1 First (10,100,1000,One) 10000,(1,First,10,100,1000,One,10000)\n" + "2 Second (20,200,2000,Two) 20000,(2,Second,20,200,2000,Two,20000)\n" + "3 Third (30,300,3000,Three) 30000,(3,Third,30,300,3000,Three,30000)\n"; compareResultAsTuples(result, expected); }
Example #4
Source File: FirstNITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testFirstNOnGroupedAndSortedDS() throws Exception { /* * First-n on grouped and sorted data set */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple2<Long, Integer>> first = ds.groupBy(1).sortGroup(0, Order.DESCENDING).first(3) .project(1, 0); List<Tuple2<Long, Integer>> result = first.collect(); String expected = "(1,1)\n" + "(2,3)\n(2,2)\n" + "(3,6)\n(3,5)\n(3,4)\n" + "(4,10)\n(4,9)\n(4,8)\n" + "(5,15)\n(5,14)\n(5,13)\n" + "(6,21)\n(6,20)\n(6,19)\n"; compareResultAsText(result, expected); }
Example #5
Source File: JoinITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testNestedIntoTupleIntoPojo() throws Exception { /* * nested into tuple into pojo */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<POJO> ds1 = CollectionDataSets.getSmallPojoDataSet(env); DataSet<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> ds2 = CollectionDataSets.getSmallTuplebasedDataSet(env); DataSet<Tuple2<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>>> joinDs = ds1.join(ds2).where("nestedTupleWithCustom.f0", "nestedTupleWithCustom.f1.myInt", "nestedTupleWithCustom.f1.myLong").equalTo("f2", "f3", "f4"); env.setParallelism(1); List<Tuple2<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>>> result = joinDs.collect(); String expected = "1 First (10,100,1000,One) 10000,(1,First,10,100,1000,One,10000)\n" + "2 Second (20,200,2000,Two) 20000,(2,Second,20,200,2000,Two,20000)\n" + "3 Third (30,300,3000,Three) 30000,(3,Third,30,300,3000,Three,30000)\n"; compareResultAsTuples(result, expected); }
Example #6
Source File: FilterITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testRichFilterOnStringTupleField() throws Exception { /* * Test filter on String tuple field. */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Integer> ints = CollectionDataSets.getIntegerDataSet(env); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> filterDs = ds. filter(new RichFilter1()).withBroadcastSet(ints, "ints"); List<Tuple3<Integer, Long, String>> result = filterDs.collect(); String expected = "1,1,Hi\n" + "2,2,Hello\n" + "3,2,Hello world\n" + "4,3,Hello world, how are you?\n"; compareResultAsTuples(result, expected); }
Example #7
Source File: SortPartitionITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testSortPartitionParallelismChange() throws Exception { /* * Test sort partition with parallelism change */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(3); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env); List<Tuple1<Boolean>> result = ds .sortPartition(1, Order.DESCENDING).setParallelism(3) // change parallelism .mapPartition(new OrderCheckMapper<>(new Tuple3Checker())) .distinct().collect(); String expected = "(true)\n"; compareResultAsText(result, expected); }
Example #8
Source File: CoGroupITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testCoGroupTwoCustomTypeInputsWithExpressionKeys() throws Exception { /* * CoGroup on two custom type inputs using expression keys */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env); DataSet<CustomType> ds2 = CollectionDataSets.getCustomTypeDataSet(env); DataSet<CustomType> coGroupDs = ds.coGroup(ds2).where("myInt").equalTo("myInt").with(new CustomTypeCoGroup()); List<CustomType> result = coGroupDs.collect(); String expected = "1,0,test\n" + "2,6,test\n" + "3,24,test\n" + "4,60,test\n" + "5,120,test\n" + "6,210,test\n"; compareResultAsText(result, expected); }
Example #9
Source File: TypeHintITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testFlatMapWithClassTypeHint() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.getConfig().disableSysoutLogging(); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.getSmall3TupleDataSet(env); DataSet<Integer> identityMapDs = ds .flatMap(new FlatMapper<Tuple3<Integer, Long, String>, Integer>()) .returns(Integer.class); List<Integer> result = identityMapDs.collect(); String expectedResult = "2\n" + "3\n" + "1\n"; compareResultAsText(result, expectedResult); }
Example #10
Source File: DataSinkITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testPojoSortingNestedParallelism1() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<CollectionDataSets.POJO> ds = CollectionDataSets.getMixedPojoDataSet(env); ds.writeAsText(resultPath) .sortLocalOutput("nestedTupleWithCustom.f0", Order.ASCENDING) .sortLocalOutput("nestedTupleWithCustom.f1.myInt", Order.DESCENDING) .sortLocalOutput("nestedPojo.longNumber", Order.ASCENDING) .setParallelism(1); env.execute(); String expected = "2 First_ (10,105,1000,One) 10200\n" + "1 First (10,100,1000,One) 10100\n" + "4 First_ (11,106,1000,One) 10300\n" + "5 First (11,102,2000,One) 10100\n" + "3 First (11,102,3000,One) 10200\n" + "6 Second_ (20,200,2000,Two) 10100\n" + "8 Third_ (30,300,1000,Three) 10100\n" + "7 Third (31,301,2000,Three) 10200\n"; compareResultsByLinesInMemoryWithStrictOrder(expected, resultPath); }
Example #11
Source File: FilterITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testFilterWithBroadcastVariables() throws Exception { /* * Test filter with broadcast variables */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Integer> intDs = CollectionDataSets.getIntegerDataSet(env); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> filterDs = ds. filter(new RichFilter2()).withBroadcastSet(intDs, "ints"); List<Tuple3<Integer, Long, String>> result = filterDs.collect(); String expected = "11,5,Comment#5\n" + "12,5,Comment#6\n" + "13,5,Comment#7\n" + "14,5,Comment#8\n" + "15,5,Comment#9\n"; compareResultAsTuples(result, expected); }
Example #12
Source File: DistinctITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testCorrectnessOfDistinctOnAtomic() throws Exception { /* * check correctness of distinct on Integers */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Integer> ds = CollectionDataSets.getIntegerDataSet(env); DataSet<Integer> reduceDs = ds.distinct(); List<Integer> result = reduceDs.collect(); String expected = "1\n2\n3\n4\n5"; compareResultAsText(result, expected); }
Example #13
Source File: UnionITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testUnion5IdenticalDataSets() throws Exception { /* * Union of 5 same Data Sets, with multiple unions */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> unionDs = ds.union(CollectionDataSets.get3TupleDataSet(env)) .union(CollectionDataSets.get3TupleDataSet(env)) .union(CollectionDataSets.get3TupleDataSet(env)) .union(CollectionDataSets.get3TupleDataSet(env)); List<Tuple3<Integer, Long, String>> result = unionDs.collect(); String expected = FULL_TUPLE_3_STRING + FULL_TUPLE_3_STRING + FULL_TUPLE_3_STRING + FULL_TUPLE_3_STRING + FULL_TUPLE_3_STRING; compareResultAsTuples(result, expected); }
Example #14
Source File: CoGroupITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testCoGroupTwoCustomTypeInputsWithExpressionKeys() throws Exception { /* * CoGroup on two custom type inputs using expression keys */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env); DataSet<CustomType> ds2 = CollectionDataSets.getCustomTypeDataSet(env); DataSet<CustomType> coGroupDs = ds.coGroup(ds2).where("myInt").equalTo("myInt").with(new CustomTypeCoGroup()); List<CustomType> result = coGroupDs.collect(); String expected = "1,0,test\n" + "2,6,test\n" + "3,24,test\n" + "4,60,test\n" + "5,120,test\n" + "6,210,test\n"; compareResultAsText(result, expected); }
Example #15
Source File: GroupReduceITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testPojoKeySelectorGroupSort() throws Exception { /* * check correctness of sorted groupReduce on custom type with keyselector sorting */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env); DataSet<CustomType> reduceDs = ds .groupBy(new TwoTuplePojoExtractor()) .sortGroup(new StringPojoExtractor(), Order.DESCENDING) .reduceGroup(new CustomTypeSortedGroupReduce()); List<CustomType> result = reduceDs.collect(); String expected = "1,0,Hi\n" + "2,3,Hello world-Hello\n" + "3,12,Luke Skywalker-I am fine.-Hello world, how are you?\n" + "4,30,Comment#4-Comment#3-Comment#2-Comment#1\n" + "5,60,Comment#9-Comment#8-Comment#7-Comment#6-Comment#5\n" + "6,105,Comment#15-Comment#14-Comment#13-Comment#12-Comment#11-Comment#10\n"; compareResultAsText(result, expected); }
Example #16
Source File: OuterJoinITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testJoinWithMixedKeyTypes2() throws Exception { /* * Join on a tuple input with key field selector and a custom type input with key extractor */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> ds1 = CollectionDataSets.getSmall3TupleDataSet(env); DataSet<CustomType> ds2 = CollectionDataSets.getSmallCustomTypeDataSet(env); DataSet<Tuple2<String, String>> joinDs = ds1.fullOuterJoin(ds2) .where(1) .equalTo(new KeySelector2()) .with(new T3CustJoin()); List<Tuple2<String, String>> result = joinDs.collect(); String expected = "null,Hi\n" + "Hi,Hello\n" + "Hello,Hello world\n" + "Hello world,Hello world\n"; compareResultAsTuples(result, expected); }
Example #17
Source File: CoGroupITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testCoGroupWithMultipleKeyFieldsWithFieldSelector() throws Exception { /* * CoGroup with multiple key fields */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds1 = CollectionDataSets.get5TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> ds2 = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> coGrouped = ds1.coGroup(ds2). where(0, 4).equalTo(0, 1).with(new Tuple5Tuple3CoGroup()); List<Tuple3<Integer, Long, String>> result = coGrouped.collect(); String expected = "1,1,Hallo\n" + "2,2,Hallo Welt\n" + "3,2,Hallo Welt wie gehts?\n" + "3,2,ABC\n" + "5,3,HIJ\n" + "5,3,IJK\n"; compareResultAsTuples(result, expected); }
Example #18
Source File: GroupReduceITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testTupleContainingPojosAndRegularFields() throws Exception { /* * Test Tuple containing pojos and regular fields */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, CrazyNested, POJO>> ds = CollectionDataSets.getTupleContainingPojos(env); DataSet<Integer> reduceDs = ds.groupBy("f0", "f1.*") // nested full tuple selection .reduceGroup(new GroupReducer4()); List<Integer> result = reduceDs.collect(); String expected = "3\n1\n"; compareResultAsText(result, expected); }
Example #19
Source File: JoinITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testJoinOnACustomTypeInputWithKeyExtractorAndATupleInputWithKeyFieldSelector() throws Exception { /* * Join on a tuple input with key field selector and a custom type input with key extractor */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<CustomType> ds1 = CollectionDataSets.getSmallCustomTypeDataSet(env); DataSet<Tuple3<Integer, Long, String>> ds2 = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple2<String, String>> joinDs = ds1.join(ds2) .where(new KeySelector1()) .equalTo(0) .with(new CustT3Join()); List<Tuple2<String, String>> result = joinDs.collect(); String expected = "Hi,Hi\n" + "Hello,Hello\n" + "Hello world,Hello\n"; compareResultAsTuples(result, expected); }
Example #20
Source File: DataSinkITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testPojoSortingDualParallelism1() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<CollectionDataSets.POJO> ds = CollectionDataSets.getMixedPojoDataSet(env); ds.writeAsText(resultPath) .sortLocalOutput("str", Order.ASCENDING) .sortLocalOutput("number", Order.DESCENDING) .setParallelism(1); env.execute(); String expected = "5 First (11,102,2000,One) 10100\n" + "3 First (11,102,3000,One) 10200\n" + "1 First (10,100,1000,One) 10100\n" + "4 First_ (11,106,1000,One) 10300\n" + "2 First_ (10,105,1000,One) 10200\n" + "6 Second_ (20,200,2000,Two) 10100\n" + "7 Third (31,301,2000,Three) 10200\n" + "8 Third_ (30,300,1000,Three) 10100\n"; compareResultsByLinesInMemoryWithStrictOrder(expected, resultPath); }
Example #21
Source File: CoGroupITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testCoGroupFieldSelectorAndComplicatedKeySelector() throws Exception { /* * CoGroup field-selector (expression keys) + key selector function * The key selector is unnecessary complicated (Tuple1) ;) */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<POJO> ds = CollectionDataSets.getSmallPojoDataSet(env); DataSet<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> ds2 = CollectionDataSets.getSmallTuplebasedDataSet(env); DataSet<CustomType> coGroupDs = ds.coGroup(ds2) .where(new KeySelector6()).equalTo(6).with(new CoGroup3()); List<CustomType> result = coGroupDs.collect(); String expected = "-1,20000,Flink\n" + "-1,10000,Flink\n" + "-1,30000,Flink\n"; compareResultAsText(result, expected); }
Example #22
Source File: JoinITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testJoinThatReturnsTheLeftInputObject() throws Exception { /* * Join that returns the left input object */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> ds1 = CollectionDataSets.getSmall3TupleDataSet(env); DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds2 = CollectionDataSets.get5TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> joinDs = ds1.join(ds2) .where(1) .equalTo(1) .with(new LeftReturningJoin()); List<Tuple3<Integer, Long, String>> result = joinDs.collect(); String expected = "1,1,Hi\n" + "2,2,Hello\n" + "3,2,Hello world\n"; compareResultAsTuples(result, expected); }
Example #23
Source File: FilterITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testAllRejectingFilter() throws Exception { /* * Test all-rejecting filter. */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> filterDs = ds. filter(new Filter1()); List<Tuple3<Integer, Long, String>> result = filterDs.collect(); String expected = "\n"; compareResultAsTuples(result, expected); }
Example #24
Source File: DistinctITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testCorrectnessOfDistinctOnTuples() throws Exception{ /* * check correctness of distinct on tuples */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.getSmall3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> distinctDs = ds.union(ds).distinct(); List<Tuple3<Integer, Long, String>> result = distinctDs.collect(); String expected = "1,1,Hi\n" + "2,2,Hello\n" + "3,2,Hello world\n"; compareResultAsTuples(result, expected); }
Example #25
Source File: CrossITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testCorrectnessOfCrossWithTiny() throws Exception { /* * check correctness of crossWithTiny (only correctness of result -> should be the same as with normal cross) */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds = CollectionDataSets.getSmall5TupleDataSet(env); DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds2 = CollectionDataSets.getSmall5TupleDataSet(env); DataSet<Tuple2<Integer, String>> crossDs = ds.crossWithTiny(ds2).with(new Tuple5Cross()); List<Tuple2<Integer, String>> result = crossDs.collect(); String expected = "0,HalloHallo\n" + "1,HalloHallo Welt\n" + "2,HalloHallo Welt wie\n" + "1,Hallo WeltHallo\n" + "2,Hallo WeltHallo Welt\n" + "3,Hallo WeltHallo Welt wie\n" + "2,Hallo Welt wieHallo\n" + "3,Hallo Welt wieHallo Welt\n" + "4,Hallo Welt wieHallo Welt wie\n"; compareResultAsTuples(result, expected); }
Example #26
Source File: TypeHintITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testFlatMapWithClassTypeHint() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.getConfig().disableSysoutLogging(); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.getSmall3TupleDataSet(env); DataSet<Integer> identityMapDs = ds .flatMap(new FlatMapper<Tuple3<Integer, Long, String>, Integer>()) .returns(Integer.class); List<Integer> result = identityMapDs.collect(); String expectedResult = "2\n" + "3\n" + "1\n"; compareResultAsText(result, expectedResult); }
Example #27
Source File: GroupReduceITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testJavaCollectionsWithinPojos() throws Exception { /* * Test Java collections within pojos ( == test kryo) */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(1); DataSet<CollectionDataSets.PojoWithCollection> ds = CollectionDataSets.getPojoWithCollection(env); // f0.f0 is first integer DataSet<String> reduceDs = ds.groupBy("key") .reduceGroup(new GroupReducer7()); List<String> result = reduceDs.collect(); String expected = "callFor key 0 we got: pojo.a=apojo.a=bFor key 0 we got: pojo.a=a2pojo.a=b2\n"; compareResultAsText(result, expected); }
Example #28
Source File: JoinITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testNonPojoToVerifyFullTupleKeys() throws Exception { /* * Non-POJO test to verify that full-tuple keys are working. */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple2<Tuple2<Integer, Integer>, String>> ds1 = CollectionDataSets.getSmallNestedTupleDataSet(env); DataSet<Tuple2<Tuple2<Integer, Integer>, String>> ds2 = CollectionDataSets.getSmallNestedTupleDataSet(env); DataSet<Tuple2<Tuple2<Tuple2<Integer, Integer>, String>, Tuple2<Tuple2<Integer, Integer>, String>>> joinDs = ds1.join(ds2).where(0).equalTo("f0.f0", "f0.f1"); // key is now Tuple2<Integer, Integer> env.setParallelism(1); List<Tuple2<Tuple2<Tuple2<Integer, Integer>, String>, Tuple2<Tuple2<Integer, Integer>, String>>> result = joinDs.collect(); String expected = "((1,1),one),((1,1),one)\n" + "((2,2),two),((2,2),two)\n" + "((3,3),three),((3,3),three)\n"; compareResultAsTuples(result, expected); }
Example #29
Source File: PartitionITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testRangePartitionByKeySelector() throws Exception { /* * Test range partition by key selector */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env); DataSet<Long> uniqLongs = ds .partitionByRange(new KeySelector1()) .mapPartition(new UniqueTupleLongMapper()); List<Long> result = uniqLongs.collect(); String expected = "1\n" + "2\n" + "3\n" + "4\n" + "5\n" + "6\n"; compareResultAsText(result, expected); }
Example #30
Source File: DataSinkITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testTupleSortingNestedParallelism1_2() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Tuple2<Integer, Integer>, String, Integer>> ds = CollectionDataSets.getGroupSortedNestedTupleDataSet2(env); ds.writeAsText(resultPath) .sortLocalOutput(1, Order.ASCENDING) .sortLocalOutput(2, Order.DESCENDING) .setParallelism(1); env.execute(); String expected = "((2,1),a,3)\n" + "((1,3),a,2)\n" + "((1,2),a,1)\n" + "((2,2),b,4)\n" + "((4,9),c,7)\n" + "((3,6),c,6)\n" + "((3,3),c,5)\n"; compareResultsByLinesInMemoryWithStrictOrder(expected, resultPath); }