org.apache.flink.api.java.tuple.Tuple3 Java Examples
The following examples show how to use
org.apache.flink.api.java.tuple.Tuple3.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: WindowTranslationTest.java From Flink-CEPplus with Apache License 2.0 | 7 votes |
@Test @SuppressWarnings("rawtypes") public void testFoldWithCustomTrigger() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2)); DataStream<Tuple3<String, String, Integer>> window1 = source .keyBy(0) .window(SlidingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS))) .trigger(CountTrigger.of(1)) .fold(new Tuple3<>("", "", 1), new DummyFolder()); OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>> transform = (OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>>) window1.getTransformation(); OneInputStreamOperator<Tuple2<String, Integer>, Tuple3<String, String, Integer>> operator = transform.getOperator(); Assert.assertTrue(operator instanceof WindowOperator); WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator; Assert.assertTrue(winOperator.getTrigger() instanceof CountTrigger); Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingEventTimeWindows); Assert.assertTrue(winOperator.getStateDescriptor() instanceof FoldingStateDescriptor); processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1)); }
Example #2
Source File: PrefixSpanBatchOp.java From Alink with Apache License 2.0 | 6 votes |
/** * Encode the sequence patterns. */ private static Tuple3<String, Long, Long> encodeSequence(int[] sequence, String[] indexToString) { StringBuilder sbd = new StringBuilder(); int itemSetSize = 0; long chainLength = 1L; long itemCount = 0L; for (int i = 1; i < sequence.length - 1; i++) { if (sequence[i] == 0) { sbd.append(ELEMENT_SEPARATOR); chainLength++; itemSetSize = 0; } else { if (itemSetSize > 0) { sbd.append(ITEM_SEPARATOR); } sbd.append(indexToString[sequence[i]]); itemSetSize++; itemCount++; } } return Tuple3.of(sbd.toString(), itemCount, chainLength); }
Example #3
Source File: FilterITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testAllRejectingFilter() throws Exception { /* * Test all-rejecting filter. */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> filterDs = ds. filter(new Filter1()); List<Tuple3<Integer, Long, String>> result = filterDs.collect(); String expected = "\n"; compareResultAsTuples(result, expected); }
Example #4
Source File: ScatterGatherIteration.java From flink with Apache License 2.0 | 6 votes |
@Override public void coGroup(Iterable<Edge<K, EV>> edges, Iterable<Vertex<K, Tuple3<VV, LongValue, LongValue>>> state, Collector<Tuple2<K, Message>> out) throws Exception { final Iterator<Vertex<K, Tuple3<VV, LongValue, LongValue>>> stateIter = state.iterator(); if (stateIter.hasNext()) { Vertex<K, Tuple3<VV, LongValue, LongValue>> vertexWithDegrees = stateIter.next(); nextVertex.f0 = vertexWithDegrees.f0; nextVertex.f1 = vertexWithDegrees.f1.f0; scatterFunction.setInDegree(vertexWithDegrees.f1.f1.getValue()); scatterFunction.setOutDegree(vertexWithDegrees.f1.f2.getValue()); scatterFunction.set(edges.iterator(), out, vertexWithDegrees.getId()); scatterFunction.sendMessages(nextVertex); } }
Example #5
Source File: DocCountVectorizerTrainBatchOp.java From Alink with Apache License 2.0 | 6 votes |
@Override public void mapPartition(Iterable<Tuple2<Long, Row>> iterable, Collector<DocCountVectorizerModelData> collector) throws Exception { List<String> data = new ArrayList<>(); Tuple3<String, Double, Integer> feature = Tuple3.of(null, null, null); for (Tuple2<Long, Row> tuple : iterable) { Row row = tuple.f1; feature.f0 = row.getField(0).toString(); feature.f1 = ((Number)row.getField(2)).doubleValue(); feature.f2 = tuple.f0.intValue(); data.add(JsonConverter.toJson(feature)); } DocCountVectorizerModelData modelData = new DocCountVectorizerModelData(); modelData.featureType = featureType; modelData.minTF = minTF; modelData.list = data; collector.collect(modelData); // new DocCountVectorizerModelDataConverter().save(modelData, collector); }
Example #6
Source File: AdamicAdar.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void reduce(Iterable<Tuple3<T, T, FloatValue>> values, Collector<Result<T>> out) throws Exception { double sum = 0; Tuple3<T, T, FloatValue> edge = null; for (Tuple3<T, T, FloatValue> next : values) { edge = next; sum += next.f2.getValue(); } if (sum >= minimumScore) { output.setVertexId0(edge.f0); output.setVertexId1(edge.f1); output.setAdamicAdarScore((float) sum); out.collect(output); } }
Example #7
Source File: SemanticPropertiesTranslationTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testUnaryFunctionForwardedInLine2() { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); @SuppressWarnings("unchecked") DataSet<Tuple3<Long, Long, Long>> input = env.fromElements(new Tuple3<Long, Long, Long>(3L, 2L, 1L)); input.map(new ReadSetMapper<Tuple3<Long, Long, Long>>()).withForwardedFields("0->1; 2") .output(new DiscardingOutputFormat<Tuple3<Long, Long, Long>>()); Plan plan = env.createProgramPlan(); GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next(); MapOperatorBase<?, ?, ?> mapper = (MapOperatorBase<?, ?, ?>) sink.getInput(); SingleInputSemanticProperties semantics = mapper.getSemanticProperties(); FieldSet fw1 = semantics.getForwardingTargetFields(0, 0); FieldSet fw2 = semantics.getForwardingTargetFields(0, 2); assertNotNull(fw1); assertNotNull(fw2); assertTrue(fw1.contains(1)); assertTrue(fw2.contains(2)); }
Example #8
Source File: WindowOperatorMigrationTest.java From flink with Apache License 2.0 | 6 votes |
@Override public int compare(Object o1, Object o2) { if (o1 instanceof Watermark || o2 instanceof Watermark) { return 0; } else { StreamRecord<Tuple3<String, Long, Long>> sr0 = (StreamRecord<Tuple3<String, Long, Long>>) o1; StreamRecord<Tuple3<String, Long, Long>> sr1 = (StreamRecord<Tuple3<String, Long, Long>>) o2; if (sr0.getTimestamp() != sr1.getTimestamp()) { return (int) (sr0.getTimestamp() - sr1.getTimestamp()); } int comparison = sr0.getValue().f0.compareTo(sr1.getValue().f0); if (comparison != 0) { return comparison; } else { comparison = (int) (sr0.getValue().f1 - sr1.getValue().f1); if (comparison != 0) { return comparison; } return (int) (sr0.getValue().f1 - sr1.getValue().f1); } } }
Example #9
Source File: BucketingSinkTestProgram.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public Tuple4<Integer, Long, Integer, String> map(Tuple3<Integer, Long, String> value) throws IOException { // update counter Integer counterValue = counter.value(); if (counterValue == null) { counterValue = 0; } counter.update(counterValue + 1); // save last value Long lastValue = last.value(); if (lastValue == null) { lastValue = initialValue; } last.update(value.f1); return Tuple4.of(value.f0, value.f1 - lastValue, counterValue, value.f2); }
Example #10
Source File: UnionITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testUnion2IdenticalDataSets() throws Exception { /* * Union of 2 Same Data Sets */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> unionDs = ds.union(CollectionDataSets.get3TupleDataSet(env)); List<Tuple3<Integer, Long, String>> result = unionDs.collect(); String expected = FULL_TUPLE_3_STRING + FULL_TUPLE_3_STRING; compareResultAsTuples(result, expected); }
Example #11
Source File: GraphLoader.java From OSTMap with Apache License 2.0 | 6 votes |
private DataSet<Tuple3<String, String, UserEdgeValues>> getUserEdges(DataSet<JSONObject> jsonData) { DataSet<Tuple3<String, String, UserEdgeValues>> userEdges = jsonData.flatMap(new FlatMapFunction<JSONObject, Tuple3<String, String, UserEdgeValues>>() { @Override public void flatMap(JSONObject jsonObject, Collector<Tuple3<String, String, UserEdgeValues>> out) throws Exception { // count initialized to 1 int count = 1; // from the current node JSONObject user = jsonObject.getJSONObject("user"); String from = user.getString("id_str"); // to other nodes JSONObject entities = jsonObject.getJSONObject("entities"); JSONArray userMentions = entities.getJSONArray("user_mentions"); for (int i = 0; i < userMentions.length(); i++) { JSONObject current = userMentions.getJSONObject(i); String to = current.getString("id_str"); out.collect(new Tuple3<String, String, UserEdgeValues>(from, to, new UserEdgeValues(count))); } return; } }); return userEdges; }
Example #12
Source File: SelectorFunctionKeysTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testAreCompatible2() throws Keys.IncompatibleKeysException { TypeInformation<PojoWithMultiplePojos> t1 = TypeExtractor.getForClass(PojoWithMultiplePojos.class); TypeInformation<Tuple3<Long, Pojo1, Integer>> t2 = new TupleTypeInfo<>( BasicTypeInfo.LONG_TYPE_INFO, TypeExtractor.getForClass(Pojo1.class), BasicTypeInfo.INT_TYPE_INFO); TypeInformation<Tuple2<Integer, String>> kt = new TupleTypeInfo<>( BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO ); Keys<PojoWithMultiplePojos> k1 = new Keys.SelectorFunctionKeys<>( new KeySelector3(), t1, kt ); Keys<Tuple3<Long, Pojo1, Integer>> k2 = new Keys.SelectorFunctionKeys<>( new KeySelector4(), t2, kt ); Assert.assertTrue(k1.areCompatible(k2)); Assert.assertTrue(k2.areCompatible(k1)); }
Example #13
Source File: GroupReduceOperator.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private static <IN, OUT, K1, K2> PlanUnwrappingSortedReduceGroupOperator<IN, OUT, K1, K2> translateSelectorFunctionSortedReducer( SelectorFunctionKeys<IN, ?> rawGroupingKey, SelectorFunctionKeys<IN, ?> rawSortingKey, Ordering groupOrdering, GroupReduceFunction<IN, OUT> function, TypeInformation<OUT> outputType, String name, Operator<IN> input, boolean combinable) { final SelectorFunctionKeys<IN, K1> groupingKey = (SelectorFunctionKeys<IN, K1>) rawGroupingKey; final SelectorFunctionKeys<IN, K2> sortingKey = (SelectorFunctionKeys<IN, K2>) rawSortingKey; TypeInformation<Tuple3<K1, K2, IN>> typeInfoWithKey = KeyFunctions.createTypeWithKey(groupingKey, sortingKey); Operator<Tuple3<K1, K2, IN>> inputWithKey = KeyFunctions.appendKeyExtractor(input, groupingKey, sortingKey); PlanUnwrappingSortedReduceGroupOperator<IN, OUT, K1, K2> reducer = new PlanUnwrappingSortedReduceGroupOperator<>( function, groupingKey, sortingKey, name, outputType, typeInfoWithKey, combinable); reducer.setInput(inputWithKey); reducer.setGroupOrder(groupOrdering); return reducer; }
Example #14
Source File: EdgeDegreesPairTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testWithSimpleGraph() throws Exception { String expectedResult = "(0,1,((null),(2,2,0),(3,0,3)))\n" + "(0,2,((null),(2,2,0),(3,2,1)))\n" + "(2,1,((null),(3,2,1),(3,0,3)))\n" + "(2,3,((null),(3,2,1),(4,2,2)))\n" + "(3,1,((null),(4,2,2),(3,0,3)))\n" + "(3,4,((null),(4,2,2),(1,0,1)))\n" + "(5,3,((null),(1,1,0),(4,2,2)))"; DataSet<Edge<IntValue, Tuple3<NullValue, Degrees, Degrees>>> degreesPair = directedSimpleGraph .run(new EdgeDegreesPair<>()); TestBaseUtils.compareResultAsText(degreesPair.collect(), expectedResult); }
Example #15
Source File: UnionITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testUnion5IdenticalDataSets() throws Exception { /* * Union of 5 same Data Sets, with multiple unions */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> unionDs = ds.union(CollectionDataSets.get3TupleDataSet(env)) .union(CollectionDataSets.get3TupleDataSet(env)) .union(CollectionDataSets.get3TupleDataSet(env)) .union(CollectionDataSets.get3TupleDataSet(env)); List<Tuple3<Integer, Long, String>> result = unionDs.collect(); String expected = FULL_TUPLE_3_STRING + FULL_TUPLE_3_STRING + FULL_TUPLE_3_STRING + FULL_TUPLE_3_STRING + FULL_TUPLE_3_STRING; compareResultAsTuples(result, expected); }
Example #16
Source File: SelectorFunctionKeysTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testAreCompatible2() throws Keys.IncompatibleKeysException { TypeInformation<PojoWithMultiplePojos> t1 = TypeExtractor.getForClass(PojoWithMultiplePojos.class); TypeInformation<Tuple3<Long, Pojo1, Integer>> t2 = new TupleTypeInfo<>( BasicTypeInfo.LONG_TYPE_INFO, TypeExtractor.getForClass(Pojo1.class), BasicTypeInfo.INT_TYPE_INFO); TypeInformation<Tuple2<Integer, String>> kt = new TupleTypeInfo<>( BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO ); Keys<PojoWithMultiplePojos> k1 = new Keys.SelectorFunctionKeys<>( new KeySelector3(), t1, kt ); Keys<Tuple3<Long, Pojo1, Integer>> k2 = new Keys.SelectorFunctionKeys<>( new KeySelector4(), t2, kt ); Assert.assertTrue(k1.areCompatible(k2)); Assert.assertTrue(k2.areCompatible(k1)); }
Example #17
Source File: ReduceWithCombinerITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testForkingReduceOnKeyedDataset() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(4); // creates the input data and distributes them evenly among the available downstream tasks DataSet<Tuple3<String, Integer, Boolean>> input = createKeyedInput(env); UnsortedGrouping<Tuple3<String, Integer, Boolean>> counts = input.groupBy(0); DataSet<Tuple3<String, Integer, Boolean>> r1 = counts.reduceGroup(new KeyedCombReducer()); DataSet<Tuple3<String, Integer, Boolean>> r2 = counts.reduceGroup(new KeyedGroupCombReducer()); List<Tuple3<String, Integer, Boolean>> actual = r1.union(r2).collect(); String expected = "k1,6,true\n" + "k2,4,true\n" + "k1,6,true\n" + "k2,4,true\n"; compareResultAsTuples(actual, expected); }
Example #18
Source File: AnnObjFunc.java From Alink with Apache License 2.0 | 5 votes |
@Override protected double calcLoss(Tuple3<Double, Double, Vector> labledVector, DenseVector coefVector) { if (topologyModel == null) { topologyModel = topology.getModel(coefVector); } else { topologyModel.resetModel(coefVector); } Tuple2<DenseMatrix, DenseMatrix> unstacked = stacker.unstack(labledVector); return topologyModel.computeGradient(unstacked.f0, unstacked.f1, null); }
Example #19
Source File: TupleComparatorISD1Test.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Override protected TupleSerializer<Tuple3<Integer, String, Double>> createSerializer() { return new TupleSerializer<Tuple3<Integer, String, Double>>( (Class<Tuple3<Integer, String, Double>>) (Class<?>) Tuple3.class, new TypeSerializer[]{ new IntSerializer(), new StringSerializer(), new DoubleSerializer()}); }
Example #20
Source File: MultiStringIndexerModelDataConverter.java From Alink with Apache License 2.0 | 5 votes |
@Override public MultiStringIndexerModelData load(List<Row> rows) { MultiStringIndexerModelData modelData = new MultiStringIndexerModelData(); modelData.tokenAndIndex = new ArrayList<>(); modelData.tokenNumber = new HashMap<>(); for (Row row : rows) { long colIndex = (Long) row.getField(0); if (colIndex < 0L) { modelData.meta = Params.fromJson((String) row.getField(1)); } else { int columnIndex = ((Long) row.getField(0)).intValue(); Long tokenIndex = Long.valueOf(String.valueOf(row.getField(2))); modelData.tokenAndIndex.add(Tuple3.of(columnIndex, (String) row.getField(1), tokenIndex)); modelData.tokenNumber.merge(columnIndex, 1L, Long::sum); } } // To ensure that every columns has token number. int numFields = 0; if (modelData.meta != null) { numFields = modelData.meta.get(HasSelectedCols.SELECTED_COLS).length; } for (int i = 0; i < numFields; i++) { modelData.tokenNumber.merge(i, 0L, Long::sum); } return modelData; }
Example #21
Source File: CustomDistributionITCase.java From flink with Apache License 2.0 | 5 votes |
@Test(expected = IllegalArgumentException.class) public void testPartitionMoreThanDistribution() throws Exception { final TestDataDist2 dist = new TestDataDist2(); ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> input = CollectionDataSets.get3TupleDataSet(env); DataSetUtils.partitionByRange(input, dist, 0, 1, 2); }
Example #22
Source File: EdgeDegreesPairTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testWithEmptyGraphWithVertices() throws Exception { DataSet<Edge<LongValue, Tuple3<NullValue, Degrees, Degrees>>> degreesPair = emptyGraphWithVertices .run(new EdgeDegreesPair<>()); assertEquals(0, degreesPair.collect().size()); }
Example #23
Source File: AdamicAdar.java From flink with Apache License 2.0 | 5 votes |
@Override public void reduce(Iterable<Tuple4<IntValue, T, T, FloatValue>> values, Collector<Tuple3<T, T, FloatValue>> out) throws Exception { int visitedCount = 0; for (Tuple4<IntValue, T, T, FloatValue> edge : values) { output.f1 = edge.f2; output.f2 = edge.f3; for (int i = 0; i < visitedCount; i++) { output.f0 = visited.get(i); out.collect(output); } if (visitedCount < GROUP_SIZE) { if (!initialized) { initialized = true; for (int i = 0; i < GROUP_SIZE; i++) { visited.add(edge.f2.copy()); } } else { edge.f2.copyTo(visited.get(visitedCount)); } visitedCount += 1; } } }
Example #24
Source File: TupleComparatorILDXC2Test.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Override protected TupleSerializer<Tuple3<Integer, Long, Double>> createSerializer() { return new TupleSerializer<Tuple3<Integer, Long, Double>>( (Class<Tuple3<Integer, Long, Double>>) (Class<?>) Tuple3.class, new TypeSerializer[]{ new IntSerializer(), new LongSerializer(), new DoubleSerializer()}); }
Example #25
Source File: CoGroupITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testCoGroupWithAtomicType2() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Integer> ds1 = env.fromElements(0, 1, 2); DataSet<Tuple3<Integer, Long, String>> ds2 = CollectionDataSets.getSmall3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> coGroupDs = ds1.coGroup(ds2).where("*").equalTo(0).with(new CoGroupAtomic2()); List<Tuple3<Integer, Long, String>> result = coGroupDs.collect(); String expected = "(1,1,Hi)\n" + "(2,2,Hello)"; compareResultAsText(result, expected); }
Example #26
Source File: GroupCombineITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testPartialReduceWithDifferentInputOutputType() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); // data DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple2<Long, Tuple3<Integer, Long, String>>> dsWrapped = ds // wrap values as Kv pairs with the grouping key as key .map(new Tuple3KvWrapper()); List<Tuple2<Integer, Long>> result = dsWrapped .groupBy(0) // reduce partially .combineGroup(new Tuple3toTuple2GroupReduce()) .groupBy(0) // reduce fully to check result .reduceGroup(new Tuple2toTuple2GroupReduce()) //unwrap .map(new MapFunction<Tuple2<Long, Tuple2<Integer, Long>>, Tuple2<Integer, Long>>() { @Override public Tuple2<Integer, Long> map(Tuple2<Long, Tuple2<Integer, Long>> value) throws Exception { return value.f1; } }).collect(); String expected = "1,3\n" + "5,20\n" + "15,58\n" + "34,52\n" + "65,70\n" + "111,96\n"; compareResultAsTuples(result, expected); }
Example #27
Source File: FlatMapITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testFlatMapperIfUDFReturnsInputObjectMultipleTimesWhileChangingIt() throws Exception { /* * Test flatmapper if UDF returns input object * multiple times and changes it in between */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> inputObjFlatMapDs = ds. flatMap(new FlatMapper6()); List<Tuple3<Integer, Long, String>> result = inputObjFlatMapDs.collect(); String expected = "0,1,Hi\n" + "0,2,Hello\n" + "1,2,Hello\n" + "0,2,Hello world\n" + "1,2,Hello world\n" + "2,2,Hello world\n" + "0,3,I am fine.\n" + "0,3,Luke Skywalker\n" + "1,3,Luke Skywalker\n" + "0,4,Comment#1\n" + "1,4,Comment#1\n" + "2,4,Comment#1\n" + "0,4,Comment#3\n" + "0,4,Comment#4\n" + "1,4,Comment#4\n" + "0,5,Comment#5\n" + "1,5,Comment#5\n" + "2,5,Comment#5\n" + "0,5,Comment#7\n" + "0,5,Comment#8\n" + "1,5,Comment#8\n" + "0,5,Comment#9\n" + "1,5,Comment#9\n" + "2,5,Comment#9\n" + "0,6,Comment#11\n" + "0,6,Comment#12\n" + "1,6,Comment#12\n" + "0,6,Comment#13\n" + "1,6,Comment#13\n" + "2,6,Comment#13\n" + "0,6,Comment#15\n"; compareResultAsTuples(result, expected); }
Example #28
Source File: CrossITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testCorrectnessOfDefaultCross() throws Exception { /* * check correctness of default cross */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.getSmall3TupleDataSet(env); DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds2 = CollectionDataSets.getSmall5TupleDataSet(env); DataSet<Tuple2<Tuple3<Integer, Long, String>, Tuple5<Integer, Long, Integer, String, Long>>> crossDs = ds.cross(ds2); List<Tuple2<Tuple3<Integer, Long, String>, Tuple5<Integer, Long, Integer, String, Long>>> result = crossDs.collect(); String expected = "(1,1,Hi),(2,2,1,Hallo Welt,2)\n" + "(1,1,Hi),(1,1,0,Hallo,1)\n" + "(1,1,Hi),(2,3,2,Hallo Welt wie,1)\n" + "(2,2,Hello),(2,2,1,Hallo Welt,2)\n" + "(2,2,Hello),(1,1,0,Hallo,1)\n" + "(2,2,Hello),(2,3,2,Hallo Welt wie,1)\n" + "(3,2,Hello world),(2,2,1,Hallo Welt,2)\n" + "(3,2,Hello world),(1,1,0,Hallo,1)\n" + "(3,2,Hello world),(2,3,2,Hallo Welt wie,1)\n"; compareResultAsTuples(result, expected); }
Example #29
Source File: JoinITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testProjectOnATuple1Input() throws Exception { /* * Project join on a tuple input 1 */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> ds1 = CollectionDataSets.getSmall3TupleDataSet(env); DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds2 = CollectionDataSets.get5TupleDataSet(env); DataSet<Tuple6<String, Long, String, Integer, Long, Long>> joinDs = ds1.join(ds2) .where(1) .equalTo(1) .projectFirst(2, 1) .projectSecond(3) .projectFirst(0) .projectSecond(4, 1); List<Tuple6<String, Long, String, Integer, Long, Long>> result = joinDs.collect(); String expected = "Hi,1,Hallo,1,1,1\n" + "Hello,2,Hallo Welt,2,2,2\n" + "Hello world,2,Hallo Welt,3,2,2\n"; compareResultAsTuples(result, expected); }
Example #30
Source File: WordCountNestedPOJOITCase.java From flink with Apache License 2.0 | 5 votes |
public WC(String t, int c) { this.count = c; this.complex = new ComplexNestedClass(); this.complex.word = new Tuple3<Long, Long, String>(0L, 0L, "egal"); this.complex.date = new Date(); this.complex.someFloat = 0.0f; this.complex.someNumber = 666; this.complex.someTest = t; }