org.apache.flink.api.common.operators.util.FieldSet Java Examples
The following examples show how to use
org.apache.flink.api.common.operators.util.FieldSet.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: DualInputSemanticProperties.java From flink with Apache License 2.0 | 6 votes |
/** * Adds, to the existing information, a field that is forwarded directly * from the source record(s) in the first input to the destination * record(s). * * @param input the input of the source field * @param sourceField the position in the source record * @param targetField the position in the destination record */ public void addForwardedField(int input, int sourceField, int targetField) { Map<Integer, FieldSet> fieldMapping; if (input != 0 && input != 1) { throw new IndexOutOfBoundsException(); } else if (input == 0) { fieldMapping = this.fieldMapping1; } else { fieldMapping = this.fieldMapping2; } if(isTargetFieldPresent(targetField, fieldMapping)) { throw new InvalidSemanticAnnotationException("Target field "+targetField+" was added twice to input "+input); } FieldSet targetFields = fieldMapping.get(sourceField); if (targetFields != null) { fieldMapping.put(sourceField, targetFields.addField(targetField)); } else { fieldMapping.put(sourceField, new FieldSet(targetField)); } }
Example #2
Source File: FieldSetTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testImmutability() { FieldSet s1 = new FieldSet(); FieldSet s2 = new FieldSet(5); FieldSet s3 = new FieldSet(Integer.valueOf(7)); FieldSet s4 = new FieldSet(5, 4, 7, 6); s1.addFields(s2).addFields(s3); s2.addFields(s4); s4.addFields(s1); s1.addField(Integer.valueOf(14)); s2.addFields(78, 13, 66, 3); assertEquals(0, s1.size()); assertEquals(1, s2.size()); assertEquals(1, s3.size()); assertEquals(4, s4.size()); }
Example #3
Source File: LocalPropertiesFilteringTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testUniqueFieldsPreserved3() { SingleInputSemanticProperties sp = new SingleInputSemanticProperties(); SemanticPropUtil.getSemanticPropsSingleFromString(sp, new String[]{"0->7;1->6;2->5;3->4;4->3"}, null, null, tupleInfo, tupleInfo); LocalProperties lProps = new LocalProperties(); lProps = lProps.addUniqueFields(new FieldSet(0,1,2)); lProps = lProps.addUniqueFields(new FieldSet(3,4)); lProps = lProps.addUniqueFields(new FieldSet(4,5,6)); LocalProperties filtered = lProps.filterBySemanticProperties(sp, 0); FieldSet expected1 = new FieldSet(5,6,7); FieldSet expected2 = new FieldSet(3,4); assertNull(filtered.getGroupedFields()); assertNull(filtered.getOrdering()); assertNotNull(filtered.getUniqueFields()); assertEquals(2, filtered.getUniqueFields().size()); assertTrue(filtered.getUniqueFields().contains(expected1)); assertTrue(filtered.getUniqueFields().contains(expected2)); }
Example #4
Source File: SemanticPropertiesTranslationTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testUnaryFunctionWildcardForwardedAnnotation() { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); @SuppressWarnings("unchecked") DataSet<Tuple3<Long, String, Integer>> input = env.fromElements(new Tuple3<Long, String, Integer>(3L, "test", 42)); input.map(new WildcardForwardedMapper<Tuple3<Long, String, Integer>>()).output(new DiscardingOutputFormat<Tuple3<Long, String, Integer>>()); Plan plan = env.createProgramPlan(); GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next(); MapOperatorBase<?, ?, ?> mapper = (MapOperatorBase<?, ?, ?>) sink.getInput(); SingleInputSemanticProperties semantics = mapper.getSemanticProperties(); FieldSet fw1 = semantics.getForwardingTargetFields(0, 0); FieldSet fw2 = semantics.getForwardingTargetFields(0, 1); FieldSet fw3 = semantics.getForwardingTargetFields(0, 2); assertNotNull(fw1); assertNotNull(fw2); assertNotNull(fw3); assertTrue(fw1.contains(0)); assertTrue(fw2.contains(1)); assertTrue(fw3.contains(2)); }
Example #5
Source File: GlobalPropertiesFilteringTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testAllErased2() { SingleInputSemanticProperties semProps = new SingleInputSemanticProperties(); SemanticPropUtil.getSemanticPropsSingleFromString(semProps, new String[]{"2"}, null, null, tupleInfo, tupleInfo); GlobalProperties gprops = new GlobalProperties(); gprops.setHashPartitioned(new FieldList(0, 1)); gprops.addUniqueFieldCombination(new FieldSet(3, 4)); gprops.addUniqueFieldCombination(new FieldSet(5, 6)); GlobalProperties result = gprops.filterBySemanticProperties(semProps, 0); assertEquals(PartitioningProperty.RANDOM_PARTITIONED, result.getPartitioning()); assertNull(result.getPartitioningFields()); assertNull(result.getPartitioningOrdering()); assertNull(result.getUniqueFieldCombination()); }
Example #6
Source File: SemanticPropertiesTranslationTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testUnaryFunctionReadFieldsAnnotation() { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); @SuppressWarnings("unchecked") DataSet<Tuple3<Long, Long, Long>> input = env.fromElements(new Tuple3<Long, Long, Long>(3L, 2L, 1L)); input.map(new ReadSetMapper<Tuple3<Long, Long, Long>>()).output(new DiscardingOutputFormat<Tuple3<Long, Long, Long>>()); Plan plan = env.createProgramPlan(); GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next(); MapOperatorBase<?, ?, ?> mapper = (MapOperatorBase<?, ?, ?>) sink.getInput(); SingleInputSemanticProperties semantics = mapper.getSemanticProperties(); FieldSet read = semantics.getReadFields(0); assertNotNull(read); assertEquals(2, read.size()); assertTrue(read.contains(0)); assertTrue(read.contains(2)); }
Example #7
Source File: GroupReduceWithCombineProperties.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public GroupReduceWithCombineProperties(FieldSet groupKeys, Ordering additionalOrderKeys, Partitioner<?> customPartitioner) { super(groupKeys); // if we have an additional ordering, construct the ordering to have primarily the grouping fields if (additionalOrderKeys != null) { this.ordering = new Ordering(); for (Integer key : this.keyList) { this.ordering.appendOrdering(key, null, Order.ANY); } // and next the additional order fields for (int i = 0; i < additionalOrderKeys.getNumberOfFields(); i++) { Integer field = additionalOrderKeys.getFieldNumber(i); Order order = additionalOrderKeys.getOrder(i); this.ordering.appendOrdering(field, additionalOrderKeys.getType(i), order); } } else { this.ordering = null; } this.customPartitioner = customPartitioner; }
Example #8
Source File: RequestedGlobalPropertiesFilteringTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testAnyPartitioningPreserved1() { SingleInputSemanticProperties sProp = new SingleInputSemanticProperties(); SemanticPropUtil.getSemanticPropsSingleFromString(sProp, new String[]{"0;3;4"}, null, null, tupleInfo, tupleInfo); RequestedGlobalProperties rgProps = new RequestedGlobalProperties(); rgProps.setAnyPartitioning(new FieldSet(0, 3, 4)); RequestedGlobalProperties filtered = rgProps.filterBySemanticProperties(sProp, 0); assertNotNull(filtered); assertEquals(PartitioningProperty.ANY_PARTITIONING, filtered.getPartitioning()); assertNotNull(filtered.getPartitionedFields()); assertEquals(3, filtered.getPartitionedFields().size()); assertTrue(filtered.getPartitionedFields().contains(0)); assertTrue(filtered.getPartitionedFields().contains(3)); assertTrue(filtered.getPartitionedFields().contains(4)); assertNull(filtered.getDataDistribution()); assertNull(filtered.getCustomPartitioner()); assertNull(filtered.getOrdering()); }
Example #9
Source File: SemanticPropertiesTranslationTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testUnaryFunctionForwardedInLine2() { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); @SuppressWarnings("unchecked") DataSet<Tuple3<Long, Long, Long>> input = env.fromElements(new Tuple3<Long, Long, Long>(3L, 2L, 1L)); input.map(new ReadSetMapper<Tuple3<Long, Long, Long>>()).withForwardedFields("0->1; 2") .output(new DiscardingOutputFormat<Tuple3<Long, Long, Long>>()); Plan plan = env.createProgramPlan(); GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next(); MapOperatorBase<?, ?, ?> mapper = (MapOperatorBase<?, ?, ?>) sink.getInput(); SingleInputSemanticProperties semantics = mapper.getSemanticProperties(); FieldSet fw1 = semantics.getForwardingTargetFields(0, 0); FieldSet fw2 = semantics.getForwardingTargetFields(0, 2); assertNotNull(fw1); assertNotNull(fw2); assertTrue(fw1.contains(1)); assertTrue(fw2.contains(2)); }
Example #10
Source File: SemanticPropertiesTranslationTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testUnaryFunctionForwardedInLine3() { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); @SuppressWarnings("unchecked") DataSet<Tuple3<Long, Long, Long>> input = env.fromElements(new Tuple3<Long, Long, Long>(3L, 2L, 1L)); input.map(new ReadSetMapper<Tuple3<Long, Long, Long>>()).withForwardedFields("0->1; 2") .output(new DiscardingOutputFormat<Tuple3<Long, Long, Long>>()); Plan plan = env.createProgramPlan(); GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next(); MapOperatorBase<?, ?, ?> mapper = (MapOperatorBase<?, ?, ?>) sink.getInput(); SingleInputSemanticProperties semantics = mapper.getSemanticProperties(); FieldSet fw1 = semantics.getForwardingTargetFields(0, 0); FieldSet fw2 = semantics.getForwardingTargetFields(0, 2); assertNotNull(fw1); assertNotNull(fw2); assertTrue(fw1.contains(1)); assertTrue(fw2.contains(2)); }
Example #11
Source File: SemanticPropertiesTranslationTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testUnaryFunctionForwardedInLine1() { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); @SuppressWarnings("unchecked") DataSet<Tuple3<Long, Long, Long>> input = env.fromElements(new Tuple3<Long, Long, Long>(3L, 2L, 1L)); input.map(new NoAnnotationMapper<Tuple3<Long, Long, Long>>()).withForwardedFields("0->1; 2") .output(new DiscardingOutputFormat<Tuple3<Long, Long, Long>>()); Plan plan = env.createProgramPlan(); GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next(); MapOperatorBase<?, ?, ?> mapper = (MapOperatorBase<?, ?, ?>) sink.getInput(); SingleInputSemanticProperties semantics = mapper.getSemanticProperties(); FieldSet fw1 = semantics.getForwardingTargetFields(0, 0); FieldSet fw2 = semantics.getForwardingTargetFields(0, 2); assertNotNull(fw1); assertNotNull(fw2); assertTrue(fw1.contains(1)); assertTrue(fw2.contains(2)); }
Example #12
Source File: CompilerHints.java From flink with Apache License 2.0 | 6 votes |
protected void copyFrom(CompilerHints source) { this.outputSize = source.outputSize; this.outputCardinality = source.outputCardinality; this.avgOutputRecordSize = source.avgOutputRecordSize; this.filterFactor = source.filterFactor; if (source.uniqueFields != null && source.uniqueFields.size() > 0) { if (this.uniqueFields == null) { this.uniqueFields = new HashSet<FieldSet>(); } else { this.uniqueFields.clear(); } this.uniqueFields.addAll(source.uniqueFields); } }
Example #13
Source File: GroupReduceNode.java From flink with Apache License 2.0 | 6 votes |
@Override protected SemanticProperties getSemanticPropertiesForLocalPropertyFiltering() { // Local properties for GroupReduce may only be preserved on key fields. SingleInputSemanticProperties origProps = getOperator().getSemanticProperties(); SingleInputSemanticProperties filteredProps = new SingleInputSemanticProperties(); FieldSet readSet = origProps.getReadFields(0); if(readSet != null) { filteredProps.addReadFields(readSet); } // only add forward field information for key fields if(this.keys != null) { for (int f : this.keys) { FieldSet targets = origProps.getForwardingTargetFields(0, f); for (int t : targets) { filteredProps.addForwardedField(f, t); } } } return filteredProps; }
Example #14
Source File: RequestedGlobalPropertiesFilteringTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testHashPartitioningPreserved1() { SingleInputSemanticProperties sProp = new SingleInputSemanticProperties(); SemanticPropUtil.getSemanticPropsSingleFromString(sProp, new String[]{"0;3;4"}, null, null, tupleInfo, tupleInfo); RequestedGlobalProperties rgProps = new RequestedGlobalProperties(); rgProps.setHashPartitioned(new FieldSet(0, 3, 4)); RequestedGlobalProperties filtered = rgProps.filterBySemanticProperties(sProp, 0); assertNotNull(filtered); assertEquals(PartitioningProperty.HASH_PARTITIONED, filtered.getPartitioning()); assertNotNull(filtered.getPartitionedFields()); assertEquals(3, filtered.getPartitionedFields().size()); assertTrue(filtered.getPartitionedFields().contains(0)); assertTrue(filtered.getPartitionedFields().contains(3)); assertTrue(filtered.getPartitionedFields().contains(4)); assertNull(filtered.getDataDistribution()); assertNull(filtered.getCustomPartitioner()); assertNull(filtered.getOrdering()); }
Example #15
Source File: SemanticPropertiesTranslationTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testUnaryFunctionForwardedInLine2() { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); @SuppressWarnings("unchecked") DataSet<Tuple3<Long, Long, Long>> input = env.fromElements(new Tuple3<Long, Long, Long>(3L, 2L, 1L)); input.map(new ReadSetMapper<Tuple3<Long, Long, Long>>()).withForwardedFields("0->1; 2") .output(new DiscardingOutputFormat<Tuple3<Long, Long, Long>>()); Plan plan = env.createProgramPlan(); GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next(); MapOperatorBase<?, ?, ?> mapper = (MapOperatorBase<?, ?, ?>) sink.getInput(); SingleInputSemanticProperties semantics = mapper.getSemanticProperties(); FieldSet fw1 = semantics.getForwardingTargetFields(0, 0); FieldSet fw2 = semantics.getForwardingTargetFields(0, 2); assertNotNull(fw1); assertNotNull(fw2); assertTrue(fw1.contains(1)); assertTrue(fw2.contains(2)); }
Example #16
Source File: RequestedGlobalPropertiesFilteringTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testAnyPartitioningPreserved2() { SingleInputSemanticProperties sProp = new SingleInputSemanticProperties(); SemanticPropUtil.getSemanticPropsSingleFromString(sProp, new String[]{"2->0;1->3;7->4"}, null, null, tupleInfo, tupleInfo); RequestedGlobalProperties rgProps = new RequestedGlobalProperties(); rgProps.setAnyPartitioning(new FieldSet(0, 3, 4)); RequestedGlobalProperties filtered = rgProps.filterBySemanticProperties(sProp, 0); assertNotNull(filtered); assertEquals(PartitioningProperty.ANY_PARTITIONING, filtered.getPartitioning()); assertNotNull(filtered.getPartitionedFields()); assertEquals(3, filtered.getPartitionedFields().size()); assertTrue(filtered.getPartitionedFields().contains(1)); assertTrue(filtered.getPartitionedFields().contains(2)); assertTrue(filtered.getPartitionedFields().contains(7)); assertNull(filtered.getDataDistribution()); assertNull(filtered.getCustomPartitioner()); assertNull(filtered.getOrdering()); }
Example #17
Source File: PropertyDataSourceTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void checkSinglePartitionedGroupedSource8() { ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment(); env.setParallelism(DEFAULT_PARALLELISM); DataSource<Tuple3<Long, SomePojo, String>> data = env.fromCollection(tuple3PojoData, tuple3PojoType); data.getSplitDataProperties() .splitsPartitionedBy("f1") .splitsGroupedBy("f1.stringField"); data.output(new DiscardingOutputFormat<Tuple3<Long, SomePojo, String>>()); Plan plan = env.createProgramPlan(); // submit the plan to the compiler OptimizedPlan oPlan = compileNoStats(plan); // check the optimized Plan SinkPlanNode sinkNode = oPlan.getDataSinks().iterator().next(); SourcePlanNode sourceNode = (SourcePlanNode) sinkNode.getPredecessor(); GlobalProperties gprops = sourceNode.getGlobalProperties(); LocalProperties lprops = sourceNode.getLocalProperties(); Assert.assertTrue((new FieldSet(gprops.getPartitioningFields().toArray())).equals(new FieldSet(1,2,3))); Assert.assertTrue(gprops.getPartitioning() == PartitioningProperty.ANY_PARTITIONING); Assert.assertTrue(lprops.getGroupedFields() == null); Assert.assertTrue(lprops.getOrdering() == null); }
Example #18
Source File: RequestedGlobalProperties.java From flink with Apache License 2.0 | 5 votes |
/** * Sets these properties to request a custom partitioning with the given {@link Partitioner} instance. * * If the fields are provided as {@link FieldSet}, then any permutation of the fields is a * valid partitioning, including subsets. If the fields are given as a {@link FieldList}, * then only an exact partitioning on the fields matches this requested partitioning. * * @param partitionedFields The key fields for the partitioning. */ public void setCustomPartitioned(FieldSet partitionedFields, Partitioner<?> partitioner) { if (partitionedFields == null || partitioner == null) { throw new NullPointerException(); } this.partitioning = PartitioningProperty.CUSTOM_PARTITIONING; this.partitioningFields = partitionedFields; this.ordering = null; this.customPartitioner = partitioner; }
Example #19
Source File: SemanticPropUtilTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testReadFieldsPojoInTuple() { String[] readFields = { "f0; f2.int1; f2.string1" }; SingleInputSemanticProperties sp = new SingleInputSemanticProperties(); SemanticPropUtil.getSemanticPropsSingleFromString(sp, null, null, readFields, pojoInTupleType, pojo2Type); FieldSet fs = sp.getReadFields(0); assertTrue(fs.size() == 3); assertTrue(fs.contains(0)); assertTrue(fs.contains(2)); assertTrue(fs.contains(5)); }
Example #20
Source File: BinaryUnionNode.java From flink with Apache License 2.0 | 5 votes |
@Override public FieldSet getReadFields(int input) { if (input != 0 && input != 1) { throw new IndexOutOfBoundsException(); } return FieldSet.EMPTY_SET; }
Example #21
Source File: PlanNode.java From flink with Apache License 2.0 | 5 votes |
public void updatePropertiesWithUniqueSets(Set<FieldSet> uniqueFieldCombinations) { if (uniqueFieldCombinations == null || uniqueFieldCombinations.isEmpty()) { return; } for (FieldSet fields : uniqueFieldCombinations) { this.globalProps.addUniqueFieldCombination(fields); this.localProps = this.localProps.addUniqueFields(fields); } }
Example #22
Source File: GlobalProperties.java From flink with Apache License 2.0 | 5 votes |
@Override public GlobalProperties clone() { final GlobalProperties newProps = new GlobalProperties(); newProps.partitioning = this.partitioning; newProps.partitioningFields = this.partitioningFields; newProps.ordering = this.ordering; newProps.distribution = this.distribution; newProps.customPartitioner = this.customPartitioner; newProps.uniqueFieldCombinations = this.uniqueFieldCombinations == null ? null : new HashSet<FieldSet>(this.uniqueFieldCombinations); return newProps; }
Example #23
Source File: PartitionNode.java From flink with Apache License 2.0 | 5 votes |
public PartitionDescriptor(PartitionMethod pMethod, FieldSet pKeys, Ordering ordering, Partitioner<?> customPartitioner, DataDistribution distribution) { super(pKeys); Preconditions.checkArgument(pMethod != PartitionMethod.RANGE || pKeys.equals(new FieldSet(ordering.getFieldPositions())), "Partition keys must match the given ordering."); this.pMethod = pMethod; this.customPartitioner = customPartitioner; this.distribution = distribution; this.ordering = ordering; }
Example #24
Source File: BinaryUnionNode.java From flink with Apache License 2.0 | 5 votes |
@Override public FieldSet getForwardingTargetFields(int input, int sourceField) { if (input != 0 && input != 1) { throw new IndexOutOfBoundsException("Invalid input index for binary union node."); } return new FieldSet(sourceField); }
Example #25
Source File: GroupReduceNodeTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testGetSemanticProperties() { SingleInputSemanticProperties origProps = new SingleInputSemanticProperties(); origProps.addForwardedField(0, 1); origProps.addForwardedField(2, 2); origProps.addForwardedField(3, 4); origProps.addForwardedField(6, 0); origProps.addReadFields(new FieldSet(0, 2, 4, 7)); GroupReduceOperatorBase<?,?,?> op = mock(GroupReduceOperatorBase.class); when(op.getSemanticProperties()).thenReturn(origProps); when(op.getKeyColumns(0)).thenReturn(new int[]{3,2}); when(op.getParameters()).thenReturn(new Configuration()); GroupReduceNode node = new GroupReduceNode(op); SemanticProperties filteredProps = node.getSemanticPropertiesForLocalPropertyFiltering(); assertTrue(filteredProps.getForwardingTargetFields(0, 0).size() == 0); assertTrue(filteredProps.getForwardingTargetFields(0, 2).size() == 1); assertTrue(filteredProps.getForwardingTargetFields(0, 2).contains(2)); assertTrue(filteredProps.getForwardingTargetFields(0, 3).size() == 1); assertTrue(filteredProps.getForwardingTargetFields(0, 3).contains(4)); assertTrue(filteredProps.getForwardingTargetFields(0, 6).size() == 0); assertTrue(filteredProps.getForwardingSourceField(0, 1) < 0); assertTrue(filteredProps.getForwardingSourceField(0, 2) == 2); assertTrue(filteredProps.getForwardingSourceField(0, 4) == 3); assertTrue(filteredProps.getForwardingSourceField(0, 0) < 0); assertTrue(filteredProps.getReadFields(0).size() == 4); assertTrue(filteredProps.getReadFields(0).contains(0)); assertTrue(filteredProps.getReadFields(0).contains(2)); assertTrue(filteredProps.getReadFields(0).contains(4)); assertTrue(filteredProps.getReadFields(0).contains(7)); }
Example #26
Source File: SemanticPropUtilTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testReadFieldsIndividualStrings() { String[] readFields = { "f1", "f2" }; SingleInputSemanticProperties sp = new SingleInputSemanticProperties(); SemanticPropUtil.getSemanticPropsSingleFromString(sp, null, null, readFields, threeIntTupleType, threeIntTupleType); FieldSet fs = sp.getReadFields(0); assertTrue(fs.size() == 2); assertTrue(fs.contains(2)); assertTrue(fs.contains(1)); }
Example #27
Source File: GlobalProperties.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public void addUniqueFieldCombination(FieldSet fields) { if (fields == null) { return; } if (this.uniqueFieldCombinations == null) { this.uniqueFieldCombinations = new HashSet<FieldSet>(); } this.uniqueFieldCombinations.add(fields); }
Example #28
Source File: PropertyDataSourceTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void checkSinglePartitionedOrderedSource1() { ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment(); env.setParallelism(DEFAULT_PARALLELISM); DataSource<Tuple2<Long, String>> data = env.readCsvFile("/some/path").types(Long.class, String.class); data.getSplitDataProperties() .splitsPartitionedBy(1) .splitsOrderedBy(new int[]{1}, new Order[]{Order.ASCENDING}); data.output(new DiscardingOutputFormat<Tuple2<Long, String>>()); Plan plan = env.createProgramPlan(); // submit the plan to the compiler OptimizedPlan oPlan = compileNoStats(plan); // check the optimized Plan SinkPlanNode sinkNode = oPlan.getDataSinks().iterator().next(); SourcePlanNode sourceNode = (SourcePlanNode) sinkNode.getPredecessor(); GlobalProperties gprops = sourceNode.getGlobalProperties(); LocalProperties lprops = sourceNode.getLocalProperties(); Assert.assertTrue((new FieldSet(gprops.getPartitioningFields().toArray())).equals(new FieldSet(1))); Assert.assertTrue(gprops.getPartitioning() == PartitioningProperty.ANY_PARTITIONING); Assert.assertTrue((new FieldSet(lprops.getGroupedFields().toArray())).equals(new FieldSet(1))); Assert.assertTrue(lprops.getOrdering() == null); }
Example #29
Source File: DualInputSemanticProperties.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Adds, to the existing information, field(s) that are read in * the source record(s) from the first input. * * @param input the input of the read fields * @param readFields the position(s) in the source record(s) */ public void addReadFields(int input, FieldSet readFields) { if (input != 0 && input != 1) { throw new IndexOutOfBoundsException(); } else if (input == 0) { this.readFields1 = (this.readFields1 == null) ? readFields.clone() : this.readFields1.addFields(readFields); } else { this.readFields2 = (this.readFields2 == null) ? readFields.clone() : this.readFields2.addFields(readFields); } }
Example #30
Source File: DualInputSemanticProperties.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public FieldSet getForwardingTargetFields(int input, int sourceField) { if (input != 0 && input != 1) { throw new IndexOutOfBoundsException(); } else if (input == 0) { return fieldMapping1.containsKey(sourceField) ? fieldMapping1.get(sourceField) : FieldSet.EMPTY_SET; } else { return fieldMapping2.containsKey(sourceField) ? fieldMapping2.get(sourceField) : FieldSet.EMPTY_SET; } }