org.apache.flink.optimizer.operators.OperatorDescriptorDual Java Examples
The following examples show how to use
org.apache.flink.optimizer.operators.OperatorDescriptorDual.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: OuterJoinNode.java From flink with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> createRightOuterJoinDescriptors(JoinHint hint) { List<OperatorDescriptorDual> list = new ArrayList<>(); switch (hint) { case OPTIMIZER_CHOOSES: list.add(new SortMergeRightOuterJoinDescriptor(this.keys1, this.keys2, true)); list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, true, true)); break; case REPARTITION_SORT_MERGE: list.add(new SortMergeRightOuterJoinDescriptor(this.keys1, this.keys2, false)); break; case REPARTITION_HASH_FIRST: list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_FIRST: list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, true, false)); break; case REPARTITION_HASH_SECOND: list.add(new HashRightOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_SECOND: default: throw new CompilerException("Invalid join hint: " + hint + " for right outer join"); } return list; }
Example #2
Source File: WorksetIterationNode.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Creates a new node with a single input for the optimizer plan. * * @param iteration The iteration operator that the node represents. */ public WorksetIterationNode(DeltaIterationBase<?, ?> iteration) { super(iteration); final int[] ssKeys = iteration.getSolutionSetKeyFields(); if (ssKeys == null || ssKeys.length == 0) { throw new CompilerException("Invalid WorksetIteration: No key fields defined for the solution set."); } this.solutionSetKeyFields = new FieldList(ssKeys); this.partitionedProperties = new GlobalProperties(); this.partitionedProperties.setHashPartitioned(this.solutionSetKeyFields); int weight = iteration.getMaximumNumberOfIterations() > 0 ? iteration.getMaximumNumberOfIterations() : DEFAULT_COST_WEIGHT; if (weight > OptimizerNode.MAX_DYNAMIC_PATH_COST_WEIGHT) { weight = OptimizerNode.MAX_DYNAMIC_PATH_COST_WEIGHT; } this.costWeight = weight; this.dataProperties = Collections.<OperatorDescriptorDual>singletonList(new WorksetOpDescriptor(this.solutionSetKeyFields)); }
Example #3
Source File: OuterJoinNode.java From flink with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> createFullOuterJoinDescriptors(JoinHint hint) { List<OperatorDescriptorDual> list = new ArrayList<>(); switch (hint) { case OPTIMIZER_CHOOSES: list.add(new SortMergeFullOuterJoinDescriptor(this.keys1, this.keys2)); break; case REPARTITION_SORT_MERGE: list.add(new SortMergeFullOuterJoinDescriptor(this.keys1, this.keys2)); break; case REPARTITION_HASH_FIRST: list.add(new HashFullOuterJoinBuildFirstDescriptor(this.keys1, this.keys2)); break; case REPARTITION_HASH_SECOND: list.add(new HashFullOuterJoinBuildSecondDescriptor(this.keys1, this.keys2)); break; case BROADCAST_HASH_FIRST: case BROADCAST_HASH_SECOND: default: throw new CompilerException("Invalid join hint: " + hint + " for full outer join"); } return list; }
Example #4
Source File: OuterJoinNode.java From flink with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> createRightOuterJoinDescriptors(JoinHint hint) { List<OperatorDescriptorDual> list = new ArrayList<>(); switch (hint) { case OPTIMIZER_CHOOSES: list.add(new SortMergeRightOuterJoinDescriptor(this.keys1, this.keys2, true)); list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, true, true)); break; case REPARTITION_SORT_MERGE: list.add(new SortMergeRightOuterJoinDescriptor(this.keys1, this.keys2, false)); break; case REPARTITION_HASH_FIRST: list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_FIRST: list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, true, false)); break; case REPARTITION_HASH_SECOND: list.add(new HashRightOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_SECOND: default: throw new CompilerException("Invalid join hint: " + hint + " for right outer join"); } return list; }
Example #5
Source File: OuterJoinNode.java From flink with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> createLeftOuterJoinDescriptors(JoinHint hint) { List<OperatorDescriptorDual> list = new ArrayList<>(); switch (hint) { case OPTIMIZER_CHOOSES: list.add(new SortMergeLeftOuterJoinDescriptor(this.keys1, this.keys2, true)); list.add(new HashLeftOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, true, true)); break; case REPARTITION_SORT_MERGE: list.add(new SortMergeLeftOuterJoinDescriptor(this.keys1, this.keys2, false)); break; case REPARTITION_HASH_SECOND: list.add(new HashLeftOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_SECOND: list.add(new HashLeftOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, true, false)); break; case REPARTITION_HASH_FIRST: list.add(new HashLeftOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_FIRST: default: throw new CompilerException("Invalid join hint: " + hint + " for left outer join"); } return list; }
Example #6
Source File: CoGroupNode.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> initializeDataProperties(Partitioner<?> customPartitioner) { Ordering groupOrder1 = null; Ordering groupOrder2 = null; CoGroupOperatorBase<?, ?, ?, ?> cgc = getOperator(); groupOrder1 = cgc.getGroupOrderForInputOne(); groupOrder2 = cgc.getGroupOrderForInputTwo(); if (groupOrder1 != null && groupOrder1.getNumberOfFields() == 0) { groupOrder1 = null; } if (groupOrder2 != null && groupOrder2.getNumberOfFields() == 0) { groupOrder2 = null; } CoGroupDescriptor descr = new CoGroupDescriptor(this.keys1, this.keys2, groupOrder1, groupOrder2); if (customPartitioner != null) { descr.setCustomPartitioner(customPartitioner); } return Collections.<OperatorDescriptorDual>singletonList(descr); }
Example #7
Source File: WorksetIterationNode.java From flink with Apache License 2.0 | 6 votes |
/** * Creates a new node with a single input for the optimizer plan. * * @param iteration The iteration operator that the node represents. */ public WorksetIterationNode(DeltaIterationBase<?, ?> iteration) { super(iteration); final int[] ssKeys = iteration.getSolutionSetKeyFields(); if (ssKeys == null || ssKeys.length == 0) { throw new CompilerException("Invalid WorksetIteration: No key fields defined for the solution set."); } this.solutionSetKeyFields = new FieldList(ssKeys); this.partitionedProperties = new GlobalProperties(); this.partitionedProperties.setHashPartitioned(this.solutionSetKeyFields); int weight = iteration.getMaximumNumberOfIterations() > 0 ? iteration.getMaximumNumberOfIterations() : DEFAULT_COST_WEIGHT; if (weight > OptimizerNode.MAX_DYNAMIC_PATH_COST_WEIGHT) { weight = OptimizerNode.MAX_DYNAMIC_PATH_COST_WEIGHT; } this.costWeight = weight; this.dataProperties = Collections.<OperatorDescriptorDual>singletonList(new WorksetOpDescriptor(this.solutionSetKeyFields)); }
Example #8
Source File: CoGroupNode.java From flink with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> initializeDataProperties(Partitioner<?> customPartitioner) { Ordering groupOrder1 = null; Ordering groupOrder2 = null; CoGroupOperatorBase<?, ?, ?, ?> cgc = getOperator(); groupOrder1 = cgc.getGroupOrderForInputOne(); groupOrder2 = cgc.getGroupOrderForInputTwo(); if (groupOrder1 != null && groupOrder1.getNumberOfFields() == 0) { groupOrder1 = null; } if (groupOrder2 != null && groupOrder2.getNumberOfFields() == 0) { groupOrder2 = null; } CoGroupDescriptor descr = new CoGroupDescriptor(this.keys1, this.keys2, groupOrder1, groupOrder2); if (customPartitioner != null) { descr.setCustomPartitioner(customPartitioner); } return Collections.<OperatorDescriptorDual>singletonList(descr); }
Example #9
Source File: OuterJoinNode.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> createLeftOuterJoinDescriptors(JoinHint hint) { List<OperatorDescriptorDual> list = new ArrayList<>(); switch (hint) { case OPTIMIZER_CHOOSES: list.add(new SortMergeLeftOuterJoinDescriptor(this.keys1, this.keys2, true)); list.add(new HashLeftOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, true, true)); break; case REPARTITION_SORT_MERGE: list.add(new SortMergeLeftOuterJoinDescriptor(this.keys1, this.keys2, false)); break; case REPARTITION_HASH_SECOND: list.add(new HashLeftOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_SECOND: list.add(new HashLeftOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, true, false)); break; case REPARTITION_HASH_FIRST: list.add(new HashLeftOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_FIRST: default: throw new CompilerException("Invalid join hint: " + hint + " for left outer join"); } return list; }
Example #10
Source File: OuterJoinNode.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> createFullOuterJoinDescriptors(JoinHint hint) { List<OperatorDescriptorDual> list = new ArrayList<>(); switch (hint) { case OPTIMIZER_CHOOSES: list.add(new SortMergeFullOuterJoinDescriptor(this.keys1, this.keys2)); break; case REPARTITION_SORT_MERGE: list.add(new SortMergeFullOuterJoinDescriptor(this.keys1, this.keys2)); break; case REPARTITION_HASH_FIRST: list.add(new HashFullOuterJoinBuildFirstDescriptor(this.keys1, this.keys2)); break; case REPARTITION_HASH_SECOND: list.add(new HashFullOuterJoinBuildSecondDescriptor(this.keys1, this.keys2)); break; case BROADCAST_HASH_FIRST: case BROADCAST_HASH_SECOND: default: throw new CompilerException("Invalid join hint: " + hint + " for full outer join"); } return list; }
Example #11
Source File: CoGroupRawNode.java From flink with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> initializeDataProperties() { Ordering groupOrder1 = null; Ordering groupOrder2 = null; CoGroupRawOperatorBase<?, ?, ?, ?> cgc = getOperator(); groupOrder1 = cgc.getGroupOrderForInputOne(); groupOrder2 = cgc.getGroupOrderForInputTwo(); if (groupOrder1 != null && groupOrder1.getNumberOfFields() == 0) { groupOrder1 = null; } if (groupOrder2 != null && groupOrder2.getNumberOfFields() == 0) { groupOrder2 = null; } return Collections.<OperatorDescriptorDual>singletonList(new CoGroupRawDescriptor(this.keys1, this.keys2, groupOrder1, groupOrder2)); }
Example #12
Source File: CoGroupRawNode.java From flink with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> initializeDataProperties() { Ordering groupOrder1 = null; Ordering groupOrder2 = null; CoGroupRawOperatorBase<?, ?, ?, ?> cgc = getOperator(); groupOrder1 = cgc.getGroupOrderForInputOne(); groupOrder2 = cgc.getGroupOrderForInputTwo(); if (groupOrder1 != null && groupOrder1.getNumberOfFields() == 0) { groupOrder1 = null; } if (groupOrder2 != null && groupOrder2.getNumberOfFields() == 0) { groupOrder2 = null; } return Collections.<OperatorDescriptorDual>singletonList(new CoGroupRawDescriptor(this.keys1, this.keys2, groupOrder1, groupOrder2)); }
Example #13
Source File: CoGroupNode.java From flink with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> initializeDataProperties(Partitioner<?> customPartitioner) { Ordering groupOrder1 = null; Ordering groupOrder2 = null; CoGroupOperatorBase<?, ?, ?, ?> cgc = getOperator(); groupOrder1 = cgc.getGroupOrderForInputOne(); groupOrder2 = cgc.getGroupOrderForInputTwo(); if (groupOrder1 != null && groupOrder1.getNumberOfFields() == 0) { groupOrder1 = null; } if (groupOrder2 != null && groupOrder2.getNumberOfFields() == 0) { groupOrder2 = null; } CoGroupDescriptor descr = new CoGroupDescriptor(this.keys1, this.keys2, groupOrder1, groupOrder2); if (customPartitioner != null) { descr.setCustomPartitioner(customPartitioner); } return Collections.<OperatorDescriptorDual>singletonList(descr); }
Example #14
Source File: WorksetIterationNode.java From flink with Apache License 2.0 | 6 votes |
/** * Creates a new node with a single input for the optimizer plan. * * @param iteration The iteration operator that the node represents. */ public WorksetIterationNode(DeltaIterationBase<?, ?> iteration) { super(iteration); final int[] ssKeys = iteration.getSolutionSetKeyFields(); if (ssKeys == null || ssKeys.length == 0) { throw new CompilerException("Invalid WorksetIteration: No key fields defined for the solution set."); } this.solutionSetKeyFields = new FieldList(ssKeys); this.partitionedProperties = new GlobalProperties(); this.partitionedProperties.setHashPartitioned(this.solutionSetKeyFields); int weight = iteration.getMaximumNumberOfIterations() > 0 ? iteration.getMaximumNumberOfIterations() : DEFAULT_COST_WEIGHT; if (weight > OptimizerNode.MAX_DYNAMIC_PATH_COST_WEIGHT) { weight = OptimizerNode.MAX_DYNAMIC_PATH_COST_WEIGHT; } this.costWeight = weight; this.dataProperties = Collections.<OperatorDescriptorDual>singletonList(new WorksetOpDescriptor(this.solutionSetKeyFields)); }
Example #15
Source File: OuterJoinNode.java From flink with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> createFullOuterJoinDescriptors(JoinHint hint) { List<OperatorDescriptorDual> list = new ArrayList<>(); switch (hint) { case OPTIMIZER_CHOOSES: list.add(new SortMergeFullOuterJoinDescriptor(this.keys1, this.keys2)); break; case REPARTITION_SORT_MERGE: list.add(new SortMergeFullOuterJoinDescriptor(this.keys1, this.keys2)); break; case REPARTITION_HASH_FIRST: list.add(new HashFullOuterJoinBuildFirstDescriptor(this.keys1, this.keys2)); break; case REPARTITION_HASH_SECOND: list.add(new HashFullOuterJoinBuildSecondDescriptor(this.keys1, this.keys2)); break; case BROADCAST_HASH_FIRST: case BROADCAST_HASH_SECOND: default: throw new CompilerException("Invalid join hint: " + hint + " for full outer join"); } return list; }
Example #16
Source File: OuterJoinNode.java From flink with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> createLeftOuterJoinDescriptors(JoinHint hint) { List<OperatorDescriptorDual> list = new ArrayList<>(); switch (hint) { case OPTIMIZER_CHOOSES: list.add(new SortMergeLeftOuterJoinDescriptor(this.keys1, this.keys2, true)); list.add(new HashLeftOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, true, true)); break; case REPARTITION_SORT_MERGE: list.add(new SortMergeLeftOuterJoinDescriptor(this.keys1, this.keys2, false)); break; case REPARTITION_HASH_SECOND: list.add(new HashLeftOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_SECOND: list.add(new HashLeftOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, true, false)); break; case REPARTITION_HASH_FIRST: list.add(new HashLeftOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_FIRST: default: throw new CompilerException("Invalid join hint: " + hint + " for left outer join"); } return list; }
Example #17
Source File: OuterJoinNode.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> createRightOuterJoinDescriptors(JoinHint hint) { List<OperatorDescriptorDual> list = new ArrayList<>(); switch (hint) { case OPTIMIZER_CHOOSES: list.add(new SortMergeRightOuterJoinDescriptor(this.keys1, this.keys2, true)); list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, true, true)); break; case REPARTITION_SORT_MERGE: list.add(new SortMergeRightOuterJoinDescriptor(this.keys1, this.keys2, false)); break; case REPARTITION_HASH_FIRST: list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_FIRST: list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, true, false)); break; case REPARTITION_HASH_SECOND: list.add(new HashRightOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_SECOND: default: throw new CompilerException("Invalid join hint: " + hint + " for right outer join"); } return list; }
Example #18
Source File: CoGroupRawNode.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> initializeDataProperties() { Ordering groupOrder1 = null; Ordering groupOrder2 = null; CoGroupRawOperatorBase<?, ?, ?, ?> cgc = getOperator(); groupOrder1 = cgc.getGroupOrderForInputOne(); groupOrder2 = cgc.getGroupOrderForInputTwo(); if (groupOrder1 != null && groupOrder1.getNumberOfFields() == 0) { groupOrder1 = null; } if (groupOrder2 != null && groupOrder2.getNumberOfFields() == 0) { groupOrder2 = null; } return Collections.<OperatorDescriptorDual>singletonList(new CoGroupRawDescriptor(this.keys1, this.keys2, groupOrder1, groupOrder2)); }
Example #19
Source File: OuterJoinNode.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private List<OperatorDescriptorDual> getDataProperties() { OuterJoinOperatorBase<?, ?, ?, ?> operator = getOperator(); OuterJoinType type = operator.getOuterJoinType(); JoinHint joinHint = operator.getJoinHint(); joinHint = joinHint == null ? JoinHint.OPTIMIZER_CHOOSES : joinHint; List<OperatorDescriptorDual> list; switch (type) { case LEFT: list = createLeftOuterJoinDescriptors(joinHint); break; case RIGHT: list = createRightOuterJoinDescriptors(joinHint); break; case FULL: list = createFullOuterJoinDescriptors(joinHint); break; default: throw new CompilerException("Unknown outer join type: " + type); } Partitioner<?> customPartitioner = operator.getCustomPartitioner(); if (customPartitioner != null) { for (OperatorDescriptorDual desc : list) { ((AbstractJoinDescriptor) desc).setCustomPartitioner(customPartitioner); } } return list; }
Example #20
Source File: TwoInputNode.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public void computeInterestingPropertiesForInputs(CostEstimator estimator) { // get what we inherit and what is preserved by our user code final InterestingProperties props1 = getInterestingProperties().filterByCodeAnnotations(this, 0); final InterestingProperties props2 = getInterestingProperties().filterByCodeAnnotations(this, 1); // add all properties relevant to this node for (OperatorDescriptorDual dpd : getProperties()) { for (GlobalPropertiesPair gp : dpd.getPossibleGlobalProperties()) { // input 1 props1.addGlobalProperties(gp.getProperties1()); // input 2 props2.addGlobalProperties(gp.getProperties2()); } for (LocalPropertiesPair lp : dpd.getPossibleLocalProperties()) { // input 1 props1.addLocalProperties(lp.getProperties1()); // input 2 props2.addLocalProperties(lp.getProperties2()); } } this.input1.setInterestingProperties(props1); this.input2.setInterestingProperties(props2); for (DagConnection conn : getBroadcastConnections()) { conn.setInterestingProperties(new InterestingProperties()); } }
Example #21
Source File: OuterJoinNode.java From flink with Apache License 2.0 | 5 votes |
private List<OperatorDescriptorDual> getDataProperties() { OuterJoinOperatorBase<?, ?, ?, ?> operator = getOperator(); OuterJoinType type = operator.getOuterJoinType(); JoinHint joinHint = operator.getJoinHint(); joinHint = joinHint == null ? JoinHint.OPTIMIZER_CHOOSES : joinHint; List<OperatorDescriptorDual> list; switch (type) { case LEFT: list = createLeftOuterJoinDescriptors(joinHint); break; case RIGHT: list = createRightOuterJoinDescriptors(joinHint); break; case FULL: list = createFullOuterJoinDescriptors(joinHint); break; default: throw new CompilerException("Unknown outer join type: " + type); } Partitioner<?> customPartitioner = operator.getCustomPartitioner(); if (customPartitioner != null) { for (OperatorDescriptorDual desc : list) { ((AbstractJoinDescriptor) desc).setCustomPartitioner(customPartitioner); } } return list; }
Example #22
Source File: TwoInputNode.java From flink with Apache License 2.0 | 5 votes |
@Override public void computeInterestingPropertiesForInputs(CostEstimator estimator) { // get what we inherit and what is preserved by our user code final InterestingProperties props1 = getInterestingProperties().filterByCodeAnnotations(this, 0); final InterestingProperties props2 = getInterestingProperties().filterByCodeAnnotations(this, 1); // add all properties relevant to this node for (OperatorDescriptorDual dpd : getProperties()) { for (GlobalPropertiesPair gp : dpd.getPossibleGlobalProperties()) { // input 1 props1.addGlobalProperties(gp.getProperties1()); // input 2 props2.addGlobalProperties(gp.getProperties2()); } for (LocalPropertiesPair lp : dpd.getPossibleLocalProperties()) { // input 1 props1.addLocalProperties(lp.getProperties1()); // input 2 props2.addLocalProperties(lp.getProperties2()); } } this.input1.setInterestingProperties(props1); this.input2.setInterestingProperties(props2); for (DagConnection conn : getBroadcastConnections()) { conn.setInterestingProperties(new InterestingProperties()); } }
Example #23
Source File: TwoInputNode.java From flink with Apache License 2.0 | 5 votes |
@Override public void computeInterestingPropertiesForInputs(CostEstimator estimator) { // get what we inherit and what is preserved by our user code final InterestingProperties props1 = getInterestingProperties().filterByCodeAnnotations(this, 0); final InterestingProperties props2 = getInterestingProperties().filterByCodeAnnotations(this, 1); // add all properties relevant to this node for (OperatorDescriptorDual dpd : getProperties()) { for (GlobalPropertiesPair gp : dpd.getPossibleGlobalProperties()) { // input 1 props1.addGlobalProperties(gp.getProperties1()); // input 2 props2.addGlobalProperties(gp.getProperties2()); } for (LocalPropertiesPair lp : dpd.getPossibleLocalProperties()) { // input 1 props1.addLocalProperties(lp.getProperties1()); // input 2 props2.addLocalProperties(lp.getProperties2()); } } this.input1.setInterestingProperties(props1); this.input2.setInterestingProperties(props2); for (DagConnection conn : getBroadcastConnections()) { conn.setInterestingProperties(new InterestingProperties()); } }
Example #24
Source File: OuterJoinNode.java From flink with Apache License 2.0 | 5 votes |
private List<OperatorDescriptorDual> getDataProperties() { OuterJoinOperatorBase<?, ?, ?, ?> operator = getOperator(); OuterJoinType type = operator.getOuterJoinType(); JoinHint joinHint = operator.getJoinHint(); joinHint = joinHint == null ? JoinHint.OPTIMIZER_CHOOSES : joinHint; List<OperatorDescriptorDual> list; switch (type) { case LEFT: list = createLeftOuterJoinDescriptors(joinHint); break; case RIGHT: list = createRightOuterJoinDescriptors(joinHint); break; case FULL: list = createFullOuterJoinDescriptors(joinHint); break; default: throw new CompilerException("Unknown outer join type: " + type); } Partitioner<?> customPartitioner = operator.getCustomPartitioner(); if (customPartitioner != null) { for (OperatorDescriptorDual desc : list) { ((AbstractJoinDescriptor) desc).setCustomPartitioner(customPartitioner); } } return list; }
Example #25
Source File: CrossNode.java From flink with Apache License 2.0 | 4 votes |
@Override protected List<OperatorDescriptorDual> getPossibleProperties() { return this.dataProperties; }
Example #26
Source File: CrossNode.java From flink with Apache License 2.0 | 4 votes |
@Override protected List<OperatorDescriptorDual> getPossibleProperties() { return this.dataProperties; }
Example #27
Source File: CoGroupRawNode.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Override protected List<OperatorDescriptorDual> getPossibleProperties() { return this.dataProperties; }
Example #28
Source File: CoGroupRawNode.java From flink with Apache License 2.0 | 4 votes |
@Override protected List<OperatorDescriptorDual> getPossibleProperties() { return this.dataProperties; }
Example #29
Source File: OuterJoinNode.java From flink with Apache License 2.0 | 4 votes |
@Override protected List<OperatorDescriptorDual> getPossibleProperties() { return dataProperties; }
Example #30
Source File: OuterJoinNode.java From flink with Apache License 2.0 | 4 votes |
@Override protected List<OperatorDescriptorDual> getPossibleProperties() { return dataProperties; }