org.apache.flink.optimizer.CompilerException Java Examples
The following examples show how to use
org.apache.flink.optimizer.CompilerException.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BulkIterationPlanNode.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private void mergeBranchPlanMaps() { for (OptimizerNode.UnclosedBranchDescriptor desc: template.getOpenBranches()) { OptimizerNode brancher = desc.getBranchingNode(); if (branchPlan == null) { branchPlan = new HashMap<OptimizerNode, PlanNode>(6); } if (!branchPlan.containsKey(brancher)) { PlanNode selectedCandidate = null; if (rootOfStepFunction.branchPlan != null) { selectedCandidate = rootOfStepFunction.branchPlan.get(brancher); } if (selectedCandidate == null) { throw new CompilerException( "Candidates for a node with open branches are missing information about the selected candidate "); } this.branchPlan.put(brancher, selectedCandidate); } } }
Example #2
Source File: CoGroupRawDescriptor.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public boolean areCoFulfilled(RequestedLocalProperties requested1, RequestedLocalProperties requested2, LocalProperties produced1, LocalProperties produced2) { int numRelevantFields = this.keys1.size(); Ordering prod1 = produced1.getOrdering(); Ordering prod2 = produced2.getOrdering(); if (prod1 == null || prod2 == null || prod1.getNumberOfFields() < numRelevantFields || prod2.getNumberOfFields() < numRelevantFields) { throw new CompilerException("The given properties do not meet this operators requirements."); } for (int i = 0; i < numRelevantFields; i++) { if (prod1.getOrder(i) != prod2.getOrder(i)) { return false; } } return true; }
Example #3
Source File: OperatorDescriptorDual.java From flink with Apache License 2.0 | 6 votes |
protected boolean checkSameOrdering(LocalProperties produced1, LocalProperties produced2, int numRelevantFields) { Ordering prod1 = produced1.getOrdering(); Ordering prod2 = produced2.getOrdering(); if (prod1 == null || prod2 == null) { throw new CompilerException("The given properties do not meet this operators requirements."); } // check that order of fields is equivalent if (!checkEquivalentFieldPositionsInKeyFields( prod1.getInvolvedIndexes(), prod2.getInvolvedIndexes(), numRelevantFields)) { return false; } // check that both inputs have the same directions of order for (int i = 0; i < numRelevantFields; i++) { if (prod1.getOrder(i) != prod2.getOrder(i)) { return false; } } return true; }
Example #4
Source File: HashJoinBuildFirstProperties.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public DualInputPlanNode instantiate(Channel in1, Channel in2, TwoInputNode node) { DriverStrategy strategy; if(!in1.isOnDynamicPath() && in2.isOnDynamicPath()) { // sanity check that the first input is cached and remove that cache if (!in1.getTempMode().isCached()) { throw new CompilerException("No cache at point where static and dynamic parts meet."); } in1.setTempMode(in1.getTempMode().makeNonCached()); strategy = DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED; } else { strategy = DriverStrategy.HYBRIDHASH_BUILD_FIRST; } return new DualInputPlanNode(node, "Join ("+node.getOperator().getName()+")", in1, in2, strategy, this.keys1, this.keys2); }
Example #5
Source File: OperatorDescriptorDual.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
protected boolean checkSameOrdering(GlobalProperties produced1, GlobalProperties produced2, int numRelevantFields) { Ordering prod1 = produced1.getPartitioningOrdering(); Ordering prod2 = produced2.getPartitioningOrdering(); if (prod1 == null || prod2 == null) { throw new CompilerException("The given properties do not meet this operators requirements."); } // check that order of fields is equivalent if (!checkEquivalentFieldPositionsInKeyFields( prod1.getInvolvedIndexes(), prod2.getInvolvedIndexes(), numRelevantFields)) { return false; } // check that both inputs have the same directions of order for (int i = 0; i < numRelevantFields; i++) { if (prod1.getOrder(i) != prod2.getOrder(i)) { return false; } } return true; }
Example #6
Source File: HashJoinBuildSecondProperties.java From flink with Apache License 2.0 | 6 votes |
@Override public DualInputPlanNode instantiate(Channel in1, Channel in2, TwoInputNode node) { DriverStrategy strategy; if (!in2.isOnDynamicPath() && in1.isOnDynamicPath()) { // sanity check that the first input is cached and remove that cache if (!in2.getTempMode().isCached()) { throw new CompilerException("No cache at point where static and dynamic parts meet."); } in2.setTempMode(in2.getTempMode().makeNonCached()); strategy = DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED; } else { strategy = DriverStrategy.HYBRIDHASH_BUILD_SECOND; } return new DualInputPlanNode(node, "Join ("+node.getOperator().getName()+")", in1, in2, strategy, this.keys1, this.keys2); }
Example #7
Source File: CoGroupRawDescriptor.java From flink with Apache License 2.0 | 6 votes |
@Override public boolean areCoFulfilled(RequestedLocalProperties requested1, RequestedLocalProperties requested2, LocalProperties produced1, LocalProperties produced2) { int numRelevantFields = this.keys1.size(); Ordering prod1 = produced1.getOrdering(); Ordering prod2 = produced2.getOrdering(); if (prod1 == null || prod2 == null || prod1.getNumberOfFields() < numRelevantFields || prod2.getNumberOfFields() < numRelevantFields) { throw new CompilerException("The given properties do not meet this operators requirements."); } for (int i = 0; i < numRelevantFields; i++) { if (prod1.getOrder(i) != prod2.getOrder(i)) { return false; } } return true; }
Example #8
Source File: TwoInputNode.java From flink with Apache License 2.0 | 6 votes |
/** * Creates a new two input node for the optimizer plan, representing the given operator. * * @param operator The operator that the optimizer DAG node should represent. */ public TwoInputNode(DualInputOperator<?, ?, ?, ?> operator) { super(operator); int[] k1 = operator.getKeyColumns(0); int[] k2 = operator.getKeyColumns(1); this.keys1 = k1 == null || k1.length == 0 ? null : new FieldList(k1); this.keys2 = k2 == null || k2.length == 0 ? null : new FieldList(k2); if (this.keys1 != null) { if (this.keys2 != null) { if (this.keys1.size() != this.keys2.size()) { throw new CompilerException("Unequal number of key fields on the two inputs."); } } else { throw new CompilerException("Keys are set on first input, but not on second."); } } else if (this.keys2 != null) { throw new CompilerException("Keys are set on second input, but not on first."); } }
Example #9
Source File: JobGraphGenerator.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private int getNumberOfSendersPerReceiver(DistributionPattern pattern, int numSenders, int numReceivers) { if (pattern == DistributionPattern.ALL_TO_ALL) { return numSenders; } else if (pattern == DistributionPattern.POINTWISE) { if (numSenders != numReceivers) { if (numReceivers == 1) { return numSenders; } else if (numSenders == 1) { return 1; } else { throw new CompilerException("Error: A changing parallelism is currently " + "not supported between tasks within an iteration."); } } else { return 1; } } else { throw new CompilerException("Unknown distribution pattern for channels: " + pattern); } }
Example #10
Source File: HashJoinBuildSecondProperties.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public DualInputPlanNode instantiate(Channel in1, Channel in2, TwoInputNode node) { DriverStrategy strategy; if (!in2.isOnDynamicPath() && in1.isOnDynamicPath()) { // sanity check that the first input is cached and remove that cache if (!in2.getTempMode().isCached()) { throw new CompilerException("No cache at point where static and dynamic parts meet."); } in2.setTempMode(in2.getTempMode().makeNonCached()); strategy = DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED; } else { strategy = DriverStrategy.HYBRIDHASH_BUILD_SECOND; } return new DualInputPlanNode(node, "Join ("+node.getOperator().getName()+")", in1, in2, strategy, this.keys1, this.keys2); }
Example #11
Source File: OperatorDescriptorDual.java From flink with Apache License 2.0 | 6 votes |
protected boolean checkSameOrdering(GlobalProperties produced1, GlobalProperties produced2, int numRelevantFields) { Ordering prod1 = produced1.getPartitioningOrdering(); Ordering prod2 = produced2.getPartitioningOrdering(); if (prod1 == null || prod2 == null) { throw new CompilerException("The given properties do not meet this operators requirements."); } // check that order of fields is equivalent if (!checkEquivalentFieldPositionsInKeyFields( prod1.getInvolvedIndexes(), prod2.getInvolvedIndexes(), numRelevantFields)) { return false; } // check that both inputs have the same directions of order for (int i = 0; i < numRelevantFields; i++) { if (prod1.getOrder(i) != prod2.getOrder(i)) { return false; } } return true; }
Example #12
Source File: OuterJoinNode.java From flink with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> createLeftOuterJoinDescriptors(JoinHint hint) { List<OperatorDescriptorDual> list = new ArrayList<>(); switch (hint) { case OPTIMIZER_CHOOSES: list.add(new SortMergeLeftOuterJoinDescriptor(this.keys1, this.keys2, true)); list.add(new HashLeftOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, true, true)); break; case REPARTITION_SORT_MERGE: list.add(new SortMergeLeftOuterJoinDescriptor(this.keys1, this.keys2, false)); break; case REPARTITION_HASH_SECOND: list.add(new HashLeftOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_SECOND: list.add(new HashLeftOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, true, false)); break; case REPARTITION_HASH_FIRST: list.add(new HashLeftOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_FIRST: default: throw new CompilerException("Invalid join hint: " + hint + " for left outer join"); } return list; }
Example #13
Source File: OuterJoinNode.java From flink with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> createRightOuterJoinDescriptors(JoinHint hint) { List<OperatorDescriptorDual> list = new ArrayList<>(); switch (hint) { case OPTIMIZER_CHOOSES: list.add(new SortMergeRightOuterJoinDescriptor(this.keys1, this.keys2, true)); list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, true, true)); break; case REPARTITION_SORT_MERGE: list.add(new SortMergeRightOuterJoinDescriptor(this.keys1, this.keys2, false)); break; case REPARTITION_HASH_FIRST: list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_FIRST: list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, true, false)); break; case REPARTITION_HASH_SECOND: list.add(new HashRightOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_SECOND: default: throw new CompilerException("Invalid join hint: " + hint + " for right outer join"); } return list; }
Example #14
Source File: SinkJoinerPlanNode.java From flink with Apache License 2.0 | 6 votes |
public void getDataSinks(List<SinkPlanNode> sinks) { final PlanNode in1 = this.input1.getSource(); final PlanNode in2 = this.input2.getSource(); if (in1 instanceof SinkPlanNode) { sinks.add((SinkPlanNode) in1); } else if (in1 instanceof SinkJoinerPlanNode) { ((SinkJoinerPlanNode) in1).getDataSinks(sinks); } else { throw new CompilerException("Illegal child node for a sink joiner utility node: Neither Sink nor Sink Joiner"); } if (in2 instanceof SinkPlanNode) { sinks.add((SinkPlanNode) in2); } else if (in2 instanceof SinkJoinerPlanNode) { ((SinkJoinerPlanNode) in2).getDataSinks(sinks); } else { throw new CompilerException("Illegal child node for a sink joiner utility node: Neither Sink nor Sink Joiner"); } }
Example #15
Source File: JobGraphGenerator.java From flink with Apache License 2.0 | 6 votes |
private int getNumberOfSendersPerReceiver(DistributionPattern pattern, int numSenders, int numReceivers) { if (pattern == DistributionPattern.ALL_TO_ALL) { return numSenders; } else if (pattern == DistributionPattern.POINTWISE) { if (numSenders != numReceivers) { if (numReceivers == 1) { return numSenders; } else if (numSenders == 1) { return 1; } else { throw new CompilerException("Error: A changing parallelism is currently " + "not supported between tasks within an iteration."); } } else { return 1; } } else { throw new CompilerException("Unknown distribution pattern for channels: " + pattern); } }
Example #16
Source File: BinaryUnionReplacer.java From flink with Apache License 2.0 | 6 votes |
public void collect(Channel in, List<Channel> inputs) { if (in.getSource() instanceof NAryUnionPlanNode) { // sanity check if (in.getShipStrategy() != ShipStrategyType.FORWARD) { throw new CompilerException("Bug: Plan generation for Unions picked a ship strategy between binary plan operators."); } if (!(in.getLocalStrategy() == null || in.getLocalStrategy() == LocalStrategy.NONE)) { throw new CompilerException("Bug: Plan generation for Unions picked a local strategy between binary plan operators."); } inputs.addAll(((NAryUnionPlanNode) in.getSource()).getListOfInputs()); } else { // is not a collapsed union node, so we take the channel directly inputs.add(in); } }
Example #17
Source File: ClusterClient.java From flink with Apache License 2.0 | 6 votes |
public static FlinkPlan getOptimizedPlan(Optimizer compiler, PackagedProgram prog, int parallelism) throws CompilerException, ProgramInvocationException { final ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(prog.getUserCodeClassLoader()); if (prog.isUsingProgramEntryPoint()) { return getOptimizedPlan(compiler, prog.getPlanWithJars(), parallelism); } else if (prog.isUsingInteractiveMode()) { // temporary hack to support the optimizer plan preview OptimizerPlanEnvironment env = new OptimizerPlanEnvironment(compiler); if (parallelism > 0) { env.setParallelism(parallelism); } return env.getOptimizedPlan(prog); } else { throw new RuntimeException("Couldn't determine program mode."); } } finally { Thread.currentThread().setContextClassLoader(contextClassLoader); } }
Example #18
Source File: Channel.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private void computeLocalPropertiesAfterShippingOnly() { switch (this.shipStrategy) { case BROADCAST: case PARTITION_HASH: case PARTITION_CUSTOM: case PARTITION_RANGE: case PARTITION_RANDOM: case PARTITION_FORCED_REBALANCE: this.localProps = new LocalProperties(); break; case FORWARD: this.localProps = this.source.getLocalProperties(); break; case NONE: throw new CompilerException("ShipStrategy has not yet been set."); default: throw new CompilerException("Unknown ShipStrategy."); } }
Example #19
Source File: TwoInputNode.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Creates a new two input node for the optimizer plan, representing the given operator. * * @param operator The operator that the optimizer DAG node should represent. */ public TwoInputNode(DualInputOperator<?, ?, ?, ?> operator) { super(operator); int[] k1 = operator.getKeyColumns(0); int[] k2 = operator.getKeyColumns(1); this.keys1 = k1 == null || k1.length == 0 ? null : new FieldList(k1); this.keys2 = k2 == null || k2.length == 0 ? null : new FieldList(k2); if (this.keys1 != null) { if (this.keys2 != null) { if (this.keys1.size() != this.keys2.size()) { throw new CompilerException("Unequal number of key fields on the two inputs."); } } else { throw new CompilerException("Keys are set on first input, but not on second."); } } else if (this.keys2 != null) { throw new CompilerException("Keys are set on second input, but not on first."); } }
Example #20
Source File: Channel.java From flink with Apache License 2.0 | 6 votes |
private void computeLocalPropertiesAfterShippingOnly() { switch (this.shipStrategy) { case BROADCAST: case PARTITION_HASH: case PARTITION_CUSTOM: case PARTITION_RANGE: case PARTITION_RANDOM: case PARTITION_FORCED_REBALANCE: this.localProps = new LocalProperties(); break; case FORWARD: this.localProps = this.source.getLocalProperties(); break; case NONE: throw new CompilerException("ShipStrategy has not yet been set."); default: throw new CompilerException("Unknown ShipStrategy."); } }
Example #21
Source File: TwoInputNode.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void accept(Visitor<OptimizerNode> visitor) { if (visitor.preVisit(this)) { if (this.input1 == null || this.input2 == null) { throw new CompilerException(); } getFirstPredecessorNode().accept(visitor); getSecondPredecessorNode().accept(visitor); for (DagConnection connection : getBroadcastConnections()) { connection.getSource().accept(visitor); } visitor.postVisit(this); } }
Example #22
Source File: WorksetIterationNode.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Creates a new node with a single input for the optimizer plan. * * @param iteration The iteration operator that the node represents. */ public WorksetIterationNode(DeltaIterationBase<?, ?> iteration) { super(iteration); final int[] ssKeys = iteration.getSolutionSetKeyFields(); if (ssKeys == null || ssKeys.length == 0) { throw new CompilerException("Invalid WorksetIteration: No key fields defined for the solution set."); } this.solutionSetKeyFields = new FieldList(ssKeys); this.partitionedProperties = new GlobalProperties(); this.partitionedProperties.setHashPartitioned(this.solutionSetKeyFields); int weight = iteration.getMaximumNumberOfIterations() > 0 ? iteration.getMaximumNumberOfIterations() : DEFAULT_COST_WEIGHT; if (weight > OptimizerNode.MAX_DYNAMIC_PATH_COST_WEIGHT) { weight = OptimizerNode.MAX_DYNAMIC_PATH_COST_WEIGHT; } this.costWeight = weight; this.dataProperties = Collections.<OperatorDescriptorDual>singletonList(new WorksetOpDescriptor(this.solutionSetKeyFields)); }
Example #23
Source File: OuterJoinNode.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> createLeftOuterJoinDescriptors(JoinHint hint) { List<OperatorDescriptorDual> list = new ArrayList<>(); switch (hint) { case OPTIMIZER_CHOOSES: list.add(new SortMergeLeftOuterJoinDescriptor(this.keys1, this.keys2, true)); list.add(new HashLeftOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, true, true)); break; case REPARTITION_SORT_MERGE: list.add(new SortMergeLeftOuterJoinDescriptor(this.keys1, this.keys2, false)); break; case REPARTITION_HASH_SECOND: list.add(new HashLeftOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_SECOND: list.add(new HashLeftOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, true, false)); break; case REPARTITION_HASH_FIRST: list.add(new HashLeftOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_FIRST: default: throw new CompilerException("Invalid join hint: " + hint + " for left outer join"); } return list; }
Example #24
Source File: OuterJoinNode.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> createRightOuterJoinDescriptors(JoinHint hint) { List<OperatorDescriptorDual> list = new ArrayList<>(); switch (hint) { case OPTIMIZER_CHOOSES: list.add(new SortMergeRightOuterJoinDescriptor(this.keys1, this.keys2, true)); list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, true, true)); break; case REPARTITION_SORT_MERGE: list.add(new SortMergeRightOuterJoinDescriptor(this.keys1, this.keys2, false)); break; case REPARTITION_HASH_FIRST: list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_FIRST: list.add(new HashRightOuterJoinBuildFirstDescriptor(this.keys1, this.keys2, true, false)); break; case REPARTITION_HASH_SECOND: list.add(new HashRightOuterJoinBuildSecondDescriptor(this.keys1, this.keys2, false, true)); break; case BROADCAST_HASH_SECOND: default: throw new CompilerException("Invalid join hint: " + hint + " for right outer join"); } return list; }
Example #25
Source File: OuterJoinNode.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private List<OperatorDescriptorDual> createFullOuterJoinDescriptors(JoinHint hint) { List<OperatorDescriptorDual> list = new ArrayList<>(); switch (hint) { case OPTIMIZER_CHOOSES: list.add(new SortMergeFullOuterJoinDescriptor(this.keys1, this.keys2)); break; case REPARTITION_SORT_MERGE: list.add(new SortMergeFullOuterJoinDescriptor(this.keys1, this.keys2)); break; case REPARTITION_HASH_FIRST: list.add(new HashFullOuterJoinBuildFirstDescriptor(this.keys1, this.keys2)); break; case REPARTITION_HASH_SECOND: list.add(new HashFullOuterJoinBuildSecondDescriptor(this.keys1, this.keys2)); break; case BROADCAST_HASH_FIRST: case BROADCAST_HASH_SECOND: default: throw new CompilerException("Invalid join hint: " + hint + " for full outer join"); } return list; }
Example #26
Source File: SinkJoinerPlanNode.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public void getDataSinks(List<SinkPlanNode> sinks) { final PlanNode in1 = this.input1.getSource(); final PlanNode in2 = this.input2.getSource(); if (in1 instanceof SinkPlanNode) { sinks.add((SinkPlanNode) in1); } else if (in1 instanceof SinkJoinerPlanNode) { ((SinkJoinerPlanNode) in1).getDataSinks(sinks); } else { throw new CompilerException("Illegal child node for a sink joiner utility node: Neither Sink nor Sink Joiner"); } if (in2 instanceof SinkPlanNode) { sinks.add((SinkPlanNode) in2); } else if (in2 instanceof SinkJoinerPlanNode) { ((SinkJoinerPlanNode) in2).getDataSinks(sinks); } else { throw new CompilerException("Illegal child node for a sink joiner utility node: Neither Sink nor Sink Joiner"); } }
Example #27
Source File: TwoInputNode.java From flink with Apache License 2.0 | 6 votes |
@Override public void accept(Visitor<OptimizerNode> visitor) { if (visitor.preVisit(this)) { if (this.input1 == null || this.input2 == null) { throw new CompilerException(); } getFirstPredecessorNode().accept(visitor); getSecondPredecessorNode().accept(visitor); for (DagConnection connection : getBroadcastConnections()) { connection.getSource().accept(visitor); } visitor.postVisit(this); } }
Example #28
Source File: ClusterClient.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public static FlinkPlan getOptimizedPlan(Optimizer compiler, PackagedProgram prog, int parallelism) throws CompilerException, ProgramInvocationException { Thread.currentThread().setContextClassLoader(prog.getUserCodeClassLoader()); if (prog.isUsingProgramEntryPoint()) { return getOptimizedPlan(compiler, prog.getPlanWithJars(), parallelism); } else if (prog.isUsingInteractiveMode()) { // temporary hack to support the optimizer plan preview OptimizerPlanEnvironment env = new OptimizerPlanEnvironment(compiler); if (parallelism > 0) { env.setParallelism(parallelism); } return env.getOptimizedPlan(prog); } else { throw new RuntimeException("Couldn't determine program mode."); } }
Example #29
Source File: RequestedGlobalProperties.java From flink with Apache License 2.0 | 5 votes |
/** * Checks, if this set of interesting properties, is met by the given * produced properties. * * @param props The properties for which to check whether they meet these properties. * @return True, if the properties are met, false otherwise. */ public boolean isMetBy(GlobalProperties props) { if (this.partitioning == PartitioningProperty.ANY_DISTRIBUTION) { return true; } else if (this.partitioning == PartitioningProperty.FULL_REPLICATION) { return props.isFullyReplicated(); } else if (props.isFullyReplicated()) { return false; } else if (this.partitioning == PartitioningProperty.RANDOM_PARTITIONED) { return true; } else if (this.partitioning == PartitioningProperty.ANY_PARTITIONING) { return checkCompatiblePartitioningFields(props); } else if (this.partitioning == PartitioningProperty.HASH_PARTITIONED) { return props.getPartitioning() == PartitioningProperty.HASH_PARTITIONED && checkCompatiblePartitioningFields(props); } else if (this.partitioning == PartitioningProperty.RANGE_PARTITIONED) { return props.getPartitioning() == PartitioningProperty.RANGE_PARTITIONED && props.matchesOrderedPartitioning(this.ordering); } else if (this.partitioning == PartitioningProperty.FORCED_REBALANCED) { return props.getPartitioning() == PartitioningProperty.FORCED_REBALANCED; } else if (this.partitioning == PartitioningProperty.CUSTOM_PARTITIONING) { return props.getPartitioning() == PartitioningProperty.CUSTOM_PARTITIONING && checkCompatiblePartitioningFields(props) && props.getCustomPartitioner().equals(this.customPartitioner); } else { throw new CompilerException("Properties matching logic leaves open cases."); } }
Example #30
Source File: PlanNode.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Sets the basic cost for this node to the given value, and sets the cumulative costs * to those costs plus the cost shares of all inputs (regular and broadcast). * * @param nodeCosts The already knows costs for this node * (this cost a produces by a concrete {@code OptimizerNode} subclass. */ public void setCosts(Costs nodeCosts) { // set the node costs this.nodeCosts = nodeCosts; // the cumulative costs are the node costs plus the costs of all inputs this.cumulativeCosts = nodeCosts.clone(); // add all the normal inputs for (PlanNode pred : getPredecessors()) { Costs parentCosts = pred.getCumulativeCostsShare(); if (parentCosts != null) { this.cumulativeCosts.addCosts(parentCosts); } else { throw new CompilerException("Trying to set the costs of an operator before the predecessor costs are computed."); } } // add all broadcast variable inputs if (this.broadcastInputs != null) { for (NamedChannel nc : this.broadcastInputs) { Costs bcInputCost = nc.getSource().getCumulativeCostsShare(); if (bcInputCost != null) { this.cumulativeCosts.addCosts(bcInputCost); } else { throw new CompilerException("Trying to set the costs of an operator before the broadcast input costs are computed."); } } } }