org.apache.flink.optimizer.plan.Channel Java Examples
The following examples show how to use
org.apache.flink.optimizer.plan.Channel.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: GroupCombineProperties.java From flink with Apache License 2.0 | 6 votes |
@Override public SingleInputPlanNode instantiate(Channel in, SingleInputNode node) { node.setParallelism(in.getSource().getParallelism()); // sorting key info SingleInputPlanNode singleInputPlanNode = new SingleInputPlanNode( node, "GroupCombine (" + node.getOperator().getName() + ")", in, // reuse the combine strategy also used in the group reduce DriverStrategy.SORTED_GROUP_COMBINE, this.keyList); // set sorting comparator key info singleInputPlanNode.setDriverKeyInfo(this.ordering.getInvolvedIndexes(), this.ordering.getFieldSortDirections(), 0); // set grouping comparator key info singleInputPlanNode.setDriverKeyInfo(this.keyList, 1); return singleInputPlanNode; }
Example #2
Source File: HashJoinBuildSecondProperties.java From flink with Apache License 2.0 | 6 votes |
@Override public DualInputPlanNode instantiate(Channel in1, Channel in2, TwoInputNode node) { DriverStrategy strategy; if (!in2.isOnDynamicPath() && in1.isOnDynamicPath()) { // sanity check that the first input is cached and remove that cache if (!in2.getTempMode().isCached()) { throw new CompilerException("No cache at point where static and dynamic parts meet."); } in2.setTempMode(in2.getTempMode().makeNonCached()); strategy = DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED; } else { strategy = DriverStrategy.HYBRIDHASH_BUILD_SECOND; } return new DualInputPlanNode(node, "Join ("+node.getOperator().getName()+")", in1, in2, strategy, this.keys1, this.keys2); }
Example #3
Source File: HashJoinBuildFirstProperties.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public DualInputPlanNode instantiate(Channel in1, Channel in2, TwoInputNode node) { DriverStrategy strategy; if(!in1.isOnDynamicPath() && in2.isOnDynamicPath()) { // sanity check that the first input is cached and remove that cache if (!in1.getTempMode().isCached()) { throw new CompilerException("No cache at point where static and dynamic parts meet."); } in1.setTempMode(in1.getTempMode().makeNonCached()); strategy = DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED; } else { strategy = DriverStrategy.HYBRIDHASH_BUILD_FIRST; } return new DualInputPlanNode(node, "Join ("+node.getOperator().getName()+")", in1, in2, strategy, this.keys1, this.keys2); }
Example #4
Source File: BinaryUnionReplacer.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public void collect(Channel in, List<Channel> inputs) { if (in.getSource() instanceof NAryUnionPlanNode) { // sanity check if (in.getShipStrategy() != ShipStrategyType.FORWARD) { throw new CompilerException("Bug: Plan generation for Unions picked a ship strategy between binary plan operators."); } if (!(in.getLocalStrategy() == null || in.getLocalStrategy() == LocalStrategy.NONE)) { throw new CompilerException("Bug: Plan generation for Unions picked a local strategy between binary plan operators."); } inputs.addAll(((NAryUnionPlanNode) in.getSource()).getListOfInputs()); } else { // is not a collapsed union node, so we take the channel directly inputs.add(in); } }
Example #5
Source File: GroupCombineProperties.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public SingleInputPlanNode instantiate(Channel in, SingleInputNode node) { node.setParallelism(in.getSource().getParallelism()); // sorting key info SingleInputPlanNode singleInputPlanNode = new SingleInputPlanNode( node, "GroupCombine (" + node.getOperator().getName() + ")", in, // reuse the combine strategy also used in the group reduce DriverStrategy.SORTED_GROUP_COMBINE, this.keyList); // set sorting comparator key info singleInputPlanNode.setDriverKeyInfo(this.ordering.getInvolvedIndexes(), this.ordering.getFieldSortDirections(), 0); // set grouping comparator key info singleInputPlanNode.setDriverKeyInfo(this.keyList, 1); return singleInputPlanNode; }
Example #6
Source File: GroupCombineProperties.java From flink with Apache License 2.0 | 6 votes |
@Override public SingleInputPlanNode instantiate(Channel in, SingleInputNode node) { node.setParallelism(in.getSource().getParallelism()); // sorting key info SingleInputPlanNode singleInputPlanNode = new SingleInputPlanNode( node, "GroupCombine (" + node.getOperator().getName() + ")", in, // reuse the combine strategy also used in the group reduce DriverStrategy.SORTED_GROUP_COMBINE, this.keyList); // set sorting comparator key info singleInputPlanNode.setDriverKeyInfo(this.ordering.getInvolvedIndexes(), this.ordering.getFieldSortDirections(), 0); // set grouping comparator key info singleInputPlanNode.setDriverKeyInfo(this.keyList, 1); return singleInputPlanNode; }
Example #7
Source File: RequestedLocalProperties.java From flink with Apache License 2.0 | 6 votes |
/** * Parametrizes the local strategy fields of a channel such that the channel produces the desired local properties. * * @param channel The channel to parametrize. */ public void parameterizeChannel(Channel channel) { LocalProperties current = channel.getLocalProperties(); if (isMetBy(current)) { // we are met, all is good channel.setLocalStrategy(LocalStrategy.NONE); } else if (this.ordering != null) { channel.setLocalStrategy(LocalStrategy.SORT, this.ordering.getInvolvedIndexes(), this.ordering.getFieldSortDirections()); } else if (this.groupedFields != null) { boolean[] dirs = new boolean[this.groupedFields.size()]; Arrays.fill(dirs, true); channel.setLocalStrategy(LocalStrategy.SORT, Utils.createOrderedFromSet(this.groupedFields), dirs); } else { channel.setLocalStrategy(LocalStrategy.NONE); } }
Example #8
Source File: HashJoinBuildSecondProperties.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public DualInputPlanNode instantiate(Channel in1, Channel in2, TwoInputNode node) { DriverStrategy strategy; if (!in2.isOnDynamicPath() && in1.isOnDynamicPath()) { // sanity check that the first input is cached and remove that cache if (!in2.getTempMode().isCached()) { throw new CompilerException("No cache at point where static and dynamic parts meet."); } in2.setTempMode(in2.getTempMode().makeNonCached()); strategy = DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED; } else { strategy = DriverStrategy.HYBRIDHASH_BUILD_SECOND; } return new DualInputPlanNode(node, "Join ("+node.getOperator().getName()+")", in1, in2, strategy, this.keys1, this.keys2); }
Example #9
Source File: TestUtils.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Verify operator parallelism. * * @param env the Flink execution environment. * @param expectedParallelism expected operator parallelism */ public static void verifyParallelism(ExecutionEnvironment env, int expectedParallelism) { env.setParallelism(2 * expectedParallelism); Optimizer compiler = new Optimizer(null, new DefaultCostEstimator(), new Configuration()); OptimizedPlan optimizedPlan = compiler.compile(env.createProgramPlan()); List<PlanNode> queue = new ArrayList<>(); queue.addAll(optimizedPlan.getDataSinks()); while (queue.size() > 0) { PlanNode node = queue.remove(queue.size() - 1); // Data sources may have parallelism of 1, so simply check that the node // parallelism has not been increased by setting the default parallelism assertTrue("Wrong parallelism for " + node.toString(), node.getParallelism() <= expectedParallelism); for (Channel channel : node.getInputs()) { queue.add(channel.getSource()); } } }
Example #10
Source File: JobGraphGenerator.java From flink with Apache License 2.0 | 5 votes |
private boolean checkAndConfigurePersistentIntermediateResult(PlanNode node) { if (!(node instanceof SinkPlanNode)) { return false; } final Object userCodeObject = node.getProgramOperator().getUserCodeWrapper().getUserCodeObject(); if (!(userCodeObject instanceof BlockingShuffleOutputFormat)) { return false; } final Iterator<Channel> inputIterator = node.getInputs().iterator(); checkState(inputIterator.hasNext(), "SinkPlanNode must have a input."); final PlanNode predecessorNode = inputIterator.next().getSource(); final JobVertex predecessorVertex = (vertices.containsKey(predecessorNode)) ? vertices.get(predecessorNode) : chainedTasks.get(predecessorNode).getContainingVertex(); checkState(predecessorVertex != null, "Bug: Chained task has not been assigned its containing vertex when connecting."); predecessorVertex.createAndAddResultDataSet( // use specified intermediateDataSetID new IntermediateDataSetID(((BlockingShuffleOutputFormat) userCodeObject).getIntermediateDataSetId()), ResultPartitionType.BLOCKING_PERSISTENT); // remove this node so the OutputFormatVertex will not shown in the final JobGraph. vertices.remove(node); return true; }
Example #11
Source File: PartialGroupProperties.java From flink with Apache License 2.0 | 5 votes |
@Override public SingleInputPlanNode instantiate(Channel in, SingleInputNode node) { // create in input node for combine with the same parallelism as input node GroupReduceNode combinerNode = new GroupReduceNode((GroupReduceOperatorBase<?, ?, ?>) node.getOperator()); combinerNode.setParallelism(in.getSource().getParallelism()); SingleInputPlanNode combiner = new SingleInputPlanNode(combinerNode, "Combine ("+node.getOperator().getName()+")", in, DriverStrategy.SORTED_GROUP_COMBINE); // sorting key info combiner.setDriverKeyInfo(in.getLocalStrategyKeys(), in.getLocalStrategySortOrder(), 0); // set grouping comparator key info combiner.setDriverKeyInfo(this.keyList, 1); return combiner; }
Example #12
Source File: CoGroupWithSolutionSetFirstDescriptor.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public DualInputPlanNode instantiate(Channel in1, Channel in2, TwoInputNode node) { boolean[] inputOrders = in2.getLocalProperties().getOrdering() == null ? null : in2.getLocalProperties().getOrdering().getFieldSortDirections(); if (inputOrders == null || inputOrders.length < this.keys2.size()) { throw new CompilerException("BUG: The input strategy does not sufficiently describe the sort orders for a CoGroup operator."); } else if (inputOrders.length > this.keys2.size()) { boolean[] tmp = new boolean[this.keys2.size()]; System.arraycopy(inputOrders, 0, tmp, 0, tmp.length); inputOrders = tmp; } return new DualInputPlanNode(node, "CoGroup ("+node.getOperator().getName()+")", in1, in2, DriverStrategy.CO_GROUP, this.keys1, this.keys2, inputOrders); }
Example #13
Source File: Utils.java From flink with Apache License 2.0 | 5 votes |
public static TypeComparatorFactory<?> getShipComparator(Channel channel, ExecutionConfig executionConfig) { PlanNode source = channel.getSource(); Operator<?> javaOp = source.getProgramOperator(); TypeInformation<?> type = javaOp.getOperatorInfo().getOutputType(); return createComparator(type, channel.getShipStrategyKeys(), getSortOrders(channel.getShipStrategyKeys(), channel.getShipStrategySortOrder()), executionConfig); }
Example #14
Source File: FeedbackPropertiesMatchTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testNoPartialSolutionFoundSingleInputOnly() { try { SourcePlanNode target = new SourcePlanNode(getSourceNode(), "Source"); SourcePlanNode otherTarget = new SourcePlanNode(getSourceNode(), "Source"); Channel toMap1 = new Channel(target); toMap1.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED); toMap1.setLocalStrategy(LocalStrategy.NONE); SingleInputPlanNode map1 = new SingleInputPlanNode(getMapNode(), "Mapper 1", toMap1, DriverStrategy.MAP); Channel toMap2 = new Channel(map1); toMap2.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED); toMap2.setLocalStrategy(LocalStrategy.NONE); SingleInputPlanNode map2 = new SingleInputPlanNode(getMapNode(), "Mapper 2", toMap2, DriverStrategy.MAP); { GlobalProperties gp = new GlobalProperties(); LocalProperties lp = new LocalProperties(); FeedbackPropertiesMeetRequirementsReport report = map2.checkPartialSolutionPropertiesMet(otherTarget, gp, lp); assertTrue(report == NO_PARTIAL_SOLUTION); } } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
Example #15
Source File: PartitionOperatorTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testRangePartitionOperatorPreservesFields() { try { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple2<Long, Long>> data = env.fromCollection(Collections.singleton(new Tuple2<>(0L, 0L))); data.partitionByRange(1) .groupBy(1) .reduceGroup(new IdentityGroupReducerCombinable<Tuple2<Long,Long>>()) .output(new DiscardingOutputFormat<Tuple2<Long, Long>>()); Plan p = env.createProgramPlan(); OptimizedPlan op = compileNoStats(p); SinkPlanNode sink = op.getDataSinks().iterator().next(); SingleInputPlanNode reducer = (SingleInputPlanNode) sink.getInput().getSource(); SingleInputPlanNode partitionNode = (SingleInputPlanNode)reducer.getInput().getSource(); SingleInputPlanNode partitionIDRemover = (SingleInputPlanNode) partitionNode.getInput().getSource(); assertEquals(ShipStrategyType.FORWARD, reducer.getInput().getShipStrategy()); assertEquals(ShipStrategyType.FORWARD, partitionNode.getInput().getShipStrategy()); assertEquals(ShipStrategyType.PARTITION_CUSTOM, partitionIDRemover.getInput().getShipStrategy()); SourcePlanNode sourcePlanNode = op.getDataSources().iterator().next(); List<Channel> sourceOutgoingChannels = sourcePlanNode.getOutgoingChannels(); assertEquals(2, sourceOutgoingChannels.size()); assertEquals(ShipStrategyType.FORWARD, sourceOutgoingChannels.get(0).getShipStrategy()); assertEquals(ShipStrategyType.FORWARD, sourceOutgoingChannels.get(1).getShipStrategy()); assertEquals(DataExchangeMode.PIPELINED, sourceOutgoingChannels.get(0).getDataExchangeMode()); assertEquals(DataExchangeMode.BATCH, sourceOutgoingChannels.get(1).getDataExchangeMode()); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
Example #16
Source File: BulkPartialSolutionNode.java From flink with Apache License 2.0 | 5 votes |
public void setCandidateProperties(GlobalProperties gProps, LocalProperties lProps, Channel initialInput) { if (this.cachedPlans != null) { throw new IllegalStateException(); } else { this.cachedPlans = Collections.<PlanNode>singletonList(new BulkPartialSolutionPlanNode(this, "PartialSolution ("+this.getOperator().getName()+")", gProps, lProps, initialInput)); } }
Example #17
Source File: BulkPartialSolutionNode.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public void setCandidateProperties(GlobalProperties gProps, LocalProperties lProps, Channel initialInput) { if (this.cachedPlans != null) { throw new IllegalStateException(); } else { this.cachedPlans = Collections.<PlanNode>singletonList(new BulkPartialSolutionPlanNode(this, "PartialSolution ("+this.getOperator().getName()+")", gProps, lProps, initialInput)); } }
Example #18
Source File: FeedbackPropertiesMatchTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testNoPartialSolutionFoundSingleInputOnly() { try { SourcePlanNode target = new SourcePlanNode(getSourceNode(), "Source"); SourcePlanNode otherTarget = new SourcePlanNode(getSourceNode(), "Source"); Channel toMap1 = new Channel(target); toMap1.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED); toMap1.setLocalStrategy(LocalStrategy.NONE); SingleInputPlanNode map1 = new SingleInputPlanNode(getMapNode(), "Mapper 1", toMap1, DriverStrategy.MAP); Channel toMap2 = new Channel(map1); toMap2.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED); toMap2.setLocalStrategy(LocalStrategy.NONE); SingleInputPlanNode map2 = new SingleInputPlanNode(getMapNode(), "Mapper 2", toMap2, DriverStrategy.MAP); { GlobalProperties gp = new GlobalProperties(); LocalProperties lp = new LocalProperties(); FeedbackPropertiesMeetRequirementsReport report = map2.checkPartialSolutionPropertiesMet(otherTarget, gp, lp); assertTrue(report == NO_PARTIAL_SOLUTION); } } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
Example #19
Source File: IterationsCompilerTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testIterationNotPushingWorkOut() throws Exception { try { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(8); DataSet<Tuple2<Long, Long>> input1 = env.readCsvFile("/some/file/path").types(Long.class).map(new DuplicateValue()); DataSet<Tuple2<Long, Long>> input2 = env.readCsvFile("/some/file/path").types(Long.class, Long.class); // Use input1 as partial solution. Partial solution is used in a single join operation --> it is cheaper // to do the hash partitioning between the partial solution node and the join node // instead of pushing the partitioning out doSimpleBulkIteration(input1, input2).output(new DiscardingOutputFormat<Tuple2<Long,Long>>()); Plan p = env.createProgramPlan(); OptimizedPlan op = compileNoStats(p); assertEquals(1, op.getDataSinks().size()); assertTrue(op.getDataSinks().iterator().next().getInput().getSource() instanceof BulkIterationPlanNode); BulkIterationPlanNode bipn = (BulkIterationPlanNode) op.getDataSinks().iterator().next().getInput().getSource(); // check that work has not been pushed out for (Channel c : bipn.getPartialSolutionPlanNode().getOutgoingChannels()) { assertEquals(ShipStrategyType.PARTITION_HASH, c.getShipStrategy()); } assertEquals(ShipStrategyType.FORWARD, bipn.getInput().getShipStrategy()); new JobGraphGenerator().compileJobGraph(op); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
Example #20
Source File: PartitionOperatorTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testRangePartitionOperatorPreservesFields() { try { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple2<Long, Long>> data = env.fromCollection(Collections.singleton(new Tuple2<>(0L, 0L))); data.partitionByRange(1) .groupBy(1) .reduceGroup(new IdentityGroupReducerCombinable<Tuple2<Long,Long>>()) .output(new DiscardingOutputFormat<Tuple2<Long, Long>>()); Plan p = env.createProgramPlan(); OptimizedPlan op = compileNoStats(p); SinkPlanNode sink = op.getDataSinks().iterator().next(); SingleInputPlanNode reducer = (SingleInputPlanNode) sink.getInput().getSource(); SingleInputPlanNode partitionNode = (SingleInputPlanNode)reducer.getInput().getSource(); SingleInputPlanNode partitionIDRemover = (SingleInputPlanNode) partitionNode.getInput().getSource(); assertEquals(ShipStrategyType.FORWARD, reducer.getInput().getShipStrategy()); assertEquals(ShipStrategyType.FORWARD, partitionNode.getInput().getShipStrategy()); assertEquals(ShipStrategyType.PARTITION_CUSTOM, partitionIDRemover.getInput().getShipStrategy()); SourcePlanNode sourcePlanNode = op.getDataSources().iterator().next(); List<Channel> sourceOutgoingChannels = sourcePlanNode.getOutgoingChannels(); assertEquals(2, sourceOutgoingChannels.size()); assertEquals(ShipStrategyType.FORWARD, sourceOutgoingChannels.get(0).getShipStrategy()); assertEquals(ShipStrategyType.FORWARD, sourceOutgoingChannels.get(1).getShipStrategy()); assertEquals(DataExchangeMode.PIPELINED, sourceOutgoingChannels.get(0).getDataExchangeMode()); assertEquals(DataExchangeMode.BATCH, sourceOutgoingChannels.get(1).getDataExchangeMode()); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
Example #21
Source File: FeedbackPropertiesMatchTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testNoPartialSolutionFoundSingleInputOnly() { try { SourcePlanNode target = new SourcePlanNode(getSourceNode(), "Source"); SourcePlanNode otherTarget = new SourcePlanNode(getSourceNode(), "Source"); Channel toMap1 = new Channel(target); toMap1.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED); toMap1.setLocalStrategy(LocalStrategy.NONE); SingleInputPlanNode map1 = new SingleInputPlanNode(getMapNode(), "Mapper 1", toMap1, DriverStrategy.MAP); Channel toMap2 = new Channel(map1); toMap2.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED); toMap2.setLocalStrategy(LocalStrategy.NONE); SingleInputPlanNode map2 = new SingleInputPlanNode(getMapNode(), "Mapper 2", toMap2, DriverStrategy.MAP); { GlobalProperties gp = new GlobalProperties(); LocalProperties lp = new LocalProperties(); FeedbackPropertiesMeetRequirementsReport report = map2.checkPartialSolutionPropertiesMet(otherTarget, gp, lp); assertTrue(report == NO_PARTIAL_SOLUTION); } } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
Example #22
Source File: Utils.java From flink with Apache License 2.0 | 5 votes |
public static TypeComparatorFactory<?> getShipComparator(Channel channel, ExecutionConfig executionConfig) { PlanNode source = channel.getSource(); Operator<?> javaOp = source.getProgramOperator(); TypeInformation<?> type = javaOp.getOperatorInfo().getOutputType(); return createComparator(type, channel.getShipStrategyKeys(), getSortOrders(channel.getShipStrategyKeys(), channel.getShipStrategySortOrder()), executionConfig); }
Example #23
Source File: FeedbackPropertiesMatchTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testNoPartialSolutionFoundTwoInputOperator() { try { SourcePlanNode target = new SourcePlanNode(getSourceNode(), "Partial Solution"); SourcePlanNode source1 = new SourcePlanNode(getSourceNode(), "Source 1"); SourcePlanNode source2 = new SourcePlanNode(getSourceNode(), "Source 2"); Channel toMap1 = new Channel(source1); toMap1.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED); toMap1.setLocalStrategy(LocalStrategy.NONE); SingleInputPlanNode map1 = new SingleInputPlanNode(getMapNode(), "Mapper 1", toMap1, DriverStrategy.MAP); Channel toMap2 = new Channel(source2); toMap2.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED); toMap2.setLocalStrategy(LocalStrategy.NONE); SingleInputPlanNode map2 = new SingleInputPlanNode(getMapNode(), "Mapper 2", toMap2, DriverStrategy.MAP); Channel toJoin1 = new Channel(map1); Channel toJoin2 = new Channel(map2); toJoin1.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED); toJoin1.setLocalStrategy(LocalStrategy.NONE); toJoin2.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED); toJoin2.setLocalStrategy(LocalStrategy.NONE); DualInputPlanNode join = new DualInputPlanNode(getJoinNode(), "Join", toJoin1, toJoin2, DriverStrategy.HYBRIDHASH_BUILD_FIRST); FeedbackPropertiesMeetRequirementsReport report = join.checkPartialSolutionPropertiesMet(target, new GlobalProperties(), new LocalProperties()); assertEquals(NO_PARTIAL_SOLUTION, report); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
Example #24
Source File: CoGroupRawDescriptor.java From flink with Apache License 2.0 | 5 votes |
@Override public DualInputPlanNode instantiate(Channel in1, Channel in2, TwoInputNode node) { boolean[] inputOrders = in1.getLocalProperties().getOrdering() == null ? null : in1.getLocalProperties().getOrdering().getFieldSortDirections(); if (inputOrders == null || inputOrders.length < this.keys1.size()) { throw new CompilerException("BUG: The input strategy does not sufficiently describe the sort orders for a CoGroup operator."); } else if (inputOrders.length > this.keys1.size()) { boolean[] tmp = new boolean[this.keys1.size()]; System.arraycopy(inputOrders, 0, tmp, 0, tmp.length); inputOrders = tmp; } return new DualInputPlanNode(node, "CoGroup (" + node.getOperator().getName() + ")", in1, in2, DriverStrategy.CO_GROUP_RAW, this.keys1, this.keys2, inputOrders); }
Example #25
Source File: JobGraphGenerator.java From flink with Apache License 2.0 | 5 votes |
private void assignLocalStrategyResources(Channel c, TaskConfig config, int inputNum) { if (c.getRelativeMemoryLocalStrategy() > 0) { config.setRelativeMemoryInput(inputNum, c.getRelativeMemoryLocalStrategy()); config.setFilehandlesInput(inputNum, this.defaultMaxFan); config.setSpillingThresholdInput(inputNum, this.defaultSortSpillingThreshold); config.setUseLargeRecordHandler(this.useLargeRecordHandler); } }
Example #26
Source File: JobGraphGenerator.java From flink with Apache License 2.0 | 5 votes |
private void assignLocalStrategyResources(Channel c, TaskConfig config, int inputNum) { if (c.getRelativeMemoryLocalStrategy() > 0) { config.setRelativeMemoryInput(inputNum, c.getRelativeMemoryLocalStrategy()); config.setFilehandlesInput(inputNum, this.defaultMaxFan); config.setSpillingThresholdInput(inputNum, this.defaultSortSpillingThreshold); config.setUseLargeRecordHandler(this.useLargeRecordHandler); } }
Example #27
Source File: RangePartitionRewriter.java From flink with Apache License 2.0 | 5 votes |
@Override public void postVisit(PlanNode node) { if(node instanceof IterationPlanNode) { IterationPlanNode iNode = (IterationPlanNode)node; if(!visitedIterationNodes.contains(iNode)) { visitedIterationNodes.add(iNode); iNode.acceptForStepFunction(this); } } final Iterable<Channel> inputChannels = node.getInputs(); for (Channel channel : inputChannels) { ShipStrategyType shipStrategy = channel.getShipStrategy(); // Make sure we only optimize the DAG for range partition, and do not optimize multi times. if (shipStrategy == ShipStrategyType.PARTITION_RANGE) { if(channel.getDataDistribution() == null) { if (node.isOnDynamicPath()) { throw new InvalidProgramException("Range Partitioning not supported within iterations if users do not supply the data distribution."); } PlanNode channelSource = channel.getSource(); List<Channel> newSourceOutputChannels = rewriteRangePartitionChannel(channel); channelSource.getOutgoingChannels().remove(channel); channelSource.getOutgoingChannels().addAll(newSourceOutputChannels); } } } }
Example #28
Source File: WorksetNode.java From flink with Apache License 2.0 | 5 votes |
public void setCandidateProperties(GlobalProperties gProps, LocalProperties lProps, Channel initialInput) { if (this.cachedPlans != null) { throw new IllegalStateException(); } else { WorksetPlanNode wspn = new WorksetPlanNode(this, "Workset ("+this.getOperator().getName()+")", gProps, lProps, initialInput); this.cachedPlans = Collections.<PlanNode>singletonList(wspn); } }
Example #29
Source File: UtilSinkJoinOpDescriptor.java From flink with Apache License 2.0 | 5 votes |
@Override public DualInputPlanNode instantiate(Channel in1, Channel in2, TwoInputNode node) { if (node instanceof SinkJoiner) { return new SinkJoinerPlanNode((SinkJoiner) node, in1, in2); } else { throw new CompilerException(); } }
Example #30
Source File: CoGroupWithSolutionSetFirstDescriptor.java From flink with Apache License 2.0 | 5 votes |
@Override public DualInputPlanNode instantiate(Channel in1, Channel in2, TwoInputNode node) { boolean[] inputOrders = in2.getLocalProperties().getOrdering() == null ? null : in2.getLocalProperties().getOrdering().getFieldSortDirections(); if (inputOrders == null || inputOrders.length < this.keys2.size()) { throw new CompilerException("BUG: The input strategy does not sufficiently describe the sort orders for a CoGroup operator."); } else if (inputOrders.length > this.keys2.size()) { boolean[] tmp = new boolean[this.keys2.size()]; System.arraycopy(inputOrders, 0, tmp, 0, tmp.length); inputOrders = tmp; } return new DualInputPlanNode(node, "CoGroup ("+node.getOperator().getName()+")", in1, in2, DriverStrategy.CO_GROUP, this.keys1, this.keys2, inputOrders); }