org.apache.flink.runtime.operators.DriverStrategy Java Examples
The following examples show how to use
org.apache.flink.runtime.operators.DriverStrategy.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AllGroupReduceDriverTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testAllReduceDriverImmutableEmpty() { try { TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String, Integer>> context = new TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String,Integer>>(); List<Tuple2<String, Integer>> data = DriverTestData.createReduceImmutableData(); TypeInformation<Tuple2<String, Integer>> typeInfo = TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<String, Integer>> input = EmptyMutableObjectIterator.get(); context.setDriverStrategy(DriverStrategy.ALL_GROUP_REDUCE); context.setInput1(input, typeInfo.createSerializer(new ExecutionConfig())); context.setCollector(new DiscardingOutputCollector<Tuple2<String, Integer>>()); AllGroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>> driver = new AllGroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>>(); driver.setup(context); driver.prepare(); driver.run(); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example #2
Source File: HashJoinBuildFirstProperties.java From flink with Apache License 2.0 | 6 votes |
@Override public DualInputPlanNode instantiate(Channel in1, Channel in2, TwoInputNode node) { DriverStrategy strategy; if(!in1.isOnDynamicPath() && in2.isOnDynamicPath()) { // sanity check that the first input is cached and remove that cache if (!in1.getTempMode().isCached()) { throw new CompilerException("No cache at point where static and dynamic parts meet."); } in1.setTempMode(in1.getTempMode().makeNonCached()); strategy = DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED; } else { strategy = DriverStrategy.HYBRIDHASH_BUILD_FIRST; } return new DualInputPlanNode(node, "Join ("+node.getOperator().getName()+")", in1, in2, strategy, this.keys1, this.keys2); }
Example #3
Source File: PlanNode.java From flink with Apache License 2.0 | 6 votes |
public PlanNode(OptimizerNode template, String nodeName, DriverStrategy strategy) { this.outChannels = new ArrayList<Channel>(2); this.broadcastInputs = new ArrayList<NamedChannel>(); this.template = template; this.nodeName = nodeName; this.driverStrategy = strategy; this.parallelism = template.getParallelism(); // check, if there is branch at this node. if yes, this candidate must be associated with // the branching template node. if (template.isBranching()) { this.branchPlan = new HashMap<OptimizerNode, PlanNode>(6); this.branchPlan.put(template, this); } }
Example #4
Source File: CachedMatchStrategyCompilerTest.java From flink with Apache License 2.0 | 6 votes |
/** * This tests whether a HYBRIDHASH_BUILD_SECOND is correctly transformed to a HYBRIDHASH_BUILD_SECOND_CACHED * when inside of an iteration an on the static path */ @Test public void testRightSide() { try { Plan plan = getTestPlanRightStatic(Optimizer.HINT_LOCAL_STRATEGY_HASH_BUILD_SECOND); OptimizedPlan oPlan = compileNoStats(plan); OptimizerPlanNodeResolver resolver = getOptimizerPlanNodeResolver(oPlan); DualInputPlanNode innerJoin = resolver.getNode("DummyJoiner"); // verify correct join strategy assertEquals(DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED, innerJoin.getDriverStrategy()); assertEquals(TempMode.NONE, innerJoin.getInput1().getTempMode()); assertEquals(TempMode.NONE, innerJoin.getInput2().getTempMode()); new JobGraphGenerator().compileJobGraph(oPlan); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); fail("Test errored: " + e.getMessage()); } }
Example #5
Source File: CachedMatchStrategyCompilerTest.java From flink with Apache License 2.0 | 6 votes |
/** * This test makes sure that only a HYBRIDHASH on the static path is transformed to the cached variant */ @Test public void testRightSideCountercheck() { try { Plan plan = getTestPlanRightStatic(Optimizer.HINT_LOCAL_STRATEGY_HASH_BUILD_FIRST); OptimizedPlan oPlan = compileNoStats(plan); OptimizerPlanNodeResolver resolver = getOptimizerPlanNodeResolver(oPlan); DualInputPlanNode innerJoin = resolver.getNode("DummyJoiner"); // verify correct join strategy assertEquals(DriverStrategy.HYBRIDHASH_BUILD_FIRST, innerJoin.getDriverStrategy()); assertEquals(TempMode.NONE, innerJoin.getInput1().getTempMode()); assertEquals(TempMode.CACHED, innerJoin.getInput2().getTempMode()); new JobGraphGenerator().compileJobGraph(oPlan); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); fail("Test errored: " + e.getMessage()); } }
Example #6
Source File: BulkPartialSolutionPlanNode.java From flink with Apache License 2.0 | 6 votes |
public BulkPartialSolutionPlanNode(BulkPartialSolutionNode template, String nodeName, GlobalProperties gProps, LocalProperties lProps, Channel initialInput) { super(template, nodeName, DriverStrategy.NONE); this.globalProps = gProps; this.localProps = lProps; this.initialInput = initialInput; // the partial solution does not cost anything this.nodeCosts = NO_COSTS; this.cumulativeCosts = NO_COSTS; if (initialInput.getSource().branchPlan != null && initialInput.getSource().branchPlan.size() > 0) { if (this.branchPlan == null) { this.branchPlan = new HashMap<OptimizerNode, PlanNode>(); } this.branchPlan.putAll(initialInput.getSource().branchPlan); } }
Example #7
Source File: CachedMatchStrategyCompilerTest.java From flink with Apache License 2.0 | 6 votes |
/** * This test makes sure that only a HYBRIDHASH on the static path is transformed to the cached variant */ @Test public void testLeftSideCountercheck() { try { Plan plan = getTestPlanLeftStatic(Optimizer.HINT_LOCAL_STRATEGY_HASH_BUILD_SECOND); OptimizedPlan oPlan = compileNoStats(plan); OptimizerPlanNodeResolver resolver = getOptimizerPlanNodeResolver(oPlan); DualInputPlanNode innerJoin = resolver.getNode("DummyJoiner"); // verify correct join strategy assertEquals(DriverStrategy.HYBRIDHASH_BUILD_SECOND, innerJoin.getDriverStrategy()); assertEquals(TempMode.CACHED, innerJoin.getInput1().getTempMode()); assertEquals(TempMode.NONE, innerJoin.getInput2().getTempMode()); new JobGraphGenerator().compileJobGraph(oPlan); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); fail("Test errored: " + e.getMessage()); } }
Example #8
Source File: HashJoinBuildFirstProperties.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public DualInputPlanNode instantiate(Channel in1, Channel in2, TwoInputNode node) { DriverStrategy strategy; if(!in1.isOnDynamicPath() && in2.isOnDynamicPath()) { // sanity check that the first input is cached and remove that cache if (!in1.getTempMode().isCached()) { throw new CompilerException("No cache at point where static and dynamic parts meet."); } in1.setTempMode(in1.getTempMode().makeNonCached()); strategy = DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED; } else { strategy = DriverStrategy.HYBRIDHASH_BUILD_FIRST; } return new DualInputPlanNode(node, "Join ("+node.getOperator().getName()+")", in1, in2, strategy, this.keys1, this.keys2); }
Example #9
Source File: PlanNode.java From flink with Apache License 2.0 | 6 votes |
public PlanNode(OptimizerNode template, String nodeName, DriverStrategy strategy) { this.outChannels = new ArrayList<Channel>(2); this.broadcastInputs = new ArrayList<NamedChannel>(); this.template = template; this.nodeName = nodeName; this.driverStrategy = strategy; this.parallelism = template.getParallelism(); // check, if there is branch at this node. if yes, this candidate must be associated with // the branching template node. if (template.isBranching()) { this.branchPlan = new HashMap<OptimizerNode, PlanNode>(6); this.branchPlan.put(template, this); } }
Example #10
Source File: GroupCombineProperties.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public SingleInputPlanNode instantiate(Channel in, SingleInputNode node) { node.setParallelism(in.getSource().getParallelism()); // sorting key info SingleInputPlanNode singleInputPlanNode = new SingleInputPlanNode( node, "GroupCombine (" + node.getOperator().getName() + ")", in, // reuse the combine strategy also used in the group reduce DriverStrategy.SORTED_GROUP_COMBINE, this.keyList); // set sorting comparator key info singleInputPlanNode.setDriverKeyInfo(this.ordering.getInvolvedIndexes(), this.ordering.getFieldSortDirections(), 0); // set grouping comparator key info singleInputPlanNode.setDriverKeyInfo(this.keyList, 1); return singleInputPlanNode; }
Example #11
Source File: ChainedOperatorsMetricTest.java From flink with Apache License 2.0 | 6 votes |
private void addChainedOperator() { final TaskConfig chainedConfig = new TaskConfig(new Configuration()); // input chainedConfig.addInputToGroup(0); chainedConfig.setInputSerializer(serFact, 0); // output chainedConfig.addOutputShipStrategy(ShipStrategyType.FORWARD); chainedConfig.setOutputSerializer(serFact); // driver chainedConfig.setDriverStrategy(DriverStrategy.FLAT_MAP); // udf chainedConfig.setStubWrapper(new UserCodeClassWrapper<>(DuplicatingFlatMapFunction.class)); getTaskConfig().addChainedTask(ChainedFlatMapDriver.class, chainedConfig, CHAINED_OPERATOR_NAME); }
Example #12
Source File: RelationalQueryCompilerTest.java From flink with Apache License 2.0 | 6 votes |
private boolean checkHashJoinStrategies(DualInputPlanNode join, SingleInputPlanNode reducer, boolean buildFirst) { if ((buildFirst && DriverStrategy.HYBRIDHASH_BUILD_FIRST == join.getDriverStrategy()) || (!buildFirst && DriverStrategy.HYBRIDHASH_BUILD_SECOND == join.getDriverStrategy())) { // driver keys Assert.assertEquals(set0, join.getKeysForInput1()); Assert.assertEquals(set0, join.getKeysForInput2()); // local strategies Assert.assertEquals(LocalStrategy.NONE, join.getInput1().getLocalStrategy()); Assert.assertEquals(LocalStrategy.NONE, join.getInput2().getLocalStrategy()); Assert.assertEquals(LocalStrategy.COMBININGSORT, reducer.getInput().getLocalStrategy()); // local strategy keys Assert.assertEquals(set01, reducer.getInput().getLocalStrategyKeys()); Assert.assertEquals(set01, reducer.getKeys(0)); Assert.assertTrue(Arrays.equals(reducer.getInput().getLocalStrategySortOrder(), reducer.getSortOrders(0))); return true; } else { return false; } }
Example #13
Source File: SourcePlanNode.java From flink with Apache License 2.0 | 5 votes |
public SourcePlanNode(DataSourceNode template, String nodeName, GlobalProperties gprops, LocalProperties lprops) { super(template, nodeName, DriverStrategy.NONE); this.globalProps = gprops; this.localProps = lprops; updatePropertiesWithUniqueSets(template.getUniqueFields()); }
Example #14
Source File: CoGroupRawDescriptor.java From flink with Apache License 2.0 | 5 votes |
@Override public DualInputPlanNode instantiate(Channel in1, Channel in2, TwoInputNode node) { boolean[] inputOrders = in1.getLocalProperties().getOrdering() == null ? null : in1.getLocalProperties().getOrdering().getFieldSortDirections(); if (inputOrders == null || inputOrders.length < this.keys1.size()) { throw new CompilerException("BUG: The input strategy does not sufficiently describe the sort orders for a CoGroup operator."); } else if (inputOrders.length > this.keys1.size()) { boolean[] tmp = new boolean[this.keys1.size()]; System.arraycopy(inputOrders, 0, tmp, 0, tmp.length); inputOrders = tmp; } return new DualInputPlanNode(node, "CoGroup (" + node.getOperator().getName() + ")", in1, in2, DriverStrategy.CO_GROUP_RAW, this.keys1, this.keys2, inputOrders); }
Example #15
Source File: GroupReduceDriverTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testAllReduceDriverImmutable() { try { TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String, Integer>> context = new TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String,Integer>>(); List<Tuple2<String, Integer>> data = DriverTestData.createReduceImmutableData(); TupleTypeInfo<Tuple2<String, Integer>> typeInfo = (TupleTypeInfo<Tuple2<String, Integer>>) TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<String, Integer>> input = new RegularToMutableObjectIterator<Tuple2<String, Integer>>(data.iterator(), typeInfo.createSerializer(new ExecutionConfig())); TypeComparator<Tuple2<String, Integer>> comparator = typeInfo.createComparator(new int[]{0}, new boolean[] {true}, 0, new ExecutionConfig()); GatheringCollector<Tuple2<String, Integer>> result = new GatheringCollector<Tuple2<String,Integer>>(typeInfo.createSerializer(new ExecutionConfig())); context.setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); context.setInput1(input, typeInfo.createSerializer(new ExecutionConfig())); context.setCollector(result); context.setComparator1(comparator); context.setUdf(new ConcatSumReducer()); GroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>> driver = new GroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>>(); driver.setup(context); driver.prepare(); driver.run(); Object[] res = result.getList().toArray(); Object[] expected = DriverTestData.createReduceImmutableDataGroupedResult().toArray(); DriverTestData.compareTupleArrays(expected, res); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example #16
Source File: AllGroupReduceDriverTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testAllReduceDriverMutable() { try { TestTaskContext<GroupReduceFunction<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>, Tuple2<StringValue, IntValue>> context = new TestTaskContext<GroupReduceFunction<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>, Tuple2<StringValue, IntValue>>(); List<Tuple2<StringValue, IntValue>> data = DriverTestData.createReduceMutableData(); TypeInformation<Tuple2<StringValue, IntValue>> typeInfo = TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<StringValue, IntValue>> input = new RegularToMutableObjectIterator<Tuple2<StringValue, IntValue>>(data.iterator(), typeInfo.createSerializer(new ExecutionConfig())); GatheringCollector<Tuple2<StringValue, IntValue>> result = new GatheringCollector<Tuple2<StringValue, IntValue>>(typeInfo.createSerializer(new ExecutionConfig())); context.setDriverStrategy(DriverStrategy.ALL_GROUP_REDUCE); context.setInput1(input, typeInfo.createSerializer(new ExecutionConfig())); context.setCollector(result); context.setUdf(new ConcatSumMutableReducer()); AllGroupReduceDriver<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>> driver = new AllGroupReduceDriver<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>(); driver.setup(context); driver.prepare(); driver.run(); Tuple2<StringValue, IntValue> res = result.getList().get(0); char[] foundString = res.f0.getValue().toCharArray(); Arrays.sort(foundString); char[] expectedString = "abcddeeeffff".toCharArray(); Arrays.sort(expectedString); Assert.assertArrayEquals(expectedString, foundString); Assert.assertEquals(78, res.f1.getValue()); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example #17
Source File: TaskConfig.java From flink with Apache License 2.0 | 5 votes |
public DriverStrategy getDriverStrategy() { final int ls = this.config.getInteger(DRIVER_STRATEGY, -1); if (ls == -1) { return DriverStrategy.NONE; } else if (ls < 0 || ls >= DriverStrategy.values().length) { throw new CorruptConfigurationException("Illegal driver strategy in configuration: " + ls); } else { return DriverStrategy.values()[ls]; } }
Example #18
Source File: GroupReduceDriverTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testAllReduceDriverAccumulatingImmutable() { try { TestTaskContext<GroupReduceFunction<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>, Tuple2<StringValue, IntValue>> context = new TestTaskContext<GroupReduceFunction<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>, Tuple2<StringValue, IntValue>>(); List<Tuple2<StringValue, IntValue>> data = DriverTestData.createReduceMutableData(); TupleTypeInfo<Tuple2<StringValue, IntValue>> typeInfo = (TupleTypeInfo<Tuple2<StringValue, IntValue>>) TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<StringValue, IntValue>> input = new RegularToMutableObjectIterator<Tuple2<StringValue, IntValue>>(data.iterator(), typeInfo.createSerializer(new ExecutionConfig())); TypeComparator<Tuple2<StringValue, IntValue>> comparator = typeInfo.createComparator(new int[]{0}, new boolean[] {true}, 0, new ExecutionConfig()); GatheringCollector<Tuple2<StringValue, IntValue>> result = new GatheringCollector<Tuple2<StringValue, IntValue>>(typeInfo.createSerializer(new ExecutionConfig())); context.setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE); context.setInput1(input, typeInfo.createSerializer(new ExecutionConfig())); context.setComparator1(comparator); context.setCollector(result); context.setUdf(new ConcatSumMutableAccumulatingReducer()); context.setMutableObjectMode(false); GroupReduceDriver<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>> driver = new GroupReduceDriver<Tuple2<StringValue, IntValue>, Tuple2<StringValue, IntValue>>(); driver.setup(context); driver.prepare(); driver.run(); Object[] res = result.getList().toArray(); Object[] expected = DriverTestData.createReduceMutableDataGroupedResult().toArray(); DriverTestData.compareTupleArrays(expected, res); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example #19
Source File: AllGroupReduceDriverTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testAllReduceDriverImmutable() { try { TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String, Integer>> context = new TestTaskContext<GroupReduceFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>, Tuple2<String,Integer>>(); List<Tuple2<String, Integer>> data = DriverTestData.createReduceImmutableData(); TypeInformation<Tuple2<String, Integer>> typeInfo = TypeExtractor.getForObject(data.get(0)); MutableObjectIterator<Tuple2<String, Integer>> input = new RegularToMutableObjectIterator<Tuple2<String, Integer>>(data.iterator(), typeInfo.createSerializer(new ExecutionConfig())); GatheringCollector<Tuple2<String, Integer>> result = new GatheringCollector<Tuple2<String,Integer>>(typeInfo.createSerializer(new ExecutionConfig())); context.setDriverStrategy(DriverStrategy.ALL_GROUP_REDUCE); context.setInput1(input, typeInfo.createSerializer(new ExecutionConfig())); context.setCollector(result); context.setUdf(new ConcatSumReducer()); AllGroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>> driver = new AllGroupReduceDriver<Tuple2<String, Integer>, Tuple2<String, Integer>>(); driver.setup(context); driver.prepare(); driver.run(); Tuple2<String,Integer> res = result.getList().get(0); char[] foundString = res.f0.toCharArray(); Arrays.sort(foundString); char[] expectedString = "abcddeeeffff".toCharArray(); Arrays.sort(expectedString); Assert.assertArrayEquals(expectedString, foundString); Assert.assertEquals(78, res.f1.intValue()); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); Assert.fail(e.getMessage()); } }
Example #20
Source File: SingleInputPlanNode.java From flink with Apache License 2.0 | 5 votes |
public SingleInputPlanNode(OptimizerNode template, String nodeName, Channel input, DriverStrategy driverStrategy, FieldList driverKeyFields, boolean[] driverSortOrders) { super(template, nodeName, driverStrategy); this.input = input; this.comparators = new TypeComparatorFactory<?>[driverStrategy.getNumRequiredComparators()]; this.driverKeys = new FieldList[driverStrategy.getNumRequiredComparators()]; this.driverSortOrders = new boolean[driverStrategy.getNumRequiredComparators()][]; if(driverStrategy.getNumRequiredComparators() > 0) { this.driverKeys[0] = driverKeyFields; this.driverSortOrders[0] = driverSortOrders; } if (this.input.getShipStrategy() == ShipStrategyType.BROADCAST) { this.input.setReplicationFactor(getParallelism()); } final PlanNode predNode = input.getSource(); if (predNode.branchPlan != null && !predNode.branchPlan.isEmpty()) { if (this.branchPlan == null) { this.branchPlan = new HashMap<OptimizerNode, PlanNode>(); } this.branchPlan.putAll(predNode.branchPlan); } }
Example #21
Source File: JoinTranslationTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testBroadcastHashFirstTest() { try { DualInputPlanNode node = createPlanAndGetJoinNode(JoinHint.BROADCAST_HASH_FIRST); assertEquals(ShipStrategyType.BROADCAST, node.getInput1().getShipStrategy()); assertEquals(ShipStrategyType.FORWARD, node.getInput2().getShipStrategy()); assertEquals(DriverStrategy.HYBRIDHASH_BUILD_FIRST, node.getDriverStrategy()); } catch (Exception e) { e.printStackTrace(); fail(e.getClass().getSimpleName() + ": " + e.getMessage()); } }
Example #22
Source File: JobGraphGenerator.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private JobVertex createDualInputVertex(DualInputPlanNode node) throws CompilerException { final String taskName = node.getNodeName(); final DriverStrategy ds = node.getDriverStrategy(); final JobVertex vertex = new JobVertex(taskName); final TaskConfig config = new TaskConfig(vertex.getConfiguration()); vertex.setResources(node.getMinResources(), node.getPreferredResources()); vertex.setInvokableClass( (this.currentIteration != null && node.isOnDynamicPath()) ? IterationIntermediateTask.class : BatchTask.class); // set user code config.setStubWrapper(node.getProgramOperator().getUserCodeWrapper()); config.setStubParameters(node.getProgramOperator().getParameters()); // set the driver strategy config.setDriver(ds.getDriverClass()); config.setDriverStrategy(ds); if (node.getComparator1() != null) { config.setDriverComparator(node.getComparator1(), 0); } if (node.getComparator2() != null) { config.setDriverComparator(node.getComparator2(), 1); } if (node.getPairComparator() != null) { config.setDriverPairComparator(node.getPairComparator()); } // assign memory, file-handles, etc. assignDriverResources(node, config); return vertex; }
Example #23
Source File: JoinTranslationTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testOptimizerChoosesTest() { try { DualInputPlanNode node = createPlanAndGetJoinNode(JoinHint.OPTIMIZER_CHOOSES); assertEquals(ShipStrategyType.PARTITION_HASH, node.getInput1().getShipStrategy()); assertEquals(ShipStrategyType.PARTITION_HASH, node.getInput2().getShipStrategy()); assertTrue(DriverStrategy.HYBRIDHASH_BUILD_FIRST == node.getDriverStrategy() || DriverStrategy.HYBRIDHASH_BUILD_SECOND == node.getDriverStrategy()); } catch (Exception e) { e.printStackTrace(); fail(e.getClass().getSimpleName() + ": " + e.getMessage()); } }
Example #24
Source File: BulkIterationPlanNode.java From flink with Apache License 2.0 | 5 votes |
public BulkIterationPlanNode(BulkIterationNode template, String nodeName, Channel input, BulkPartialSolutionPlanNode pspn, PlanNode rootOfStepFunction) { super(template, nodeName, input, DriverStrategy.NONE); this.partialSolutionPlanNode = pspn; this.rootOfStepFunction = rootOfStepFunction; mergeBranchPlanMaps(); }
Example #25
Source File: WorksetIterationPlanNode.java From flink with Apache License 2.0 | 5 votes |
public WorksetIterationPlanNode(WorksetIterationNode template, String nodeName, Channel initialSolutionSet, Channel initialWorkset, SolutionSetPlanNode solutionSetPlanNode, WorksetPlanNode worksetPlanNode, PlanNode nextWorkSetPlanNode, PlanNode solutionSetDeltaPlanNode) { super(template, nodeName, initialSolutionSet, initialWorkset, DriverStrategy.BINARY_NO_OP); this.solutionSetPlanNode = solutionSetPlanNode; this.worksetPlanNode = worksetPlanNode; this.solutionSetDeltaPlanNode = solutionSetDeltaPlanNode; this.nextWorkSetPlanNode = nextWorkSetPlanNode; mergeBranchPlanMaps(); }
Example #26
Source File: JobGraphGenerator.java From flink with Apache License 2.0 | 5 votes |
private JobVertex createDualInputVertex(DualInputPlanNode node) throws CompilerException { final String taskName = node.getNodeName(); final DriverStrategy ds = node.getDriverStrategy(); final JobVertex vertex = new JobVertex(taskName); final TaskConfig config = new TaskConfig(vertex.getConfiguration()); vertex.setResources(node.getMinResources(), node.getPreferredResources()); vertex.setInvokableClass( (this.currentIteration != null && node.isOnDynamicPath()) ? IterationIntermediateTask.class : BatchTask.class); // set user code config.setStubWrapper(node.getProgramOperator().getUserCodeWrapper()); config.setStubParameters(node.getProgramOperator().getParameters()); // set the driver strategy config.setDriver(ds.getDriverClass()); config.setDriverStrategy(ds); if (node.getComparator1() != null) { config.setDriverComparator(node.getComparator1(), 0); } if (node.getComparator2() != null) { config.setDriverComparator(node.getComparator2(), 1); } if (node.getPairComparator() != null) { config.setDriverPairComparator(node.getPairComparator()); } // assign memory, file-handles, etc. assignDriverResources(node, config); return vertex; }
Example #27
Source File: NAryUnionPlanNode.java From flink with Apache License 2.0 | 5 votes |
/** * @param template */ public NAryUnionPlanNode(BinaryUnionNode template, List<Channel> inputs, GlobalProperties gProps, Costs cumulativeCosts) { super(template, "Union", DriverStrategy.NONE); this.inputs = inputs; this.globalProps = gProps; this.localProps = new LocalProperties(); this.nodeCosts = new Costs(); this.cumulativeCosts = cumulativeCosts; }
Example #28
Source File: WorksetIterationPlanNode.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public WorksetIterationPlanNode(WorksetIterationNode template, String nodeName, Channel initialSolutionSet, Channel initialWorkset, SolutionSetPlanNode solutionSetPlanNode, WorksetPlanNode worksetPlanNode, PlanNode nextWorkSetPlanNode, PlanNode solutionSetDeltaPlanNode) { super(template, nodeName, initialSolutionSet, initialWorkset, DriverStrategy.BINARY_NO_OP); this.solutionSetPlanNode = solutionSetPlanNode; this.worksetPlanNode = worksetPlanNode; this.solutionSetDeltaPlanNode = solutionSetDeltaPlanNode; this.nextWorkSetPlanNode = nextWorkSetPlanNode; mergeBranchPlanMaps(); }
Example #29
Source File: CachedMatchStrategyCompilerTest.java From flink with Apache License 2.0 | 5 votes |
/** * This test simulates a join of a big left side with a small right side inside of an iteration, where the small side is on a static path. * Currently the best execution plan is a HYBRIDHASH_BUILD_SECOND_CACHED, where the small side is hashed and cached. * This test also makes sure that all relevant plans are correctly enumerated by the optimizer. */ @Test public void testCorrectChoosing() { try { Plan plan = getTestPlanRightStatic(""); SourceCollectorVisitor sourceCollector = new SourceCollectorVisitor(); plan.accept(sourceCollector); for(GenericDataSourceBase<?, ?> s : sourceCollector.getSources()) { if(s.getName().equals("bigFile")) { this.setSourceStatistics(s, 10000000, 1000); } else if(s.getName().equals("smallFile")) { this.setSourceStatistics(s, 100, 100); } } OptimizedPlan oPlan = compileNoStats(plan); OptimizerPlanNodeResolver resolver = getOptimizerPlanNodeResolver(oPlan); DualInputPlanNode innerJoin = resolver.getNode("DummyJoiner"); // verify correct join strategy assertEquals(DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED, innerJoin.getDriverStrategy()); assertEquals(TempMode.NONE, innerJoin.getInput1().getTempMode()); assertEquals(TempMode.NONE, innerJoin.getInput2().getTempMode()); new JobGraphGenerator().compileJobGraph(oPlan); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); fail("Test errored: " + e.getMessage()); } }
Example #30
Source File: CoGroupWithSolutionSetFirstDescriptor.java From flink with Apache License 2.0 | 5 votes |
@Override public DualInputPlanNode instantiate(Channel in1, Channel in2, TwoInputNode node) { boolean[] inputOrders = in2.getLocalProperties().getOrdering() == null ? null : in2.getLocalProperties().getOrdering().getFieldSortDirections(); if (inputOrders == null || inputOrders.length < this.keys2.size()) { throw new CompilerException("BUG: The input strategy does not sufficiently describe the sort orders for a CoGroup operator."); } else if (inputOrders.length > this.keys2.size()) { boolean[] tmp = new boolean[this.keys2.size()]; System.arraycopy(inputOrders, 0, tmp, 0, tmp.length); inputOrders = tmp; } return new DualInputPlanNode(node, "CoGroup ("+node.getOperator().getName()+")", in1, in2, DriverStrategy.CO_GROUP, this.keys1, this.keys2, inputOrders); }