org.apache.flink.api.common.operators.Operator Java Examples
The following examples show how to use
org.apache.flink.api.common.operators.Operator.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: PartitionOperator.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private static <T, K> org.apache.flink.api.common.operators.SingleInputOperator<?, T, ?> translateSelectorFunctionPartitioner( SelectorFunctionKeys<T, ?> rawKeys, PartitionMethod pMethod, String name, Operator<T> input, int partitionDop, Partitioner<?> customPartitioner, Order[] orders) { final SelectorFunctionKeys<T, K> keys = (SelectorFunctionKeys<T, K>) rawKeys; TypeInformation<Tuple2<K, T>> typeInfoWithKey = KeyFunctions.createTypeWithKey(keys); Operator<Tuple2<K, T>> keyedInput = KeyFunctions.appendKeyExtractor(input, keys); PartitionOperatorBase<Tuple2<K, T>> keyedPartitionedInput = new PartitionOperatorBase<>(new UnaryOperatorInformation<>(typeInfoWithKey, typeInfoWithKey), pMethod, new int[]{0}, name); keyedPartitionedInput.setInput(keyedInput); keyedPartitionedInput.setCustomPartitioner(customPartitioner); keyedPartitionedInput.setParallelism(partitionDop); keyedPartitionedInput.setOrdering(new Ordering(0, null, orders != null ? orders[0] : Order.ASCENDING)); return KeyFunctions.appendKeyRemover(keyedPartitionedInput, keys); }
Example #2
Source File: OptimizerNode.java From flink with Apache License 2.0 | 6 votes |
/** * This function connects the operators that produce the broadcast inputs to this operator. * * @param operatorToNode The map from program operators to optimizer nodes. * @param defaultExchangeMode The data exchange mode to use, if the operator does not * specify one. * * @throws CompilerException */ public void setBroadcastInputs(Map<Operator<?>, OptimizerNode> operatorToNode, ExecutionMode defaultExchangeMode) { // skip for Operators that don't support broadcast variables if (!(getOperator() instanceof AbstractUdfOperator<?, ?>)) { return; } // get all broadcast inputs AbstractUdfOperator<?, ?> operator = ((AbstractUdfOperator<?, ?>) getOperator()); // create connections and add them for (Map.Entry<String, Operator<?>> input : operator.getBroadcastInputs().entrySet()) { OptimizerNode predecessor = operatorToNode.get(input.getValue()); DagConnection connection = new DagConnection(predecessor, this, ShipStrategyType.BROADCAST, defaultExchangeMode); addBroadcastConnection(input.getKey(), connection); predecessor.addOutgoingConnection(connection); } }
Example #3
Source File: GroupCombineOperator.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private static <IN, OUT, K1, K2> PlanUnwrappingSortedGroupCombineOperator<IN, OUT, K1, K2> translateSelectorFunctionSortedReducer( SelectorFunctionKeys<IN, ?> rawGroupingKey, SelectorFunctionKeys<IN, ?> rawSortingKeys, Ordering groupOrder, GroupCombineFunction<IN, OUT> function, TypeInformation<OUT> outputType, String name, Operator<IN> input) { final SelectorFunctionKeys<IN, K1> groupingKey = (SelectorFunctionKeys<IN, K1>) rawGroupingKey; final SelectorFunctionKeys<IN, K2> sortingKey = (SelectorFunctionKeys<IN, K2>) rawSortingKeys; TypeInformation<Tuple3<K1, K2, IN>> typeInfoWithKey = KeyFunctions.createTypeWithKey(groupingKey, sortingKey); Operator<Tuple3<K1, K2, IN>> inputWithKey = KeyFunctions.appendKeyExtractor(input, groupingKey, sortingKey); PlanUnwrappingSortedGroupCombineOperator<IN, OUT, K1, K2> reducer = new PlanUnwrappingSortedGroupCombineOperator<>(function, groupingKey, sortingKey, name, outputType, typeInfoWithKey); reducer.setInput(inputWithKey); reducer.setGroupOrder(groupOrder); return reducer; }
Example #4
Source File: MapPartitionOperator.java From flink with Apache License 2.0 | 6 votes |
@Override protected MapPartitionOperatorBase<IN, OUT, MapPartitionFunction<IN, OUT>> translateToDataFlow(Operator<IN> input) { String name = getName() != null ? getName() : "MapPartition at " + defaultName; // create operator MapPartitionOperatorBase<IN, OUT, MapPartitionFunction<IN, OUT>> po = new MapPartitionOperatorBase<IN, OUT, MapPartitionFunction<IN, OUT>>(function, new UnaryOperatorInformation<IN, OUT>(getInputType(), getResultType()), name); // set input po.setInput(input); // set parallelism if (this.getParallelism() > 0) { // use specified parallelism po.setParallelism(this.getParallelism()); } else { // if no parallelism has been specified, use parallelism of input operator to enable chaining po.setParallelism(input.getParallelism()); } return po; }
Example #5
Source File: FlatMapOperator.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override protected FlatMapOperatorBase<IN, OUT, FlatMapFunction<IN, OUT>> translateToDataFlow(Operator<IN> input) { String name = getName() != null ? getName() : "FlatMap at " + defaultName; // create operator FlatMapOperatorBase<IN, OUT, FlatMapFunction<IN, OUT>> po = new FlatMapOperatorBase<IN, OUT, FlatMapFunction<IN, OUT>>(function, new UnaryOperatorInformation<IN, OUT>(getInputType(), getResultType()), name); // set input po.setInput(input); // set parallelism if (this.getParallelism() > 0) { // use specified parallelism po.setParallelism(this.getParallelism()); } else { // if no parallelism has been specified, use parallelism of input operator to enable chaining po.setParallelism(input.getParallelism()); } return po; }
Example #6
Source File: PartitionOperator.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private static <T, K> org.apache.flink.api.common.operators.SingleInputOperator<?, T, ?> translateSelectorFunctionPartitioner( SelectorFunctionKeys<T, ?> rawKeys, PartitionMethod pMethod, String name, Operator<T> input, int partitionDop, Partitioner<?> customPartitioner, Order[] orders) { final SelectorFunctionKeys<T, K> keys = (SelectorFunctionKeys<T, K>) rawKeys; TypeInformation<Tuple2<K, T>> typeInfoWithKey = KeyFunctions.createTypeWithKey(keys); Operator<Tuple2<K, T>> keyedInput = KeyFunctions.appendKeyExtractor(input, keys); PartitionOperatorBase<Tuple2<K, T>> keyedPartitionedInput = new PartitionOperatorBase<>(new UnaryOperatorInformation<>(typeInfoWithKey, typeInfoWithKey), pMethod, new int[]{0}, name); keyedPartitionedInput.setInput(keyedInput); keyedPartitionedInput.setCustomPartitioner(customPartitioner); keyedPartitionedInput.setParallelism(partitionDop); keyedPartitionedInput.setOrdering(new Ordering(0, null, orders != null ? orders[0] : Order.ASCENDING)); return KeyFunctions.appendKeyRemover(keyedPartitionedInput, keys); }
Example #7
Source File: OperatorResolver.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") public <T extends Operator<?>> T getNode(String name, Class<? extends RichFunction> stubClass) { List<Operator<?>> nodes = this.map.get(name); if (nodes == null || nodes.isEmpty()) { throw new RuntimeException("No node found with the given name and stub class."); } else { Operator<?> found = null; for (Operator<?> node : nodes) { if (node.getClass() == stubClass) { if (found == null) { found = node; } else { throw new RuntimeException("Multiple nodes found with the given name and stub class."); } } } if (found == null) { throw new RuntimeException("No node found with the given name and stub class."); } else { return (T) found; } } }
Example #8
Source File: CoGroupOperator.java From flink with Apache License 2.0 | 6 votes |
private static <I1, I2, K, OUT> PlanBothUnwrappingCoGroupOperator<I1, I2, OUT, K> translateSelectorFunctionCoGroup( SelectorFunctionKeys<I1, ?> rawKeys1, SelectorFunctionKeys<I2, ?> rawKeys2, CoGroupFunction<I1, I2, OUT> function, TypeInformation<OUT> outputType, String name, Operator<I1> input1, Operator<I2> input2) { @SuppressWarnings("unchecked") final SelectorFunctionKeys<I1, K> keys1 = (SelectorFunctionKeys<I1, K>) rawKeys1; @SuppressWarnings("unchecked") final SelectorFunctionKeys<I2, K> keys2 = (SelectorFunctionKeys<I2, K>) rawKeys2; final TypeInformation<Tuple2<K, I1>> typeInfoWithKey1 = KeyFunctions.createTypeWithKey(keys1); final TypeInformation<Tuple2<K, I2>> typeInfoWithKey2 = KeyFunctions.createTypeWithKey(keys2); final Operator<Tuple2<K, I1>> keyedInput1 = KeyFunctions.appendKeyExtractor(input1, keys1); final Operator<Tuple2<K, I2>> keyedInput2 = KeyFunctions.appendKeyExtractor(input2, keys2); final PlanBothUnwrappingCoGroupOperator<I1, I2, OUT, K> cogroup = new PlanBothUnwrappingCoGroupOperator<>(function, keys1, keys2, name, outputType, typeInfoWithKey1, typeInfoWithKey2); cogroup.setFirstInput(keyedInput1); cogroup.setSecondInput(keyedInput2); return cogroup; }
Example #9
Source File: OptimizerNode.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * This function connects the operators that produce the broadcast inputs to this operator. * * @param operatorToNode The map from program operators to optimizer nodes. * @param defaultExchangeMode The data exchange mode to use, if the operator does not * specify one. * * @throws CompilerException */ public void setBroadcastInputs(Map<Operator<?>, OptimizerNode> operatorToNode, ExecutionMode defaultExchangeMode) { // skip for Operators that don't support broadcast variables if (!(getOperator() instanceof AbstractUdfOperator<?, ?>)) { return; } // get all broadcast inputs AbstractUdfOperator<?, ?> operator = ((AbstractUdfOperator<?, ?>) getOperator()); // create connections and add them for (Map.Entry<String, Operator<?>> input : operator.getBroadcastInputs().entrySet()) { OptimizerNode predecessor = operatorToNode.get(input.getValue()); DagConnection connection = new DagConnection(predecessor, this, ShipStrategyType.BROADCAST, defaultExchangeMode); addBroadcastConnection(input.getKey(), connection); predecessor.addOutgoingConnection(connection); } }
Example #10
Source File: DistinctOperator.java From flink with Apache License 2.0 | 6 votes |
private static <IN, K> org.apache.flink.api.common.operators.SingleInputOperator<?, IN, ?> translateSelectorFunctionDistinct( SelectorFunctionKeys<IN, ?> rawKeys, ReduceFunction<IN> function, TypeInformation<IN> outputType, String name, Operator<IN> input, int parallelism, CombineHint hint) { @SuppressWarnings("unchecked") final SelectorFunctionKeys<IN, K> keys = (SelectorFunctionKeys<IN, K>) rawKeys; TypeInformation<Tuple2<K, IN>> typeInfoWithKey = KeyFunctions.createTypeWithKey(keys); Operator<Tuple2<K, IN>> keyedInput = KeyFunctions.appendKeyExtractor(input, keys); PlanUnwrappingReduceOperator<IN, K> reducer = new PlanUnwrappingReduceOperator<>(function, keys, name, outputType, typeInfoWithKey); reducer.setInput(keyedInput); reducer.setCombineHint(hint); reducer.setParallelism(parallelism); return KeyFunctions.appendKeyRemover(reducer, keys); }
Example #11
Source File: FlatMapOperator.java From flink with Apache License 2.0 | 6 votes |
@Override protected FlatMapOperatorBase<IN, OUT, FlatMapFunction<IN, OUT>> translateToDataFlow(Operator<IN> input) { String name = getName() != null ? getName() : "FlatMap at " + defaultName; // create operator FlatMapOperatorBase<IN, OUT, FlatMapFunction<IN, OUT>> po = new FlatMapOperatorBase<IN, OUT, FlatMapFunction<IN, OUT>>(function, new UnaryOperatorInformation<IN, OUT>(getInputType(), getResultType()), name); // set input po.setInput(input); // set parallelism if (this.getParallelism() > 0) { // use specified parallelism po.setParallelism(this.getParallelism()); } else { // if no parallelism has been specified, use parallelism of input operator to enable chaining po.setParallelism(input.getParallelism()); } return po; }
Example #12
Source File: MapOperator.java From flink with Apache License 2.0 | 6 votes |
@Override protected MapOperatorBase<IN, OUT, MapFunction<IN, OUT>> translateToDataFlow(Operator<IN> input) { String name = getName() != null ? getName() : "Map at " + defaultName; // create operator MapOperatorBase<IN, OUT, MapFunction<IN, OUT>> po = new MapOperatorBase<IN, OUT, MapFunction<IN, OUT>>(function, new UnaryOperatorInformation<IN, OUT>(getInputType(), getResultType()), name); // set input po.setInput(input); // set parallelism if (this.getParallelism() > 0) { // use specified parallelism po.setParallelism(this.getParallelism()); } else { // if no parallelism has been specified, use parallelism of input operator to enable chaining po.setParallelism(input.getParallelism()); } return po; }
Example #13
Source File: GroupCombineOperator.java From flink with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private static <IN, OUT, K> PlanUnwrappingGroupCombineOperator<IN, OUT, K> translateSelectorFunctionReducer( SelectorFunctionKeys<IN, ?> rawKeys, GroupCombineFunction<IN, OUT> function, TypeInformation<OUT> outputType, String name, Operator<IN> input) { final SelectorFunctionKeys<IN, K> keys = (SelectorFunctionKeys<IN, K>) rawKeys; TypeInformation<Tuple2<K, IN>> typeInfoWithKey = KeyFunctions.createTypeWithKey(keys); Operator<Tuple2<K, IN>> keyedInput = KeyFunctions.appendKeyExtractor(input, keys); PlanUnwrappingGroupCombineOperator<IN, OUT, K> reducer = new PlanUnwrappingGroupCombineOperator<>(function, keys, name, outputType, typeInfoWithKey); reducer.setInput(keyedInput); return reducer; }
Example #14
Source File: GroupReduceOperator.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private static <IN, OUT, K1, K2> PlanUnwrappingSortedReduceGroupOperator<IN, OUT, K1, K2> translateSelectorFunctionSortedReducer( SelectorFunctionKeys<IN, ?> rawGroupingKey, SelectorFunctionKeys<IN, ?> rawSortingKey, Ordering groupOrdering, GroupReduceFunction<IN, OUT> function, TypeInformation<OUT> outputType, String name, Operator<IN> input, boolean combinable) { final SelectorFunctionKeys<IN, K1> groupingKey = (SelectorFunctionKeys<IN, K1>) rawGroupingKey; final SelectorFunctionKeys<IN, K2> sortingKey = (SelectorFunctionKeys<IN, K2>) rawSortingKey; TypeInformation<Tuple3<K1, K2, IN>> typeInfoWithKey = KeyFunctions.createTypeWithKey(groupingKey, sortingKey); Operator<Tuple3<K1, K2, IN>> inputWithKey = KeyFunctions.appendKeyExtractor(input, groupingKey, sortingKey); PlanUnwrappingSortedReduceGroupOperator<IN, OUT, K1, K2> reducer = new PlanUnwrappingSortedReduceGroupOperator<>( function, groupingKey, sortingKey, name, outputType, typeInfoWithKey, combinable); reducer.setInput(inputWithKey); reducer.setGroupOrder(groupOrdering); return reducer; }
Example #15
Source File: ProjectOperator.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override protected org.apache.flink.api.common.operators.base.MapOperatorBase<IN, OUT, MapFunction<IN, OUT>> translateToDataFlow(Operator<IN> input) { String name = getName() != null ? getName() : "Projection " + Arrays.toString(fields); // create operator PlanProjectOperator<IN, OUT> ppo = new PlanProjectOperator<IN, OUT>(fields, name, getInputType(), getResultType(), context.getConfig()); // set input ppo.setInput(input); // set parallelism ppo.setParallelism(this.getParallelism()); ppo.setSemanticProperties(SemanticPropUtil.createProjectionPropertiesSingle(fields, (CompositeType<?>) getInputType())); return ppo; }
Example #16
Source File: JoinOperator.java From flink with Apache License 2.0 | 5 votes |
public <I2, K> JoinOperatorBaseBuilder<OUT> withWrappedInput2( Operator<I2> input2, SelectorFunctionKeys<I2, ?> rawKeys2) { @SuppressWarnings("unchecked") SelectorFunctionKeys<I2, K> keys2 = (SelectorFunctionKeys<I2, K>) rawKeys2; TypeInformation<Tuple2<K, I2>> typeInfoWithKey2 = KeyFunctions.createTypeWithKey(keys2); Operator<Tuple2<K, I2>> keyMapper2 = KeyFunctions.appendKeyExtractor(input2, keys2); return withInput2(keyMapper2, typeInfoWithKey2, rawKeys2); }
Example #17
Source File: Plan.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
/** * Traverses the job depth first from all data sinks on towards the sources. * * @see Visitable#accept(Visitor) */ @Override public void accept(Visitor<Operator<?>> visitor) { for (GenericDataSinkBase<?> sink : this.sinks) { sink.accept(visitor); } }
Example #18
Source File: Plan.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public boolean preVisit(Operator<?> visitable) { if (!visitedOperators.add(visitable)) { return false; } this.maxDop = Math.max(this.maxDop, visitable.getParallelism()); return true; }
Example #19
Source File: DataSink.java From flink with Apache License 2.0 | 5 votes |
protected GenericDataSinkBase<T> translateToDataFlow(Operator<T> input) { // select the name (or create a default one) String name = this.name != null ? this.name : this.format.toString(); GenericDataSinkBase<T> sink = new GenericDataSinkBase<>(this.format, new UnaryOperatorInformation<>(this.type, new NothingTypeInfo()), name); // set input sink.setInput(input); // set parameters if (this.parameters != null) { sink.getParameters().addAll(this.parameters); } // set parallelism if (this.parallelism > 0) { // use specified parallelism sink.setParallelism(this.parallelism); } else { // if no parallelism has been specified, use parallelism of input operator to enable chaining sink.setParallelism(input.getParallelism()); } if (this.sortKeyPositions != null) { // configure output sorting Ordering ordering = new Ordering(); for (int i = 0; i < this.sortKeyPositions.length; i++) { ordering.appendOrdering(this.sortKeyPositions[i], null, this.sortOrders[i]); } sink.setLocalOrder(ordering); } return sink; }
Example #20
Source File: CompilerTestBase.java From flink with Apache License 2.0 | 5 votes |
@Override public boolean preVisit(Operator<?> visitable) { if(visitable instanceof GenericDataSourceBase) { sources.add((GenericDataSourceBase<?, ?>) visitable); } else if(visitable instanceof BulkIterationBase) { ((BulkIterationBase<?>) visitable).getNextPartialSolution().accept(this); } return true; }
Example #21
Source File: OperatorResolver.java From flink with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") public <T extends Operator<?>> T getNode(String name) { List<Operator<?>> nodes = this.map.get(name); if (nodes == null || nodes.isEmpty()) { throw new RuntimeException("No nodes found with the given name."); } else if (nodes.size() != 1) { throw new RuntimeException("Multiple nodes found with the given name."); } else { return (T) nodes.get(0); } }
Example #22
Source File: Plan.java From flink with Apache License 2.0 | 5 votes |
/** * Traverses the job depth first from all data sinks on towards the sources. * * @see Visitable#accept(Visitor) */ @Override public void accept(Visitor<Operator<?>> visitor) { for (GenericDataSinkBase<?> sink : this.sinks) { sink.accept(visitor); } }
Example #23
Source File: OperatorResolver.java From flink with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") public <T extends Operator<?>> T getNode(String name) { List<Operator<?>> nodes = this.map.get(name); if (nodes == null || nodes.isEmpty()) { throw new RuntimeException("No nodes found with the given name."); } else if (nodes.size() != 1) { throw new RuntimeException("Multiple nodes found with the given name."); } else { return (T) nodes.get(0); } }
Example #24
Source File: Plan.java From flink with Apache License 2.0 | 5 votes |
/** * Traverses the job depth first from all data sinks on towards the sources. * * @see Visitable#accept(Visitor) */ @Override public void accept(Visitor<Operator<?>> visitor) { for (GenericDataSinkBase<?> sink : this.sinks) { sink.accept(visitor); } }
Example #25
Source File: JavaApiPostPass.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") private static <T> TypeInformation<T> getTypeInfoFromSource(SourcePlanNode node) { Operator<?> op = node.getOptimizerNode().getOperator(); if (op instanceof GenericDataSourceBase) { return ((GenericDataSourceBase<T, ?>) op).getOperatorInfo().getOutputType(); } else { throw new RuntimeException("Wrong operator type found in post pass."); } }
Example #26
Source File: BulkIterationBase.java From flink with Apache License 2.0 | 5 votes |
/** * @param result */ public void setNextPartialSolution(Operator<T> result) { if (result == null) { throw new NullPointerException("Operator producing the next partial solution must not be null."); } this.iterationResult = result; }
Example #27
Source File: OperatorResolver.java From flink with Apache License 2.0 | 5 votes |
public List<Operator<?>> getNodes(String name) { List<Operator<?>> nodes = this.map.get(name); if (nodes == null || nodes.isEmpty()) { throw new RuntimeException("No node found with the given name."); } else { return new ArrayList<Operator<?>>(nodes); } }
Example #28
Source File: OperatorResolver.java From flink with Apache License 2.0 | 5 votes |
public OperatorResolver(Plan p) { this.map = new HashMap<String, List<Operator<?>>>(); this.seen = new HashSet<Operator<?>>(); p.accept(this); this.seen = null; }
Example #29
Source File: JoinOperator.java From flink with Apache License 2.0 | 5 votes |
public <I1, K> JoinOperatorBaseBuilder<OUT> withWrappedInput1( Operator<I1> input1, SelectorFunctionKeys<I1, ?> rawKeys1) { @SuppressWarnings("unchecked") SelectorFunctionKeys<I1, K> keys1 = (SelectorFunctionKeys<I1, K>) rawKeys1; TypeInformation<Tuple2<K, I1>> typeInfoWithKey1 = KeyFunctions.createTypeWithKey(keys1); Operator<Tuple2<K, I1>> keyMapper1 = KeyFunctions.appendKeyExtractor(input1, keys1); return this.withInput1(keyMapper1, typeInfoWithKey1, rawKeys1); }
Example #30
Source File: StepFunctionValidator.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Override public boolean preVisit(Operator<?> visitable) { if (visitable instanceof DeltaIterationBase.WorksetPlaceHolder) { foundWorkset = true; } return (!foundWorkset) && seenBefore.add(visitable); }