Java Code Examples for org.apache.flink.table.expressions.Expression#accept()
The following examples show how to use
org.apache.flink.table.expressions.Expression#accept() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveTableUtil.java From flink with Apache License 2.0 | 6 votes |
@Override public String visit(CallExpression call) { FunctionDefinition funcDef = call.getFunctionDefinition(); if (FUNC_TO_STR.containsKey(funcDef)) { List<String> operands = new ArrayList<>(); for (Expression child : call.getChildren()) { String operand = child.accept(this); if (operand == null) { return null; } operands.add(operand); } return "(" + String.join(" " + FUNC_TO_STR.get(funcDef) + " ", operands) + ")"; } return null; }
Example 2
Source File: QueryOperationConverter.java From flink with Apache License 2.0 | 6 votes |
@Override public RelBuilder.AggCall visit(CallExpression call) { FunctionDefinition def = call.getFunctionDefinition(); if (BuiltInFunctionDefinitions.DISTINCT == def) { Expression innerAgg = call.getChildren().get(0); return innerAgg.accept(new AggCallVisitor(relBuilder, expressionConverter, name, true)); } else { SqlAggFunction sqlAggFunction = call.accept(sqlAggFunctionVisitor); return relBuilder.aggregateCall( sqlAggFunction, isDistinct, false, null, name, call.getChildren().stream().map(expr -> expr.accept(expressionConverter)) .collect(Collectors.toList())); } }
Example 3
Source File: ExpressionConverter.java From flink with Apache License 2.0 | 6 votes |
private ConvertContext newFunctionContext() { return new ConvertContext() { @Override public RexNode toRexNode(Expression expr) { return expr.accept(ExpressionConverter.this); } @Override public RelBuilder getRelBuilder() { return relBuilder; } @Override public FlinkTypeFactory getTypeFactory() { return typeFactory; } @Override public DataTypeFactory getDataTypeFactory() { return dataTypeFactory; } }; }
Example 4
Source File: OperationTreeBuilder.java From flink with Apache License 2.0 | 5 votes |
public QueryOperation map(Expression mapFunction, QueryOperation child) { Expression resolvedMapFunction = mapFunction.accept(lookupResolver); if (!ApiExpressionUtils.isFunctionOfKind(resolvedMapFunction, FunctionKind.SCALAR)) { throw new ValidationException("Only a scalar function can be used in the map operator."); } Expression expandedFields = unresolvedCall(BuiltInFunctionDefinitions.FLATTEN, resolvedMapFunction); return project(Collections.singletonList(expandedFields), child, false); }
Example 5
Source File: OperationTreeBuilder.java From flink with Apache License 2.0 | 5 votes |
public QueryOperation flatMap(Expression tableFunction, QueryOperation child) { Expression resolvedTableFunction = tableFunction.accept(lookupResolver); if (!ApiExpressionUtils.isFunctionOfKind(resolvedTableFunction, FunctionKind.TABLE)) { throw new ValidationException("Only a table function can be used in the flatMap operator."); } FunctionDefinition functionDefinition = ((UnresolvedCallExpression) resolvedTableFunction) .getFunctionDefinition(); if (!(functionDefinition instanceof TableFunctionDefinition)) { throw new ValidationException( "The new type inference for functions is not supported in the flatMap yet."); } TypeInformation<?> resultType = ((TableFunctionDefinition) functionDefinition).getResultType(); List<String> originFieldNames = Arrays.asList(FieldInfoUtils.getFieldNames(resultType)); List<String> childFields = Arrays.asList(child.getTableSchema().getFieldNames()); Set<String> usedFieldNames = new HashSet<>(childFields); List<Expression> args = new ArrayList<>(); for (String originFieldName : originFieldNames) { String resultName = getUniqueName(originFieldName, usedFieldNames); usedFieldNames.add(resultName); args.add(valueLiteral(resultName)); } args.add(0, resolvedTableFunction); Expression renamedTableFunction = unresolvedCall( BuiltInFunctionDefinitions.AS, args.toArray(new Expression[0])); QueryOperation joinNode = joinLateral(child, renamedTableFunction, JoinType.INNER, Optional.empty()); QueryOperation rightNode = dropColumns( childFields.stream().map(ApiExpressionUtils::unresolvedRef).collect(Collectors.toList()), joinNode); return alias( originFieldNames.stream().map(ApiExpressionUtils::unresolvedRef).collect(Collectors.toList()), rightNode); }
Example 6
Source File: OperationTreeBuilder.java From flink with Apache License 2.0 | 5 votes |
public QueryOperation map(Expression mapFunction, QueryOperation child) { Expression resolvedMapFunction = mapFunction.accept(lookupResolver); if (!ApiExpressionUtils.isFunctionOfKind(resolvedMapFunction, FunctionKind.SCALAR)) { throw new ValidationException("Only a scalar function can be used in the map operator."); } Expression expandedFields = unresolvedCall(BuiltInFunctionDefinitions.FLATTEN, resolvedMapFunction); return project(Collections.singletonList(expandedFields), child, false); }
Example 7
Source File: QueryOperationConverter.java From flink with Apache License 2.0 | 5 votes |
/** * Get the {@link AggCall} correspond to the aggregate expression. */ private AggCall getAggCall(Expression aggregateExpression) { if (isFunctionOfKind(aggregateExpression, TABLE_AGGREGATE)) { return aggregateExpression.accept(tableAggregateVisitor); } else { return aggregateExpression.accept(aggregateVisitor); } }
Example 8
Source File: QueryOperationConverter.java From flink with Apache License 2.0 | 5 votes |
/** * Get the {@link AggCall} correspond to the aggregate expression. */ private AggCall getAggCall(Expression aggregateExpression) { if (isFunctionOfKind(aggregateExpression, TABLE_AGGREGATE)) { return aggregateExpression.accept(tableAggregateVisitor); } else { return aggregateExpression.accept(aggregateVisitor); } }
Example 9
Source File: HiveTableUtil.java From flink with Apache License 2.0 | 5 votes |
/** * Generates a filter string for partition columns from the given filter expressions. * * @param partColOffset The number of non-partition columns -- used to shift field reference index * @param partColNames The names of all partition columns * @param expressions The filter expressions in CNF form * @return an Optional filter string equivalent to the expressions, which is empty if the expressions can't be handled */ public static Optional<String> makePartitionFilter( int partColOffset, List<String> partColNames, List<Expression> expressions, HiveShim hiveShim) { List<String> filters = new ArrayList<>(expressions.size()); ExpressionExtractor extractor = new ExpressionExtractor(partColOffset, partColNames, hiveShim); for (Expression expression : expressions) { String str = expression.accept(extractor); if (str == null) { return Optional.empty(); } filters.add(str); } return Optional.of(String.join(" and ", filters)); }
Example 10
Source File: QueryOperationConverter.java From flink with Apache License 2.0 | 5 votes |
/** * Get the {@link AggCall} correspond to the aggregate or table aggregate expression. */ private AggCall getAggCall(Expression aggregateExpression) { if (isFunctionOfKind(aggregateExpression, TABLE_AGGREGATE)) { return aggregateExpression.accept(tableAggregateVisitor); } else { return aggregateExpression.accept(aggregateVisitor); } }
Example 11
Source File: OperationTreeBuilder.java From flink with Apache License 2.0 | 5 votes |
public QueryOperation flatMap(Expression tableFunction, QueryOperation child) { Expression resolvedTableFunction = tableFunction.accept(lookupResolver); if (!ApiExpressionUtils.isFunctionOfKind(resolvedTableFunction, FunctionKind.TABLE)) { throw new ValidationException("Only a table function can be used in the flatMap operator."); } TypeInformation<?> resultType = ((TableFunctionDefinition) ((UnresolvedCallExpression) resolvedTableFunction) .getFunctionDefinition()) .getResultType(); List<String> originFieldNames = Arrays.asList(FieldInfoUtils.getFieldNames(resultType)); List<String> childFields = Arrays.asList(child.getTableSchema().getFieldNames()); Set<String> usedFieldNames = new HashSet<>(childFields); List<Expression> args = new ArrayList<>(); for (String originFieldName : originFieldNames) { String resultName = getUniqueName(originFieldName, usedFieldNames); usedFieldNames.add(resultName); args.add(valueLiteral(resultName)); } args.add(0, resolvedTableFunction); Expression renamedTableFunction = unresolvedCall( BuiltInFunctionDefinitions.AS, args.toArray(new Expression[0])); QueryOperation joinNode = joinLateral(child, renamedTableFunction, JoinType.INNER, Optional.empty()); QueryOperation rightNode = dropColumns( childFields.stream().map(UnresolvedReferenceExpression::new).collect(Collectors.toList()), joinNode); return alias( originFieldNames.stream().map(UnresolvedReferenceExpression::new).collect(Collectors.toList()), rightNode); }
Example 12
Source File: QueryOperationConverter.java From flink with Apache License 2.0 | 5 votes |
@Override public AggCall visit(CallExpression unresolvedCall) { if (unresolvedCall.getFunctionDefinition() == AS) { String aggregateName = extractValue(unresolvedCall.getChildren().get(1), String.class) .orElseThrow(() -> new TableException("Unexpected name.")); Expression aggregate = unresolvedCall.getChildren().get(0); if (isFunctionOfKind(aggregate, AGGREGATE)) { return aggregate.accept( new AggCallVisitor(relBuilder, expressionConverter, aggregateName, false)); } } throw new TableException("Expected named aggregate. Got: " + unresolvedCall); }
Example 13
Source File: QueryOperationConverter.java From flink with Apache License 2.0 | 4 votes |
private RexNode convertExprToRexNode(Expression expr) { return expr.accept(expressionConverter); }
Example 14
Source File: RexNodeConverter.java From flink with Apache License 2.0 | 4 votes |
private RexNode convertOver(List<Expression> children) { List<Expression> args = children; Expression agg = args.get(0); SqlAggFunction aggFunc = agg.accept(new SqlAggFunctionVisitor(typeFactory)); RelDataType aggResultType = typeFactory.createFieldTypeFromLogicalType( fromDataTypeToLogicalType(((ResolvedExpression) agg).getOutputDataType())); // assemble exprs by agg children List<RexNode> aggExprs = agg.getChildren().stream().map(expr -> expr.accept(this)) .collect(Collectors.toList()); // assemble order by key Expression orderKeyExpr = args.get(1); Set<SqlKind> kinds = new HashSet<>(); RexNode collationRexNode = createCollation(orderKeyExpr.accept(this), RelFieldCollation.Direction.ASCENDING, null, kinds); ImmutableList<RexFieldCollation> orderKey = ImmutableList .of(new RexFieldCollation(collationRexNode, kinds)); // assemble partition by keys List<RexNode> partitionKeys = args.subList(4, args.size()).stream().map(expr -> expr.accept(this)) .collect(Collectors.toList()); // assemble bounds Expression preceding = args.get(2); boolean isPhysical = LogicalTypeChecks.hasRoot( fromDataTypeToLogicalType(((ResolvedExpression) preceding).getOutputDataType()), LogicalTypeRoot.BIGINT); Expression following = args.get(3); RexWindowBound lowerBound = createBound(preceding, SqlKind.PRECEDING); RexWindowBound upperBound = createBound(following, SqlKind.FOLLOWING); // build RexOver return relBuilder.getRexBuilder().makeOver( aggResultType, aggFunc, aggExprs, partitionKeys, orderKey, lowerBound, upperBound, isPhysical, true, false, false); }
Example 15
Source File: TableImpl.java From flink with Apache License 2.0 | 4 votes |
@Override public Table filter(Expression predicate) { Expression resolvedCallPredicate = predicate.accept(lookupResolver); return createTable(operationTreeBuilder.filter(resolvedCallPredicate, operationTree)); }
Example 16
Source File: OperationTreeBuilder.java From flink with Apache License 2.0 | 4 votes |
public QueryOperation windowAggregate( List<Expression> groupingExpressions, GroupWindow window, List<Expression> windowProperties, Expression aggregateFunction, QueryOperation child) { ExpressionResolver resolver = getResolver(child); Expression resolvedAggregate = aggregateFunction.accept(lookupResolver); AggregateWithAlias aggregateWithAlias = resolvedAggregate.accept(new ExtractAliasAndAggregate(true, resolver)); List<Expression> groupsAndAggregate = new ArrayList<>(groupingExpressions); groupsAndAggregate.add(aggregateWithAlias.aggregate); List<Expression> namedGroupsAndAggregate = addAliasToTheCallInAggregate( Arrays.asList(child.getTableSchema().getFieldNames()), groupsAndAggregate); // Step1: add a default name to the call in the grouping expressions, e.g., groupBy(a % 5) to // groupBy(a % 5 as TMP_0). We need a name for every column so that to perform alias for the // table aggregate function in Step6. List<Expression> newGroupingExpressions = namedGroupsAndAggregate.subList(0, groupingExpressions.size()); // Step2: turn agg to a named agg, because it will be verified later. Expression aggregateRenamed = namedGroupsAndAggregate.get(groupingExpressions.size()); // Step3: resolve expressions, including grouping, aggregates and window properties. ResolvedGroupWindow resolvedWindow = aggregateOperationFactory.createResolvedWindow(window, resolver); ExpressionResolver resolverWithWindowReferences = ExpressionResolver.resolverFor( config, tableReferenceLookup, functionCatalog, typeFactory, child) .withLocalReferences( localRef( resolvedWindow.getAlias(), resolvedWindow.getTimeAttribute().getOutputDataType())) .build(); List<ResolvedExpression> convertedGroupings = resolverWithWindowReferences.resolve(newGroupingExpressions); List<ResolvedExpression> convertedAggregates = resolverWithWindowReferences.resolve(Collections.singletonList( aggregateRenamed)); List<ResolvedExpression> convertedProperties = resolverWithWindowReferences.resolve(windowProperties); // Step4: create window agg operation QueryOperation aggregateOperation = aggregateOperationFactory.createWindowAggregate( convertedGroupings, Collections.singletonList(convertedAggregates.get(0)), convertedProperties, resolvedWindow, child); // Step5: flatten the aggregate function String[] aggNames = aggregateOperation.getTableSchema().getFieldNames(); List<Expression> flattenedExpressions = Arrays.stream(aggNames) .map(ApiExpressionUtils::unresolvedRef) .collect(Collectors.toCollection(ArrayList::new)); flattenedExpressions.set( groupingExpressions.size(), unresolvedCall( BuiltInFunctionDefinitions.FLATTEN, unresolvedRef(aggNames[groupingExpressions.size()]))); QueryOperation flattenedProjection = this.project(flattenedExpressions, aggregateOperation); // Step6: add a top project to alias the output fields of the aggregate. Also, project the // window attribute. return aliasBackwardFields(flattenedProjection, aggregateWithAlias.aliases, groupingExpressions.size()); }
Example 17
Source File: OverConvertRule.java From flink with Apache License 2.0 | 4 votes |
@Override public Optional<RexNode> convert(CallExpression call, ConvertContext context) { List<Expression> children = call.getChildren(); if (call.getFunctionDefinition() == BuiltInFunctionDefinitions.OVER) { FlinkTypeFactory typeFactory = context.getTypeFactory(); Expression agg = children.get(0); SqlAggFunction aggFunc = agg.accept(new SqlAggFunctionVisitor(typeFactory)); RelDataType aggResultType = typeFactory.createFieldTypeFromLogicalType( fromDataTypeToLogicalType(((ResolvedExpression) agg).getOutputDataType())); // assemble exprs by agg children List<RexNode> aggExprs = agg.getChildren().stream().map(context::toRexNode) .collect(Collectors.toList()); // assemble order by key Expression orderKeyExpr = children.get(1); Set<SqlKind> kinds = new HashSet<>(); RexNode collationRexNode = createCollation( context.toRexNode(orderKeyExpr), RelFieldCollation.Direction.ASCENDING, null, kinds); ImmutableList<RexFieldCollation> orderKey = ImmutableList .of(new RexFieldCollation(collationRexNode, kinds)); // assemble partition by keys List<RexNode> partitionKeys = children.subList(4, children.size()).stream().map(context::toRexNode) .collect(Collectors.toList()); // assemble bounds Expression preceding = children.get(2); boolean isPhysical = LogicalTypeChecks.hasRoot( fromDataTypeToLogicalType(((ResolvedExpression) preceding).getOutputDataType()), LogicalTypeRoot.BIGINT); Expression following = children.get(3); RexWindowBound lowerBound = createBound(context, preceding, SqlKind.PRECEDING); RexWindowBound upperBound = createBound(context, following, SqlKind.FOLLOWING); // build RexOver return Optional.of(context.getRelBuilder().getRexBuilder().makeOver( aggResultType, aggFunc, aggExprs, partitionKeys, orderKey, lowerBound, upperBound, isPhysical, true, false, false)); } return Optional.empty(); }
Example 18
Source File: TableImpl.java From flink with Apache License 2.0 | 4 votes |
@Override public Table filter(Expression predicate) { Expression resolvedCallPredicate = predicate.accept(lookupResolver); return createTable(operationTreeBuilder.filter(resolvedCallPredicate, operationTree)); }
Example 19
Source File: OperationExpressionsUtils.java From flink with Apache License 2.0 | 2 votes |
/** * Extracts name from given expression if it has one. Expressions that have names are: * <ul> * <li>{@link FieldReferenceExpression}</li> * <li>{@link TableReferenceExpression}</li> * <li>{@link LocalReferenceExpression}</li> * <li>{@link BuiltInFunctionDefinitions#AS}</li> * </ul> * * @param expression expression to extract name from * @return optional name of given expression */ public static Optional<String> extractName(Expression expression) { return expression.accept(extractNameVisitor); }
Example 20
Source File: OperationExpressionsUtils.java From flink with Apache License 2.0 | 2 votes |
/** * Extracts name from given expression if it has one. Expressions that have names are: * <ul> * <li>{@link FieldReferenceExpression}</li> * <li>{@link TableReferenceExpression}</li> * <li>{@link LocalReferenceExpression}</li> * <li>{@link BuiltInFunctionDefinitions#AS}</li> * </ul> * * @param expression expression to extract name from * @return optional name of given expression */ public static Optional<String> extractName(Expression expression) { return expression.accept(extractNameVisitor); }