org.apache.flink.table.functions.TableFunction Java Examples
The following examples show how to use
org.apache.flink.table.functions.TableFunction.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ExecutionContext.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private void registerTemporalTable(TemporalTableEntry temporalTableEntry) { try { final Table table = tableEnv.scan(temporalTableEntry.getHistoryTable()); final TableFunction<?> function = table.createTemporalTableFunction( temporalTableEntry.getTimeAttribute(), String.join(",", temporalTableEntry.getPrimaryKeyFields())); if (tableEnv instanceof StreamTableEnvironment) { StreamTableEnvironment streamTableEnvironment = (StreamTableEnvironment) tableEnv; streamTableEnvironment.registerFunction(temporalTableEntry.getName(), function); } else { BatchTableEnvironment batchTableEnvironment = (BatchTableEnvironment) tableEnv; batchTableEnvironment.registerFunction(temporalTableEntry.getName(), function); } } catch (Exception e) { throw new SqlExecutionException( "Invalid temporal table '" + temporalTableEntry.getName() + "' over table '" + temporalTableEntry.getHistoryTable() + ".\nCause: " + e.getMessage()); } }
Example #2
Source File: ExecutionContext.java From flink with Apache License 2.0 | 6 votes |
private void registerTemporalTable(TemporalTableEntry temporalTableEntry) { try { final Table table = tableEnv.from(temporalTableEntry.getHistoryTable()); List<String> primaryKeyFields = temporalTableEntry.getPrimaryKeyFields(); if (primaryKeyFields.size() > 1) { throw new ValidationException("Temporal tables over a composite primary key are not supported yet."); } final TableFunction<?> function = table.createTemporalTableFunction( $(temporalTableEntry.getTimeAttribute()), $(primaryKeyFields.get(0))); if (tableEnv instanceof StreamTableEnvironment) { StreamTableEnvironment streamTableEnvironment = (StreamTableEnvironment) tableEnv; streamTableEnvironment.registerFunction(temporalTableEntry.getName(), function); } else { BatchTableEnvironment batchTableEnvironment = (BatchTableEnvironment) tableEnv; batchTableEnvironment.registerFunction(temporalTableEntry.getName(), function); } } catch (Exception e) { throw new SqlExecutionException( "Invalid temporal table '" + temporalTableEntry.getName() + "' over table '" + temporalTableEntry.getHistoryTable() + ".\nCause: " + e.getMessage()); } }
Example #3
Source File: FunctionCatalog.java From flink with Apache License 2.0 | 6 votes |
/** * @deprecated Use {@link #registerTemporarySystemFunction(String, FunctionDefinition, boolean)} instead. */ @Deprecated public <T> void registerTempSystemTableFunction( String name, TableFunction<T> function, TypeInformation<T> resultType) { UserDefinedFunctionHelper.prepareInstance(config, function); registerTempSystemFunction( name, new TableFunctionDefinition( name, function, resultType) ); }
Example #4
Source File: HBaseTableSource.java From flink with Apache License 2.0 | 6 votes |
@Override public TableFunction<Row> getLookupFunction(String[] lookupKeys) { Preconditions.checkArgument( null != lookupKeys && lookupKeys.length == 1, "HBase table can only be retrieved by rowKey for now."); Preconditions.checkState( hbaseSchema.getRowKeyName().isPresent(), "HBase schema must have a row key when used in lookup mode."); Preconditions.checkState( hbaseSchema.getRowKeyName().get().equals(lookupKeys[0]), "The lookup key is not row key of HBase."); return new HBaseLookupFunction( this.conf, this.tableName, hbaseSchema.getProjectedHBaseTableSchema(projectFields)); }
Example #5
Source File: ExecutionContext.java From flink with Apache License 2.0 | 6 votes |
private void registerTemporalTable(TemporalTableEntry temporalTableEntry) { try { final Table table = tableEnv.scan(temporalTableEntry.getHistoryTable()); final TableFunction<?> function = table.createTemporalTableFunction( temporalTableEntry.getTimeAttribute(), String.join(",", temporalTableEntry.getPrimaryKeyFields())); if (tableEnv instanceof StreamTableEnvironment) { StreamTableEnvironment streamTableEnvironment = (StreamTableEnvironment) tableEnv; streamTableEnvironment.registerFunction(temporalTableEntry.getName(), function); } else { BatchTableEnvironment batchTableEnvironment = (BatchTableEnvironment) tableEnv; batchTableEnvironment.registerFunction(temporalTableEntry.getName(), function); } } catch (Exception e) { throw new SqlExecutionException( "Invalid temporal table '" + temporalTableEntry.getName() + "' over table '" + temporalTableEntry.getHistoryTable() + ".\nCause: " + e.getMessage()); } }
Example #6
Source File: FunctionCatalog.java From flink with Apache License 2.0 | 6 votes |
public <T> void registerTableFunction( String name, TableFunction<T> function, TypeInformation<T> resultType) { // check if class not Scala object UserFunctionsTypeHelper.validateNotSingleton(function.getClass()); // check if class could be instantiated UserFunctionsTypeHelper.validateInstantiation(function.getClass()); registerFunction( name, new TableFunctionDefinition( name, function, resultType) ); }
Example #7
Source File: HBaseTableSource.java From flink with Apache License 2.0 | 6 votes |
@Override public TableFunction<Row> getLookupFunction(String[] lookupKeys) { Preconditions.checkArgument( null != lookupKeys && lookupKeys.length == 1, "HBase table can only be retrieved by rowKey for now."); Preconditions.checkState( hbaseSchema.getRowKeyName().isPresent(), "HBase schema must have a row key when used in lookup mode."); Preconditions.checkState( hbaseSchema.getRowKeyName().get().equals(lookupKeys[0]), "The lookup key is not row key of HBase."); return new HBaseLookupFunction( this.conf, this.tableName, hbaseSchema.getProjectedHBaseTableSchema(projectFields)); }
Example #8
Source File: QueryOperationConverter.java From flink with Apache License 2.0 | 5 votes |
@Override public <U> RelNode visit(CalculatedQueryOperation<U> calculatedTable) { String[] fieldNames = calculatedTable.getTableSchema().getFieldNames(); int[] fieldIndices = IntStream.range(0, fieldNames.length).toArray(); TypeInformation<U> resultType = calculatedTable.getResultType(); FlinkTableFunctionImpl function = new FlinkTableFunctionImpl<>( resultType, fieldIndices, fieldNames); TableFunction<?> tableFunction = calculatedTable.getTableFunction(); FlinkTypeFactory typeFactory = relBuilder.getTypeFactory(); TableSqlFunction sqlFunction = new TableSqlFunction( tableFunction.functionIdentifier(), tableFunction.toString(), tableFunction, resultType, typeFactory, function); List<RexNode> parameters = convertToRexNodes(calculatedTable.getParameters()); return LogicalTableFunctionScan.create( relBuilder.peek().getCluster(), Collections.emptyList(), relBuilder.call(sqlFunction, parameters), function.getElementType(null), function.getRowType(typeFactory, null), null); }
Example #9
Source File: ExecutionContext.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private void registerFunctions() { if (tableEnv instanceof StreamTableEnvironment) { StreamTableEnvironment streamTableEnvironment = (StreamTableEnvironment) tableEnv; functions.forEach((k, v) -> { if (v instanceof ScalarFunction) { streamTableEnvironment.registerFunction(k, (ScalarFunction) v); } else if (v instanceof AggregateFunction) { streamTableEnvironment.registerFunction(k, (AggregateFunction<?, ?>) v); } else if (v instanceof TableFunction) { streamTableEnvironment.registerFunction(k, (TableFunction<?>) v); } else { throw new SqlExecutionException("Unsupported function type: " + v.getClass().getName()); } }); } else { BatchTableEnvironment batchTableEnvironment = (BatchTableEnvironment) tableEnv; functions.forEach((k, v) -> { if (v instanceof ScalarFunction) { batchTableEnvironment.registerFunction(k, (ScalarFunction) v); } else if (v instanceof AggregateFunction) { batchTableEnvironment.registerFunction(k, (AggregateFunction<?, ?>) v); } else if (v instanceof TableFunction) { batchTableEnvironment.registerFunction(k, (TableFunction<?>) v); } else { throw new SqlExecutionException("Unsupported function type: " + v.getClass().getName()); } }); } }
Example #10
Source File: QueryOperationConverter.java From flink with Apache License 2.0 | 5 votes |
private RelNode convertLegacyTableFunction( CalculatedQueryOperation calculatedTable, TableFunctionDefinition functionDefinition, List<RexNode> parameters, FlinkTypeFactory typeFactory) { String[] fieldNames = calculatedTable.getTableSchema().getFieldNames(); TableFunction<?> tableFunction = functionDefinition.getTableFunction(); DataType resultType = fromLegacyInfoToDataType(functionDefinition.getResultType()); TypedFlinkTableFunction function = new TypedFlinkTableFunction( tableFunction, fieldNames, resultType ); final TableSqlFunction sqlFunction = new TableSqlFunction( calculatedTable.getFunctionIdentifier().orElse(null), tableFunction.toString(), tableFunction, resultType, typeFactory, function, scala.Option.empty()); return LogicalTableFunctionScan.create( relBuilder.peek().getCluster(), Collections.emptyList(), relBuilder.getRexBuilder() .makeCall(function.getRowType(typeFactory), sqlFunction, parameters), function.getElementType(null), function.getRowType(typeFactory), null); }
Example #11
Source File: HiveTableSqlFunction.java From flink with Apache License 2.0 | 5 votes |
@Override public TableFunction makeFunction(Object[] constantArguments, LogicalType[] argTypes) { TableFunction clone; try { clone = InstantiationUtil.clone(hiveUdtf); } catch (IOException | ClassNotFoundException e) { throw new RuntimeException(e); } return (TableFunction) invokeSetArgs(clone, constantArguments, argTypes); }
Example #12
Source File: HiveTableSqlFunction.java From flink with Apache License 2.0 | 5 votes |
public HiveTableSqlFunction( FunctionIdentifier identifier, TableFunction hiveUdtf, DataType implicitResultType, FlinkTypeFactory typeFactory, FlinkTableFunction functionImpl, HiveOperandTypeChecker operandTypeChecker) { super(identifier, identifier.toString(), hiveUdtf, implicitResultType, typeFactory, functionImpl, scala.Option.apply(operandTypeChecker)); this.hiveUdtf = hiveUdtf; this.operandTypeChecker = operandTypeChecker; }
Example #13
Source File: StreamTableEnvironmentImpl.java From flink with Apache License 2.0 | 5 votes |
@Override public <T> void registerFunction(String name, TableFunction<T> tableFunction) { TypeInformation<T> typeInfo = UserDefinedFunctionHelper.getReturnTypeOfTableFunction(tableFunction); functionCatalog.registerTempSystemTableFunction( name, tableFunction, typeInfo ); }
Example #14
Source File: TypeInferenceExtractor.java From flink with Apache License 2.0 | 5 votes |
/** * Extracts a type inference from a {@link TableFunction}. */ public static TypeInference forTableFunction( DataTypeFactory typeFactory, Class<? extends TableFunction<?>> function) { final FunctionMappingExtractor mappingExtractor = new FunctionMappingExtractor( typeFactory, function, UserDefinedFunctionHelper.TABLE_EVAL, createParameterSignatureExtraction(0), null, createGenericResultExtraction(TableFunction.class, 0), createParameterVerification()); return extractTypeInference(mappingExtractor); }
Example #15
Source File: QueryOperationConverter.java From flink with Apache License 2.0 | 5 votes |
@Override public RelNode visit(CalculatedQueryOperation calculatedTable) { FlinkTypeFactory typeFactory = relBuilder.getTypeFactory(); if (calculatedTable.getFunctionDefinition() instanceof TableFunctionDefinition) { TableFunctionDefinition functionDefinition = (TableFunctionDefinition) calculatedTable.getFunctionDefinition(); String[] fieldNames = calculatedTable.getTableSchema().getFieldNames(); int[] fieldIndices = IntStream.range(0, fieldNames.length).toArray(); TableFunction<?> tableFunction = functionDefinition.getTableFunction(); TypeInformation<?> rowType = functionDefinition.getResultType(); FlinkTableFunctionImpl<?> function = new FlinkTableFunctionImpl<>( rowType, fieldIndices, fieldNames ); final TableSqlFunction sqlFunction = new TableSqlFunction( tableFunction.functionIdentifier(), tableFunction.toString(), tableFunction, rowType, typeFactory, function); List<RexNode> parameters = convertToRexNodes(calculatedTable.getArguments()); return LogicalTableFunctionScan.create( relBuilder.peek().getCluster(), Collections.emptyList(), relBuilder.call(sqlFunction, parameters), function.getElementType(null), function.getRowType(typeFactory, null), null); } throw new ValidationException( "The new type inference for functions is only supported in the Blink planner."); }
Example #16
Source File: HiveTableSource.java From flink with Apache License 2.0 | 5 votes |
@Override public TableFunction<RowData> getLookupFunction(String[] lookupKeys) { List<HiveTablePartition> allPartitions = initAllPartitions(); TableSchema producedSchema = getProducedTableSchema(); return new FileSystemLookupFunction<>( getInputFormat(allPartitions, flinkConf.get(HiveOptions.TABLE_EXEC_HIVE_FALLBACK_MAPRED_READER)), lookupKeys, producedSchema.getFieldNames(), producedSchema.getFieldDataTypes(), hiveTableCacheTTL ); }
Example #17
Source File: HBaseTableFactoryTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testTableSourceFactory() { String[] columnNames = {FAMILY1, FAMILY2, ROWKEY, FAMILY3}; TypeInformation<Row> f1 = Types.ROW_NAMED(new String[]{COL1}, Types.INT); TypeInformation<Row> f2 = Types.ROW_NAMED(new String[]{COL1, COL2}, Types.INT, Types.LONG); TypeInformation<Row> f3 = Types.ROW_NAMED(new String[]{COL1, COL2, COL3}, Types.DOUBLE, Types.BOOLEAN, Types.STRING); TypeInformation[] columnTypes = new TypeInformation[]{f1, f2, Types.LONG, f3}; DescriptorProperties descriptorProperties = createDescriptor(columnNames, columnTypes); TableSource source = TableFactoryService.find(HBaseTableFactory.class, descriptorProperties.asMap()).createTableSource(descriptorProperties.asMap()); Assert.assertTrue(source instanceof HBaseTableSource); TableFunction<Row> tableFunction = ((HBaseTableSource) source).getLookupFunction(new String[]{ROWKEY}); Assert.assertTrue(tableFunction instanceof HBaseLookupFunction); Assert.assertEquals("testHBastTable", ((HBaseLookupFunction) tableFunction).getHTableName()); HBaseTableSchema hbaseSchema = ((HBaseTableSource) source).getHBaseTableSchema(); Assert.assertEquals(2, hbaseSchema.getRowKeyIndex()); Assert.assertEquals(Optional.of(Types.LONG), hbaseSchema.getRowKeyTypeInfo()); Assert.assertArrayEquals(new String[]{"f1", "f2", "f3"}, hbaseSchema.getFamilyNames()); Assert.assertArrayEquals(new String[]{"c1"}, hbaseSchema.getQualifierNames("f1")); Assert.assertArrayEquals(new String[]{"c1", "c2"}, hbaseSchema.getQualifierNames("f2")); Assert.assertArrayEquals(new String[]{"c1", "c2", "c3"}, hbaseSchema.getQualifierNames("f3")); Assert.assertArrayEquals(new TypeInformation[]{Types.INT}, hbaseSchema.getQualifierTypes("f1")); Assert.assertArrayEquals(new TypeInformation[]{Types.INT, Types.LONG}, hbaseSchema.getQualifierTypes("f2")); Assert.assertArrayEquals(new TypeInformation[]{Types.DOUBLE, Types.BOOLEAN, Types.STRING}, hbaseSchema.getQualifierTypes("f3")); }
Example #18
Source File: JdbcTableSource.java From flink with Apache License 2.0 | 5 votes |
@Override public TableFunction<Row> getLookupFunction(String[] lookupKeys) { final RowTypeInfo rowTypeInfo = (RowTypeInfo) fromDataTypeToLegacyInfo(producedDataType); return JdbcLookupFunction.builder() .setOptions(options) .setLookupOptions(lookupOptions) .setFieldTypes(rowTypeInfo.getFieldTypes()) .setFieldNames(rowTypeInfo.getFieldNames()) .setKeyNames(lookupKeys) .build(); }
Example #19
Source File: BatchOperator.java From Alink with Apache License 2.0 | 5 votes |
public BatchOperator udtf(String selectedColName, String[] outputColNames, TableFunction tableFunction, String[] reservedColNames) { return linkTo( new UDTFBatchOp() .setSelectedCols(selectedColName) .setOutputCols(outputColNames) .setFunc(tableFunction) .setReservedCols(reservedColNames) .setMLEnvironmentId(getMLEnvironmentId()) ); }
Example #20
Source File: BatchOperator.java From Alink with Apache License 2.0 | 5 votes |
public BatchOperator udtf(String selectedColName, String[] outputColNames, TableFunction tableFunction) { return linkTo( new UDTFBatchOp() .setSelectedCols(selectedColName) .setOutputCols(outputColNames) .setFunc(tableFunction) .setMLEnvironmentId(getMLEnvironmentId()) ); }
Example #21
Source File: StreamOperator.java From Alink with Apache License 2.0 | 5 votes |
public StreamOperator udtf(String selectedColName, String[] outputColNames, TableFunction tableFunction, String[] reservedColNames) { return linkTo( new UDTFStreamOp() .setSelectedCols(selectedColName) .setOutputCols(outputColNames) .setFunc(tableFunction) .setReservedCols(reservedColNames) .setMLEnvironmentId(getMLEnvironmentId()) ); }
Example #22
Source File: StreamOperator.java From Alink with Apache License 2.0 | 5 votes |
public StreamOperator udtf(String selectedColName, String[] outputColNames, TableFunction tableFunction) { return linkTo( new UDTFStreamOp() .setSelectedCols(selectedColName) .setOutputCols(outputColNames) .setFunc(tableFunction) .setMLEnvironmentId(getMLEnvironmentId()) ); }
Example #23
Source File: QueryOperationConverter.java From flink with Apache License 2.0 | 5 votes |
@Override public <U> RelNode visit(CalculatedQueryOperation<U> calculatedTable) { DataType resultType = fromLegacyInfoToDataType(calculatedTable.getResultType()); TableFunction<?> tableFunction = calculatedTable.getTableFunction(); String[] fieldNames = calculatedTable.getTableSchema().getFieldNames(); TypedFlinkTableFunction function = new TypedFlinkTableFunction( tableFunction, fieldNames, resultType); FlinkTypeFactory typeFactory = relBuilder.getTypeFactory(); TableSqlFunction sqlFunction = new TableSqlFunction( tableFunction.functionIdentifier(), tableFunction.toString(), tableFunction, resultType, typeFactory, function, scala.Option.empty()); List<RexNode> parameters = convertToRexNodes(calculatedTable.getParameters()); return LogicalTableFunctionScan.create( relBuilder.peek().getCluster(), Collections.emptyList(), relBuilder.call(sqlFunction, parameters), function.getElementType(null), function.getRowType(typeFactory, null, null), null); }
Example #24
Source File: HiveTableSqlFunction.java From flink with Apache License 2.0 | 5 votes |
public HiveTableSqlFunction(String name, String displayName, TableFunction hiveUdtf, DataType implicitResultType, FlinkTypeFactory typeFactory, FlinkTableFunction functionImpl, HiveOperandTypeChecker operandTypeChecker) { super(name, displayName, hiveUdtf, implicitResultType, typeFactory, functionImpl, scala.Option.apply(operandTypeChecker)); this.hiveUdtf = hiveUdtf; this.operandTypeChecker = operandTypeChecker; }
Example #25
Source File: HiveTableSqlFunction.java From flink with Apache License 2.0 | 5 votes |
@Override public TableFunction makeFunction(Object[] constantArguments, LogicalType[] argTypes) { TableFunction clone; try { clone = InstantiationUtil.clone(hiveUdtf); } catch (IOException | ClassNotFoundException e) { throw new RuntimeException(e); } return (TableFunction) invokeSetArgs(clone, constantArguments, argTypes); }
Example #26
Source File: StreamTableEnvironmentImpl.java From flink with Apache License 2.0 | 5 votes |
@Override public <T> void registerFunction(String name, TableFunction<T> tableFunction) { TypeInformation<T> typeInfo = UserFunctionsTypeHelper.getReturnTypeOfTableFunction(tableFunction); functionCatalog.registerTableFunction( name, tableFunction, typeInfo ); }
Example #27
Source File: ExecutionContext.java From flink with Apache License 2.0 | 5 votes |
private void registerFunctions() { if (tableEnv instanceof StreamTableEnvironment) { StreamTableEnvironment streamTableEnvironment = (StreamTableEnvironment) tableEnv; functions.forEach((k, v) -> { if (v instanceof ScalarFunction) { streamTableEnvironment.registerFunction(k, (ScalarFunction) v); } else if (v instanceof AggregateFunction) { streamTableEnvironment.registerFunction(k, (AggregateFunction<?, ?>) v); } else if (v instanceof TableFunction) { streamTableEnvironment.registerFunction(k, (TableFunction<?>) v); } else { throw new SqlExecutionException("Unsupported function type: " + v.getClass().getName()); } }); } else { BatchTableEnvironment batchTableEnvironment = (BatchTableEnvironment) tableEnv; functions.forEach((k, v) -> { if (v instanceof ScalarFunction) { batchTableEnvironment.registerFunction(k, (ScalarFunction) v); } else if (v instanceof AggregateFunction) { batchTableEnvironment.registerFunction(k, (AggregateFunction<?, ?>) v); } else if (v instanceof TableFunction) { batchTableEnvironment.registerFunction(k, (TableFunction<?>) v); } else { throw new SqlExecutionException("Unsupported function type: " + v.getClass().getName()); } }); } }
Example #28
Source File: CalculatedQueryOperation.java From flink with Apache License 2.0 | 5 votes |
public CalculatedQueryOperation( TableFunction<T> tableFunction, List<ResolvedExpression> parameters, TypeInformation<T> resultType, TableSchema tableSchema) { this.tableFunction = tableFunction; this.parameters = parameters; this.resultType = resultType; this.tableSchema = tableSchema; }
Example #29
Source File: JDBCTableSource.java From flink with Apache License 2.0 | 5 votes |
@Override public TableFunction<Row> getLookupFunction(String[] lookupKeys) { return JDBCLookupFunction.builder() .setOptions(options) .setLookupOptions(lookupOptions) .setFieldTypes(returnType.getFieldTypes()) .setFieldNames(returnType.getFieldNames()) .setKeyNames(lookupKeys) .build(); }
Example #30
Source File: AbstractFlinkClient.java From alchemy with Apache License 2.0 | 5 votes |
private void register(StreamTableEnvironment env, String name, Object function) { if (function instanceof TableFunction) { env.registerFunction(name, (TableFunction) function); } else if (function instanceof AggregateFunction) { env.registerFunction(name, (AggregateFunction) function); } else if (function instanceof ScalarFunction) { env.registerFunction(name, (ScalarFunction) function); } else { throw new RuntimeException("Unknown UDF {} was found." + name); } LOGGER.info("register udf, name:{}, class:{}", name, function.getClass()); }