Java Code Examples for org.apache.flink.table.api.TableSchema#toRowDataType()
The following examples show how to use
org.apache.flink.table.api.TableSchema#toRowDataType() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveBatchSource.java From Alink with Apache License 2.0 | 6 votes |
@Override public DataType getProducedDataType() { TableSchema fullSchema = getTableSchema(); DataType type; if (projectedFields == null) { type = fullSchema.toRowDataType(); } else { String[] fullNames = fullSchema.getFieldNames(); DataType[] fullTypes = fullSchema.getFieldDataTypes(); type = TableSchema.builder().fields( Arrays.stream(projectedFields).mapToObj(i -> fullNames[i]).toArray(String[]::new), Arrays.stream(projectedFields).mapToObj(i -> fullTypes[i]).toArray(DataType[]::new)) .build().toRowDataType(); } return type.bridgedTo(BaseRow.class); }
Example 2
Source File: JdbcTableSource.java From flink with Apache License 2.0 | 6 votes |
private JdbcTableSource( JdbcOptions options, JdbcReadOptions readOptions, JdbcLookupOptions lookupOptions, TableSchema schema, int[] selectFields) { this.options = options; this.readOptions = readOptions; this.lookupOptions = lookupOptions; this.schema = schema; this.selectFields = selectFields; final DataType[] schemaDataTypes = schema.getFieldDataTypes(); final String[] schemaFieldNames = schema.getFieldNames(); if (selectFields != null) { DataType[] dataTypes = new DataType[selectFields.length]; String[] fieldNames = new String[selectFields.length]; for (int i = 0; i < selectFields.length; i++) { dataTypes[i] = schemaDataTypes[selectFields[i]]; fieldNames[i] = schemaFieldNames[selectFields[i]]; } this.producedDataType = TableSchema.builder().fields(fieldNames, dataTypes).build().toRowDataType(); } else { this.producedDataType = schema.toRowDataType(); } }
Example 3
Source File: TableSourceFactoryMock.java From flink with Apache License 2.0 | 5 votes |
@Override public TableSource<Row> createTableSource(Map<String, String> properties) { final DescriptorProperties descriptorProperties = new DescriptorProperties(); descriptorProperties.putProperties(properties); final TableSchema schema = descriptorProperties.getTableSchema(Schema.SCHEMA); return new TableSourceMock(schema.toRowDataType(), schema); }
Example 4
Source File: FlinkSchemaUtil.java From iceberg with Apache License 2.0 | 5 votes |
/** * Convert the flink table schema to apache iceberg schema. */ public static Schema convert(TableSchema schema) { Preconditions.checkArgument(schema.toRowDataType() instanceof FieldsDataType, "Should be FieldsDataType"); FieldsDataType root = (FieldsDataType) schema.toRowDataType(); Type converted = FlinkTypeVisitor.visit(root, new FlinkTypeToType(root)); return new Schema(converted.asStructType().fields()); }
Example 5
Source File: CatalogStructureBuilder.java From flink with Apache License 2.0 | 5 votes |
private TestTable( String fullyQualifiedPath, TableSchema tableSchema, boolean isTemporary) { super(new StreamTableSource<Row>() { @Override public DataStream<Row> getDataStream(StreamExecutionEnvironment execEnv) { return null; } @Override public DataType getProducedDataType() { return tableSchema.toRowDataType(); } @Override public TableSchema getTableSchema() { throw new UnsupportedOperationException("Should not be called"); } @Override public String explainSource() { return String.format("isTemporary=[%s]", isTemporary); } }, null, tableSchema, false); this.fullyQualifiedPath = fullyQualifiedPath; this.isTemporary = isTemporary; }
Example 6
Source File: TableSourceMock.java From flink with Apache License 2.0 | 4 votes |
public TableSourceMock(TableSchema tableSchema) { this.tableSchema = TableSchemaUtils.checkNoGeneratedColumns(tableSchema); this.producedDataType = tableSchema.toRowDataType(); }