Java Code Examples for org.apache.flink.table.types.FieldsDataType#getLogicalType()
The following examples show how to use
org.apache.flink.table.types.FieldsDataType#getLogicalType() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SchemaUtils.java From pulsar-flink with Apache License 2.0 | 6 votes |
public static FieldsDataType pulsarSourceSchema(SchemaInfo si) throws IncompatibleSchemaException { List<DataTypes.Field> mainSchema = new ArrayList<>(); DataType dataType = si2SqlType(si); if (dataType instanceof FieldsDataType) { FieldsDataType fieldsDataType = (FieldsDataType) dataType; RowType rowType = (RowType) fieldsDataType.getLogicalType(); rowType.getFieldNames().stream() .map(fieldName -> DataTypes.FIELD(fieldName, fieldsDataType.getFieldDataTypes().get(fieldName))) .forEach(mainSchema::add); } else { mainSchema.add(DataTypes.FIELD("value", dataType)); } mainSchema.addAll(METADATA_FIELDS); return (FieldsDataType) DataTypes.ROW(mainSchema.toArray(new DataTypes.Field[0])); }
Example 2
Source File: SchemaUtils.java From pulsar-flink with Apache License 2.0 | 5 votes |
public static TableSchema toTableSchema(FieldsDataType schema) { RowType rt = (RowType) schema.getLogicalType(); List<DataType> fieldTypes = rt.getFieldNames().stream().map(fn -> schema.getFieldDataTypes().get(fn)).collect(Collectors.toList()); return TableSchema.builder().fields( rt.getFieldNames().toArray(new String[0]), fieldTypes.toArray(new DataType[0])).build(); }
Example 3
Source File: JacksonRecordParser.java From pulsar-flink with Apache License 2.0 | 5 votes |
private Row convertObject(JsonParser parser, FieldsDataType fdt, List<Function<JsonParser, Object>> fieldConverters, Row row) throws IOException { RowType rowType = (RowType) fdt.getLogicalType(); List<String> fieldNames = rowType.getFieldNames(); while (nextUntil(parser, JsonToken.END_OBJECT)) { int index = fieldNames.indexOf(parser.getCurrentName()); if (index == -1) { parser.skipChildren(); } else { row.setField(index, fieldConverters.get(index).apply(parser)); } } return row; }
Example 4
Source File: LegacyTypeInfoDataTypeConverter.java From flink with Apache License 2.0 | 5 votes |
private static TypeInformation<?> convertToRowTypeInfo(FieldsDataType fieldsDataType) { final RowType rowType = (RowType) fieldsDataType.getLogicalType(); final String[] fieldNames = rowType.getFields() .stream() .map(RowType.RowField::getName) .toArray(String[]::new); final TypeInformation<?>[] fieldTypes = Stream.of(fieldNames) .map(name -> fieldsDataType.getFieldDataTypes().get(name)) .map(LegacyTypeInfoDataTypeConverter::toLegacyTypeInfo) .toArray(TypeInformation[]::new); return Types.ROW_NAMED(fieldNames, fieldTypes); }
Example 5
Source File: LegacyTypeInfoDataTypeConverter.java From flink with Apache License 2.0 | 5 votes |
private static TypeInformation<?> convertToRowTypeInfo(FieldsDataType fieldsDataType) { final RowType rowType = (RowType) fieldsDataType.getLogicalType(); final String[] fieldNames = rowType.getFields() .stream() .map(RowType.RowField::getName) .toArray(String[]::new); final TypeInformation<?>[] fieldTypes = fieldsDataType.getChildren() .stream() .map(LegacyTypeInfoDataTypeConverter::toLegacyTypeInfo) .toArray(TypeInformation[]::new); return Types.ROW_NAMED(fieldNames, fieldTypes); }
Example 6
Source File: DataTypeUtilsTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testExpandDistinctType() { FieldsDataType dataType = (FieldsDataType) ROW( FIELD("f0", INT()), FIELD("f1", STRING()), FIELD("f2", TIMESTAMP(5).bridgedTo(Timestamp.class)), FIELD("f3", TIMESTAMP(3))); LogicalType originalLogicalType = dataType.getLogicalType(); DistinctType distinctLogicalType = DistinctType.newBuilder( ObjectIdentifier.of("catalog", "database", "type"), originalLogicalType) .build(); DataType distinctDataType = new FieldsDataType(distinctLogicalType, dataType.getChildren()); TableSchema schema = DataTypeUtils.expandCompositeTypeToSchema(distinctDataType); assertThat( schema, equalTo( TableSchema.builder() .field("f0", INT()) .field("f1", STRING()) .field("f2", TIMESTAMP(5).bridgedTo(Timestamp.class)) .field("f3", TIMESTAMP(3).bridgedTo(LocalDateTime.class)) .build())); }
Example 7
Source File: SchemaUtils.java From pulsar-flink with Apache License 2.0 | 4 votes |
private static Schema sqlType2AvroSchema(DataType flinkType, boolean nullable, String recordName, String namespace) throws IncompatibleSchemaException { SchemaBuilder.TypeBuilder<Schema> builder = SchemaBuilder.builder(); LogicalTypeRoot type = flinkType.getLogicalType().getTypeRoot(); Schema schema = null; if (flinkType instanceof AtomicDataType) { switch (type) { case BOOLEAN: schema = builder.booleanType(); break; case TINYINT: case SMALLINT: case INTEGER: schema = builder.intType(); break; case BIGINT: schema = builder.longType(); break; case DATE: schema = LogicalTypes.date().addToSchema(builder.intType()); break; case TIMESTAMP_WITHOUT_TIME_ZONE: schema = LogicalTypes.timestampMicros().addToSchema(builder.longType()); break; case FLOAT: schema = builder.floatType(); break; case DOUBLE: schema = builder.doubleType(); break; case VARCHAR: schema = builder.stringType(); break; case BINARY: case VARBINARY: schema = builder.bytesType(); break; case DECIMAL: DecimalType dt = (DecimalType) flinkType.getLogicalType(); LogicalTypes.Decimal avroType = LogicalTypes.decimal(dt.getPrecision(), dt.getScale()); int fixedSize = minBytesForPrecision[dt.getPrecision()]; // Need to avoid naming conflict for the fixed fields String name; if (namespace.equals("")) { name = recordName + ".fixed"; } else { name = namespace + recordName + ".fixed"; } schema = avroType.addToSchema(SchemaBuilder.fixed(name).size(fixedSize)); break; default: throw new IncompatibleSchemaException(String.format("Unsupported type %s", flinkType.toString()), null); } } else if (flinkType instanceof CollectionDataType) { if (type == LogicalTypeRoot.ARRAY) { CollectionDataType cdt = (CollectionDataType) flinkType; DataType elementType = cdt.getElementDataType(); schema = builder.array().items(sqlType2AvroSchema(elementType, elementType.getLogicalType().isNullable(), recordName, namespace)); } else { throw new IncompatibleSchemaException("Pulsar only support collection as array", null); } } else if (flinkType instanceof KeyValueDataType) { KeyValueDataType kvType = (KeyValueDataType) flinkType; DataType keyType = kvType.getKeyDataType(); DataType valueType = kvType.getValueDataType(); if (!(keyType instanceof AtomicDataType) || keyType.getLogicalType().getTypeRoot() != LogicalTypeRoot.VARCHAR) { throw new IncompatibleSchemaException("Pulsar only support string key map", null); } schema = builder.map().values(sqlType2AvroSchema(valueType, valueType.getLogicalType().isNullable(), recordName, namespace)); } else if (flinkType instanceof FieldsDataType) { FieldsDataType fieldsDataType = (FieldsDataType) flinkType; String childNamespace = namespace.equals("") ? recordName : namespace + "." + recordName; SchemaBuilder.FieldAssembler<Schema> fieldsAssembler = builder.record(recordName).namespace(namespace).fields(); RowType rowType = (RowType) fieldsDataType.getLogicalType(); for (String fieldName : rowType.getFieldNames()) { DataType ftype = fieldsDataType.getFieldDataTypes().get(fieldName); Schema fieldAvroSchema = sqlType2AvroSchema(ftype, ftype.getLogicalType().isNullable(), fieldName, childNamespace); fieldsAssembler.name(fieldName).type(fieldAvroSchema).noDefault(); } schema = fieldsAssembler.endRecord(); } else { throw new IncompatibleSchemaException(String.format("Unexpected type %s", flinkType.toString()), null); } if (nullable) { return Schema.createUnion(schema, NULL_SCHEMA); } else { return schema; } }