Java Code Examples for org.apache.flink.table.types.logical.DecimalType#getPrecision()
The following examples show how to use
org.apache.flink.table.types.logical.DecimalType#getPrecision() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AvroRowDataDeserializationSchema.java From flink with Apache License 2.0 | 6 votes |
private static DeserializationRuntimeConverter createDecimalConverter(DecimalType decimalType) { final int precision = decimalType.getPrecision(); final int scale = decimalType.getScale(); return avroObject -> { final byte[] bytes; if (avroObject instanceof GenericFixed) { bytes = ((GenericFixed) avroObject).bytes(); } else if (avroObject instanceof ByteBuffer) { ByteBuffer byteBuffer = (ByteBuffer) avroObject; bytes = new byte[byteBuffer.remaining()]; byteBuffer.get(bytes); } else { bytes = (byte[]) avroObject; } return DecimalData.fromUnscaledBytes(bytes, precision, scale); }; }
Example 2
Source File: HBaseSerde.java From flink with Apache License 2.0 | 5 votes |
private static FieldEncoder createDecimalEncoder(DecimalType decimalType) { final int precision = decimalType.getPrecision(); final int scale = decimalType.getScale(); return (row, pos) -> { BigDecimal decimal = row.getDecimal(pos, precision, scale).toBigDecimal(); return Bytes.toBytes(decimal); }; }
Example 3
Source File: HBaseSerde.java From flink with Apache License 2.0 | 5 votes |
private static FieldDecoder createDecimalDecoder(DecimalType decimalType) { final int precision = decimalType.getPrecision(); final int scale = decimalType.getScale(); return value -> { BigDecimal decimal = Bytes.toBigDecimal(value); return DecimalData.fromBigDecimal(decimal, precision, scale); }; }
Example 4
Source File: CsvRowDataSerializationSchema.java From flink with Apache License 2.0 | 5 votes |
private RowFieldConverter createDecimalRowFieldConverter(DecimalType decimalType) { final int precision = decimalType.getPrecision(); final int scale = decimalType.getScale(); return (csvMapper, container, row, pos) -> { DecimalData decimal = row.getDecimal(pos, precision, scale); return convertDecimal(decimal, container); }; }
Example 5
Source File: CsvRowDataSerializationSchema.java From flink with Apache License 2.0 | 5 votes |
private ArrayElementConverter createDecimalArrayElementConverter(DecimalType decimalType) { final int precision = decimalType.getPrecision(); final int scale = decimalType.getScale(); return (csvMapper, container, array, pos) -> { DecimalData decimal = array.getDecimal(pos, precision, scale); return convertDecimal(decimal, container); }; }
Example 6
Source File: SchemaUtils.java From pulsar-flink with Apache License 2.0 | 4 votes |
private static Schema sqlType2AvroSchema(DataType flinkType, boolean nullable, String recordName, String namespace) throws IncompatibleSchemaException { SchemaBuilder.TypeBuilder<Schema> builder = SchemaBuilder.builder(); LogicalTypeRoot type = flinkType.getLogicalType().getTypeRoot(); Schema schema = null; if (flinkType instanceof AtomicDataType) { switch (type) { case BOOLEAN: schema = builder.booleanType(); break; case TINYINT: case SMALLINT: case INTEGER: schema = builder.intType(); break; case BIGINT: schema = builder.longType(); break; case DATE: schema = LogicalTypes.date().addToSchema(builder.intType()); break; case TIMESTAMP_WITHOUT_TIME_ZONE: schema = LogicalTypes.timestampMicros().addToSchema(builder.longType()); break; case FLOAT: schema = builder.floatType(); break; case DOUBLE: schema = builder.doubleType(); break; case VARCHAR: schema = builder.stringType(); break; case BINARY: case VARBINARY: schema = builder.bytesType(); break; case DECIMAL: DecimalType dt = (DecimalType) flinkType.getLogicalType(); LogicalTypes.Decimal avroType = LogicalTypes.decimal(dt.getPrecision(), dt.getScale()); int fixedSize = minBytesForPrecision[dt.getPrecision()]; // Need to avoid naming conflict for the fixed fields String name; if (namespace.equals("")) { name = recordName + ".fixed"; } else { name = namespace + recordName + ".fixed"; } schema = avroType.addToSchema(SchemaBuilder.fixed(name).size(fixedSize)); break; default: throw new IncompatibleSchemaException(String.format("Unsupported type %s", flinkType.toString()), null); } } else if (flinkType instanceof CollectionDataType) { if (type == LogicalTypeRoot.ARRAY) { CollectionDataType cdt = (CollectionDataType) flinkType; DataType elementType = cdt.getElementDataType(); schema = builder.array().items(sqlType2AvroSchema(elementType, elementType.getLogicalType().isNullable(), recordName, namespace)); } else { throw new IncompatibleSchemaException("Pulsar only support collection as array", null); } } else if (flinkType instanceof KeyValueDataType) { KeyValueDataType kvType = (KeyValueDataType) flinkType; DataType keyType = kvType.getKeyDataType(); DataType valueType = kvType.getValueDataType(); if (!(keyType instanceof AtomicDataType) || keyType.getLogicalType().getTypeRoot() != LogicalTypeRoot.VARCHAR) { throw new IncompatibleSchemaException("Pulsar only support string key map", null); } schema = builder.map().values(sqlType2AvroSchema(valueType, valueType.getLogicalType().isNullable(), recordName, namespace)); } else if (flinkType instanceof FieldsDataType) { FieldsDataType fieldsDataType = (FieldsDataType) flinkType; String childNamespace = namespace.equals("") ? recordName : namespace + "." + recordName; SchemaBuilder.FieldAssembler<Schema> fieldsAssembler = builder.record(recordName).namespace(namespace).fields(); RowType rowType = (RowType) fieldsDataType.getLogicalType(); for (String fieldName : rowType.getFieldNames()) { DataType ftype = fieldsDataType.getFieldDataTypes().get(fieldName); Schema fieldAvroSchema = sqlType2AvroSchema(ftype, ftype.getLogicalType().isNullable(), fieldName, childNamespace); fieldsAssembler.name(fieldName).type(fieldAvroSchema).noDefault(); } schema = fieldsAssembler.endRecord(); } else { throw new IncompatibleSchemaException(String.format("Unexpected type %s", flinkType.toString()), null); } if (nullable) { return Schema.createUnion(schema, NULL_SCHEMA); } else { return schema; } }
Example 7
Source File: TypeInfoDataTypeConverter.java From flink with Apache License 2.0 | 4 votes |
public static TypeInformation<?> fromDataTypeToTypeInfo(DataType dataType) { Class<?> clazz = dataType.getConversionClass(); if (clazz.isPrimitive()) { final TypeInformation<?> foundTypeInfo = primitiveDataTypeTypeInfoMap.get(clazz.getName()); if (foundTypeInfo != null) { return foundTypeInfo; } } LogicalType logicalType = fromDataTypeToLogicalType(dataType); switch (logicalType.getTypeRoot()) { case DECIMAL: DecimalType decimalType = (DecimalType) logicalType; return clazz == Decimal.class ? new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale()) : new BigDecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale()); case CHAR: case VARCHAR: // ignore precision return clazz == BinaryString.class ? BinaryStringTypeInfo.INSTANCE : BasicTypeInfo.STRING_TYPE_INFO; case BINARY: case VARBINARY: // ignore precision return PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO; case INTERVAL_YEAR_MONTH: return TimeIntervalTypeInfo.INTERVAL_MONTHS; case INTERVAL_DAY_TIME: return TimeIntervalTypeInfo.INTERVAL_MILLIS; case ARRAY: if (dataType instanceof CollectionDataType && !isPrimitive(((CollectionDataType) dataType).getElementDataType().getLogicalType())) { return ObjectArrayTypeInfo.getInfoFor( fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType())); } else { return TypeConversions.fromDataTypeToLegacyInfo(dataType); } case MAP: KeyValueDataType mapType = (KeyValueDataType) dataType; return new MapTypeInfo( fromDataTypeToTypeInfo(mapType.getKeyDataType()), fromDataTypeToTypeInfo(mapType.getValueDataType())); case MULTISET: return MultisetTypeInfo.getInfoFor( fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType())); case ROW: if (BaseRow.class.isAssignableFrom(dataType.getConversionClass())) { return BaseRowTypeInfo.of((RowType) fromDataTypeToLogicalType(dataType)); } else if (Row.class == dataType.getConversionClass()) { FieldsDataType rowType = (FieldsDataType) dataType; RowType logicalRowType = (RowType) logicalType; return new RowTypeInfo( logicalRowType.getFieldNames().stream() .map(name -> rowType.getFieldDataTypes().get(name)) .map(TypeInfoDataTypeConverter::fromDataTypeToTypeInfo) .toArray(TypeInformation[]::new), logicalRowType.getFieldNames().toArray(new String[0])); } else { return TypeConversions.fromDataTypeToLegacyInfo(dataType); } default: return TypeConversions.fromDataTypeToLegacyInfo(dataType); } }
Example 8
Source File: InternalSerializers.java From flink with Apache License 2.0 | 4 votes |
public static TypeSerializer create(LogicalType type, ExecutionConfig config) { switch (type.getTypeRoot()) { case BOOLEAN: return BooleanSerializer.INSTANCE; case TINYINT: return ByteSerializer.INSTANCE; case SMALLINT: return ShortSerializer.INSTANCE; case INTEGER: case DATE: case TIME_WITHOUT_TIME_ZONE: case INTERVAL_YEAR_MONTH: return IntSerializer.INSTANCE; case BIGINT: case TIMESTAMP_WITHOUT_TIME_ZONE: case TIMESTAMP_WITH_LOCAL_TIME_ZONE: case INTERVAL_DAY_TIME: return LongSerializer.INSTANCE; case FLOAT: return FloatSerializer.INSTANCE; case DOUBLE: return DoubleSerializer.INSTANCE; case CHAR: case VARCHAR: return BinaryStringSerializer.INSTANCE; case DECIMAL: DecimalType decimalType = (DecimalType) type; return new DecimalSerializer(decimalType.getPrecision(), decimalType.getScale()); case ARRAY: return new BaseArraySerializer(((ArrayType) type).getElementType(), config); case MAP: MapType mapType = (MapType) type; return new BaseMapSerializer(mapType.getKeyType(), mapType.getValueType(), config); case MULTISET: return new BaseMapSerializer(((MultisetType) type).getElementType(), new IntType(), config); case ROW: RowType rowType = (RowType) type; return new BaseRowSerializer(config, rowType); case BINARY: case VARBINARY: return BytePrimitiveArraySerializer.INSTANCE; case ANY: return new BinaryGenericSerializer( ((TypeInformationAnyType) type).getTypeInformation().createSerializer(config)); default: throw new RuntimeException("Not support type: " + type); } }
Example 9
Source File: LogicalTypeChecks.java From flink with Apache License 2.0 | 4 votes |
@Override public Integer visit(DecimalType decimalType) { return decimalType.getPrecision(); }
Example 10
Source File: DecimalBigDecimalConverter.java From flink with Apache License 2.0 | 4 votes |
static DecimalBigDecimalConverter create(DataType dataType) { final DecimalType decimalType = (DecimalType) dataType.getLogicalType(); return new DecimalBigDecimalConverter(decimalType.getPrecision(), decimalType.getScale()); }
Example 11
Source File: TypeInfoDataTypeConverter.java From flink with Apache License 2.0 | 4 votes |
public static TypeInformation<?> fromDataTypeToTypeInfo(DataType dataType) { Class<?> clazz = dataType.getConversionClass(); if (clazz.isPrimitive()) { final TypeInformation<?> foundTypeInfo = primitiveDataTypeTypeInfoMap.get(clazz.getName()); if (foundTypeInfo != null) { return foundTypeInfo; } } LogicalType logicalType = fromDataTypeToLogicalType(dataType); switch (logicalType.getTypeRoot()) { case TIMESTAMP_WITHOUT_TIME_ZONE: TimestampType timestampType = (TimestampType) logicalType; int precision = timestampType.getPrecision(); if (timestampType.getKind() == TimestampKind.REGULAR) { return clazz == TimestampData.class ? new TimestampDataTypeInfo(precision) : (clazz == LocalDateTime.class ? ((3 == precision) ? Types.LOCAL_DATE_TIME : new LegacyLocalDateTimeTypeInfo(precision)) : ((3 == precision) ? Types.SQL_TIMESTAMP : new LegacyTimestampTypeInfo(precision))); } else { return TypeConversions.fromDataTypeToLegacyInfo(dataType); } case TIMESTAMP_WITH_LOCAL_TIME_ZONE: LocalZonedTimestampType lzTs = (LocalZonedTimestampType) logicalType; int precisionLzTs = lzTs.getPrecision(); return clazz == TimestampData.class ? new TimestampDataTypeInfo(precisionLzTs) : (clazz == Instant.class ? ((3 == precisionLzTs) ? Types.INSTANT : new LegacyInstantTypeInfo(precisionLzTs)) : TypeConversions.fromDataTypeToLegacyInfo(dataType)); case DECIMAL: DecimalType decimalType = (DecimalType) logicalType; return clazz == DecimalData.class ? new DecimalDataTypeInfo(decimalType.getPrecision(), decimalType.getScale()) : new BigDecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale()); case CHAR: case VARCHAR: // ignore precision return clazz == StringData.class ? StringDataTypeInfo.INSTANCE : BasicTypeInfo.STRING_TYPE_INFO; case BINARY: case VARBINARY: // ignore precision return PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO; case INTERVAL_YEAR_MONTH: return TimeIntervalTypeInfo.INTERVAL_MONTHS; case INTERVAL_DAY_TIME: return TimeIntervalTypeInfo.INTERVAL_MILLIS; case ARRAY: if (dataType instanceof CollectionDataType && !isPrimitive(((CollectionDataType) dataType).getElementDataType().getLogicalType())) { return ObjectArrayTypeInfo.getInfoFor( fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType())); } else { return TypeConversions.fromDataTypeToLegacyInfo(dataType); } case MAP: KeyValueDataType mapType = (KeyValueDataType) dataType; return new MapTypeInfo( fromDataTypeToTypeInfo(mapType.getKeyDataType()), fromDataTypeToTypeInfo(mapType.getValueDataType())); case MULTISET: return MultisetTypeInfo.getInfoFor( fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType())); case ROW: if (RowData.class.isAssignableFrom(dataType.getConversionClass())) { return RowDataTypeInfo.of((RowType) fromDataTypeToLogicalType(dataType)); } else if (Row.class == dataType.getConversionClass()) { RowType logicalRowType = (RowType) logicalType; return new RowTypeInfo( dataType.getChildren() .stream() .map(TypeInfoDataTypeConverter::fromDataTypeToTypeInfo) .toArray(TypeInformation[]::new), logicalRowType.getFieldNames().toArray(new String[0])); } else { return TypeConversions.fromDataTypeToLegacyInfo(dataType); } case RAW: if (logicalType instanceof RawType) { final RawType<?> rawType = (RawType<?>) logicalType; return createWrapperTypeInfo(rawType); } return TypeConversions.fromDataTypeToLegacyInfo(dataType); default: return TypeConversions.fromDataTypeToLegacyInfo(dataType); } }
Example 12
Source File: LogicalTypeChecks.java From flink with Apache License 2.0 | 4 votes |
@Override public Integer visit(DecimalType decimalType) { return decimalType.getPrecision(); }
Example 13
Source File: PythonTypeUtils.java From flink with Apache License 2.0 | 4 votes |
@Override public TypeSerializer visit(DecimalType decimalType) { return new DecimalDataSerializer(decimalType.getPrecision(), decimalType.getScale()); }
Example 14
Source File: ArrowUtils.java From flink with Apache License 2.0 | 4 votes |
@Override public ArrowType visit(DecimalType decimalType) { return new ArrowType.Decimal(decimalType.getPrecision(), decimalType.getScale()); }