org.apache.flink.api.java.typeutils.MultisetTypeInfo Java Examples
The following examples show how to use
org.apache.flink.api.java.typeutils.MultisetTypeInfo.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LegacyTypeInfoDataTypeConverter.java From flink with Apache License 2.0 | 5 votes |
public static DataType toDataType(TypeInformation<?> typeInfo) { // time indicators first as their hashCode/equals is shared with those of regular timestamps if (typeInfo instanceof TimeIndicatorTypeInfo) { return convertToTimeAttributeType((TimeIndicatorTypeInfo) typeInfo); } final DataType foundDataType = typeInfoDataTypeMap.get(typeInfo); if (foundDataType != null) { return foundDataType; } if (typeInfo instanceof RowTypeInfo) { return convertToRowType((RowTypeInfo) typeInfo); } else if (typeInfo instanceof ObjectArrayTypeInfo) { return convertToArrayType( typeInfo.getTypeClass(), ((ObjectArrayTypeInfo) typeInfo).getComponentInfo()); } else if (typeInfo instanceof BasicArrayTypeInfo) { return createLegacyType(LogicalTypeRoot.ARRAY, typeInfo); } else if (typeInfo instanceof MultisetTypeInfo) { return convertToMultisetType(((MultisetTypeInfo) typeInfo).getElementTypeInfo()); } else if (typeInfo instanceof MapTypeInfo) { return convertToMapType((MapTypeInfo) typeInfo); } else if (typeInfo instanceof CompositeType) { return createLegacyType(LogicalTypeRoot.STRUCTURED_TYPE, typeInfo); } return createLegacyType(LogicalTypeRoot.ANY, typeInfo); }
Example #2
Source File: LegacyTypeInfoDataTypeConverter.java From flink with Apache License 2.0 | 5 votes |
public static DataType toDataType(TypeInformation<?> typeInfo) { // time indicators first as their hashCode/equals is shared with those of regular timestamps if (typeInfo instanceof TimeIndicatorTypeInfo) { return convertToTimeAttributeType((TimeIndicatorTypeInfo) typeInfo); } final DataType foundDataType = typeInfoDataTypeMap.get(typeInfo); if (foundDataType != null) { return foundDataType; } if (typeInfo instanceof RowTypeInfo) { return convertToRowType((RowTypeInfo) typeInfo); } else if (typeInfo instanceof ObjectArrayTypeInfo) { return convertToArrayType( typeInfo.getTypeClass(), ((ObjectArrayTypeInfo) typeInfo).getComponentInfo()); } else if (typeInfo instanceof BasicArrayTypeInfo) { return createLegacyType(LogicalTypeRoot.ARRAY, typeInfo); } else if (typeInfo instanceof MultisetTypeInfo) { return convertToMultisetType(((MultisetTypeInfo) typeInfo).getElementTypeInfo()); } else if (typeInfo instanceof MapTypeInfo) { return convertToMapType((MapTypeInfo) typeInfo); } else if (typeInfo instanceof CompositeType) { return createLegacyType(LogicalTypeRoot.STRUCTURED_TYPE, typeInfo); } return createLegacyType(LogicalTypeRoot.RAW, typeInfo); }
Example #3
Source File: TypeStringUtilsTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testWriteComplexTypes() { testReadAndWrite( "ROW<f0 DECIMAL, f1 TINYINT>", Types.ROW(Types.BIG_DEC, Types.BYTE)); testReadAndWrite( "ROW<hello DECIMAL, world TINYINT>", Types.ROW_NAMED( new String[]{"hello", "world"}, Types.BIG_DEC, Types.BYTE)); testReadAndWrite( "POJO<org.apache.flink.table.utils.TypeStringUtilsTest$TestPojo>", TypeExtractor.createTypeInfo(TestPojo.class)); testReadAndWrite( "ANY<org.apache.flink.table.utils.TypeStringUtilsTest$TestNoPojo>", TypeExtractor.createTypeInfo(TestNoPojo.class)); testReadAndWrite( "MAP<VARCHAR, ROW<f0 DECIMAL, f1 TINYINT>>", Types.MAP(Types.STRING, Types.ROW(Types.BIG_DEC, Types.BYTE))); testReadAndWrite( "MULTISET<ROW<f0 DECIMAL, f1 TINYINT>>", new MultisetTypeInfo<>(Types.ROW(Types.BIG_DEC, Types.BYTE))); testReadAndWrite( "PRIMITIVE_ARRAY<TINYINT>", Types.PRIMITIVE_ARRAY(Types.BYTE)); testReadAndWrite( "OBJECT_ARRAY<POJO<org.apache.flink.table.utils.TypeStringUtilsTest$TestPojo>>", Types.OBJECT_ARRAY(TypeExtractor.createTypeInfo(TestPojo.class))); // test escaping assertEquals( Types.ROW_NAMED( new String[] {"he \nllo", "world"}, Types.BIG_DEC, Types.BYTE), TypeStringUtils.readTypeInfo("ROW<`he \nllo` DECIMAL, world TINYINT>")); assertEquals( Types.ROW_NAMED( new String[] {"he`llo", "world"}, Types.BIG_DEC, Types.BYTE), TypeStringUtils.readTypeInfo("ROW<`he``llo` DECIMAL, world TINYINT>")); // test backward compatibility with brackets () assertEquals( Types.ROW_NAMED( new String[] {"he \nllo", "world"}, Types.BIG_DEC, Types.BYTE), TypeStringUtils.readTypeInfo("ROW(`he \nllo` DECIMAL, world TINYINT)")); // test nesting testReadAndWrite( "ROW<singleton ROW<f0 INT>, twoField ROW<`Field 1` ROW<f0 DECIMAL>, `Field``s 2` VARCHAR>>", Types.ROW_NAMED( new String[] {"singleton", "twoField"}, Types.ROW(Types.INT), Types.ROW_NAMED( new String[] {"Field 1", "Field`s 2"}, Types.ROW(Types.BIG_DEC), Types.STRING))); }
Example #4
Source File: TypeInfoDataTypeConverter.java From flink with Apache License 2.0 | 4 votes |
public static TypeInformation<?> fromDataTypeToTypeInfo(DataType dataType) { Class<?> clazz = dataType.getConversionClass(); if (clazz.isPrimitive()) { final TypeInformation<?> foundTypeInfo = primitiveDataTypeTypeInfoMap.get(clazz.getName()); if (foundTypeInfo != null) { return foundTypeInfo; } } LogicalType logicalType = fromDataTypeToLogicalType(dataType); switch (logicalType.getTypeRoot()) { case DECIMAL: DecimalType decimalType = (DecimalType) logicalType; return clazz == Decimal.class ? new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale()) : new BigDecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale()); case CHAR: case VARCHAR: // ignore precision return clazz == BinaryString.class ? BinaryStringTypeInfo.INSTANCE : BasicTypeInfo.STRING_TYPE_INFO; case BINARY: case VARBINARY: // ignore precision return PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO; case INTERVAL_YEAR_MONTH: return TimeIntervalTypeInfo.INTERVAL_MONTHS; case INTERVAL_DAY_TIME: return TimeIntervalTypeInfo.INTERVAL_MILLIS; case ARRAY: if (dataType instanceof CollectionDataType && !isPrimitive(((CollectionDataType) dataType).getElementDataType().getLogicalType())) { return ObjectArrayTypeInfo.getInfoFor( fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType())); } else { return TypeConversions.fromDataTypeToLegacyInfo(dataType); } case MAP: KeyValueDataType mapType = (KeyValueDataType) dataType; return new MapTypeInfo( fromDataTypeToTypeInfo(mapType.getKeyDataType()), fromDataTypeToTypeInfo(mapType.getValueDataType())); case MULTISET: return MultisetTypeInfo.getInfoFor( fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType())); case ROW: if (BaseRow.class.isAssignableFrom(dataType.getConversionClass())) { return BaseRowTypeInfo.of((RowType) fromDataTypeToLogicalType(dataType)); } else if (Row.class == dataType.getConversionClass()) { FieldsDataType rowType = (FieldsDataType) dataType; RowType logicalRowType = (RowType) logicalType; return new RowTypeInfo( logicalRowType.getFieldNames().stream() .map(name -> rowType.getFieldDataTypes().get(name)) .map(TypeInfoDataTypeConverter::fromDataTypeToTypeInfo) .toArray(TypeInformation[]::new), logicalRowType.getFieldNames().toArray(new String[0])); } else { return TypeConversions.fromDataTypeToLegacyInfo(dataType); } default: return TypeConversions.fromDataTypeToLegacyInfo(dataType); } }
Example #5
Source File: LegacyTypeInfoDataTypeConverter.java From flink with Apache License 2.0 | 4 votes |
private static TypeInformation<?> convertToMultisetTypeInfo(CollectionDataType collectionDataType) { return new MultisetTypeInfo<>( toLegacyTypeInfo(collectionDataType.getElementDataType())); }
Example #6
Source File: TypeStringUtilsTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testWriteComplexTypes() { testReadAndWrite( "ROW<f0 DECIMAL, f1 TINYINT>", Types.ROW(Types.BIG_DEC, Types.BYTE)); testReadAndWrite( "ROW<hello DECIMAL, world TINYINT>", Types.ROW_NAMED( new String[]{"hello", "world"}, Types.BIG_DEC, Types.BYTE)); testReadAndWrite( "POJO<org.apache.flink.table.utils.TypeStringUtilsTest$TestPojo>", TypeExtractor.createTypeInfo(TestPojo.class)); testReadAndWrite( "ANY<org.apache.flink.table.utils.TypeStringUtilsTest$TestNoPojo>", TypeExtractor.createTypeInfo(TestNoPojo.class)); testReadAndWrite( "MAP<VARCHAR, ROW<f0 DECIMAL, f1 TINYINT>>", Types.MAP(Types.STRING, Types.ROW(Types.BIG_DEC, Types.BYTE))); testReadAndWrite( "MULTISET<ROW<f0 DECIMAL, f1 TINYINT>>", new MultisetTypeInfo<>(Types.ROW(Types.BIG_DEC, Types.BYTE))); testReadAndWrite( "PRIMITIVE_ARRAY<TINYINT>", Types.PRIMITIVE_ARRAY(Types.BYTE)); testReadAndWrite( "OBJECT_ARRAY<POJO<org.apache.flink.table.utils.TypeStringUtilsTest$TestPojo>>", Types.OBJECT_ARRAY(TypeExtractor.createTypeInfo(TestPojo.class))); // test escaping assertEquals( Types.ROW_NAMED( new String[] {"he \nllo", "world"}, Types.BIG_DEC, Types.BYTE), TypeStringUtils.readTypeInfo("ROW<`he \nllo` DECIMAL, world TINYINT>")); assertEquals( Types.ROW_NAMED( new String[] {"he`llo", "world"}, Types.BIG_DEC, Types.BYTE), TypeStringUtils.readTypeInfo("ROW<`he``llo` DECIMAL, world TINYINT>")); // test backward compatibility with brackets () assertEquals( Types.ROW_NAMED( new String[] {"he \nllo", "world"}, Types.BIG_DEC, Types.BYTE), TypeStringUtils.readTypeInfo("ROW(`he \nllo` DECIMAL, world TINYINT)")); // test nesting testReadAndWrite( "ROW<singleton ROW<f0 INT>, twoField ROW<`Field 1` ROW<f0 DECIMAL>, `Field``s 2` VARCHAR>>", Types.ROW_NAMED( new String[] {"singleton", "twoField"}, Types.ROW(Types.INT), Types.ROW_NAMED( new String[] {"Field 1", "Field`s 2"}, Types.ROW(Types.BIG_DEC), Types.STRING))); }
Example #7
Source File: TypeInfoDataTypeConverter.java From flink with Apache License 2.0 | 4 votes |
public static TypeInformation<?> fromDataTypeToTypeInfo(DataType dataType) { Class<?> clazz = dataType.getConversionClass(); if (clazz.isPrimitive()) { final TypeInformation<?> foundTypeInfo = primitiveDataTypeTypeInfoMap.get(clazz.getName()); if (foundTypeInfo != null) { return foundTypeInfo; } } LogicalType logicalType = fromDataTypeToLogicalType(dataType); switch (logicalType.getTypeRoot()) { case TIMESTAMP_WITHOUT_TIME_ZONE: TimestampType timestampType = (TimestampType) logicalType; int precision = timestampType.getPrecision(); if (timestampType.getKind() == TimestampKind.REGULAR) { return clazz == TimestampData.class ? new TimestampDataTypeInfo(precision) : (clazz == LocalDateTime.class ? ((3 == precision) ? Types.LOCAL_DATE_TIME : new LegacyLocalDateTimeTypeInfo(precision)) : ((3 == precision) ? Types.SQL_TIMESTAMP : new LegacyTimestampTypeInfo(precision))); } else { return TypeConversions.fromDataTypeToLegacyInfo(dataType); } case TIMESTAMP_WITH_LOCAL_TIME_ZONE: LocalZonedTimestampType lzTs = (LocalZonedTimestampType) logicalType; int precisionLzTs = lzTs.getPrecision(); return clazz == TimestampData.class ? new TimestampDataTypeInfo(precisionLzTs) : (clazz == Instant.class ? ((3 == precisionLzTs) ? Types.INSTANT : new LegacyInstantTypeInfo(precisionLzTs)) : TypeConversions.fromDataTypeToLegacyInfo(dataType)); case DECIMAL: DecimalType decimalType = (DecimalType) logicalType; return clazz == DecimalData.class ? new DecimalDataTypeInfo(decimalType.getPrecision(), decimalType.getScale()) : new BigDecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale()); case CHAR: case VARCHAR: // ignore precision return clazz == StringData.class ? StringDataTypeInfo.INSTANCE : BasicTypeInfo.STRING_TYPE_INFO; case BINARY: case VARBINARY: // ignore precision return PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO; case INTERVAL_YEAR_MONTH: return TimeIntervalTypeInfo.INTERVAL_MONTHS; case INTERVAL_DAY_TIME: return TimeIntervalTypeInfo.INTERVAL_MILLIS; case ARRAY: if (dataType instanceof CollectionDataType && !isPrimitive(((CollectionDataType) dataType).getElementDataType().getLogicalType())) { return ObjectArrayTypeInfo.getInfoFor( fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType())); } else { return TypeConversions.fromDataTypeToLegacyInfo(dataType); } case MAP: KeyValueDataType mapType = (KeyValueDataType) dataType; return new MapTypeInfo( fromDataTypeToTypeInfo(mapType.getKeyDataType()), fromDataTypeToTypeInfo(mapType.getValueDataType())); case MULTISET: return MultisetTypeInfo.getInfoFor( fromDataTypeToTypeInfo(((CollectionDataType) dataType).getElementDataType())); case ROW: if (RowData.class.isAssignableFrom(dataType.getConversionClass())) { return RowDataTypeInfo.of((RowType) fromDataTypeToLogicalType(dataType)); } else if (Row.class == dataType.getConversionClass()) { RowType logicalRowType = (RowType) logicalType; return new RowTypeInfo( dataType.getChildren() .stream() .map(TypeInfoDataTypeConverter::fromDataTypeToTypeInfo) .toArray(TypeInformation[]::new), logicalRowType.getFieldNames().toArray(new String[0])); } else { return TypeConversions.fromDataTypeToLegacyInfo(dataType); } case RAW: if (logicalType instanceof RawType) { final RawType<?> rawType = (RawType<?>) logicalType; return createWrapperTypeInfo(rawType); } return TypeConversions.fromDataTypeToLegacyInfo(dataType); default: return TypeConversions.fromDataTypeToLegacyInfo(dataType); } }
Example #8
Source File: LegacyTypeInfoDataTypeConverter.java From flink with Apache License 2.0 | 4 votes |
private static TypeInformation<?> convertToMultisetTypeInfo(CollectionDataType collectionDataType) { return new MultisetTypeInfo<>( toLegacyTypeInfo(collectionDataType.getElementDataType())); }
Example #9
Source File: TypeStringUtilsTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testWriteComplexTypes() { testReadAndWrite( "ROW<f0 DECIMAL, f1 TINYINT>", Types.ROW(Types.BIG_DEC, Types.BYTE)); testReadAndWrite( "ROW<hello DECIMAL, world TINYINT>", Types.ROW_NAMED( new String[]{"hello", "world"}, Types.BIG_DEC, Types.BYTE)); testReadAndWrite( "POJO<org.apache.flink.table.utils.TypeStringUtilsTest$TestPojo>", TypeExtractor.createTypeInfo(TestPojo.class)); testReadAndWrite( "ANY<org.apache.flink.table.utils.TypeStringUtilsTest$TestNoPojo>", TypeExtractor.createTypeInfo(TestNoPojo.class)); testReadAndWrite( "MAP<VARCHAR, ROW<f0 DECIMAL, f1 TINYINT>>", Types.MAP(Types.STRING, Types.ROW(Types.BIG_DEC, Types.BYTE))); testReadAndWrite( "MULTISET<ROW<f0 DECIMAL, f1 TINYINT>>", new MultisetTypeInfo<>(Types.ROW(Types.BIG_DEC, Types.BYTE))); testReadAndWrite( "PRIMITIVE_ARRAY<TINYINT>", Types.PRIMITIVE_ARRAY(Types.BYTE)); testReadAndWrite( "OBJECT_ARRAY<POJO<org.apache.flink.table.utils.TypeStringUtilsTest$TestPojo>>", Types.OBJECT_ARRAY(TypeExtractor.createTypeInfo(TestPojo.class))); // test escaping assertEquals( Types.ROW_NAMED( new String[] {"he \nllo", "world"}, Types.BIG_DEC, Types.BYTE), TypeStringUtils.readTypeInfo("ROW<`he \nllo` DECIMAL, world TINYINT>")); assertEquals( Types.ROW_NAMED( new String[] {"he`llo", "world"}, Types.BIG_DEC, Types.BYTE), TypeStringUtils.readTypeInfo("ROW<`he``llo` DECIMAL, world TINYINT>")); // test backward compatibility with brackets () assertEquals( Types.ROW_NAMED( new String[] {"he \nllo", "world"}, Types.BIG_DEC, Types.BYTE), TypeStringUtils.readTypeInfo("ROW(`he \nllo` DECIMAL, world TINYINT)")); // test nesting testReadAndWrite( "ROW<singleton ROW<f0 INT>, twoField ROW<`Field 1` ROW<f0 DECIMAL>, `Field``s 2` VARCHAR>>", Types.ROW_NAMED( new String[] {"singleton", "twoField"}, Types.ROW(Types.INT), Types.ROW_NAMED( new String[] {"Field 1", "Field`s 2"}, Types.ROW(Types.BIG_DEC), Types.STRING))); testWrite("ROW<f0 DECIMAL, f1 TIMESTAMP, f2 TIME, f3 DATE>", Types.ROW_NAMED( new String[] {"f0", "f1", "f2", "f3"}, Types.BIG_DEC, Types.LOCAL_DATE_TIME, Types.LOCAL_TIME, Types.LOCAL_DATE)); }
Example #10
Source File: TypeStringUtils.java From Flink-CEPplus with Apache License 2.0 | 3 votes |
private TypeInformation<?> convertMultiset() { nextToken(TokenType.BEGIN); nextToken(TokenType.LITERAL); final TypeInformation<?> elementTypeInfo = convertType(); nextToken(TokenType.END); return new MultisetTypeInfo<>(elementTypeInfo); }
Example #11
Source File: TypeStringUtils.java From flink with Apache License 2.0 | 3 votes |
private TypeInformation<?> convertMultiset() { nextToken(TokenType.BEGIN); nextToken(TokenType.LITERAL); final TypeInformation<?> elementTypeInfo = convertType(); nextToken(TokenType.END); return new MultisetTypeInfo<>(elementTypeInfo); }
Example #12
Source File: TypeStringUtils.java From flink with Apache License 2.0 | 3 votes |
private TypeInformation<?> convertMultiset() { nextToken(TokenType.BEGIN); nextToken(TokenType.LITERAL); final TypeInformation<?> elementTypeInfo = convertType(); nextToken(TokenType.END); return new MultisetTypeInfo<>(elementTypeInfo); }
Example #13
Source File: Types.java From flink with Apache License 2.0 | 2 votes |
/** * Generates type information for a Multiset. A Multiset is baked by a Java HashMap and maps an * arbitrary key to an integer value. Null values in keys are not supported. * * @param elementType type of the elements of the multiset e.g. Types.STRING() */ public static <E> TypeInformation<Map<E, Integer>> MULTISET(TypeInformation<E> elementType) { return new MultisetTypeInfo<>(elementType); }
Example #14
Source File: Types.java From flink with Apache License 2.0 | 2 votes |
/** * Generates type information for a Multiset. A Multiset is baked by a Java HashMap and maps an * arbitrary key to an integer value. Null values in keys are not supported. * * @param elementType type of the elements of the multiset e.g. Types.STRING() */ public static <E> TypeInformation<Map<E, Integer>> MULTISET(TypeInformation<E> elementType) { return new MultisetTypeInfo<>(elementType); }