Java Code Examples for org.apache.flink.api.common.typeinfo.Types#PRIMITIVE_ARRAY
The following examples show how to use
org.apache.flink.api.common.typeinfo.Types#PRIMITIVE_ARRAY .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JsonRowSchemaConverter.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private static TypeInformation<?> convertStringEncoding(String location, JsonNode node) { if (!node.isTextual()) { throw new IllegalArgumentException("Invalid '" + CONTENT_ENCODING + "' property in node: " + location); } // "If the instance value is a string, this property defines that the string SHOULD // be interpreted as binary data and decoded using the encoding named by this property." switch (node.asText()) { case CONTENT_ENCODING_BASE64: return Types.PRIMITIVE_ARRAY(Types.BYTE); default: // we fail hard here: // this gives us the chance to support more encodings in the future without problems // of backwards compatibility throw new IllegalArgumentException("Invalid encoding '" + node.asText() + "' in node: " + location); } }
Example 2
Source File: JsonRowSchemaConverter.java From flink with Apache License 2.0 | 6 votes |
private static TypeInformation<?> convertStringEncoding(String location, JsonNode node) { if (!node.isTextual()) { throw new IllegalArgumentException("Invalid '" + CONTENT_ENCODING + "' property in node: " + location); } // "If the instance value is a string, this property defines that the string SHOULD // be interpreted as binary data and decoded using the encoding named by this property." switch (node.asText()) { case CONTENT_ENCODING_BASE64: return Types.PRIMITIVE_ARRAY(Types.BYTE); default: // we fail hard here: // this gives us the chance to support more encodings in the future without problems // of backwards compatibility throw new IllegalArgumentException("Invalid encoding '" + node.asText() + "' in node: " + location); } }
Example 3
Source File: StreamSqlUtil.java From sylph with Apache License 2.0 | 6 votes |
private static TypeInformation<?> getFlinkType(Type type) { if (type instanceof ParameterizedType && ((ParameterizedType) type).getRawType() == Map.class) { Type[] arguments = ((ParameterizedType) type).getActualTypeArguments(); Type valueType = arguments[1]; TypeInformation<?> valueInfo = getFlinkType(valueType); return new MapTypeInfo<>(TypeExtractor.createTypeInfo(arguments[0]), valueInfo); } else if (type instanceof ParameterizedType && ((ParameterizedType) type).getRawType() == List.class) { TypeInformation<?> typeInformation = getFlinkType(((ParameterizedType) type).getActualTypeArguments()[0]); if (typeInformation.isBasicType() && typeInformation != Types.STRING) { return Types.PRIMITIVE_ARRAY(typeInformation); } else { return Types.OBJECT_ARRAY(typeInformation); } } else { return TypeExtractor.createTypeInfo(type); } }
Example 4
Source File: JsonRowSchemaConverter.java From flink with Apache License 2.0 | 6 votes |
private static TypeInformation<?> convertStringEncoding(String location, JsonNode node) { if (!node.isTextual()) { throw new IllegalArgumentException("Invalid '" + CONTENT_ENCODING + "' property in node: " + location); } // "If the instance value is a string, this property defines that the string SHOULD // be interpreted as binary data and decoded using the encoding named by this property." switch (node.asText()) { case CONTENT_ENCODING_BASE64: return Types.PRIMITIVE_ARRAY(Types.BYTE); default: // we fail hard here: // this gives us the chance to support more encodings in the future without problems // of backwards compatibility throw new IllegalArgumentException("Invalid encoding '" + node.asText() + "' in node: " + location); } }
Example 5
Source File: AvroSchemaConverter.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
private static TypeInformation<?> convertToTypeInfo(Schema schema) { switch (schema.getType()) { case RECORD: final List<Schema.Field> fields = schema.getFields(); final TypeInformation<?>[] types = new TypeInformation<?>[fields.size()]; final String[] names = new String[fields.size()]; for (int i = 0; i < fields.size(); i++) { final Schema.Field field = fields.get(i); types[i] = convertToTypeInfo(field.schema()); names[i] = field.name(); } return Types.ROW_NAMED(names, types); case ENUM: return Types.STRING; case ARRAY: // result type might either be ObjectArrayTypeInfo or BasicArrayTypeInfo for Strings return Types.OBJECT_ARRAY(convertToTypeInfo(schema.getElementType())); case MAP: return Types.MAP(Types.STRING, convertToTypeInfo(schema.getValueType())); case UNION: final Schema actualSchema; if (schema.getTypes().size() == 2 && schema.getTypes().get(0).getType() == Schema.Type.NULL) { actualSchema = schema.getTypes().get(1); } else if (schema.getTypes().size() == 2 && schema.getTypes().get(1).getType() == Schema.Type.NULL) { actualSchema = schema.getTypes().get(0); } else if (schema.getTypes().size() == 1) { actualSchema = schema.getTypes().get(0); } else { // use Kryo for serialization return Types.GENERIC(Object.class); } return convertToTypeInfo(actualSchema); case FIXED: // logical decimal type if (schema.getLogicalType() instanceof LogicalTypes.Decimal) { return Types.BIG_DEC; } // convert fixed size binary data to primitive byte arrays return Types.PRIMITIVE_ARRAY(Types.BYTE); case STRING: // convert Avro's Utf8/CharSequence to String return Types.STRING; case BYTES: // logical decimal type if (schema.getLogicalType() instanceof LogicalTypes.Decimal) { return Types.BIG_DEC; } return Types.PRIMITIVE_ARRAY(Types.BYTE); case INT: // logical date and time type final LogicalType logicalType = schema.getLogicalType(); if (logicalType == LogicalTypes.date()) { return Types.SQL_DATE; } else if (logicalType == LogicalTypes.timeMillis()) { return Types.SQL_TIME; } return Types.INT; case LONG: // logical timestamp type if (schema.getLogicalType() == LogicalTypes.timestampMillis()) { return Types.SQL_TIMESTAMP; } return Types.LONG; case FLOAT: return Types.FLOAT; case DOUBLE: return Types.DOUBLE; case BOOLEAN: return Types.BOOLEAN; case NULL: return Types.VOID; } throw new IllegalArgumentException("Unsupported Avro type '" + schema.getType() + "'."); }
Example 6
Source File: AvroSchemaConverter.java From flink with Apache License 2.0 | 4 votes |
private static TypeInformation<?> convertToTypeInfo(Schema schema) { switch (schema.getType()) { case RECORD: final List<Schema.Field> fields = schema.getFields(); final TypeInformation<?>[] types = new TypeInformation<?>[fields.size()]; final String[] names = new String[fields.size()]; for (int i = 0; i < fields.size(); i++) { final Schema.Field field = fields.get(i); types[i] = convertToTypeInfo(field.schema()); names[i] = field.name(); } return Types.ROW_NAMED(names, types); case ENUM: return Types.STRING; case ARRAY: // result type might either be ObjectArrayTypeInfo or BasicArrayTypeInfo for Strings return Types.OBJECT_ARRAY(convertToTypeInfo(schema.getElementType())); case MAP: return Types.MAP(Types.STRING, convertToTypeInfo(schema.getValueType())); case UNION: final Schema actualSchema; if (schema.getTypes().size() == 2 && schema.getTypes().get(0).getType() == Schema.Type.NULL) { actualSchema = schema.getTypes().get(1); } else if (schema.getTypes().size() == 2 && schema.getTypes().get(1).getType() == Schema.Type.NULL) { actualSchema = schema.getTypes().get(0); } else if (schema.getTypes().size() == 1) { actualSchema = schema.getTypes().get(0); } else { // use Kryo for serialization return Types.GENERIC(Object.class); } return convertToTypeInfo(actualSchema); case FIXED: // logical decimal type if (schema.getLogicalType() instanceof LogicalTypes.Decimal) { return Types.BIG_DEC; } // convert fixed size binary data to primitive byte arrays return Types.PRIMITIVE_ARRAY(Types.BYTE); case STRING: // convert Avro's Utf8/CharSequence to String return Types.STRING; case BYTES: // logical decimal type if (schema.getLogicalType() instanceof LogicalTypes.Decimal) { return Types.BIG_DEC; } return Types.PRIMITIVE_ARRAY(Types.BYTE); case INT: // logical date and time type final LogicalType logicalType = schema.getLogicalType(); if (logicalType == LogicalTypes.date()) { return Types.SQL_DATE; } else if (logicalType == LogicalTypes.timeMillis()) { return Types.SQL_TIME; } return Types.INT; case LONG: // logical timestamp type if (schema.getLogicalType() == LogicalTypes.timestampMillis()) { return Types.SQL_TIMESTAMP; } return Types.LONG; case FLOAT: return Types.FLOAT; case DOUBLE: return Types.DOUBLE; case BOOLEAN: return Types.BOOLEAN; case NULL: return Types.VOID; } throw new IllegalArgumentException("Unsupported Avro type '" + schema.getType() + "'."); }
Example 7
Source File: AvroSchemaConverter.java From flink with Apache License 2.0 | 4 votes |
private static TypeInformation<?> convertToTypeInfo(Schema schema) { switch (schema.getType()) { case RECORD: final List<Schema.Field> fields = schema.getFields(); final TypeInformation<?>[] types = new TypeInformation<?>[fields.size()]; final String[] names = new String[fields.size()]; for (int i = 0; i < fields.size(); i++) { final Schema.Field field = fields.get(i); types[i] = convertToTypeInfo(field.schema()); names[i] = field.name(); } return Types.ROW_NAMED(names, types); case ENUM: return Types.STRING; case ARRAY: // result type might either be ObjectArrayTypeInfo or BasicArrayTypeInfo for Strings return Types.OBJECT_ARRAY(convertToTypeInfo(schema.getElementType())); case MAP: return Types.MAP(Types.STRING, convertToTypeInfo(schema.getValueType())); case UNION: final Schema actualSchema; if (schema.getTypes().size() == 2 && schema.getTypes().get(0).getType() == Schema.Type.NULL) { actualSchema = schema.getTypes().get(1); } else if (schema.getTypes().size() == 2 && schema.getTypes().get(1).getType() == Schema.Type.NULL) { actualSchema = schema.getTypes().get(0); } else if (schema.getTypes().size() == 1) { actualSchema = schema.getTypes().get(0); } else { // use Kryo for serialization return Types.GENERIC(Object.class); } return convertToTypeInfo(actualSchema); case FIXED: // logical decimal type if (schema.getLogicalType() instanceof LogicalTypes.Decimal) { return Types.BIG_DEC; } // convert fixed size binary data to primitive byte arrays return Types.PRIMITIVE_ARRAY(Types.BYTE); case STRING: // convert Avro's Utf8/CharSequence to String return Types.STRING; case BYTES: // logical decimal type if (schema.getLogicalType() instanceof LogicalTypes.Decimal) { return Types.BIG_DEC; } return Types.PRIMITIVE_ARRAY(Types.BYTE); case INT: // logical date and time type final org.apache.avro.LogicalType logicalType = schema.getLogicalType(); if (logicalType == LogicalTypes.date()) { return Types.SQL_DATE; } else if (logicalType == LogicalTypes.timeMillis()) { return Types.SQL_TIME; } return Types.INT; case LONG: // logical timestamp type if (schema.getLogicalType() == LogicalTypes.timestampMillis()) { return Types.SQL_TIMESTAMP; } return Types.LONG; case FLOAT: return Types.FLOAT; case DOUBLE: return Types.DOUBLE; case BOOLEAN: return Types.BOOLEAN; case NULL: return Types.VOID; } throw new IllegalArgumentException("Unsupported Avro type '" + schema.getType() + "'."); }
Example 8
Source File: TypeStringUtils.java From Flink-CEPplus with Apache License 2.0 | 3 votes |
private TypeInformation<?> convertPrimitiveArray() { nextToken(TokenType.BEGIN); nextToken(TokenType.LITERAL); final TypeInformation<?> elementTypeInfo = convertType(); nextToken(TokenType.END); return Types.PRIMITIVE_ARRAY(elementTypeInfo); }
Example 9
Source File: TypeStringUtils.java From flink with Apache License 2.0 | 3 votes |
private TypeInformation<?> convertPrimitiveArray() { nextToken(TokenType.BEGIN); nextToken(TokenType.LITERAL); final TypeInformation<?> elementTypeInfo = convertType(); nextToken(TokenType.END); return Types.PRIMITIVE_ARRAY(elementTypeInfo); }
Example 10
Source File: TypeStringUtils.java From flink with Apache License 2.0 | 3 votes |
private TypeInformation<?> convertPrimitiveArray() { nextToken(TokenType.BEGIN); nextToken(TokenType.LITERAL); final TypeInformation<?> elementTypeInfo = convertType(); nextToken(TokenType.END); return Types.PRIMITIVE_ARRAY(elementTypeInfo); }