Java Code Examples for org.apache.kafka.connect.data.SchemaBuilder#build()
The following examples show how to use
org.apache.kafka.connect.data.SchemaBuilder#build() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: LogFieldConverterFactory.java From kafka-connect-spooldir with Apache License 2.0 | 6 votes |
static Schema schema(Class<?> logClass, String logFieldName) { final SchemaBuilder builder; if (LocalDate.class.equals(logClass)) { builder = Date.builder(); } else if (LocalTime.class.equals(logClass)) { builder = Time.builder(); } else if (Integer.class.equals(logClass)) { builder = SchemaBuilder.int32(); } else if (Long.class.equals(logClass)) { builder = SchemaBuilder.int64(); } else if (String.class.equals(logClass)) { builder = SchemaBuilder.string(); } else { throw new UnsupportedOperationException( String.format("%s is not a supported type.", logClass.getName()) ); } builder.optional(); return builder.build(); }
Example 2
Source File: SchemaKStream.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
Pair<Schema, SelectValueMapper> createSelectValueMapperAndSchema( final List<Pair<String, Expression>> expressionPairList ) { try { final CodeGenRunner codeGenRunner = new CodeGenRunner(schema, functionRegistry); final SchemaBuilder schemaBuilder = SchemaBuilder.struct(); final List<ExpressionMetadata> expressionEvaluators = new ArrayList<>(); for (Pair<String, Expression> expressionPair : expressionPairList) { final ExpressionMetadata expressionEvaluator = codeGenRunner.buildCodeGenFromParseTree(expressionPair.getRight()); schemaBuilder.field(expressionPair.getLeft(), expressionEvaluator.getExpressionType()); expressionEvaluators.add(expressionEvaluator); } return new Pair<>(schemaBuilder.build(), new SelectValueMapper( genericRowValueTypeEnforcer, expressionPairList, expressionEvaluators )); } catch (Exception e) { throw new KsqlException("Code generation failed for SelectValueMapper", e); } }
Example 3
Source File: CellData.java From debezium-incubator with Apache License 2.0 | 6 votes |
static Schema cellSchema(ColumnMetadata cm, boolean optional) { AbstractType<?> convertedType = CassandraTypeConverter.convert(cm.getType()); Schema valueSchema = CassandraTypeDeserializer.getSchemaBuilder(convertedType).optional().build(); if (valueSchema != null) { SchemaBuilder schemaBuilder = SchemaBuilder.struct().name(cm.getName()) .field(CELL_VALUE_KEY, valueSchema) .field(CELL_DELETION_TS_KEY, Schema.OPTIONAL_INT64_SCHEMA) .field(CELL_SET_KEY, Schema.BOOLEAN_SCHEMA); if (optional) { schemaBuilder.optional(); } return schemaBuilder.build(); } else { return null; } }
Example 4
Source File: RowData.java From debezium-incubator with Apache License 2.0 | 5 votes |
/** * Assemble the Kafka connect {@link Schema} for the "after" field of the change event * based on the Cassandra table schema. * @param tm metadata of a table that contains the Cassandra table schema * @return a schema for the "after" field of a change event */ static Schema rowSchema(TableMetadata tm) { SchemaBuilder schemaBuilder = SchemaBuilder.struct().name(Record.AFTER); for (ColumnMetadata cm : tm.getColumns()) { Schema optionalCellSchema = CellData.cellSchema(cm, true); if (optionalCellSchema != null) { schemaBuilder.field(cm.getName(), optionalCellSchema); } } return schemaBuilder.build(); }
Example 5
Source File: DataConverter.java From kafka-mysql-connector with Apache License 2.0 | 5 votes |
public static Schema convertPrimaryKeySchema(Table table) { String tableName = table.getName(); String databaseName = table.getDatabase().getName(); SchemaBuilder pkBuilder = SchemaBuilder.struct().name(databaseName + "." + tableName + ".pk"); for (String pk : table.getPKList()) { int columnNumber = table.findColumnIndex(pk); addFieldSchema(table, columnNumber, pkBuilder); } return pkBuilder.build(); }
Example 6
Source File: AbstractSchemaGenerator.java From kafka-connect-spooldir with Apache License 2.0 | 5 votes |
public Map.Entry<Schema, Schema> generate(File inputFile, List<String> keyFields) throws IOException { log.trace("generate() - inputFile = '{}', keyFields = {}", inputFile, keyFields); final Map<String, Schema.Type> fieldTypes; log.info("Determining fields from {}", inputFile); try (InputStream inputStream = new FileInputStream(inputFile)) { fieldTypes = determineFieldTypes(inputStream); } log.trace("generate() - Building key schema."); SchemaBuilder keySchemaBuilder = SchemaBuilder.struct() .name("com.github.jcustenborder.kafka.connect.model.Key"); for (String keyFieldName : keyFields) { log.trace("generate() - Adding keyFieldName field '{}'", keyFieldName); if (fieldTypes.containsKey(keyFieldName)) { Schema.Type schemaType = fieldTypes.get(keyFieldName); addField(keySchemaBuilder, keyFieldName, schemaType); } else { log.warn("Key field '{}' is not in the data.", keyFieldName); } } log.trace("generate() - Building value schema."); SchemaBuilder valueSchemaBuilder = SchemaBuilder.struct() .name("com.github.jcustenborder.kafka.connect.model.Value"); for (Map.Entry<String, Schema.Type> kvp : fieldTypes.entrySet()) { addField(valueSchemaBuilder, kvp.getKey(), kvp.getValue()); } return new AbstractMap.SimpleEntry<>(keySchemaBuilder.build(), valueSchemaBuilder.build()); }
Example 7
Source File: SchemaNameToTopicTest.java From kafka-connect-transform-common with Apache License 2.0 | 5 votes |
Schema exampleSchema(List<String> fieldNames, final int version) { SchemaBuilder builder = SchemaBuilder.struct() .name(this.getClass().getName()); for (String fieldName : fieldNames) { builder.field(fieldName, Schema.STRING_SCHEMA); } builder.version(version); return builder.build(); }
Example 8
Source File: NormalizeSchemaTest.java From kafka-connect-transform-common with Apache License 2.0 | 5 votes |
Schema exampleSchema(List<String> fieldNames, final int version) { SchemaBuilder builder = SchemaBuilder.struct() .name(this.getClass().getName()); for (String fieldName : fieldNames) { builder.field(fieldName, Schema.STRING_SCHEMA); } builder.version(version); return builder.build(); }
Example 9
Source File: PatternRename.java From kafka-connect-transform-common with Apache License 2.0 | 5 votes |
@Override protected SchemaAndValue processStruct(R record, Schema inputSchema, Struct inputStruct) { final SchemaBuilder outputSchemaBuilder = SchemaBuilder.struct(); outputSchemaBuilder.name(inputSchema.name()); outputSchemaBuilder.doc(inputSchema.doc()); if (null != inputSchema.defaultValue()) { outputSchemaBuilder.defaultValue(inputSchema.defaultValue()); } if (null != inputSchema.parameters() && !inputSchema.parameters().isEmpty()) { outputSchemaBuilder.parameters(inputSchema.parameters()); } if (inputSchema.isOptional()) { outputSchemaBuilder.optional(); } Map<String, String> fieldMappings = new HashMap<>(inputSchema.fields().size()); for (final Field inputField : inputSchema.fields()) { log.trace("process() - Processing field '{}'", inputField.name()); final Matcher fieldMatcher = this.config.pattern.matcher(inputField.name()); final String outputFieldName; if (fieldMatcher.find()) { outputFieldName = fieldMatcher.replaceAll(this.config.replacement); } else { outputFieldName = inputField.name(); } log.trace("process() - Mapping field '{}' to '{}'", inputField.name(), outputFieldName); fieldMappings.put(inputField.name(), outputFieldName); outputSchemaBuilder.field(outputFieldName, inputField.schema()); } final Schema outputSchema = outputSchemaBuilder.build(); final Struct outputStruct = new Struct(outputSchema); for (Map.Entry<String, String> entry : fieldMappings.entrySet()) { final String inputField = entry.getKey(), outputField = entry.getValue(); log.trace("process() - Copying '{}' to '{}'", inputField, outputField); final Object value = inputStruct.get(inputField); outputStruct.put(outputField, value); } return new SchemaAndValue(outputSchema, outputStruct); }
Example 10
Source File: CamelTypeConverterTransform.java From camel-kafka-connector with Apache License 2.0 | 5 votes |
private Schema getOrBuildRecordSchema(final Schema originalSchema, final Object value) { final SchemaBuilder builder = SchemaUtil.copySchemaBasics(originalSchema, SchemaHelper.buildSchemaBuilderForType(value)); if (originalSchema.isOptional()) { builder.optional(); } if (originalSchema.defaultValue() != null) { builder.defaultValue(convertValueWithCamelTypeConverter(originalSchema.defaultValue())); } return builder.build(); }
Example 11
Source File: DataConverter.java From kafka-mysql-connector with Apache License 2.0 | 5 votes |
public static Schema convertRowSchema(Table table) { String tableName = table.getName(); String databaseName = table.getDatabase().getName(); SchemaBuilder builder = SchemaBuilder.struct().name(databaseName + "." + tableName); for (int columnNumber = 0; columnNumber < table.getColumnList().size(); columnNumber++) { addFieldSchema(table, columnNumber, builder); } return builder.build(); }
Example 12
Source File: SchemaUtil.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
/** * Remove the alias when reading/writing from outside */ public static Schema getSchemaWithNoAlias(Schema schema) { SchemaBuilder schemaBuilder = SchemaBuilder.struct(); for (Field field : schema.fields()) { String name = getFieldNameWithNoAlias(field); schemaBuilder.field(name, field.schema()); } return schemaBuilder.build(); }
Example 13
Source File: SchemaUtil.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
/** * Rename field names to be consistent with the internal column names. */ public static Schema getAvroSerdeKsqlSchema(Schema schema) { SchemaBuilder schemaBuilder = SchemaBuilder.struct(); for (Field field : schema.fields()) { schemaBuilder.field(field.name().replace(".", "_"), field.schema()); } return schemaBuilder.build(); }
Example 14
Source File: SchemaUtil.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
public static Schema removeImplicitRowTimeRowKeyFromSchema(Schema schema) { SchemaBuilder schemaBuilder = SchemaBuilder.struct(); for (Field field : schema.fields()) { String fieldName = field.name(); fieldName = fieldName.substring(fieldName.indexOf('.') + 1); if (!fieldName.equalsIgnoreCase(SchemaUtil.ROWTIME_NAME) && !fieldName.equalsIgnoreCase(SchemaUtil.ROWKEY_NAME)) { schemaBuilder.field(fieldName, field.schema()); } } return schemaBuilder.build(); }
Example 15
Source File: SerDeUtil.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
public static Schema getSchemaFromAvro(String avroSchemaString) { org.apache.avro.Schema.Parser parser = new org.apache.avro.Schema.Parser(); org.apache.avro.Schema avroSchema = parser.parse(avroSchemaString); SchemaBuilder inferredSchema = SchemaBuilder.struct().name(avroSchema.getName()); for (org.apache.avro.Schema.Field avroField: avroSchema.getFields()) { inferredSchema.field(avroField.name(), getKsqlSchemaForAvroSchema(avroField.schema())); } return inferredSchema.build(); }
Example 16
Source File: AggregateNode.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
private Schema buildAggregateSchema( final Schema schema, final FunctionRegistry functionRegistry, final InternalSchema internalSchema ) { final SchemaBuilder schemaBuilder = SchemaBuilder.struct(); final List<Field> fields = schema.fields(); for (int i = 0; i < getRequiredColumnList().size(); i++) { schemaBuilder.field(fields.get(i).name(), fields.get(i).schema()); } for (int aggFunctionVarSuffix = 0; aggFunctionVarSuffix < getFunctionList().size(); aggFunctionVarSuffix++) { String udafName = getFunctionList().get(aggFunctionVarSuffix).getName() .getSuffix(); KsqlAggregateFunction aggregateFunction = functionRegistry.getAggregateFunction( udafName, internalSchema.getInternalExpressionList( getFunctionList().get(aggFunctionVarSuffix).getArguments()), schema ); schemaBuilder.field( AggregateExpressionRewriter.AGGREGATE_FUNCTION_VARIABLE_PREFIX + aggFunctionVarSuffix, aggregateFunction.getReturnType() ); } return schemaBuilder.build(); }
Example 17
Source File: SchemaConverter.java From kafka-connect-elasticsearch-source with Apache License 2.0 | 5 votes |
public static Schema convertElasticMapping2AvroSchema(Map<String, Object> doc, String name) { SchemaBuilder schemaBuilder = SchemaBuilder.struct().name( Utils.filterAvroName("", name)); //characters not valid for avro schema name convertDocumentSchema("", doc, schemaBuilder); return schemaBuilder.build(); }
Example 18
Source File: JsonFileReader.java From kafka-connect-fs with Apache License 2.0 | 4 votes |
private static Schema extractSchema(JsonNode jsonNode) { switch (jsonNode.getNodeType()) { case BOOLEAN: return Schema.OPTIONAL_BOOLEAN_SCHEMA; case NUMBER: if (jsonNode.isShort()) { return Schema.OPTIONAL_INT8_SCHEMA; } else if (jsonNode.isInt()) { return Schema.OPTIONAL_INT32_SCHEMA; } else if (jsonNode.isLong()) { return Schema.OPTIONAL_INT64_SCHEMA; } else if (jsonNode.isFloat()) { return Schema.OPTIONAL_FLOAT32_SCHEMA; } else if (jsonNode.isDouble()) { return Schema.OPTIONAL_FLOAT64_SCHEMA; } else if (jsonNode.isBigInteger()) { return Schema.OPTIONAL_INT64_SCHEMA; } else if (jsonNode.isBigDecimal()) { return Schema.OPTIONAL_FLOAT64_SCHEMA; } else { return Schema.OPTIONAL_FLOAT64_SCHEMA; } case STRING: return Schema.OPTIONAL_STRING_SCHEMA; case BINARY: return Schema.OPTIONAL_BYTES_SCHEMA; case ARRAY: Iterable<JsonNode> elements = jsonNode::elements; Schema arraySchema = StreamSupport.stream(elements.spliterator(), false) .findFirst().map(JsonFileReader::extractSchema) .orElse(SchemaBuilder.struct().build()); return SchemaBuilder.array(arraySchema).build(); case OBJECT: SchemaBuilder builder = SchemaBuilder.struct(); jsonNode.fields() .forEachRemaining(field -> builder.field(field.getKey(), extractSchema(field.getValue()))); return builder.build(); default: return SchemaBuilder.struct().optional().build(); } }
Example 19
Source File: MsSqlTableMetadataProvider.java From kafka-connect-cdc-mssql with Apache License 2.0 | 4 votes |
Schema generateSchema(ResultSet resultSet, final ChangeKey changeKey, final String columnName) throws SQLException { boolean optional = resultSet.getBoolean(2); String dataType = resultSet.getString(3); int scale = resultSet.getInt(4); SchemaBuilder builder; log.trace("{}: columnName='{}' dataType='{}' scale={} optional={}", changeKey, columnName, dataType, scale, optional); switch (dataType) { case "bigint": builder = SchemaBuilder.int64(); break; case "bit": builder = SchemaBuilder.bool(); break; case "char": case "varchar": case "text": case "nchar": case "nvarchar": case "ntext": case "uniqueidentifier": builder = SchemaBuilder.string(); break; case "smallmoney": case "money": case "decimal": case "numeric": builder = Decimal.builder(scale); break; case "binary": case "image": case "varbinary": builder = SchemaBuilder.bytes(); break; case "date": builder = Date.builder(); break; case "datetime": case "datetime2": case "smalldatetime": builder = Timestamp.builder(); break; case "time": builder = Time.builder(); break; case "int": builder = SchemaBuilder.int32(); break; case "smallint": builder = SchemaBuilder.int16(); break; case "tinyint": builder = SchemaBuilder.int8(); break; case "real": builder = SchemaBuilder.float32(); break; case "float": builder = SchemaBuilder.float64(); break; default: throw new DataException( String.format("Could not process (dataType = '%s', optional = %s, scale = %d) for %s.", dataType, optional, scale, changeKey ) ); } log.trace("{}: columnName='{}' schema.type='{}' schema.name='{}'", changeKey, columnName, builder.type(), builder.name()); builder.parameters( ImmutableMap.of(Change.ColumnValue.COLUMN_NAME, columnName) ); if (optional) { builder.optional(); } return builder.build(); }
Example 20
Source File: SchemaSerializationModule.java From connect-utils with Apache License 2.0 | 4 votes |
public Schema build() { log.trace(this.toString()); SchemaBuilder builder; switch (this.type) { case MAP: Preconditions.checkNotNull(this.keySchema, "keySchema cannot be null."); Preconditions.checkNotNull(this.valueSchema, "valueSchema cannot be null."); builder = SchemaBuilder.map(this.keySchema, this.valueSchema); break; case ARRAY: Preconditions.checkNotNull(this.valueSchema, "valueSchema cannot be null."); builder = SchemaBuilder.array(this.valueSchema); break; default: builder = SchemaBuilder.type(this.type); break; } if (Schema.Type.STRUCT == this.type) { for (Map.Entry<String, Schema> kvp : this.fieldSchemas.entrySet()) { builder.field(kvp.getKey(), kvp.getValue()); } } if (!Strings.isNullOrEmpty(this.name)) { builder.name(this.name); } if (!Strings.isNullOrEmpty(this.doc)) { builder.doc(this.doc); } if (null != this.defaultValue) { Object value; switch (this.type) { case INT8: value = ((Number) this.defaultValue).byteValue(); break; case INT16: value = ((Number) this.defaultValue).shortValue(); break; case INT32: value = ((Number) this.defaultValue).intValue(); break; case INT64: value = ((Number) this.defaultValue).longValue(); break; case FLOAT32: value = ((Number) this.defaultValue).floatValue(); break; case FLOAT64: value = ((Number) this.defaultValue).doubleValue(); break; default: value = this.defaultValue; break; } builder.defaultValue(value); } if (null != this.parameters) { builder.parameters(this.parameters); } if (this.isOptional) { builder.optional(); } if (null != this.version) { builder.version(this.version); } return builder.build(); }