Java Code Examples for org.apache.kafka.connect.data.Struct#schema()
The following examples show how to use
org.apache.kafka.connect.data.Struct#schema() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: StructSerializationModule.java From connect-utils with Apache License 2.0 | 6 votes |
@Override public void serialize(Struct struct, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException, JsonProcessingException { struct.validate(); Storage result = new Storage(); result.schema = struct.schema(); result.fieldValues = new ArrayList<>(); for (Field field : struct.schema().fields()) { log.trace("serialize() - Processing field '{}'", field.name()); KeyValue keyValue = new KeyValue(); keyValue.name = field.name(); keyValue.schema = field.schema(); keyValue.value(struct.get(field)); result.fieldValues.add(keyValue); } jsonGenerator.writeObject(result); }
Example 2
Source File: FsSourceTask.java From kafka-connect-fs with Apache License 2.0 | 5 votes |
private SourceRecord convert(FileMetadata metadata, long offset, Struct struct) { return new SourceRecord( Collections.singletonMap("path", metadata.getPath()), Collections.singletonMap("offset", offset), config.getTopic(), struct.schema(), struct ); }
Example 3
Source File: HeaderToFieldTest.java From kafka-connect-transform-common with Apache License 2.0 | 4 votes |
@Test public void apply() throws IOException { this.transformation.configure( ImmutableMap.of(HeaderToFieldConfig.HEADER_MAPPINGS_CONF, "applicationId:STRING") ); ConnectHeaders inputHeaders = new ConnectHeaders(); inputHeaders.addString("applicationId", "testing"); Schema inputSchema = SchemaBuilder.struct() .field("firstName", Schema.OPTIONAL_STRING_SCHEMA) .field("lastName", Schema.OPTIONAL_STRING_SCHEMA) .build(); Struct inputStruct = new Struct(inputSchema) .put("firstName", "example") .put("lastName", "user"); Schema expectedSchema = SchemaBuilder.struct() .field("firstName", Schema.OPTIONAL_STRING_SCHEMA) .field("lastName", Schema.OPTIONAL_STRING_SCHEMA) .field("applicationId", Schema.OPTIONAL_STRING_SCHEMA) .build(); Struct expectedStruct = new Struct(expectedSchema) .put("firstName", "example") .put("lastName", "user") .put("applicationId", "testing"); SinkRecord inputRecord = new SinkRecord( "testing", 1, null, null, inputStruct.schema(), inputStruct, 12345L, 123412351L, TimestampType.NO_TIMESTAMP_TYPE, inputHeaders ); SinkRecord actualRecord = this.transformation.apply(inputRecord); assertNotNull(actualRecord, "record should not be null."); assertStruct(expectedStruct, (Struct) actualRecord.value()); }
Example 4
Source File: SetMaximumPrecisionTest.java From kafka-connect-transform-common with Apache License 2.0 | 4 votes |
SinkRecord record(Struct struct) { return new SinkRecord("test", 1, null, null, struct.schema(), struct, 1234L); }
Example 5
Source File: StructHelper.java From connect-utils with Apache License 2.0 | 4 votes |
public static SchemaAndValue asSchemaAndValue(Struct struct) { Preconditions.checkNotNull(struct, "struct cannot be null."); return new SchemaAndValue(struct.schema(), struct); }
Example 6
Source File: AbstractConverter.java From connect-utils with Apache License 2.0 | 4 votes |
void convertStruct(final T result, Struct struct) { final Schema schema = struct.schema(); for (final Field field : schema.fields()) { final String fieldName = field.name(); log.trace("convertStruct() - Processing '{}'", field.name()); final Object fieldValue = struct.get(field); try { if (null == fieldValue) { log.trace("convertStruct() - Setting '{}' to null.", fieldName); setNullField(result, fieldName); continue; } log.trace("convertStruct() - Field '{}'.field().schema().type() = '{}'", fieldName, field.schema().type()); switch (field.schema().type()) { case STRING: log.trace("convertStruct() - Processing '{}' as string.", fieldName); setStringField(result, fieldName, (String) fieldValue); break; case INT8: log.trace("convertStruct() - Processing '{}' as int8.", fieldName); setInt8Field(result, fieldName, (Byte) fieldValue); break; case INT16: log.trace("convertStruct() - Processing '{}' as int16.", fieldName); setInt16Field(result, fieldName, (Short) fieldValue); break; case INT32: if (org.apache.kafka.connect.data.Date.LOGICAL_NAME.equals(field.schema().name())) { log.trace("convertStruct() - Processing '{}' as date.", fieldName); setDateField(result, fieldName, (Date) fieldValue); } else if (org.apache.kafka.connect.data.Time.LOGICAL_NAME.equals(field.schema().name())) { log.trace("convertStruct() - Processing '{}' as time.", fieldName); setTimeField(result, fieldName, (Date) fieldValue); } else { Integer int32Value = (Integer) fieldValue; log.trace("convertStruct() - Processing '{}' as int32.", fieldName); setInt32Field(result, fieldName, int32Value); } break; case INT64: if (Timestamp.LOGICAL_NAME.equals(field.schema().name())) { log.trace("convertStruct() - Processing '{}' as timestamp.", fieldName); setTimestampField(result, fieldName, (Date) fieldValue); } else { Long int64Value = (Long) fieldValue; log.trace("convertStruct() - Processing '{}' as int64.", fieldName); setInt64Field(result, fieldName, int64Value); } break; case BYTES: if (Decimal.LOGICAL_NAME.equals(field.schema().name())) { log.trace("convertStruct() - Processing '{}' as decimal.", fieldName); setDecimalField(result, fieldName, (BigDecimal) fieldValue); } else { byte[] bytes = (byte[]) fieldValue; log.trace("convertStruct() - Processing '{}' as bytes.", fieldName); setBytesField(result, fieldName, bytes); } break; case FLOAT32: log.trace("convertStruct() - Processing '{}' as float32.", fieldName); setFloat32Field(result, fieldName, (Float) fieldValue); break; case FLOAT64: log.trace("convertStruct() - Processing '{}' as float64.", fieldName); setFloat64Field(result, fieldName, (Double) fieldValue); break; case BOOLEAN: log.trace("convertStruct() - Processing '{}' as boolean.", fieldName); setBooleanField(result, fieldName, (Boolean) fieldValue); break; case STRUCT: log.trace("convertStruct() - Processing '{}' as struct.", fieldName); setStructField(result, fieldName, (Struct) fieldValue); break; case ARRAY: log.trace("convertStruct() - Processing '{}' as array.", fieldName); setArray(result, fieldName, schema, (List) fieldValue); break; case MAP: log.trace("convertStruct() - Processing '{}' as map.", fieldName); setMap(result, fieldName, schema, (Map) fieldValue); break; default: throw new DataException("Unsupported schema.type(): " + schema.type()); } } catch (Exception ex) { throw new DataException( String.format("Exception thrown while processing field '%s'", fieldName), ex ); } } }