Java Code Examples for org.apache.kafka.connect.data.Field#name()

The following examples show how to use org.apache.kafka.connect.data.Field#name() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: StructSerializationModule.java    From connect-utils with Apache License 2.0 6 votes vote down vote up
@Override
public void serialize(Struct struct, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException, JsonProcessingException {
  struct.validate();
  Storage result = new Storage();
  result.schema = struct.schema();
  result.fieldValues = new ArrayList<>();
  for (Field field : struct.schema().fields()) {
    log.trace("serialize() - Processing field '{}'", field.name());
    KeyValue keyValue = new KeyValue();
    keyValue.name = field.name();
    keyValue.schema = field.schema();
    keyValue.value(struct.get(field));
    result.fieldValues.add(keyValue);
  }
  jsonGenerator.writeObject(result);
}
 
Example 2
Source File: ObjectMapperFactory.java    From kafka-connect-splunk with Apache License 2.0 6 votes vote down vote up
void handleStruct(Event event) {
  final Struct input = (Struct) event.event;
  List<Field> fields = input.schema().fields();
  final Map result = new LinkedHashMap(fields.size());

  for (Field field : fields) {
    Object key = field.name();
    Object value = input.get(field);

    if (null == value) {
      continue;
    }

    if (!event.setValue(key, value)) {
      result.put(key, value);
    }
  }

  event.event = result.isEmpty() ? null : result;
}
 
Example 3
Source File: AvroJsonSchemafulRecordConverter.java    From mongo-kafka with Apache License 2.0 5 votes vote down vote up
private BsonValue processField(final Struct struct, final Field field) {
  LOGGER.trace("processing field '{}'", field.name());

  if (struct.get(field.name()) == null) {
    LOGGER.trace("no field in struct -> adding null");
    return BsonNull.VALUE;
  }

  if (isSupportedLogicalType(field.schema())) {
    return getConverter(field.schema()).toBson(struct.get(field), field.schema());
  }

  try {
    switch (field.schema().type()) {
      case BOOLEAN:
      case FLOAT32:
      case FLOAT64:
      case INT8:
      case INT16:
      case INT32:
      case INT64:
      case STRING:
      case BYTES:
        return handlePrimitiveField(struct, field);
      case STRUCT:
      case MAP:
        return toBsonDoc(field.schema(), struct.get(field));
      case ARRAY:
        return toBsonArray(field.schema(), struct.get(field));
      default:
        throw new DataException("unexpected / unsupported schema type " + field.schema().type());
    }
  } catch (Exception exc) {
    throw new DataException("error while processing field " + field.name(), exc);
  }
}
 
Example 4
Source File: SchemaUtil.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
public static Schema removeImplicitRowTimeRowKeyFromSchema(Schema schema) {
  SchemaBuilder schemaBuilder = SchemaBuilder.struct();
  for (Field field : schema.fields()) {
    String fieldName = field.name();
    fieldName = fieldName.substring(fieldName.indexOf('.') + 1);
    if (!fieldName.equalsIgnoreCase(SchemaUtil.ROWTIME_NAME)
        && !fieldName.equalsIgnoreCase(SchemaUtil.ROWKEY_NAME)) {
      schemaBuilder.field(fieldName, field.schema());
    }
  }
  return schemaBuilder.build();
}
 
Example 5
Source File: SchemaUtil.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
public static String getFieldNameWithNoAlias(Field field) {
  String name = field.name();
  if (name.contains(".")) {
    return name.substring(name.indexOf(".") + 1);
  } else {
    return name;
  }
}
 
Example 6
Source File: CellData.java    From debezium-incubator with Apache License 2.0 5 votes vote down vote up
private Struct cloneValue(Schema valueSchema, Struct value) {
    Struct clonedValue = new Struct(valueSchema);
    for (Field field : valueSchema.fields()) {
        String fieldName = field.name();
        clonedValue.put(fieldName, value.get(fieldName));
    }
    return clonedValue;
}
 
Example 7
Source File: PatternRename.java    From kafka-connect-transform-common with Apache License 2.0 5 votes vote down vote up
@Override
protected SchemaAndValue processStruct(R record, Schema inputSchema, Struct inputStruct) {
  final SchemaBuilder outputSchemaBuilder = SchemaBuilder.struct();
  outputSchemaBuilder.name(inputSchema.name());
  outputSchemaBuilder.doc(inputSchema.doc());
  if (null != inputSchema.defaultValue()) {
    outputSchemaBuilder.defaultValue(inputSchema.defaultValue());
  }
  if (null != inputSchema.parameters() && !inputSchema.parameters().isEmpty()) {
    outputSchemaBuilder.parameters(inputSchema.parameters());
  }
  if (inputSchema.isOptional()) {
    outputSchemaBuilder.optional();
  }
  Map<String, String> fieldMappings = new HashMap<>(inputSchema.fields().size());
  for (final Field inputField : inputSchema.fields()) {
    log.trace("process() - Processing field '{}'", inputField.name());
    final Matcher fieldMatcher = this.config.pattern.matcher(inputField.name());
    final String outputFieldName;
    if (fieldMatcher.find()) {
      outputFieldName = fieldMatcher.replaceAll(this.config.replacement);
    } else {
      outputFieldName = inputField.name();
    }
    log.trace("process() - Mapping field '{}' to '{}'", inputField.name(), outputFieldName);
    fieldMappings.put(inputField.name(), outputFieldName);
    outputSchemaBuilder.field(outputFieldName, inputField.schema());
  }
  final Schema outputSchema = outputSchemaBuilder.build();
  final Struct outputStruct = new Struct(outputSchema);
  for (Map.Entry<String, String> entry : fieldMappings.entrySet()) {
    final String inputField = entry.getKey(), outputField = entry.getValue();
    log.trace("process() - Copying '{}' to '{}'", inputField, outputField);
    final Object value = inputStruct.get(inputField);
    outputStruct.put(outputField, value);
  }
  return new SchemaAndValue(outputSchema, outputStruct);
}
 
Example 8
Source File: HeaderToField.java    From kafka-connect-transform-common with Apache License 2.0 5 votes vote down vote up
public SchemaAndValue apply(ConnectRecord record, Struct input) {
  Struct result = new Struct(this.newSchema);
  for (Field field : input.schema().fields()) {
    String fieldName = field.name();
    Object fieldValue = input.get(field);
    result.put(fieldName, fieldValue);
  }
  for (ConversionHandler handler : this.conversionHandlers) {
    handler.convert(record, result);
  }
  return new SchemaAndValue(this.newSchema, result);
}
 
Example 9
Source File: SpoolDirCsvSourceTask.java    From kafka-connect-spooldir with Apache License 2.0 5 votes vote down vote up
@Override
protected void configure(InputStream inputStream, final Long lastOffset) throws IOException {
  log.trace("configure() - creating csvParser");
  this.csvParser = this.config.createCSVParserBuilder();
  this.streamReader = new InputStreamReader(inputStream, this.config.charset);
  CSVReaderBuilder csvReaderBuilder = this.config.createCSVReaderBuilder(this.streamReader, csvParser);
  this.csvReader = csvReaderBuilder.build();

  String[] fieldNames;

  if (this.config.firstRowAsHeader) {
    log.trace("configure() - Reading the header row.");
    fieldNames = this.csvReader.readNext();
    log.info("configure() - field names from header row. fields = {}", Joiner.on(", ").join(fieldNames));
  } else {
    log.trace("configure() - Using fields from schema {}", this.config.valueSchema.name());
    fieldNames = new String[this.config.valueSchema.fields().size()];
    int index = 0;
    for (Field field : this.config.valueSchema.fields()) {
      fieldNames[index++] = field.name();
    }
    log.info("configure() - field names from schema order. fields = {}", Joiner.on(", ").join(fieldNames));
  }

  if (null != lastOffset) {
    log.info("Found previous offset. Skipping {} line(s).", lastOffset.intValue());
    String[] row = null;
    while (null != (row = this.csvReader.readNext()) && this.csvReader.getLinesRead() < lastOffset) {
      log.trace("skipped row");
    }
  }

  this.fieldNames = fieldNames;
}
 
Example 10
Source File: JsonEventParser.java    From kafka-connect-hbase with Apache License 2.0 5 votes vote down vote up
/**
 *
 * @param keyValues
 * @param field
 * @return
 */
private byte[] toValue(final Map<String, Object> keyValues, final Field field) {
    Preconditions.checkNotNull(field);
    final Schema.Type type = field.schema().type();
    final String fieldName = field.name();
    final Object fieldValue = keyValues.get(fieldName);
    switch (type) {
        case STRING:
            return Bytes.toBytes((String) fieldValue);
        case BOOLEAN:
            return Bytes.toBytes((Boolean)fieldValue);
        case BYTES:
            return Bytes.toBytes((ByteBuffer) fieldValue);
        case FLOAT32:
            return Bytes.toBytes((Float)fieldValue);
        case FLOAT64:
            return Bytes.toBytes((Double)fieldValue);
        case INT8:
            return Bytes.toBytes((Byte)fieldValue);
        case INT16:
            return Bytes.toBytes((Short)fieldValue);
        case INT32:
            return Bytes.toBytes((Integer)fieldValue);
        case INT64:
            return Bytes.toBytes((Long)fieldValue);
        default:
            return null;
    }
}
 
Example 11
Source File: AvroEventParser.java    From kafka-connect-hbase with Apache License 2.0 5 votes vote down vote up
private byte[] toValue(final GenericRecord record, final Field field) {
    Preconditions.checkNotNull(field);
    final Schema.Type type = field.schema().type();
    final String fieldName = field.name();
    final Object fieldValue = record.get(fieldName);
    switch (type) {
        case STRING:
            return Bytes.toBytes((String) fieldValue);
        case BOOLEAN:
            return Bytes.toBytes((Boolean)fieldValue);
        case BYTES:
            return Bytes.toBytes((ByteBuffer) fieldValue);
        case FLOAT32:
            return Bytes.toBytes((Float)fieldValue);
        case FLOAT64:
            return Bytes.toBytes((Double)fieldValue);
        case INT8:
            return Bytes.toBytes((Byte)fieldValue);
        case INT16:
            return Bytes.toBytes((Short)fieldValue);
        case INT32:
            return Bytes.toBytes((Integer)fieldValue);
        case INT64:
            return Bytes.toBytes((Long)fieldValue);
        default:
            return null;
    }
}
 
Example 12
Source File: SchemaUtils.java    From kafka-connect-mongodb with Apache License 2.0 5 votes vote down vote up
public static Map<String, Object> toJsonMap(Struct struct) {
    Map<String, Object> jsonMap = new HashMap<String, Object>(0);
    List<Field> fields = struct.schema().fields();
    for (Field field : fields) {
        String fieldName = field.name();
        Schema.Type fieldType = field.schema().type();
        String schemaName=field.schema().name();
        switch (fieldType) {
            case STRING:
                jsonMap.put(fieldName, struct.getString(fieldName));
                break;
            case INT32:
            	if (Date.LOGICAL_NAME.equals(schemaName) 
            			|| Time.LOGICAL_NAME.equals(schemaName)) {
            		jsonMap.put(fieldName, (java.util.Date) struct.get(fieldName));
            	} else {
            		jsonMap.put(fieldName, struct.getInt32(fieldName));
            	}
                break;
            case INT16:
                jsonMap.put(fieldName, struct.getInt16(fieldName));
                break;
            case INT64:
            	if (Timestamp.LOGICAL_NAME.equals(schemaName)) {
            		jsonMap.put(fieldName, (java.util.Date) struct.get(fieldName));
            	} else {
            		jsonMap.put(fieldName, struct.getInt64(fieldName));
            	}
                break;
            case FLOAT32:
                jsonMap.put(fieldName, struct.getFloat32(fieldName));
                break;
            case STRUCT:
                jsonMap.put(fieldName, toJsonMap(struct.getStruct(fieldName)));
                break;
        }
    }
    return jsonMap;
}
 
Example 13
Source File: AbstractConverter.java    From connect-utils with Apache License 2.0 4 votes vote down vote up
void convertStruct(final T result, Struct struct) {
  final Schema schema = struct.schema();

  for (final Field field : schema.fields()) {
    final String fieldName = field.name();
    log.trace("convertStruct() - Processing '{}'", field.name());
    final Object fieldValue = struct.get(field);

    try {
      if (null == fieldValue) {
        log.trace("convertStruct() - Setting '{}' to null.", fieldName);
        setNullField(result, fieldName);
        continue;
      }

      log.trace("convertStruct() - Field '{}'.field().schema().type() = '{}'", fieldName, field.schema().type());
      switch (field.schema().type()) {
        case STRING:
          log.trace("convertStruct() - Processing '{}' as string.", fieldName);
          setStringField(result, fieldName, (String) fieldValue);
          break;
        case INT8:
          log.trace("convertStruct() - Processing '{}' as int8.", fieldName);
          setInt8Field(result, fieldName, (Byte) fieldValue);
          break;
        case INT16:
          log.trace("convertStruct() - Processing '{}' as int16.", fieldName);
          setInt16Field(result, fieldName, (Short) fieldValue);
          break;
        case INT32:
          if (org.apache.kafka.connect.data.Date.LOGICAL_NAME.equals(field.schema().name())) {
            log.trace("convertStruct() - Processing '{}' as date.", fieldName);
            setDateField(result, fieldName, (Date) fieldValue);
          } else if (org.apache.kafka.connect.data.Time.LOGICAL_NAME.equals(field.schema().name())) {
            log.trace("convertStruct() - Processing '{}' as time.", fieldName);
            setTimeField(result, fieldName, (Date) fieldValue);
          } else {
            Integer int32Value = (Integer) fieldValue;
            log.trace("convertStruct() - Processing '{}' as int32.", fieldName);
            setInt32Field(result, fieldName, int32Value);
          }
          break;
        case INT64:

          if (Timestamp.LOGICAL_NAME.equals(field.schema().name())) {
            log.trace("convertStruct() - Processing '{}' as timestamp.", fieldName);
            setTimestampField(result, fieldName, (Date) fieldValue);
          } else {
            Long int64Value = (Long) fieldValue;
            log.trace("convertStruct() - Processing '{}' as int64.", fieldName);
            setInt64Field(result, fieldName, int64Value);
          }
          break;
        case BYTES:

          if (Decimal.LOGICAL_NAME.equals(field.schema().name())) {
            log.trace("convertStruct() - Processing '{}' as decimal.", fieldName);
            setDecimalField(result, fieldName, (BigDecimal) fieldValue);
          } else {
            byte[] bytes = (byte[]) fieldValue;
            log.trace("convertStruct() - Processing '{}' as bytes.", fieldName);
            setBytesField(result, fieldName, bytes);
          }
          break;
        case FLOAT32:
          log.trace("convertStruct() - Processing '{}' as float32.", fieldName);
          setFloat32Field(result, fieldName, (Float) fieldValue);
          break;
        case FLOAT64:
          log.trace("convertStruct() - Processing '{}' as float64.", fieldName);
          setFloat64Field(result, fieldName, (Double) fieldValue);
          break;
        case BOOLEAN:
          log.trace("convertStruct() - Processing '{}' as boolean.", fieldName);
          setBooleanField(result, fieldName, (Boolean) fieldValue);
          break;
        case STRUCT:
          log.trace("convertStruct() - Processing '{}' as struct.", fieldName);
          setStructField(result, fieldName, (Struct) fieldValue);
          break;
        case ARRAY:
          log.trace("convertStruct() - Processing '{}' as array.", fieldName);
          setArray(result, fieldName, schema, (List) fieldValue);
          break;
        case MAP:
          log.trace("convertStruct() - Processing '{}' as map.", fieldName);
          setMap(result, fieldName, schema, (Map) fieldValue);
          break;
        default:
          throw new DataException("Unsupported schema.type(): " + schema.type());
      }
    } catch (Exception ex) {
      throw new DataException(
          String.format("Exception thrown while processing field '%s'", fieldName),
          ex
      );
    }
  }
}