Java Code Examples for org.apache.kafka.connect.data.SchemaAndValue#value()

The following examples show how to use org.apache.kafka.connect.data.SchemaAndValue#value() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ExtractTimestamp.java    From kafka-connect-transform-common with Apache License 2.0 6 votes vote down vote up
private long processMap(SchemaAndValue schemaAndValue) {
  Preconditions.checkState(schemaAndValue.value() instanceof Map, "value must be a map.");
  final Map<String, Object> input = (Map<String, Object>) schemaAndValue.value();
  final Object inputValue = input.get(this.config.fieldName);
  final long result;

  if (inputValue instanceof Date) {
    final Date inputDate = (Date) inputValue;
    result = inputDate.getTime();
  } else if (inputValue instanceof Long) {
    result = (long) inputValue;
  } else if (null == inputValue) {
    throw new DataException(
        String.format("Field '%s' cannot be null.", this.config.fieldName)
    );
  } else {
    throw new DataException(
        String.format("Cannot convert %s to timestamp.", inputValue.getClass().getName())
    );
  }

  return result;
}
 
Example 2
Source File: PatternFilter.java    From kafka-connect-transform-common with Apache License 2.0 6 votes vote down vote up
R filter(R record, final boolean key) {
  final SchemaAndValue input = key ?
      new SchemaAndValue(record.keySchema(), record.key()) :
      new SchemaAndValue(record.valueSchema(), record.value());
  final R result;
  if (input.schema() != null) {
    if (Schema.Type.STRUCT == input.schema().type()) {
      result = filter(record, (Struct) input.value());
    } else if (Schema.Type.MAP == input.schema().type()) {
      result = filter(record, (Map) input.value());
    } else {
      result = record;
    }
  } else if (input.value() instanceof Map) {
    result = filter(record, (Map) input.value());
  } else {
    result = record;
  }

  return result;
}
 
Example 3
Source File: SinkRecordHelper.java    From connect-utils with Apache License 2.0 6 votes vote down vote up
public static SinkRecord delete(String topic, SchemaAndValue key) {
  Preconditions.checkNotNull(topic, "topic cannot be null");
  if (null == key) {
    throw new DataException("key cannot be null.");
  }
  if (null == key.value()) {
    throw new DataException("key cannot be null.");
  }

  return new SinkRecord(
      topic,
      PARTITION,
      key.schema(),
      key.value(),
      null,
      null,
      OFFSET,
      TIMESTAMP,
      TimestampType.CREATE_TIME
  );
}
 
Example 4
Source File: SinkRecordHelper.java    From connect-utils with Apache License 2.0 6 votes vote down vote up
public static SinkRecord write(String topic, SchemaAndValue key, SchemaAndValue value) {
  Preconditions.checkNotNull(topic, "topic cannot be null");
  Preconditions.checkNotNull(key, "key cannot be null.");
  Preconditions.checkNotNull(key.value(), "key cannot be null.");
  Preconditions.checkNotNull(value, "value cannot be null.");
  Preconditions.checkNotNull(value.value(), "value cannot be null.");

  return new SinkRecord(
      topic,
      PARTITION,
      key.schema(),
      key.value(),
      value.schema(),
      value.value(),
      OFFSET,
      TIMESTAMP,
      TimestampType.CREATE_TIME
  );
}
 
Example 5
Source File: ExtJsonConverter.java    From apicurio-registry with Apache License 2.0 5 votes vote down vote up
@Override
public SchemaAndValue toConnectData(String topic, byte[] value) {
    FormatStrategy.IdPayload ip = formatStrategy.toConnectData(value);

    long globalId = ip.getGlobalId();
    JsonNode schemaNode = getCache().getSchema(globalId);
    Schema schema = jsonConverter.asConnectSchema(schemaNode);

    byte[] payload = ip.getPayload();
    SchemaAndValue sav = jsonConverter.toConnectData(topic, payload);

    return new SchemaAndValue(schema, sav.value());
}
 
Example 6
Source File: ExtractTimestamp.java    From kafka-connect-transform-common with Apache License 2.0 5 votes vote down vote up
protected long process(SchemaAndValue schemaAndValue) {
  final long result;
  if (schemaAndValue.value() instanceof Struct) {
    result = processStruct(schemaAndValue);
  } else if (schemaAndValue.value() instanceof Map) {
    result = processMap(schemaAndValue);
  } else {
    throw new UnsupportedOperationException();
  }
  return result;
}
 
Example 7
Source File: ExtractTimestamp.java    From kafka-connect-transform-common with Apache License 2.0 5 votes vote down vote up
private long processStruct(SchemaAndValue schemaAndValue) {
  final Struct inputStruct = (Struct) schemaAndValue.value();
  final Field inputField = schemaAndValue.schema().field(this.config.fieldName);

  if (null == inputField) {
    throw new DataException(
        String.format("Schema does not have field '{}'", this.config.fieldName)
    );
  }

  final Schema fieldSchema = inputField.schema();
  final long result;
  if (Schema.Type.INT64 == fieldSchema.type()) {
    final Object fieldValue = inputStruct.get(inputField);

    if (null == fieldValue) {
      throw new DataException(
          String.format("Field '%s' cannot be null.", this.config.fieldName)
      );
    }

    if (Timestamp.LOGICAL_NAME.equals(fieldSchema.name())) {
      final Date date = (Date) fieldValue;
      result = date.getTime();
    } else {
      final long timestamp = (long) fieldValue;
      result = timestamp;
    }
  } else {
    throw new DataException(
        String.format("Schema '{}' is not supported.", inputField.schema())
    );
  }

  return result;
}
 
Example 8
Source File: NormalizeSchema.java    From kafka-connect-transform-common with Apache License 2.0 5 votes vote down vote up
protected SchemaAndValue normalize(SchemaAndValue input) {
  if (null == input.value() || null == input.schema()) {
    log.trace("normalize() - input.value() or input.schema() is null.");
    return input;
  }
  final Schema inputSchema = input.schema();

  if (Schema.Type.STRUCT != inputSchema.type()) {
    log.trace("normalize() - inputSchema.type('{}') is not a struct.", inputSchema.type());
    return input;
  }

  SchemaState state = stateLookup.computeIfAbsent(SchemaKey.of(inputSchema), schemaKey -> new SchemaState(inputSchema));
  if (!state.hasSchema(inputSchema)) {
    state.addSchema(inputSchema);
  }
  if (!state.shouldConvert(inputSchema)) {
    log.trace(
        "normalize() - Schema is correct version don't need to convert. version = {}",
        inputSchema.version()
    );
    return input;
  }
  Schema latestSchema = state.latest();
  log.trace(
      "normalize() - Converting version {} to {}. schema = '{}', state='{}'",
      inputSchema.version(),
      latestSchema.version(),
      inputSchema,
      state
  );

  Struct inputStruct = (Struct) input.value();

  Struct outputStruct = new Struct(latestSchema);
  copy(inputStruct, outputStruct);
  return new SchemaAndValue(latestSchema, outputStruct);
}
 
Example 9
Source File: AbstractSpoolDirSourceTask.java    From kafka-connect-spooldir with Apache License 2.0 5 votes vote down vote up
protected void addRecord(List<SourceRecord> records, SchemaAndValue key, SchemaAndValue value) {
  final Long timestamp;

  switch (this.config.timestampMode) {
    case FIELD:
      Struct valueStruct = (Struct) value.value();
      log.trace("addRecord() - Reading date from timestamp field '{}'", this.config.timestampField);
      final java.util.Date date = (java.util.Date) valueStruct.get(this.config.timestampField);
      timestamp = date.getTime();
      break;
    case FILE_TIME:
      timestamp = this.inputFile.lastModified();
      break;
    case PROCESS_TIME:
      timestamp = null;
      break;
    default:
      throw new UnsupportedOperationException(
          String.format("Unsupported timestamp mode. %s", this.config.timestampMode)
      );
  }

  SourceRecord sourceRecord = record(
      key,
      value,
      timestamp
  );
  recordCount++;
  records.add(sourceRecord);
}
 
Example 10
Source File: BaseRecordBuilder.java    From kafka-connect-mq-source with Apache License 2.0 3 votes vote down vote up
/**
    * Convert a message into a Kafka Connect SourceRecord.
    *
    * @param context            the JMS context to use for building messages
    * @param topic              the Kafka topic
    * @param messageBodyJms     whether to interpret MQ messages as JMS messages
    * @param message            the message
    *
    * @return the Kafka Connect SourceRecord
    *
    * @throws JMSException      Message could not be converted
    */
   @Override public SourceRecord toSourceRecord(JMSContext context, String topic, boolean messageBodyJms, Message message) throws JMSException {
       SchemaAndValue key = this.getKey(context, topic, message);
       SchemaAndValue value = this.getValue(context, topic, messageBodyJms, message);

	if (copyJmsPropertiesFlag && messageBodyJms)
	    return new SourceRecord(null, null, topic, (Integer) null, key.schema(), key.value(), value.schema(), value.value(), message.getJMSTimestamp(), jmsToKafkaHeaderConverter.convertJmsPropertiesToKafkaHeaders(message));
       else
	    return new SourceRecord(null, null, topic, key.schema(), key.value(), value.schema(), value.value());
}