Java Code Examples for org.apache.kafka.connect.source.SourceRecord#value()
The following examples show how to use
org.apache.kafka.connect.source.SourceRecord#value() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CamelTypeConverterTransformTest.java From camel-kafka-connector with Apache License 2.0 | 5 votes |
@Test public void testIfHandlesTypeConvertersFromCamelComponents() { // we know we have a type converter from struct to map in dbz component, so we use this for testing final Schema schema = SchemaBuilder.struct() .field("id", Schema.INT32_SCHEMA) .field("name", Schema.STRING_SCHEMA) .field("valid", Schema.BOOLEAN_SCHEMA) .field("extra", Schema.STRING_SCHEMA) .build(); final Struct value = new Struct(schema); value.put("id", 12); value.put("name", "test-name"); value.put("valid", true); final SourceRecord connectRecord = new SourceRecord(Collections.emptyMap(), Collections.emptyMap(), "topic", Schema.STRING_SCHEMA, "1234", schema, value); final Map<String, Object> props = new HashMap<>(); props.put(CamelTypeConverterTransform.FIELD_TARGET_TYPE_CONFIG, Map.class.getName()); final Transformation<SourceRecord> transformationValue = new CamelTypeConverterTransform.Value<>(); transformationValue.configure(props); final SourceRecord transformedValueSourceRecord = transformationValue.apply(connectRecord); // assert assertNotNull(transformedValueSourceRecord); final Map<String, Object> outputValue = (Map<String, Object>) transformedValueSourceRecord.value(); assertEquals(12, outputValue.get("id")); assertEquals("test-name", outputValue.get("name")); assertNull(outputValue.get("extra")); assertTrue((boolean)outputValue.get("valid")); assertEquals(Schema.Type.MAP, transformedValueSourceRecord.valueSchema().type()); }
Example 2
Source File: DebeziumSourceRecordToDataflowCdcFormatTranslator.java From DataflowTemplates with Apache License 2.0 | 4 votes |
public Row translate(SourceRecord record) { LOG.debug("Source Record from Debezium: {}", record); String qualifiedTableName = record.topic(); Struct recordValue = (Struct) record.value(); if (recordValue == null) { return null; } // TODO: Consider including before value in the Row. Struct afterValue = recordValue.getStruct("after"); Row afterValueRow = afterValue == null ? null : handleValue(afterValue.schema(), afterValue); LOG.debug("Beam Row is {}", afterValueRow); Row primaryKey = null; boolean hasPK = true; if (record.key() == null) { hasPK = false; } else { primaryKey = handleValue(record.keySchema(), record.key()); LOG.debug("Key Schema: {} | Key Value: {}", primaryKey.getSchema(), primaryKey); } String sourceRecordOp = recordValue.getString("op"); String operation = translateOperation(sourceRecordOp); if (operation == null) { return null; } Long timestampMs = recordValue.getInt64("ts_ms"); if (!knownSchemas.containsKey(qualifiedTableName)) { org.apache.beam.sdk.schemas.Schema.Builder schemaBuilder = org.apache.beam.sdk.schemas.Schema .builder() .addStringField(DataflowCdcRowFormat.OPERATION) .addStringField(DataflowCdcRowFormat.TABLE_NAME) .addField(org.apache.beam.sdk.schemas.Schema.Field.nullable( DataflowCdcRowFormat.FULL_RECORD, FieldType.row(afterValueRow.getSchema()))) .addInt64Field(DataflowCdcRowFormat.TIMESTAMP_MS); if (hasPK) { schemaBuilder.addRowField(DataflowCdcRowFormat.PRIMARY_KEY, primaryKey.getSchema()); } knownSchemas.put(qualifiedTableName, schemaBuilder.build()); } org.apache.beam.sdk.schemas.Schema finalBeamSchema = knownSchemas.get(qualifiedTableName); Row.Builder beamRowBuilder = Row.withSchema(finalBeamSchema) .addValue(operation) .addValue(qualifiedTableName) .addValue(afterValueRow) .addValue(timestampMs); if (hasPK) { beamRowBuilder.addValue(primaryKey); } return beamRowBuilder.build(); }
Example 3
Source File: Db2ConnectorIT.java From debezium-incubator with Apache License 2.0 | 4 votes |
@Test public void deleteWithoutTombstone() throws Exception { final int RECORDS_PER_TABLE = 5; final int TABLES = 2; final int ID_START = 10; final Configuration config = TestHelper.defaultConfig() .with(Db2ConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL) .with(Db2ConnectorConfig.TOMBSTONES_ON_DELETE, false) .build(); start(Db2Connector.class, config); assertConnectorIsRunning(); // Wait for snapshot completion consumeRecordsByTopic(1); TestHelper.enableDbCdc(connection); connection.execute("UPDATE ASNCDC.IBMSNAP_REGISTER SET STATE = 'A' WHERE SOURCE_OWNER = 'DB2INST1'"); TestHelper.refreshAndWait(connection); for (int i = 0; i < RECORDS_PER_TABLE; i++) { final int id = ID_START + i; connection.execute( "INSERT INTO tablea VALUES(" + id + ", 'a')"); connection.execute( "INSERT INTO tableb VALUES(" + id + ", 'b')"); } TestHelper.refreshAndWait(connection); consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES); connection.execute("DELETE FROM tableB"); TestHelper.refreshAndWait(connection); final SourceRecords deleteRecords = consumeRecordsByTopic(RECORDS_PER_TABLE); final List<SourceRecord> deleteTableA = deleteRecords.recordsForTopic("testdb.DB2INST1.TABLEA"); final List<SourceRecord> deleteTableB = deleteRecords.recordsForTopic("testdb.DB2INST1.TABLEB"); assertThat(deleteTableA).isNullOrEmpty(); assertThat(deleteTableB).hasSize(RECORDS_PER_TABLE); for (int i = 0; i < RECORDS_PER_TABLE; i++) { final SourceRecord deleteRecord = deleteTableB.get(i); final List<SchemaAndValueField> expectedDeleteRow = Arrays.asList( new SchemaAndValueField("ID", Schema.INT32_SCHEMA, i + ID_START), new SchemaAndValueField("COLB", Schema.OPTIONAL_STRING_SCHEMA, "b")); final Struct deleteValue = (Struct) deleteRecord.value(); assertRecord((Struct) deleteValue.get("before"), expectedDeleteRow); assertNull(deleteValue.get("after")); } stopConnector(); }
Example 4
Source File: Db2ConnectorIT.java From debezium-incubator with Apache License 2.0 | 4 votes |
@Test public void updatePrimaryKey() throws Exception { final Configuration config = TestHelper.defaultConfig() .with(Db2ConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL) .build(); start(Db2Connector.class, config); assertConnectorIsRunning(); connection.execute("INSERT INTO tableb VALUES(1, 'b')"); consumeRecordsByTopic(2); TestHelper.enableDbCdc(connection); connection.execute("UPDATE ASNCDC.IBMSNAP_REGISTER SET STATE = 'A' WHERE SOURCE_OWNER = 'DB2INST1'"); TestHelper.refreshAndWait(connection); connection.setAutoCommit(false); connection.execute( "UPDATE tablea SET id=100 WHERE id=1", "UPDATE tableb SET id=100 WHERE id=1"); TestHelper.refreshAndWait(connection); final SourceRecords records = consumeRecordsByTopic(6); final List<SourceRecord> tableA = records.recordsForTopic("testdb.DB2INST1.TABLEA"); final List<SourceRecord> tableB = records.recordsForTopic("testdb.DB2INST1.TABLEB"); assertThat(tableA).hasSize(3); assertThat(tableB).hasSize(3); final List<SchemaAndValueField> expectedDeleteRowA = Arrays.asList( new SchemaAndValueField("ID", Schema.INT32_SCHEMA, 1), new SchemaAndValueField("COLA", Schema.OPTIONAL_STRING_SCHEMA, "a")); final List<SchemaAndValueField> expectedDeleteKeyA = Arrays.asList( new SchemaAndValueField("ID", Schema.INT32_SCHEMA, 1)); final List<SchemaAndValueField> expectedInsertRowA = Arrays.asList( new SchemaAndValueField("ID", Schema.INT32_SCHEMA, 100), new SchemaAndValueField("COLA", Schema.OPTIONAL_STRING_SCHEMA, "a")); final List<SchemaAndValueField> expectedInsertKeyA = Arrays.asList( new SchemaAndValueField("ID", Schema.INT32_SCHEMA, 100)); final SourceRecord deleteRecordA = tableA.get(0); final SourceRecord tombstoneRecordA = tableA.get(1); final SourceRecord insertRecordA = tableA.get(2); final Struct deleteKeyA = (Struct) deleteRecordA.key(); final Struct deleteValueA = (Struct) deleteRecordA.value(); assertRecord(deleteValueA.getStruct("before"), expectedDeleteRowA); assertRecord(deleteKeyA, expectedDeleteKeyA); assertNull(deleteValueA.get("after")); final Struct tombstoneKeyA = (Struct) tombstoneRecordA.key(); final Struct tombstoneValueA = (Struct) tombstoneRecordA.value(); assertRecord(tombstoneKeyA, expectedDeleteKeyA); assertNull(tombstoneValueA); final Struct insertKeyA = (Struct) insertRecordA.key(); final Struct insertValueA = (Struct) insertRecordA.value(); assertRecord(insertValueA.getStruct("after"), expectedInsertRowA); assertRecord(insertKeyA, expectedInsertKeyA); assertNull(insertValueA.get("before")); final List<SchemaAndValueField> expectedDeleteRowB = Arrays.asList( new SchemaAndValueField("ID", Schema.INT32_SCHEMA, 1), new SchemaAndValueField("COLB", Schema.OPTIONAL_STRING_SCHEMA, "b")); final List<SchemaAndValueField> expectedDeleteKeyB = Arrays.asList( new SchemaAndValueField("ID", Schema.INT32_SCHEMA, 1)); final List<SchemaAndValueField> expectedInsertRowB = Arrays.asList( new SchemaAndValueField("ID", Schema.INT32_SCHEMA, 100), new SchemaAndValueField("COLB", Schema.OPTIONAL_STRING_SCHEMA, "b")); final List<SchemaAndValueField> expectedInsertKeyB = Arrays.asList( new SchemaAndValueField("ID", Schema.INT32_SCHEMA, 100)); final SourceRecord deleteRecordB = tableB.get(0); final SourceRecord tombstoneRecordB = tableB.get(1); final SourceRecord insertRecordB = tableB.get(2); final Struct deletekeyB = (Struct) deleteRecordB.key(); final Struct deleteValueB = (Struct) deleteRecordB.value(); assertRecord(deleteValueB.getStruct("before"), expectedDeleteRowB); assertRecord(deletekeyB, expectedDeleteKeyB); assertNull(deleteValueB.get("after")); // assertThat(deleteValueB.getStruct("source").getInt64("event_serial_no")).isEqualTo(1L); final Struct tombstonekeyB = (Struct) tombstoneRecordB.key(); final Struct tombstoneValueB = (Struct) tombstoneRecordB.value(); assertRecord(tombstonekeyB, expectedDeleteKeyB); assertNull(tombstoneValueB); final Struct insertkeyB = (Struct) insertRecordB.key(); final Struct insertValueB = (Struct) insertRecordB.value(); assertRecord(insertValueB.getStruct("after"), expectedInsertRowB); assertRecord(insertkeyB, expectedInsertKeyB); assertNull(insertValueB.get("before")); // assertThat(insertValueB.getStruct("source").getInt64("event_serial_no")).isEqualTo(2L); stopConnector(); }