Java Code Examples for org.apache.kafka.common.serialization.Deserializer#deserialize()
The following examples show how to use
org.apache.kafka.common.serialization.Deserializer#deserialize() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KafkaAvroSerdesTest.java From registry with Apache License 2.0 | 7 votes |
@Test public void testToggleStoringSchemaInHeader() { TestRecord record = new TestRecord(); record.setField1("Hello"); record.setField2("World"); String keySchemaHeaderName = KafkaAvroSerde.DEFAULT_KEY_SCHEMA_VERSION_ID; for (Boolean storeScheamIdInHeader : Arrays.asList(true, false)) { Map<String, Object> configs = new HashMap<>(); configs.put(KafkaAvroSerializer.STORE_SCHEMA_VERSION_ID_IN_HEADER, storeScheamIdInHeader.toString()); configs.put(AbstractAvroSnapshotDeserializer.SPECIFIC_AVRO_READER, true); KafkaAvroSerde serde = new KafkaAvroSerde(schemaRegistryClient); final Serializer<Object> serializer = serde.serializer(); serializer.configure(configs, true); Headers headers = new RecordHeaders(); final byte[] bytes = serializer.serialize(topic, headers, record); Assert.assertEquals(storeScheamIdInHeader, headers.lastHeader(keySchemaHeaderName) != null); final Deserializer<Object> deserializer = serde.deserializer(); deserializer.configure(configs, true); final TestRecord actual = (TestRecord) deserializer.deserialize(topic, headers, bytes); Assert.assertEquals(record, actual); } }
Example 2
Source File: SerializerDeserializerTest.java From li-apache-kafka-clients with BSD 2-Clause "Simplified" License | 6 votes |
@Test public void testSerde() { Serializer<String> stringSerializer = new StringSerializer(); Deserializer<String> stringDeserializer = new StringDeserializer(); Serializer<LargeMessageSegment> segmentSerializer = new DefaultSegmentSerializer(); Deserializer<LargeMessageSegment> segmentDeserializer = new DefaultSegmentDeserializer(); String s = LiKafkaClientsTestUtils.getRandomString(100); assertEquals(s.length(), 100); byte[] stringBytes = stringSerializer.serialize("topic", s); assertEquals(stringBytes.length, 100); LargeMessageSegment segment = new LargeMessageSegment(LiKafkaClientsUtils.randomUUID(), 0, 2, stringBytes.length, ByteBuffer.wrap(stringBytes)); // String bytes + segment header byte[] serializedSegment = segmentSerializer.serialize("topic", segment); assertEquals(serializedSegment.length, 1 + stringBytes.length + LargeMessageSegment.SEGMENT_INFO_OVERHEAD + 4); LargeMessageSegment deserializedSegment = segmentDeserializer.deserialize("topic", serializedSegment); assertEquals(deserializedSegment.messageId, segment.messageId); assertEquals(deserializedSegment.messageSizeInBytes, segment.messageSizeInBytes); assertEquals(deserializedSegment.numberOfSegments, segment.numberOfSegments); assertEquals(deserializedSegment.sequenceNumber, segment.sequenceNumber); assertEquals(deserializedSegment.payload.limit(), 100); String deserializedString = stringDeserializer.deserialize("topic", deserializedSegment.payloadArray()); assertEquals(deserializedString.length(), s.length()); }
Example 3
Source File: PluginFactoryTest.java From kafka-webview with MIT License | 5 votes |
/** * Test creating a Deserializer. */ @Test public void testWithDeserializer() throws LoaderException { final String jarFilename = "testPlugins.jar"; final String classPath = "examples.deserializer.ExampleDeserializer"; // Find jar on filesystem. final URL jar = getClass().getClassLoader().getResource("testDeserializer/" + jarFilename); final String jarPath = new File(jar.getFile()).getParent(); // Create factory final PluginFactory<Deserializer> factory = new PluginFactory<>(jarPath, Deserializer.class); final Path pathForJar = factory.getPathForJar(jarFilename); // Validate path is correct assertEquals("Has expected Path", jar.getPath(), pathForJar.toString()); // Get class instance final Class<? extends Deserializer> pluginFilterClass = factory.getPluginClass(jarFilename, classPath); // Validate assertNotNull(pluginFilterClass); assertEquals("Has expected name", classPath, pluginFilterClass.getName()); assertTrue("Validate came from correct class loader", pluginFilterClass.getClassLoader() instanceof PluginClassLoader); // Crete Deserializer instance final Deserializer deserializer = factory.getPlugin(jarFilename, classPath); assertNotNull(deserializer); assertEquals("Has correct name", classPath, deserializer.getClass().getName()); // Call method on interface final String value = "MyValue"; final String result = (String) deserializer.deserialize("MyTopic", value.getBytes(StandardCharsets.UTF_8)); }
Example 4
Source File: MessageSplitterTest.java From li-apache-kafka-clients with BSD 2-Clause "Simplified" License | 5 votes |
@Test public void testSplit() { TopicPartition tp = new TopicPartition("topic", 0); UUID id = LiKafkaClientsUtils.randomUUID(); String message = LiKafkaClientsTestUtils.getRandomString(1000); Serializer<String> stringSerializer = new StringSerializer(); Deserializer<String> stringDeserializer = new StringDeserializer(); Serializer<LargeMessageSegment> segmentSerializer = new DefaultSegmentSerializer(); Deserializer<LargeMessageSegment> segmentDeserializer = new DefaultSegmentDeserializer(); MessageSplitter splitter = new MessageSplitterImpl(200, segmentSerializer, new UUIDFactory.DefaultUUIDFactory<>()); byte[] serializedMessage = stringSerializer.serialize("topic", message); List<ProducerRecord<byte[], byte[]>> records = splitter.split("topic", id, serializedMessage); assertEquals(records.size(), 5, "Should have 6 segments."); MessageAssembler assembler = new MessageAssemblerImpl(10000, 10000, true, segmentDeserializer); String assembledMessage = null; UUID uuid = null; for (int i = 0; i < records.size(); i++) { ProducerRecord<byte[], byte[]> record = records.get(i); LargeMessageSegment segment = segmentDeserializer.deserialize("topic", record.value()); if (uuid == null) { uuid = segment.messageId; } else { assertEquals(segment.messageId, uuid, "messageId should match."); } assertEquals(segment.numberOfSegments, 5, "segment number should be 5"); assertEquals(segment.messageSizeInBytes, serializedMessage.length, "message size should the same"); assertEquals(segment.sequenceNumber, i, "SequenceNumber should match"); assembledMessage = stringDeserializer.deserialize(null, assembler.assemble(tp, i, record.value()).messageBytes()); } assertEquals(assembledMessage, message, "messages should match."); }
Example 5
Source File: KafkaAvroSerdesTest.java From registry with Apache License 2.0 | 5 votes |
private void testSchemaHeaderNames(String customKeySchemaHeaderName, String customValueSchemaHeaderName) { TestRecord record = new TestRecord(); record.setField1("Hello"); record.setField2("World"); Map<String, Object> configs = new HashMap<>(); configs.put(KafkaAvroSerde.KEY_SCHEMA_VERSION_ID_HEADER_NAME, customKeySchemaHeaderName); configs.put(KafkaAvroSerde.VALUE_SCHEMA_VERSION_ID_HEADER_NAME, customValueSchemaHeaderName); configs.put(KafkaAvroSerializer.STORE_SCHEMA_VERSION_ID_IN_HEADER, "true"); configs.put(AbstractAvroSnapshotDeserializer.SPECIFIC_AVRO_READER, true); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); AvroSerDesHandler handler = new DefaultAvroSerDesHandler(); handler.handlePayloadSerialization(outputStream, record); for (Boolean isKey : Arrays.asList(true, false)) { KafkaAvroSerde serde = new KafkaAvroSerde(schemaRegistryClient); final Serializer<Object> serializer = serde.serializer(); serializer.configure(configs, isKey); Headers headers = new RecordHeaders(); final byte[] bytes = serializer.serialize(topic, headers, record); Assert.assertArrayEquals(outputStream.toByteArray(), bytes); Assert.assertEquals(isKey, headers.lastHeader(customKeySchemaHeaderName) != null); Assert.assertEquals(!isKey, headers.lastHeader(customValueSchemaHeaderName) != null); final Deserializer<Object> deserializer = serde.deserializer(); deserializer.configure(configs, isKey); final TestRecord actual = (TestRecord) deserializer.deserialize(topic, headers, bytes); Assert.assertEquals(record, actual); } }
Example 6
Source File: KafkaProducerInterceptorWrapper.java From pulsar with Apache License 2.0 | 5 votes |
private K deserializeKey(String topic, String key) { if (keySchema instanceof PulsarKafkaSchema) { PulsarKafkaSchema<K> pulsarKeyKafkaSchema = (PulsarKafkaSchema<K>) keySchema; // If key is a String, we can use it as it is, otherwise, serialize to byte[] and encode in base64 if (pulsarKeyKafkaSchema.getKafkaSerializer() instanceof StringSerializer) { return (K) key; } Deserializer keyDeserializer = getDeserializer(pulsarKeyKafkaSchema.getKafkaSerializer()); return (K) keyDeserializer.deserialize(topic, Base64.getDecoder().decode(key)); } return keySchema.decode(Base64.getDecoder().decode(key)); }
Example 7
Source File: RegistrySerdeTest.java From apicurio-registry with Apache License 2.0 | 4 votes |
@SuppressWarnings("unchecked") @RegistryServiceTest public void testConfiguration(Supplier<RegistryService> supplier) throws Exception { Schema schema = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"myrecord3\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}"); String artifactId = generateArtifactId(); CompletionStage<ArtifactMetaData> csa = supplier.get().createArtifact( ArtifactType.AVRO, artifactId + "-myrecord3", null, new ByteArrayInputStream(schema.toString().getBytes(StandardCharsets.UTF_8)) ); ArtifactMetaData amd = ConcurrentUtil.result(csa); // reset any cache supplier.get().reset(); // wait for global id store to populate (in case of Kafka / Streams) ArtifactMetaData amdById = retry(() -> supplier.get().getArtifactMetaDataByGlobalId(amd.getGlobalId())); Assertions.assertNotNull(amdById); GenericData.Record record = new GenericData.Record(schema); record.put("bar", "somebar"); Map<String, Object> config = new HashMap<>(); config.put(AbstractKafkaSerDe.REGISTRY_URL_CONFIG_PARAM, "http://localhost:8081/api"); config.put(AbstractKafkaSerializer.REGISTRY_ARTIFACT_ID_STRATEGY_CONFIG_PARAM, new TopicRecordIdStrategy()); config.put(AbstractKafkaSerializer.REGISTRY_GLOBAL_ID_STRATEGY_CONFIG_PARAM, new FindLatestIdStrategy<>()); config.put(AvroDatumProvider.REGISTRY_AVRO_DATUM_PROVIDER_CONFIG_PARAM, new DefaultAvroDatumProvider<>()); Serializer<GenericData.Record> serializer = (Serializer<GenericData.Record>) getClass().getClassLoader() .loadClass(AvroKafkaSerializer.class.getName()) .newInstance(); serializer.configure(config, true); byte[] bytes = serializer.serialize(artifactId, record); Deserializer<GenericData.Record> deserializer = (Deserializer<GenericData.Record>) getClass().getClassLoader() .loadClass(AvroKafkaDeserializer.class.getName()) .newInstance(); deserializer.configure(config, true); record = deserializer.deserialize(artifactId, bytes); Assertions.assertEquals("somebar", record.get("bar").toString()); config.put(AbstractKafkaSerializer.REGISTRY_ARTIFACT_ID_STRATEGY_CONFIG_PARAM, TopicRecordIdStrategy.class); config.put(AbstractKafkaSerializer.REGISTRY_GLOBAL_ID_STRATEGY_CONFIG_PARAM, FindLatestIdStrategy.class); config.put(AvroDatumProvider.REGISTRY_AVRO_DATUM_PROVIDER_CONFIG_PARAM, DefaultAvroDatumProvider.class); serializer.configure(config, true); bytes = serializer.serialize(artifactId, record); deserializer.configure(config, true); record = deserializer.deserialize(artifactId, bytes); Assertions.assertEquals("somebar", record.get("bar").toString()); config.put(AbstractKafkaSerializer.REGISTRY_ARTIFACT_ID_STRATEGY_CONFIG_PARAM, TopicRecordIdStrategy.class.getName()); config.put(AbstractKafkaSerializer.REGISTRY_GLOBAL_ID_STRATEGY_CONFIG_PARAM, FindLatestIdStrategy.class.getName()); config.put(AvroDatumProvider.REGISTRY_AVRO_DATUM_PROVIDER_CONFIG_PARAM, DefaultAvroDatumProvider.class.getName()); serializer.configure(config, true); bytes = serializer.serialize(artifactId, record); deserializer.configure(config, true); record = deserializer.deserialize(artifactId, bytes); Assertions.assertEquals("somebar", record.get("bar").toString()); serializer.close(); deserializer.close(); }
Example 8
Source File: ConsumerRecordsProcessorTest.java From li-apache-kafka-clients with BSD 2-Clause "Simplified" License | 4 votes |
@Test public void testDeserializationException() { TopicPartition tp0 = new TopicPartition("topic", 0); TopicPartition tp1 = new TopicPartition("topic", 1); TopicPartition tp2 = new TopicPartition("topic", 2); Deserializer<String> stringDeserializer = new StringDeserializer(); Deserializer<String> errorThrowingDeserializer = new Deserializer<String>() { @Override public void configure(Map<String, ?> configs, boolean isKey) { } @Override public String deserialize(String topic, byte[] data) { String s = stringDeserializer.deserialize(topic, data); if (s.equals("ErrorBytes")) { throw new SkippableException(); } return s; } @Override public void close() { } }; Deserializer<LargeMessageSegment> segmentDeserializer = new DefaultSegmentDeserializer(); MessageAssembler assembler = new MessageAssemblerImpl(5000, 100, false, segmentDeserializer); DeliveredMessageOffsetTracker deliveredMessageOffsetTracker = new DeliveredMessageOffsetTracker(4); ConsumerRecordsProcessor processor = new ConsumerRecordsProcessor<>(assembler, stringDeserializer, errorThrowingDeserializer, deliveredMessageOffsetTracker, null, (tp) -> null); StringSerializer stringSerializer = new StringSerializer(); ConsumerRecord<byte[], byte[]> consumerRecord0 = new ConsumerRecord<>("topic", 0, 0, null, stringSerializer.serialize("topic", "value")); ConsumerRecord<byte[], byte[]> consumerRecord1 = new ConsumerRecord<>("topic", 0, 1, null, stringSerializer.serialize("topic", "ErrorBytes")); ConsumerRecord<byte[], byte[]> consumerRecord2 = new ConsumerRecord<>("topic", 0, 2, null, stringSerializer.serialize("topic", "value")); ConsumerRecord<byte[], byte[]> consumerRecord3 = new ConsumerRecord<>("topic", 1, 0, null, stringSerializer.serialize("topic", "ErrorBytes")); ConsumerRecord<byte[], byte[]> consumerRecord4 = new ConsumerRecord<>("topic", 1, 1, null, stringSerializer.serialize("topic", "value")); ConsumerRecord<byte[], byte[]> consumerRecord5 = new ConsumerRecord<>("topic", 2, 0, null, stringSerializer.serialize("topic", "value")); Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> recordMap = new HashMap<>(); recordMap.put(tp0, Arrays.asList(consumerRecord0, consumerRecord1, consumerRecord2)); recordMap.put(tp1, Arrays.asList(consumerRecord3, consumerRecord4)); recordMap.put(tp2, Collections.singletonList(consumerRecord5)); ConsumerRecords<byte[], byte[]> consumerRecords = new ConsumerRecords<>(recordMap); ConsumerRecordsProcessResult result = processor.process(consumerRecords); assertEquals(result.consumerRecords().count(), 4); assertEquals(result.consumerRecords().records(tp0).size(), 2); assertEquals(result.consumerRecords().records(tp1).size(), 1); assertEquals(result.consumerRecords().records(tp2).size(), 1); assertTrue(result.offsets().isEmpty()); assertFalse(result.hasException()); }