io.confluent.kafka.schemaregistry.client.SchemaRegistryClient Java Examples
The following examples show how to use
io.confluent.kafka.schemaregistry.client.SchemaRegistryClient.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: PhysicalPlanBuilder.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
public PhysicalPlanBuilder( final StreamsBuilder builder, final KsqlConfig ksqlConfig, final KafkaTopicClient kafkaTopicClient, final FunctionRegistry functionRegistry, final Map<String, Object> overriddenStreamsProperties, final boolean updateMetastore, final MetaStore metaStore, final SchemaRegistryClient schemaRegistryClient, final KafkaStreamsBuilder kafkaStreamsBuilder ) { this.builder = builder; this.ksqlConfig = ksqlConfig; this.kafkaTopicClient = kafkaTopicClient; this.functionRegistry = functionRegistry; this.overriddenStreamsProperties = overriddenStreamsProperties; this.metaStore = metaStore; this.updateMetastore = updateMetastore; this.schemaRegistryClient = schemaRegistryClient; this.kafkaStreamsBuilder = kafkaStreamsBuilder; }
Example #2
Source File: ConfluentClientTest.java From apicurio-registry with Apache License 2.0 | 6 votes |
@Test public void testSerdeJsonSchema() throws Exception { final SchemaRegistryClient client = buildClient(); final String subject = generateArtifactId(); final SchemaContent schemaContent = new SchemaContent( "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"f1\",\"type\":\"string\"}]}"); final Properties config = new Properties(); config.put(KafkaJsonSchemaSerializerConfig.AUTO_REGISTER_SCHEMAS, true); config.put(KafkaJsonSchemaSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081/api/ccompat"); try (KafkaJsonSchemaSerializer serializer = new KafkaJsonSchemaSerializer(client, new HashMap(config)); KafkaJsonSchemaDeserializer deserializer = new KafkaJsonSchemaDeserializer(client, config, SchemaContent.class )){ byte[] bytes = serializer.serialize(subject, schemaContent); Object deserialized = deserializer.deserialize(subject, bytes); Assertions.assertEquals(schemaContent, deserialized); } }
Example #3
Source File: TopicStreamWriterFormatTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
@Test public void shouldNotMatchAvroFormatter() throws Exception { /** * Setup expects */ SchemaRegistryClient schemaRegistryClient = mock(SchemaRegistryClient.class); replay(schemaRegistryClient); /** * Test data */ ConsumerRecord<String, Bytes> record = new ConsumerRecord<String, Bytes>("topic", 1, 1, "key", new Bytes("test-data".getBytes())); /** Assert */ assertFalse(TopicStreamWriter.Format.AVRO.isFormat("topic", record, schemaRegistryClient)); }
Example #4
Source File: TopicStreamWriterFormatTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
@Test public void shouldMatchJsonFormatter() throws Exception { SchemaRegistryClient schemaRegistryClient = mock(SchemaRegistryClient.class); replay(schemaRegistryClient); /** * Test data */ String json = "{ \"name\": \"myrecord\"," + " \"type\": \"record\"" + "}"; ConsumerRecord<String, Bytes> record = new ConsumerRecord<String, Bytes>("topic", 1, 1, "key", new Bytes(json.getBytes())); assertTrue(TopicStreamWriter.Format.JSON.isFormat("topic", record, schemaRegistryClient)); }
Example #5
Source File: KsqlGenericRowAvroSerializer.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
public KsqlGenericRowAvroSerializer( org.apache.kafka.connect.data.Schema schema, SchemaRegistryClient schemaRegistryClient, KsqlConfig ksqlConfig ) { String avroSchemaStr = SchemaUtil.buildAvroSchema(schema, "avro_schema"); Schema.Parser parser = new Schema.Parser(); avroSchema = parser.parse(avroSchemaStr); fields = avroSchema.getFields(); Map<String, Object> map = new HashMap<>(); // Automatically register the schema in the Schema Registry if it has not been registered. map.put(AbstractKafkaAvroSerDeConfig.AUTO_REGISTER_SCHEMAS, true); map.put( AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, ksqlConfig.getString(KsqlConfig.SCHEMA_REGISTRY_URL_PROPERTY) ); kafkaAvroSerializer = new KafkaAvroSerializer(schemaRegistryClient, map); }
Example #6
Source File: ConfluentClientTest.java From apicurio-registry with Apache License 2.0 | 6 votes |
@Test public void testSerdeProtobufSchema() throws Exception { TestCmmn.UUID record = TestCmmn.UUID.newBuilder().setLsb(2).setMsb(1).build(); final SchemaRegistryClient client = buildClient(); final String subject = generateArtifactId(); final Properties config = new Properties(); config.put(KafkaProtobufSerializerConfig.AUTO_REGISTER_SCHEMAS, true); config.put(KafkaJsonSchemaSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081/api/ccompat"); try (KafkaProtobufSerializer serializer = new KafkaProtobufSerializer(client, new HashMap(config)); KafkaProtobufDeserializer deserializer = new KafkaProtobufDeserializer(client, config)){ byte[] bytes = serializer.serialize(subject, record); Object deserialized = deserializer.deserialize(subject, bytes); Assertions.assertEquals(record, deserialized); } }
Example #7
Source File: ClusterIT.java From apicurio-registry with Apache License 2.0 | 6 votes |
@Test public void testConfluent() throws Exception { Properties properties = getClusterProperties(); Assumptions.assumeTrue(properties != null); SchemaRegistryClient client1 = new CachedSchemaRegistryClient("http://localhost:8080/api/ccompat", 3); SchemaRegistryClient client2 = new CachedSchemaRegistryClient("http://localhost:8081/api/ccompat", 3); String subject = UUID.randomUUID().toString(); Schema schema = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"f1\",\"type\":\"string\"}]}"); int id = client1.register(subject, schema); try { TestUtils.retry(() -> { Collection<String> allSubjects = client2.getAllSubjects(); Assertions.assertTrue(allSubjects.contains(subject)); }); TestUtils.retry(() -> { Schema s = client2.getById(id); Assertions.assertNotNull(s); }); } finally { client1.deleteSchemaVersion(subject, "1"); } }
Example #8
Source File: KsqlResourceTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
private void registerSchema(SchemaRegistryClient schemaRegistryClient) throws IOException, RestClientException { String ordersAveroSchemaStr = "{" + "\"namespace\": \"kql\"," + " \"name\": \"orders\"," + " \"type\": \"record\"," + " \"fields\": [" + " {\"name\": \"ordertime\", \"type\": \"long\"}," + " {\"name\": \"orderid\", \"type\": \"long\"}," + " {\"name\": \"itemid\", \"type\": \"string\"}," + " {\"name\": \"orderunits\", \"type\": \"double\"}," + " {\"name\": \"arraycol\", \"type\": {\"type\": \"array\", \"items\": \"double\"}}," + " {\"name\": \"mapcol\", \"type\": {\"type\": \"map\", \"values\": \"double\"}}" + " ]" + "}"; org.apache.avro.Schema.Parser parser = new org.apache.avro.Schema.Parser(); org.apache.avro.Schema avroSchema = parser.parse(ordersAveroSchemaStr); schemaRegistryClient.register("orders-topic" + KsqlConstants.SCHEMA_REGISTRY_VALUE_SUFFIX, avroSchema); }
Example #9
Source File: ConfluentKafkaSchemaRegistryTest.java From incubator-gobblin with Apache License 2.0 | 6 votes |
@Test public void testRegisterAndGetByKey() throws SchemaRegistryException { Properties properties = new Properties(); properties.setProperty(KafkaSchemaRegistry.KAFKA_SCHEMA_REGISTRY_URL, TEST_URL); SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient(); KafkaSchemaRegistry<Integer, Schema> kafkaSchemaRegistry = new ConfluentKafkaSchemaRegistry(properties, schemaRegistryClient); Schema schema = SchemaBuilder.record(TEST_RECORD_NAME).namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type() .stringType().noDefault().endRecord(); Integer id = kafkaSchemaRegistry.register(schema); Assert.assertEquals(schema, kafkaSchemaRegistry.getSchemaByKey(id)); }
Example #10
Source File: ConfluentKafkaSchemaRegistryTest.java From incubator-gobblin with Apache License 2.0 | 6 votes |
private void doTestRegisterAndGetLatest(Properties properties) throws SchemaRegistryException { SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient(); KafkaSchemaRegistry<Integer, Schema> kafkaSchemaRegistry = new ConfluentKafkaSchemaRegistry(properties, schemaRegistryClient); Schema schema1 = SchemaBuilder.record(TEST_RECORD_NAME + "1").namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type() .stringType().noDefault().endRecord(); Schema schema2 = SchemaBuilder.record(TEST_RECORD_NAME + "2").namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type() .stringType().noDefault().endRecord(); kafkaSchemaRegistry.register(schema1, TEST_TOPIC_NAME); kafkaSchemaRegistry.register(schema2, TEST_TOPIC_NAME); Assert.assertNotEquals(schema1, kafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME)); Assert.assertEquals(schema2, kafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME)); }
Example #11
Source File: KsqlSchemaRegistryClientFactoryTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
@Test public void shouldPickUpPrefixedSslConfig() { // Given: final KsqlConfig config = config( "ksql.schema.registry." + SslConfigs.SSL_PROTOCOL_CONFIG, "SSLv3" ); final Map<String, Object> expectedConfigs = defaultConfigs(); expectedConfigs.put(SslConfigs.SSL_PROTOCOL_CONFIG, "SSLv3"); setUpMocksWithExpectedConfig(expectedConfigs); // When: final SchemaRegistryClient client = new KsqlSchemaRegistryClientFactory(config, restServiceSupplier, sslFactory).create(); // Then: assertThat(client, is(notNullValue())); EasyMock.verify(restService); }
Example #12
Source File: KsqlGenericRowAvroSerializerTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
@Test public void shouldFailForIncompatibleType() { SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient(); KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer (schema, schemaRegistryClient, new KsqlConfig(new HashMap<>())); List columns = Arrays.asList(1511897796092L, 1L, "item_1", "10.0", new Double[]{100.0}, Collections.singletonMap("key1", 100.0)); GenericRow genericRow = new GenericRow(columns); try { byte[] serilizedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow); Assert.fail("Did not fail for incompatible types."); } catch (Exception e) { assertThat(e.getMessage(), equalTo("org.apache.kafka.common.errors.SerializationException: Error serializing Avro message")); } }
Example #13
Source File: KsqlSchemaRegistryClientFactoryTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
@Test public void shouldSetSocketFactoryWhenNoSpecificSslConfig() { // Given: final KsqlConfig config = config(); final Map<String, Object> expectedConfigs = defaultConfigs(); setUpMocksWithExpectedConfig(expectedConfigs); // When: final SchemaRegistryClient client = new KsqlSchemaRegistryClientFactory(config, restServiceSupplier, sslFactory).create(); // Then: assertThat(client, is(notNullValue())); EasyMock.verify(restService); }
Example #14
Source File: ConfluentSchemaRegistryDeserializerProvider.java From beam with Apache License 2.0 | 6 votes |
@VisibleForTesting ConfluentSchemaRegistryDeserializerProvider( SerializableFunction<Void, SchemaRegistryClient> schemaRegistryClientProviderFn, String schemaRegistryUrl, String subject, @Nullable Integer version) { checkArgument( schemaRegistryClientProviderFn != null, "You should provide a schemaRegistryClientProviderFn."); checkArgument(schemaRegistryUrl != null, "You should provide a schemaRegistryUrl."); checkArgument(subject != null, "You should provide a subject to fetch the schema from."); this.schemaRegistryClientProviderFn = schemaRegistryClientProviderFn; this.schemaRegistryUrl = schemaRegistryUrl; this.subject = subject; this.version = version; }
Example #15
Source File: PhysicalPlanBuilder.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
public PhysicalPlanBuilder( final StreamsBuilder builder, final KsqlConfig ksqlConfig, final KafkaTopicClient kafkaTopicClient, final FunctionRegistry functionRegistry, final Map<String, Object> overriddenStreamsProperties, final boolean updateMetastore, final MetaStore metaStore, final SchemaRegistryClient schemaRegistryClient ) { this( builder, ksqlConfig, kafkaTopicClient, functionRegistry, overriddenStreamsProperties, updateMetastore, metaStore, schemaRegistryClient, new KafkaStreamsBuilderImpl() ); }
Example #16
Source File: SchemaKTable.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
public SchemaKTable( final Schema schema, final KTable ktable, final Field keyField, final List<SchemaKStream> sourceSchemaKStreams, boolean isWindowed, Type type, final FunctionRegistry functionRegistry, final SchemaRegistryClient schemaRegistryClient ) { super( schema, null, keyField, sourceSchemaKStreams, type, functionRegistry, schemaRegistryClient ); this.ktable = ktable; this.isWindowed = isWindowed; }
Example #17
Source File: QueryMetadata.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
private Set<String> getInternalSubjectNameSet(SchemaRegistryClient schemaRegistryClient) { try { final String suffix1 = KsqlConstants.STREAMS_CHANGELOG_TOPIC_SUFFIX + KsqlConstants.SCHEMA_REGISTRY_VALUE_SUFFIX; final String suffix2 = KsqlConstants.STREAMS_REPARTITION_TOPIC_SUFFIX + KsqlConstants.SCHEMA_REGISTRY_VALUE_SUFFIX; return schemaRegistryClient.getAllSubjects().stream() .filter(subjectName -> subjectName.startsWith(getQueryApplicationId())) .filter(subjectName -> subjectName.endsWith(suffix1) || subjectName.endsWith(suffix2)) .collect(Collectors.toSet()); } catch (Exception e) { // Do nothing! Schema registry clean up is best effort! log.warn("Could not clean up the schema registry for query: " + queryApplicationId, e); } return new HashSet<>(); }
Example #18
Source File: SchemaKStream.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
public SchemaKStream( final Schema schema, final KStream<String, GenericRow> kstream, final Field keyField, final List<SchemaKStream> sourceSchemaKStreams, final Type type, final FunctionRegistry functionRegistry, final SchemaRegistryClient schemaRegistryClient ) { this.schema = schema; this.kstream = kstream; this.keyField = keyField; this.sourceSchemaKStreams = sourceSchemaKStreams; this.genericRowValueTypeEnforcer = new GenericRowValueTypeEnforcer(schema); this.type = type; this.functionRegistry = functionRegistry; this.schemaRegistryClient = schemaRegistryClient; }
Example #19
Source File: AvroUtilTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
@Test public void shouldPassAvroCheck() throws Exception { SchemaRegistryClient schemaRegistryClient = mock(SchemaRegistryClient.class); SchemaMetadata schemaMetadata = new SchemaMetadata(1, 1, ordersAveroSchemaStr); expect(schemaRegistryClient.getLatestSchemaMetadata(anyString())).andReturn(schemaMetadata); replay(schemaRegistryClient); AbstractStreamCreateStatement abstractStreamCreateStatement = getAbstractStreamCreateStatement ("CREATE STREAM S1 WITH " + "(kafka_topic='s1_topic', " + "value_format='avro' );"); Pair<AbstractStreamCreateStatement, String> checkResult = avroUtil.checkAndSetAvroSchema(abstractStreamCreateStatement, new HashMap<>(), schemaRegistryClient); AbstractStreamCreateStatement newAbstractStreamCreateStatement = checkResult.getLeft(); assertThat(newAbstractStreamCreateStatement.getElements(), equalTo(Arrays.asList( new TableElement("ORDERTIME", "BIGINT"), new TableElement("ORDERID", "BIGINT"), new TableElement("ITEMID", "VARCHAR"), new TableElement("ORDERUNITS", "DOUBLE"), new TableElement("ARRAYCOL", "ARRAY<DOUBLE>"), new TableElement("MAPCOL", "MAP<VARCHAR,DOUBLE>") ))); }
Example #20
Source File: AvroUtilTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
@Test public void shouldNotPassAvroCheckIfSchemaDoesNotExist() throws Exception { SchemaRegistryClient schemaRegistryClient = mock(SchemaRegistryClient.class); SchemaMetadata schemaMetadata = new SchemaMetadata(1, 1, null); expect(schemaRegistryClient.getLatestSchemaMetadata(anyString())).andReturn(schemaMetadata); replay(schemaRegistryClient); AbstractStreamCreateStatement abstractStreamCreateStatement = getAbstractStreamCreateStatement ("CREATE STREAM S1 WITH " + "(kafka_topic='s1_topic', " + "value_format='avro' );"); try { avroUtil.checkAndSetAvroSchema(abstractStreamCreateStatement, new HashMap<>(), schemaRegistryClient); fail(); } catch (Exception e) { assertThat("Expected different message message.", e.getMessage().trim(), equalTo("Unable to verify the AVRO schema is compatible with KSQL. null")); } }
Example #21
Source File: ConfluentSchemaRegistryDeserializerProviderTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void testGetCoder() { String schemaRegistryUrl = "mock://my-scope-name"; String subject = "mytopic"; SchemaRegistryClient mockRegistryClient = mockSchemaRegistryClient(schemaRegistryUrl, subject); CoderRegistry coderRegistry = CoderRegistry.createDefault(); AvroCoder coderV0 = (AvroCoder) mockDeserializerProvider(schemaRegistryUrl, subject, null).getCoder(coderRegistry); assertEquals(AVRO_SCHEMA, coderV0.getSchema()); try { Integer version = mockRegistryClient.register(subject, AVRO_SCHEMA_V1); AvroCoder coderV1 = (AvroCoder) mockDeserializerProvider(schemaRegistryUrl, subject, version).getCoder(coderRegistry); assertEquals(AVRO_SCHEMA_V1, coderV1.getSchema()); } catch (IOException | RestClientException e) { throw new RuntimeException("Unable to register schema for subject: " + subject, e); } }
Example #22
Source File: JoinNode.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
SchemaKTable tableForJoin( final StreamsBuilder builder, final KsqlConfig ksqlConfig, final KafkaTopicClient kafkaTopicClient, final FunctionRegistry functionRegistry, final Map<String, Object> props, final SchemaRegistryClient schemaRegistryClient) { Map<String, Object> joinTableProps = new HashMap<>(props); joinTableProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); final SchemaKStream schemaKStream = right.buildStream( builder, ksqlConfig, kafkaTopicClient, functionRegistry, joinTableProps, schemaRegistryClient); if (!(schemaKStream instanceof SchemaKTable)) { throw new KsqlException("Unsupported Join. Only stream-table joins are supported, but was " + getLeft() + "-" + getRight()); } return (SchemaKTable) schemaKStream; }
Example #23
Source File: KsqlSchemaRegistryClientFactoryTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
@Test public void shouldPickUpNonPrefixedSslConfig() { // Given: final KsqlConfig config = config( SslConfigs.SSL_PROTOCOL_CONFIG, "SSLv3" ); final Map<String, Object> expectedConfigs = defaultConfigs(); expectedConfigs.put(SslConfigs.SSL_PROTOCOL_CONFIG, "SSLv3"); setUpMocksWithExpectedConfig(expectedConfigs); // When: final SchemaRegistryClient client = new KsqlSchemaRegistryClientFactory(config, restServiceSupplier, sslFactory).create(); // Then: assertThat(client, is(notNullValue())); EasyMock.verify(restService); }
Example #24
Source File: ProjectNodeTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
private void mockSourceNode() { EasyMock.expect(source.getKeyField()).andReturn(new Field("field1", 0, Schema.STRING_SCHEMA)); EasyMock.expect(source.buildStream(anyObject(StreamsBuilder.class), anyObject(KsqlConfig.class), anyObject(KafkaTopicClient.class), anyObject(FunctionRegistry.class), eq(props), anyObject(SchemaRegistryClient.class))).andReturn(stream); }
Example #25
Source File: AvroUtil.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
private SchemaMetadata fetchSchemaMetadata( AbstractStreamCreateStatement abstractStreamCreateStatement, SchemaRegistryClient schemaRegistryClient, String kafkaTopicName ) throws IOException, RestClientException { if (abstractStreamCreateStatement.getProperties().containsKey(KsqlConstants.AVRO_SCHEMA_ID)) { int schemaId; try { schemaId = Integer.parseInt( StringUtil.cleanQuotes( abstractStreamCreateStatement .getProperties() .get(KsqlConstants.AVRO_SCHEMA_ID) .toString() ) ); } catch (NumberFormatException e) { throw new KsqlException(String.format( "Invalid schema id property: %s.", abstractStreamCreateStatement .getProperties() .get(KsqlConstants.AVRO_SCHEMA_ID) .toString() )); } return schemaRegistryClient.getSchemaMetadata( kafkaTopicName + KsqlConstants.SCHEMA_REGISTRY_VALUE_SUFFIX, schemaId ); } else { return schemaRegistryClient.getLatestSchemaMetadata( kafkaTopicName + KsqlConstants.SCHEMA_REGISTRY_VALUE_SUFFIX ); } }
Example #26
Source File: DropSourceCommand.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
public DropSourceCommand( final AbstractStreamDropStatement statement, final DataSource.DataSourceType dataSourceType, final SchemaRegistryClient schemaRegistryClient) { this.sourceName = statement.getName().getSuffix(); this.dataSourceType = dataSourceType; this.schemaRegistryClient = schemaRegistryClient; }
Example #27
Source File: KsqlSchemaRegistryClientFactory.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
public SchemaRegistryClient create() { final RestService restService = serviceSupplier.get(); final SSLContext sslContext = sslFactory.sslContext(); if (sslContext != null) { restService.setSslSocketFactory(sslContext.getSocketFactory()); } return new CachedSchemaRegistryClient(restService, 1000); }
Example #28
Source File: KsqlGenericRowAvroDeserializerTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@Test public void shouldDeserializeWithMissingFields() { String schemaStr1 = "{" + "\"namespace\": \"kql\"," + " \"name\": \"orders\"," + " \"type\": \"record\"," + " \"fields\": [" + " {\"name\": \"orderTime\", \"type\": \"long\"}," + " {\"name\": \"orderId\", \"type\": \"long\"}," + " {\"name\": \"itemId\", \"type\": \"string\"}," + " {\"name\": \"orderUnits\", \"type\": \"double\"}" + " ]" + "}"; Schema.Parser parser = new Schema.Parser(); Schema avroSchema1 = parser.parse(schemaStr1); SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient(); List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0); GenericRow genericRow = new GenericRow(columns); byte[] serializedRow = getSerializedRow("t1", schemaRegistryClient, avroSchema1, genericRow); KsqlGenericRowAvroDeserializer ksqlGenericRowAvroDeserializer = new KsqlGenericRowAvroDeserializer(schema, schemaRegistryClient, false); GenericRow row = ksqlGenericRowAvroDeserializer.deserialize("t1", serializedRow); assertThat("Incorrect deserializarion", row.getColumns().size(), equalTo(6)); assertThat("Incorrect deserializarion", (Long)row.getColumns().get(0), equalTo(1511897796092L)); assertThat("Incorrect deserializarion", (Long)row.getColumns().get(1), equalTo (1L)); assertThat("Incorrect deserializarion", (String)row.getColumns().get(2), equalTo ( "item_1")); Assert.assertNull(row.getColumns().get(4)); Assert.assertNull(row.getColumns().get(5)); }
Example #29
Source File: KsqlGenericRowAvroSerializerTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@Test public void shouldSerializeRowCorrectly() { SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient(); KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer (schema, schemaRegistryClient, new KsqlConfig(new HashMap<>())); List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[]{100.0}, Collections.singletonMap("key1", 100.0)); GenericRow genericRow = new GenericRow(columns); byte[] serializedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow); KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient); GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow); Assert.assertNotNull(genericRecord); assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo (1511897796092L)); assertThat("Incorrect serialization.", genericRecord.get("orderid".toUpperCase()), equalTo (1L)); assertThat("Incorrect serialization.", genericRecord.get("itemid".toUpperCase()).toString(), equalTo("item_1")); assertThat("Incorrect serialization.", genericRecord.get("orderunits".toUpperCase()), equalTo (10.0)); GenericData.Array array = (GenericData.Array) genericRecord.get("arraycol".toUpperCase()); Map map = (Map) genericRecord.get("mapcol".toUpperCase()); assertThat("Incorrect serialization.", array.size(), equalTo(1)); assertThat("Incorrect serialization.", array.get(0), equalTo(100.0)); assertThat("Incorrect serialization.", map.size(), equalTo(1)); assertThat("Incorrect serialization.", map.get(new Utf8("key1")), equalTo(100.0)); }
Example #30
Source File: KafkaConfluentSchemaRegistryAvroMessageDecoder.java From incubator-pinot with Apache License 2.0 | 5 votes |
@Override public void init(Map<String, String> props, Set<String> fieldsToRead, String topicName) throws Exception { checkState(props.containsKey(SCHEMA_REGISTRY_REST_URL), "Missing required property '%s'", SCHEMA_REGISTRY_REST_URL); String schemaRegistryUrl = props.get(SCHEMA_REGISTRY_REST_URL); SchemaRegistryClient schemaRegistryClient = new CachedSchemaRegistryClient(schemaRegistryUrl, 1000); _deserializer = new KafkaAvroDeserializer(schemaRegistryClient); Preconditions.checkNotNull(topicName, "Topic must be provided"); _topicName = topicName; _avroRecordExtractor = PluginManager.get().createInstance(AvroRecordExtractor.class.getName()); _avroRecordExtractor.init(fieldsToRead, null); }