org.apache.kafka.common.serialization.Deserializer Java Examples
The following examples show how to use
org.apache.kafka.common.serialization.Deserializer.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ModelManagerListener.java From oryx with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") void init(ServletContext context) { String serializedConfig = context.getInitParameter(ConfigUtils.class.getName() + ".serialized"); Objects.requireNonNull(serializedConfig); this.config = ConfigUtils.deserialize(serializedConfig); this.updateTopic = config.getString("oryx.update-topic.message.topic"); this.maxMessageSize = config.getInt("oryx.update-topic.message.max-size"); this.updateTopicLockMaster = config.getString("oryx.update-topic.lock.master"); this.updateTopicBroker = config.getString("oryx.update-topic.broker"); this.readOnly = config.getBoolean("oryx.serving.api.read-only"); if (!readOnly) { this.inputTopic = config.getString("oryx.input-topic.message.topic"); this.inputTopicLockMaster = config.getString("oryx.input-topic.lock.master"); this.inputTopicBroker = config.getString("oryx.input-topic.broker"); } this.modelManagerClassName = config.getString("oryx.serving.model-manager-class"); this.updateDecoderClass = (Class<? extends Deserializer<U>>) ClassUtils.loadClass( config.getString("oryx.update-topic.message.decoder-class"), Deserializer.class); Preconditions.checkArgument(maxMessageSize > 0); }
Example #2
Source File: ConsumerContainer.java From apicurio-registry with Apache License 2.0 | 6 votes |
public ConsumerContainer( Properties consumerProperties, Deserializer<K> keyDeserializer, Deserializer<V> valueDeserializer, long consumerPollTimeout, Oneof2< java.util.function.Consumer<? super ConsumerRecord<K, V>>, java.util.function.Consumer<? super ConsumerRecords<K, V>> > recordOrRecordsHandler, BiConsumer<? super Consumer<?, ?>, ? super RuntimeException> consumerExceptionHandler, long idlePingTimeout, java.util.function.Consumer<? super TopicPartition> idlePingHandler ) { this.consumerProperties = Objects.requireNonNull(consumerProperties); this.keyDeserializer = Objects.requireNonNull(keyDeserializer); this.valueDeserializer = Objects.requireNonNull(valueDeserializer); this.consumerPollTimeout = Duration.ofMillis(consumerPollTimeout); this.recordHandler = recordOrRecordsHandler.isFirst() ? recordOrRecordsHandler.getFirst() : null; this.recordsHandler = recordOrRecordsHandler.isSecond() ? recordOrRecordsHandler.getSecond() : null; this.consumerExceptionHandler = Objects.requireNonNull(consumerExceptionHandler); this.idlePingTimeout = idlePingTimeout; this.idlePingHandler = /* optional */ idlePingHandler; this.thread = new Thread(this::consumerLoop, "kafka-consumer-container-" + containerCount.incrementAndGet()); thread.start(); }
Example #3
Source File: ConsumerContainer.java From apicurio-registry with Apache License 2.0 | 6 votes |
public ConsumerContainer( Properties consumerProperties, Deserializer<K> keyDeserializer, Deserializer<V> valueDeserializer, Oneof2< java.util.function.Consumer<? super ConsumerRecord<K, V>>, java.util.function.Consumer<? super ConsumerRecords<K, V>> > recordOrRecordsHandler, BiConsumer<? super Consumer<?, ?>, ? super RuntimeException> consumerExceptionHandler ) { this( consumerProperties, keyDeserializer, valueDeserializer, DEFAULT_CONSUMER_POLL_TIMEOUT, recordOrRecordsHandler, consumerExceptionHandler, 0L, null ); }
Example #4
Source File: JsonPOJOSerde.java From hello-kafka-streams with Apache License 2.0 | 6 votes |
@Override public Deserializer<T> deserializer() { return new Deserializer<T>() { @Override public void configure(Map<String, ?> configs, boolean isKey) { } @Override public T deserialize(String topic, byte[] data) { T result; try { result = mapper.readValue(data, cls); } catch (Exception e) { throw new SerializationException(e); } return result; } @Override public void close() { } }; }
Example #5
Source File: PulsarKafkaConsumer.java From pulsar with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private K getKey(String topic, Message<byte[]> msg) { if (!msg.hasKey()) { return null; } if (keySchema instanceof PulsarKafkaSchema) { PulsarKafkaSchema<K> pulsarKafkaSchema = (PulsarKafkaSchema) keySchema; Deserializer<K> kafkaDeserializer = pulsarKafkaSchema.getKafkaDeserializer(); if (kafkaDeserializer instanceof StringDeserializer) { return (K) msg.getKey(); } pulsarKafkaSchema.setTopic(topic); } // Assume base64 encoding byte[] data = Base64.getDecoder().decode(msg.getKey()); return keySchema.decode(data); }
Example #6
Source File: TumblingWindowTest.java From kafka-tutorials with Apache License 2.0 | 6 votes |
private List<RatingCount> readOutputTopic(TopologyTestDriver testDriver, String outputTopic, Deserializer<String> keyDeserializer, Deserializer<String> valueDeserializer) { List<RatingCount> results = new ArrayList<>(); while(true) { ProducerRecord<String, String> record = testDriver.readOutput(outputTopic, keyDeserializer, valueDeserializer); if (record != null) { results.add(new RatingCount(record.key().toString(), record.value())); } else { break; } } return results; }
Example #7
Source File: MessageAssemblerTest.java From li-apache-kafka-clients with BSD 2-Clause "Simplified" License | 6 votes |
@Test public void testSingleMessageSegment() { // Create serializer/deserializers. Serializer<LargeMessageSegment> segmentSerializer = new DefaultSegmentSerializer(); Deserializer<LargeMessageSegment> segmentDeserializer = new DefaultSegmentDeserializer(); byte[] messageWrappedBytes = wrapMessageBytes(segmentSerializer, "message".getBytes()); MessageAssembler messageAssembler = new MessageAssemblerImpl(100, 100, true, segmentDeserializer); MessageAssembler.AssembleResult assembleResult = messageAssembler.assemble(new TopicPartition("topic", 0), 0, messageWrappedBytes); assertNotNull(assembleResult.messageBytes()); assertEquals(assembleResult.messageStartingOffset(), 0, "The message starting offset should be 0"); assertEquals(assembleResult.messageEndingOffset(), 0, "The message ending offset should be 0"); }
Example #8
Source File: KafkaRyaStreamsClientFactory.java From rya with Apache License 2.0 | 6 votes |
/** * Create a {@link Consumer} that has a unique group ID and reads everything from a topic in Kafka * starting at the earliest point by default. * * @param kafkaHostname - The Kafka broker hostname. (not null) * @param kafkaPort - The Kafka broker port. * @param keyDeserializerClass - Deserializes the keys. (not null) * @param valueDeserializerClass - Deserializes the values. (not null) * @return A {@link Consumer} that can be used to read records from a topic. */ private static <K, V> Consumer<K, V> fromStartConsumer( final String kafkaHostname, final int kakfaPort, final Class<? extends Deserializer<K>> keyDeserializerClass, final Class<? extends Deserializer<V>> valueDeserializerClass) { requireNonNull(kafkaHostname); requireNonNull(keyDeserializerClass); requireNonNull(valueDeserializerClass); final Properties consumerProps = new Properties(); consumerProps.setProperty(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, kafkaHostname + ":" + kakfaPort); consumerProps.setProperty(ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString()); consumerProps.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, UUID.randomUUID().toString()); consumerProps.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); consumerProps.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyDeserializerClass.getName()); consumerProps.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueDeserializerClass.getName()); return new KafkaConsumer<>(consumerProps); }
Example #9
Source File: SerializerDeserializerTest.java From li-apache-kafka-clients with BSD 2-Clause "Simplified" License | 6 votes |
@Test public void testSerde() { Serializer<String> stringSerializer = new StringSerializer(); Deserializer<String> stringDeserializer = new StringDeserializer(); Serializer<LargeMessageSegment> segmentSerializer = new DefaultSegmentSerializer(); Deserializer<LargeMessageSegment> segmentDeserializer = new DefaultSegmentDeserializer(); String s = LiKafkaClientsTestUtils.getRandomString(100); assertEquals(s.length(), 100); byte[] stringBytes = stringSerializer.serialize("topic", s); assertEquals(stringBytes.length, 100); LargeMessageSegment segment = new LargeMessageSegment(LiKafkaClientsUtils.randomUUID(), 0, 2, stringBytes.length, ByteBuffer.wrap(stringBytes)); // String bytes + segment header byte[] serializedSegment = segmentSerializer.serialize("topic", segment); assertEquals(serializedSegment.length, 1 + stringBytes.length + LargeMessageSegment.SEGMENT_INFO_OVERHEAD + 4); LargeMessageSegment deserializedSegment = segmentDeserializer.deserialize("topic", serializedSegment); assertEquals(deserializedSegment.messageId, segment.messageId); assertEquals(deserializedSegment.messageSizeInBytes, segment.messageSizeInBytes); assertEquals(deserializedSegment.numberOfSegments, segment.numberOfSegments); assertEquals(deserializedSegment.sequenceNumber, segment.sequenceNumber); assertEquals(deserializedSegment.payload.limit(), 100); String deserializedString = stringDeserializer.deserialize("topic", deserializedSegment.payloadArray()); assertEquals(deserializedString.length(), s.length()); }
Example #10
Source File: OldApiTopicConsumer.java From azeroth with Apache License 2.0 | 6 votes |
/** * * @param connector * @param topics * @param processThreads */ @SuppressWarnings("unchecked") public OldApiTopicConsumer(ConsumerContext context) { this.consumerContext = context; try { Class<?> deserializerClass = Class .forName(context.getProperties().getProperty("value.deserializer")); deserializer = (Deserializer<Object>) deserializerClass.newInstance(); } catch (Exception e) { } this.connector = kafka.consumer.Consumer .createJavaConsumerConnector(new ConsumerConfig(context.getProperties())); int poolSize = consumerContext.getMessageHandlers().size(); this.fetchExecutor = new StandardThreadExecutor(poolSize, poolSize, 0, TimeUnit.SECONDS, poolSize, new StandardThreadFactory("KafkaFetcher")); this.defaultProcessExecutor = new StandardThreadExecutor(1, context.getMaxProcessThreads(), 30, TimeUnit.SECONDS, context.getMaxProcessThreads(), new StandardThreadFactory("KafkaProcessor"), new PoolFullRunsPolicy()); logger.info( "Kafka Conumer ThreadPool initialized,fetchPool Size:{},defalutProcessPool Size:{} ", poolSize, context.getMaxProcessThreads()); }
Example #11
Source File: WebKafkaConsumerFactoryTest.java From kafka-webview with MIT License | 6 votes |
private WebKafkaConsumerFactory createDefaultFactory() { final PluginFactory<Deserializer> deserializerPluginFactory = new PluginFactory<>("not/used", Deserializer.class); final PluginFactory<RecordFilter> filterPluginFactoryPluginFactory = new PluginFactory<>("not/used", RecordFilter.class); final SecretManager secretManager = new SecretManager("Passphrase"); final KafkaConsumerFactory kafkaConsumerFactory = new KafkaConsumerFactory( new KafkaClientConfigUtil("not/used", "MyPrefix") ); return new WebKafkaConsumerFactory( deserializerPluginFactory, filterPluginFactoryPluginFactory, secretManager, kafkaConsumerFactory, null ); }
Example #12
Source File: IntegrationTestHarness.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
private Deserializer<GenericRow> getDeserializer(Schema schema, DataSource.DataSourceSerDe dataSourceSerDe) { switch (dataSourceSerDe) { case JSON: return new KsqlJsonDeserializer(schema); case AVRO: return new KsqlGenericRowAvroDeserializer(schema, this.schemaRegistryClient, false); case DELIMITED: return new KsqlDelimitedDeserializer(schema); default: throw new KsqlException("Format not supported: " + dataSourceSerDe); } }
Example #13
Source File: KsqlResourceTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
private static <T> Deserializer<T> getJsonDeserializer(Class<T> classs, boolean isKey) { Deserializer<T> result = new KafkaJsonDeserializer<>(); String typeConfigProperty = isKey ? KafkaJsonDeserializerConfig.JSON_KEY_TYPE : KafkaJsonDeserializerConfig.JSON_VALUE_TYPE; Map<String, ?> props = Collections.singletonMap( typeConfigProperty, classs ); result.configure(props, isKey); return result; }
Example #14
Source File: KafkaBinderTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
private ConsumerFactory<byte[], byte[]> consumerFactory() { Map<String, Object> props = new HashMap<>(); KafkaBinderConfigurationProperties configurationProperties = createConfigurationProperties(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, configurationProperties.getKafkaConnectionString()); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false); props.put(ConsumerConfig.GROUP_ID_CONFIG, "TEST-CONSUMER-GROUP"); Deserializer<byte[]> valueDecoder = new ByteArrayDeserializer(); Deserializer<byte[]> keyDecoder = new ByteArrayDeserializer(); return new DefaultKafkaConsumerFactory<>(props, keyDecoder, valueDecoder); }
Example #15
Source File: KsqlJsonTopicSerDe.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@Override public Serde<GenericRow> getGenericRowSerde(Schema schema, KsqlConfig ksqlConfig, boolean isInternal, SchemaRegistryClient schemaRegistryClient) { Map<String, Object> serdeProps = new HashMap<>(); serdeProps.put("JsonPOJOClass", GenericRow.class); final Serializer<GenericRow> genericRowSerializer = new KsqlJsonSerializer(schema); genericRowSerializer.configure(serdeProps, false); final Deserializer<GenericRow> genericRowDeserializer = new KsqlJsonDeserializer(schema); genericRowDeserializer.configure(serdeProps, false); return Serdes.serdeFrom(genericRowSerializer, genericRowDeserializer); }
Example #16
Source File: RegistrySerdeTest.java From apicurio-registry with Apache License 2.0 | 5 votes |
@RegistryServiceTest public void testProto(Supplier<RegistryService> supplier) throws Exception { try (ProtobufKafkaSerializer<TestCmmn.UUID> serializer = new ProtobufKafkaSerializer<TestCmmn.UUID>(supplier.get()); Deserializer<DynamicMessage> deserializer = new ProtobufKafkaDeserializer(supplier.get())) { serializer.setGlobalIdStrategy(new AutoRegisterIdStrategy<>()); TestCmmn.UUID record = TestCmmn.UUID.newBuilder().setLsb(2).setMsb(1).build(); String subject = generateArtifactId(); byte[] bytes = serializer.serialize(subject, record); waitForSchema(supplier.get(), bytes); DynamicMessage dm = deserializer.deserialize(subject, bytes); Descriptors.Descriptor descriptor = dm.getDescriptorForType(); Descriptors.FieldDescriptor lsb = descriptor.findFieldByName("lsb"); Assertions.assertNotNull(lsb); Assertions.assertEquals(2L, dm.getField(lsb)); Descriptors.FieldDescriptor msb = descriptor.findFieldByName("msb"); Assertions.assertNotNull(msb); Assertions.assertEquals(1L, dm.getField(msb)); } }
Example #17
Source File: LocalDeserializerProvider.java From beam with Apache License 2.0 | 5 votes |
/** * Attempt to infer a {@link Coder} by extracting the type of the deserialized-class from the * deserializer argument using the {@link Coder} registry. */ @Override public NullableCoder<T> getCoder(CoderRegistry coderRegistry) { for (Type type : deserializer.getGenericInterfaces()) { if (!(type instanceof ParameterizedType)) { continue; } // This does not recurse: we will not infer from a class that extends // a class that extends Deserializer<T>. ParameterizedType parameterizedType = (ParameterizedType) type; if (parameterizedType.getRawType() == Deserializer.class) { Type parameter = parameterizedType.getActualTypeArguments()[0]; @SuppressWarnings("unchecked") Class<T> clazz = (Class<T>) parameter; try { return NullableCoder.of(coderRegistry.getCoder(clazz)); } catch (CannotProvideCoderException e) { throw new RuntimeException( String.format( "Unable to automatically infer a Coder for " + "the Kafka Deserializer %s: no coder registered for type %s", deserializer, clazz)); } } } throw new RuntimeException( String.format("Could not extract the Kafka Deserializer type from %s", deserializer)); }
Example #18
Source File: KafkaDeserializerExtractor.java From incubator-gobblin with Apache License 2.0 | 5 votes |
@VisibleForTesting KafkaDeserializerExtractor(WorkUnitState state, Optional<Deserializers> deserializerType, Deserializer<?> kafkaDeserializer, KafkaSchemaRegistry<?, ?> kafkaSchemaRegistry) { super(state); this.kafkaDeserializer = kafkaDeserializer; this.kafkaSchemaRegistry = kafkaSchemaRegistry; this.latestSchema = (deserializerType.equals(Optional.of(Deserializers.CONFLUENT_AVRO))) ? (Schema) getSchema() : null; }
Example #19
Source File: KafkaDeserializerExtractor.java From incubator-gobblin with Apache License 2.0 | 5 votes |
/** * Constructs a {@link Deserializer}, using the value of {@link #KAFKA_DESERIALIZER_TYPE}. */ private static Deserializer<?> getDeserializer(Properties props, Optional<Deserializers> deserializerType) throws ReflectiveOperationException { Deserializer<?> deserializer; if (deserializerType.isPresent()) { deserializer = ConstructorUtils.invokeConstructor(deserializerType.get().getDeserializerClass()); } else { deserializer = Deserializer.class .cast(ConstructorUtils.invokeConstructor(Class.forName(props.getProperty(KAFKA_DESERIALIZER_TYPE)))); } deserializer.configure(PropertiesUtils.propsToStringKeyMap(props), false); return deserializer; }
Example #20
Source File: ProcessingKafkaConsumerTest.java From common-kafka with Apache License 2.0 | 5 votes |
@Test public void constructorWithDeserializers_nullConfig() { Deserializer<String> keyDeserializer = new StringDeserializer(); Deserializer<String> valueDeserializer = new StringDeserializer(); try { new ProcessingKafkaConsumer(null, keyDeserializer, valueDeserializer); Assert.fail("Expected IllegalArgumentException to be thrown"); } catch (IllegalArgumentException e) { } }
Example #21
Source File: JacksonReadingSerializerTest.java From kafka-serializer-example with MIT License | 5 votes |
@Override public Deserializer<SensorReading> getDeserializer() { if(smile) { return JacksonReadingSerializer.smileConfig(); } else { return JacksonReadingSerializer.defaultConfig(); } }
Example #22
Source File: KafkaIO.java From DataflowTemplates with Apache License 2.0 | 5 votes |
/** * Attempt to infer a {@link Coder} by extracting the type of the deserialized-class from the * deserializer argument using the {@link Coder} registry. */ @VisibleForTesting static <T> NullableCoder<T> inferCoder( CoderRegistry coderRegistry, Class<? extends Deserializer<T>> deserializer) { checkNotNull(deserializer); for (Type type : deserializer.getGenericInterfaces()) { if (!(type instanceof ParameterizedType)) { continue; } // This does not recurse: we will not infer from a class that extends // a class that extends Deserializer<T>. ParameterizedType parameterizedType = (ParameterizedType) type; if (parameterizedType.getRawType() == Deserializer.class) { Type parameter = parameterizedType.getActualTypeArguments()[0]; @SuppressWarnings("unchecked") Class<T> clazz = (Class<T>) parameter; try { return NullableCoder.of(coderRegistry.getCoder(clazz)); } catch (CannotProvideCoderException e) { throw new RuntimeException( String.format( "Unable to automatically infer a Coder for " + "the Kafka Deserializer %s: no coder registered for type %s", deserializer, clazz)); } } } throw new RuntimeException( String.format("Could not extract the Kafka Deserializer type from %s", deserializer)); }
Example #23
Source File: KafkaDeserializerExtractorTest.java From incubator-gobblin with Apache License 2.0 | 5 votes |
@Test public void testConfluentAvroDeserializer() throws IOException, RestClientException { WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L,10L); mockWorkUnitState.setProp("schema.registry.url", TEST_URL); Schema schema = SchemaBuilder.record(TEST_RECORD_NAME) .namespace(TEST_NAMESPACE).fields() .name(TEST_FIELD_NAME).type().stringType().noDefault() .endRecord(); GenericRecord testGenericRecord = new GenericRecordBuilder(schema).set(TEST_FIELD_NAME, "testValue").build(); SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class); when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schema); Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient); Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient); ByteBuffer testGenericRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord)); KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class); KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry); ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer); Assert.assertEquals(kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset), testGenericRecord); }
Example #24
Source File: PluginFactoryTest.java From kafka-webview with MIT License | 5 votes |
/** * Tests loading a deserializer not from an external jar. */ @Test public void testLoadingDefaultDeserializer() throws LoaderException { final String classPath = StringDeserializer.class.getName(); // Create factory final PluginFactory<Deserializer> factory = new PluginFactory<>("/tmp", Deserializer.class); // Get class instance final Class<? extends Deserializer> pluginFilterClass = factory.getPluginClass(classPath); // Validate assertNotNull(pluginFilterClass); assertEquals("Has expected name", classPath, pluginFilterClass.getName()); }
Example #25
Source File: LiKafkaConsumerImpl.java From li-apache-kafka-clients with BSD 2-Clause "Simplified" License | 5 votes |
public LiKafkaConsumerImpl(Map<String, Object> configs, Deserializer<K> keyDeserializer, Deserializer<V> valueDeserializer, Deserializer<LargeMessageSegment> largeMessageSegmentDeserializer, Auditor<K, V> consumerAuditor) { this(new LiKafkaConsumerConfig(configs), keyDeserializer, valueDeserializer, largeMessageSegmentDeserializer, consumerAuditor); }
Example #26
Source File: SinkBridgeEndpoint.java From strimzi-kafka-bridge with Apache License 2.0 | 5 votes |
/** * Constructor * * @param vertx Vert.x instance * @param bridgeConfig Bridge configuration * @param format embedded format for the key/value in the Kafka message * @param keyDeserializer Kafka deserializer for the message key * @param valueDeserializer Kafka deserializer for the message value */ public SinkBridgeEndpoint(Vertx vertx, BridgeConfig bridgeConfig, EmbeddedFormat format, Deserializer<K> keyDeserializer, Deserializer<V> valueDeserializer) { this.vertx = vertx; this.bridgeConfig = bridgeConfig; this.topicSubscriptions = new ArrayList<>(); this.format = format; this.keyDeserializer = keyDeserializer; this.valueDeserializer = valueDeserializer; }
Example #27
Source File: SpeedLayer.java From oryx with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") public SpeedLayer(Config config) { super(config); this.updateBroker = config.getString("oryx.update-topic.broker"); this.updateTopic = config.getString("oryx.update-topic.message.topic"); this.maxMessageSize = config.getInt("oryx.update-topic.message.max-size"); this.modelManagerClassName = config.getString("oryx.speed.model-manager-class"); this.updateDecoderClass = (Class<? extends Deserializer<U>>) ClassUtils.loadClass( config.getString("oryx.update-topic.message.decoder-class"), Deserializer.class); Preconditions.checkArgument(maxMessageSize > 0); }
Example #28
Source File: DeserializerConfig.java From kafka-webview with MIT License | 5 votes |
/** * Constructor. * @param keyDeserializerClass Class for deserializer for keys. * @param valueDeserializerClass Class for deserializer for values. */ private DeserializerConfig( final Class<? extends Deserializer> keyDeserializerClass, final Map<String, String> keyDeserializerOptions, final Class<? extends Deserializer> valueDeserializerClass, final Map<String, String> valueDeserializerOptions ) { this.keyDeserializerClass = keyDeserializerClass; this.keyDeserializerOptions = new HashMap<>(); this.keyDeserializerOptions.putAll(keyDeserializerOptions); this.valueDeserializerClass = valueDeserializerClass; this.valueDeserializerOptions = new HashMap<>(); this.valueDeserializerOptions.putAll(valueDeserializerOptions); }
Example #29
Source File: PluginFactoryTest.java From kafka-webview with MIT License | 5 votes |
/** * Test creating a Deserializer. */ @Test public void testWithDeserializer() throws LoaderException { final String jarFilename = "testPlugins.jar"; final String classPath = "examples.deserializer.ExampleDeserializer"; // Find jar on filesystem. final URL jar = getClass().getClassLoader().getResource("testDeserializer/" + jarFilename); final String jarPath = new File(jar.getFile()).getParent(); // Create factory final PluginFactory<Deserializer> factory = new PluginFactory<>(jarPath, Deserializer.class); final Path pathForJar = factory.getPathForJar(jarFilename); // Validate path is correct assertEquals("Has expected Path", jar.getPath(), pathForJar.toString()); // Get class instance final Class<? extends Deserializer> pluginFilterClass = factory.getPluginClass(jarFilename, classPath); // Validate assertNotNull(pluginFilterClass); assertEquals("Has expected name", classPath, pluginFilterClass.getName()); assertTrue("Validate came from correct class loader", pluginFilterClass.getClassLoader() instanceof PluginClassLoader); // Crete Deserializer instance final Deserializer deserializer = factory.getPlugin(jarFilename, classPath); assertNotNull(deserializer); assertEquals("Has correct name", classPath, deserializer.getClass().getName()); // Call method on interface final String value = "MyValue"; final String result = (String) deserializer.deserialize("MyTopic", value.getBytes(StandardCharsets.UTF_8)); }
Example #30
Source File: SimpleStormKafkaBuilder.java From metron with Apache License 2.0 | 5 votes |
private static <T> Class<Deserializer<T>> createDeserializer( Optional<String> deserializerClass , String defaultDeserializerClass ) { try { return (Class<Deserializer<T>>) Class.forName(deserializerClass.orElse(defaultDeserializerClass)); } catch (Exception e) { throw new IllegalStateException("Unable to create a deserializer: " + deserializerClass.orElse(defaultDeserializerClass) + ": " + e.getMessage(), e); } }