org.apache.kafka.connect.source.SourceTask Java Examples
The following examples show how to use
org.apache.kafka.connect.source.SourceTask.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AbstractKafkaConnectSource.java From hazelcast-jet-contrib with Apache License 2.0 | 5 votes |
public AbstractKafkaConnectSource(Properties properties) { try { String connectorClazz = properties.getProperty("connector.class"); Class<?> connectorClass = Thread.currentThread().getContextClassLoader().loadClass(connectorClazz); connector = (SourceConnector) connectorClass.getConstructor().newInstance(); connector.initialize(new JetConnectorContext()); connector.start((Map) properties); taskConfig = connector.taskConfigs(1).get(0); task = (SourceTask) connector.taskClass().getConstructor().newInstance(); } catch (Exception e) { throw rethrow(e); } }
Example #2
Source File: BackupSourceConnector.java From kafka-backup with Apache License 2.0 | 5 votes |
@Override public void start(Map<String, String> props) { config = props; if (!config.getOrDefault(BackupSourceConfig.ALLOW_OLD_KAFKA_CONNECT_VERSION, "false").equals("true")) { try { SourceTask.class.getMethod("commitRecord", SourceRecord.class, RecordMetadata.class); } catch (NoSuchMethodException e) { throw new RuntimeException("Kafka Backup requires at least Kafka Connect 2.4. Otherwise Offsets cannot be committed. If you are sure what you are doing, please set " + BackupSourceConfig.ALLOW_OLD_KAFKA_CONNECT_VERSION + " to true"); } } }
Example #3
Source File: AbstractKafkaConnectSource.java From pulsar with Apache License 2.0 | 4 votes |
@Override public void open(Map<String, Object> config, SourceContext sourceContext) throws Exception { Map<String, String> stringConfig = new HashMap<>(); config.forEach((key, value) -> { if (value instanceof String) { stringConfig.put(key, (String) value); } }); // get the source class name from config and create source task from reflection sourceTask = ((Class<? extends SourceTask>) Class.forName(stringConfig.get(TaskConfig.TASK_CLASS_CONFIG))) .asSubclass(SourceTask.class) .getDeclaredConstructor() .newInstance(); topicNamespace = stringConfig.get(TOPIC_NAMESPACE_CONFIG); // initialize the key and value converter keyConverter = ((Class<? extends Converter>) Class.forName(stringConfig.get(PulsarKafkaWorkerConfig.KEY_CONVERTER_CLASS_CONFIG))) .asSubclass(Converter.class) .getDeclaredConstructor() .newInstance(); valueConverter = ((Class<? extends Converter>) Class.forName(stringConfig.get(PulsarKafkaWorkerConfig.VALUE_CONVERTER_CLASS_CONFIG))) .asSubclass(Converter.class) .getDeclaredConstructor() .newInstance(); if (keyConverter instanceof AvroConverter) { keyConverter = new AvroConverter(new MockSchemaRegistryClient()); config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "mock"); } if (valueConverter instanceof AvroConverter) { valueConverter = new AvroConverter(new MockSchemaRegistryClient()); config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "mock"); } keyConverter.configure(config, true); valueConverter.configure(config, false); offsetStore = new PulsarOffsetBackingStore(); PulsarKafkaWorkerConfig pulsarKafkaWorkerConfig = new PulsarKafkaWorkerConfig(stringConfig); offsetStore.configure(pulsarKafkaWorkerConfig); offsetStore.start(); offsetReader = new OffsetStorageReaderImpl( offsetStore, "pulsar-kafka-connect-adaptor", keyConverter, valueConverter ); offsetWriter = new OffsetStorageWriter( offsetStore, "pulsar-kafka-connect-adaptor", keyConverter, valueConverter ); sourceTaskContext = new PulsarIOSourceTaskContext(offsetReader, pulsarKafkaWorkerConfig); sourceTask.initialize(sourceTaskContext); sourceTask.start(stringConfig); }