org.apache.kafka.connect.runtime.ConnectorConfig Java Examples
The following examples show how to use
org.apache.kafka.connect.runtime.ConnectorConfig.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: IgniteSinkConnectorTest.java From ignite with Apache License 2.0 | 6 votes |
/** * Creates properties for test sink connector. * * @param topics Topics. * @return Test sink connector properties. */ private Map<String, String> makeSinkProps(String topics) { Map<String, String> props = new HashMap<>(); props.put(SinkConnector.TOPICS_CONFIG, topics); props.put(ConnectorConfig.TASKS_MAX_CONFIG, "2"); props.put(ConnectorConfig.NAME_CONFIG, "test-sink-connector"); props.put(ConnectorConfig.CONNECTOR_CLASS_CONFIG, IgniteSinkConnectorMock.class.getName()); props.put(IgniteSinkConstants.CACHE_NAME, "testCache"); props.put(IgniteSinkConstants.CACHE_ALLOW_OVERWRITE, "true"); props.put(IgniteSinkConstants.CACHE_CFG_PATH, "example-ignite.xml"); props.put(IgniteSinkConstants.SINGLE_TUPLE_EXTRACTOR_CLASS, "org.apache.ignite.stream.kafka.connect.IgniteSinkConnectorTest$TestExtractor"); return props; }
Example #2
Source File: KafkaConnectRunnerService.java From camel-kafka-connector with Apache License 2.0 | 6 votes |
private void checkInitializationState(KafkaConnectRunner.ConnectorInitState initState) { Objects.nonNull(initState); Throwable error = initState.getError(); Map<String, String> configs = initState.getConfigs(); String name = configs.get(ConnectorConfig.NAME_CONFIG); if (error != null) { LOG.error("Failed to create the connector {}: {}", name, error.getMessage(), error); throw new RuntimeException(String.format("Failed to create the connector %s: %s", name, error.getMessage()), error); } else { if (initState.isCreated()) { LOG.debug("Created and initialized the connector {}", name); } else { LOG.debug("Failed to create connector {}", name); throw new RuntimeException(String.format("Failed to create connector %s", name)); } } }
Example #3
Source File: IgniteSourceConnectorTest.java From ignite with Apache License 2.0 | 6 votes |
/** * Creates properties for test source connector. * * @param topics Topics. * @return Test source connector properties. */ private Map<String, String> makeSourceProps(String topics) { Map<String, String> props = new HashMap<>(); props.put(ConnectorConfig.TASKS_MAX_CONFIG, "2"); props.put(ConnectorConfig.NAME_CONFIG, "test-src-connector"); props.put(ConnectorConfig.CONNECTOR_CLASS_CONFIG, IgniteSourceConnectorMock.class.getName()); props.put(IgniteSourceConstants.CACHE_NAME, "testCache"); props.put(IgniteSourceConstants.CACHE_CFG_PATH, "example-ignite.xml"); props.put(IgniteSourceConstants.TOPIC_NAMES, topics); props.put(IgniteSourceConstants.CACHE_EVENTS, "put"); props.put(IgniteSourceConstants.CACHE_FILTER_CLASS, TestCacheEventFilter.class.getName()); props.put(IgniteSourceConstants.INTL_BUF_SIZE, "1000000"); return props; }
Example #4
Source File: KafkaConnectRunner.java From camel-kafka-connector with Apache License 2.0 | 5 votes |
public void initializeConnector(ConnectorPropertyFactory connectorPropertyFactory, Consumer<ConnectorInitState> callback) throws ExecutionException, InterruptedException { Properties connectorProps = connectorPropertyFactory.getProperties(); FutureCallback<Herder.Created<ConnectorInfo>> cb = new FutureCallback<>((error, info) -> callback.accept(new ConnectorInitState(info.result().config(), info.created(), error))); herder.putConnectorConfig( connectorProps.getProperty(ConnectorConfig.NAME_CONFIG), Utils.propsToStringMap(connectorProps), false, cb); cb.get(); }
Example #5
Source File: KafkaConnectRunner.java From camel-kafka-connector with Apache License 2.0 | 5 votes |
public <T> void initializeConnector(ConnectorPropertyFactory connectorPropertyFactory, BiConsumer<ConnectorInitState, T> callback, T payload) throws ExecutionException, InterruptedException { Properties connectorProps = connectorPropertyFactory.getProperties(); FutureCallback<Herder.Created<ConnectorInfo>> cb = new FutureCallback<>((error, info) -> callback.accept(new ConnectorInitState(info.result().config(), info.created(), error), payload)); herder.putConnectorConfig( connectorProps.getProperty(ConnectorConfig.NAME_CONFIG), Utils.propsToStringMap(connectorProps), false, cb); cb.get(); }
Example #6
Source File: KafkaConnectEmbedded.java From camel-kafka-connector with Apache License 2.0 | 5 votes |
@Override public void initializeConnector(ConnectorPropertyFactory propertyFactory) { LOG.trace("Adding the new connector"); Map<String, String> configuredProperties = new HashMap<>(); propertyFactory.getProperties().forEach((k, v) -> convertProperty(configuredProperties, k, v)); connectorName = configuredProperties.get(ConnectorConfig.NAME_CONFIG); LOG.info("Initializing connector {}", connectorName); cluster.configureConnector(connectorName, configuredProperties); LOG.trace("Added the new connector"); }
Example #7
Source File: TestHbaseSinkTask.java From kafka-connect-hbase with Apache License 2.0 | 5 votes |
@Before public void setUp() throws Exception { startMiniCluster(); createTable(hbaseTable, columnFamily); configuration = getUtility().getConfiguration(); //configure defaults for Sink task. configProps.put("hbase.test.rowkey.columns", "id"); configProps.put("hbase.test.rowkey.delimiter", "|"); configProps.put("hbase.test.family", columnFamily); configProps.put(ConnectorConfig.TOPICS_CONFIG, hbaseTable); configProps.put(HBaseSinkConfig.ZOOKEEPER_QUORUM_CONFIG, TO_LOCAL_URI.apply(getUtility().getZkCluster() .getClientPort())); }
Example #8
Source File: HBaseSinkConfig.java From kafka-connect-hbase with Apache License 2.0 | 5 votes |
/** * Validates the properties to ensure the rowkey property is configured for each table. */ public void validate() { final String topicsAsStr = properties.get(ConnectorConfig.TOPICS_CONFIG); final String[] topics = topicsAsStr.split(","); for(String topic : topics) { String key = String.format(TABLE_ROWKEY_COLUMNS_TEMPLATE, topic); if(!properties.containsKey(key)) { throw new ConfigException(String.format(" No rowkey has been configured for table [%s]", key)); } } }
Example #9
Source File: TransformsConfigBuilder.java From camel-kafka-connector with Apache License 2.0 | 5 votes |
public TransformsConfigBuilder(T handle, Properties properties, String name) { this.handle = handle; this.properties = properties; this.name = name; properties.put(ConnectorConfig.TRANSFORMS_CONFIG, name); }
Example #10
Source File: WikipediaStreamDemo.java From hello-kafka-streams with Apache License 2.0 | 5 votes |
private static ConnectEmbedded createWikipediaFeedConnectInstance(String bootstrapServers) throws Exception { Properties workerProps = new Properties(); workerProps.put(DistributedConfig.GROUP_ID_CONFIG, "wikipedia-connect"); workerProps.put(DistributedConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); workerProps.put(DistributedConfig.OFFSET_STORAGE_TOPIC_CONFIG, "connect-offsets"); workerProps.put(DistributedConfig.CONFIG_TOPIC_CONFIG, "connect-configs"); workerProps.put(DistributedConfig.STATUS_STORAGE_TOPIC_CONFIG, "connect-status"); workerProps.put(DistributedConfig.KEY_CONVERTER_CLASS_CONFIG, "org.apache.kafka.connect.json.JsonConverter"); workerProps.put("key.converter.schemas.enable", "false"); workerProps.put(DistributedConfig.VALUE_CONVERTER_CLASS_CONFIG, "org.apache.kafka.connect.json.JsonConverter"); workerProps.put("value.converter.schemas.enable", "false"); workerProps.put(DistributedConfig.OFFSET_COMMIT_INTERVAL_MS_CONFIG, "30000"); workerProps.put(DistributedConfig.INTERNAL_KEY_CONVERTER_CLASS_CONFIG, "org.apache.kafka.connect.json.JsonConverter"); workerProps.put("internal.key.converter.schemas.enable", "false"); workerProps.put(DistributedConfig.INTERNAL_VALUE_CONVERTER_CLASS_CONFIG, "org.apache.kafka.connect.json.JsonConverter"); workerProps.put("internal.value.converter.schemas.enable", "false"); Properties connectorProps = new Properties(); connectorProps.put(ConnectorConfig.NAME_CONFIG, "wikipedia-irc-source"); connectorProps.put(ConnectorConfig.CONNECTOR_CLASS_CONFIG, "io.amient.kafka.connect.irc.IRCFeedConnector"); connectorProps.put(ConnectorConfig.TASKS_MAX_CONFIG, "10"); connectorProps.put(IRCFeedConnector.IRC_HOST_CONFIG, "irc.wikimedia.org"); connectorProps.put(IRCFeedConnector.IRC_PORT_CONFIG, "6667"); connectorProps.put(IRCFeedConnector.IRC_CHANNELS_CONFIG, "#en.wikipedia,#en.wiktionary,#en.wikinews"); connectorProps.put(IRCFeedConnector.TOPIC_CONFIG, "wikipedia-raw"); return new ConnectEmbedded(workerProps, connectorProps); }
Example #11
Source File: SourceConfig.java From mirus with BSD 3-Clause "New" or "Revised" License | 5 votes |
private List<Transformation<SourceRecord>> buildTransformations() { List<Transformation<SourceRecord>> transformations = new ArrayList<>(); List<String> transformNames = simpleConfig.getList(ConnectorConfig.TRANSFORMS_CONFIG); for (String name : transformNames) { String configPrefix = ConnectorConfig.TRANSFORMS_CONFIG + "." + name + "."; // We don't have access to Plugins to properly add loaded classes' configs to the definition, // so retrieve it based on the transform prefix. Map<String, Object> transformConfig = simpleConfig.originalsWithPrefix(configPrefix); String transformClassName = (String) transformConfig.get("type"); Transformation<SourceRecord> transform; try { Class<?> transformClass = (Class<?>) ConfigDef.parseType( configPrefix + "type", transformClassName, ConfigDef.Type.CLASS); transform = transformClass.asSubclass(Transformation.class).newInstance(); transform.configure(transformConfig); } catch (RuntimeException | InstantiationException | IllegalAccessException e) { // If we couldn't build and configure the Transformation properly we can't verify // that we'll be looking for the right target topics, so throw an error. throw new ConnectException( String.format("Error building transformation %s from config", name), e); } transformations.add(transform); } return transformations; }
Example #12
Source File: BasicConnectorPropertyFactory.java From camel-kafka-connector with Apache License 2.0 | 4 votes |
public T withValueConverterClass(String converterClass) { connectorProps.put(ConnectorConfig.VALUE_CONVERTER_CLASS_CONFIG, converterClass); return (T) this; }
Example #13
Source File: TaskConfigBuilderTest.java From mirus with BSD 3-Clause "New" or "Revised" License | 4 votes |
private TaskConfigBuilder newBuilder(Map<String, String> properties) { properties.put(ConnectorConfig.NAME_CONFIG, "source-name"); properties.put(TaskConfigDefinition.CONSUMER_CLIENT_ID, "test-"); return new TaskConfigBuilder(new RoundRobinTaskAssignor(), new SourceConfig(properties)); }
Example #14
Source File: SourceConfig.java From mirus with BSD 3-Clause "New" or "Revised" License | 4 votes |
public String getName() { return simpleConfig.getString(ConnectorConfig.NAME_CONFIG); }
Example #15
Source File: BasicConnectorPropertyFactory.java From camel-kafka-connector with Apache License 2.0 | 4 votes |
public T withKeyConverterClass(String converterClass) { connectorProps.put(ConnectorConfig.KEY_CONVERTER_CLASS_CONFIG, converterClass); return (T) this; }
Example #16
Source File: BasicConnectorPropertyFactory.java From camel-kafka-connector with Apache License 2.0 | 4 votes |
public T withConnectorClass(String connectorClass) { connectorProps.put(ConnectorConfig.CONNECTOR_CLASS_CONFIG, connectorClass); return (T) this; }
Example #17
Source File: BasicConnectorPropertyFactory.java From camel-kafka-connector with Apache License 2.0 | 4 votes |
public T withName(String name) { connectorProps.put(ConnectorConfig.NAME_CONFIG, name); return (T) this; }
Example #18
Source File: TransformsConfigBuilder.java From camel-kafka-connector with Apache License 2.0 | 4 votes |
public TransformsConfigBuilder<T> withEntry(String key, String value) { properties.put(ConnectorConfig.TRANSFORMS_CONFIG + "." + name + "." + key, value); return this; }