org.apache.kafka.common.config.SslConfigs Java Examples
The following examples show how to use
org.apache.kafka.common.config.SslConfigs.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KafkaClientConfigUtil.java From kafka-webview with MIT License | 6 votes |
/** * If SSL is configured for this cluster, apply the settings. * @param clusterConfig Cluster configuration definition to source values from. * @param config Config map to apply settings to. */ private void applySslSettings(final ClusterConfig clusterConfig, final Map<String, Object> config) { // Optionally configure SSL if (!clusterConfig.isUseSsl()) { return; } if (clusterConfig.isUseSasl()) { config.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, SecurityProtocol.SASL_SSL.name); } else { config.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, SecurityProtocol.SSL.name); // KeyStore and KeyStore password only needed if NOT using SASL config.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, keyStoreRootPath + "/" + clusterConfig.getKeyStoreFile()); config.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, clusterConfig.getKeyStorePassword()); } // Only put Trust properties if one is defined if (clusterConfig.getTrustStoreFile() != null) { config.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, keyStoreRootPath + "/" + clusterConfig.getTrustStoreFile()); config.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, clusterConfig.getTrustStorePassword()); } }
Example #2
Source File: ConfigDefTest.java From kafka-connectors with Apache License 2.0 | 6 votes |
@Test public void testSslPasswords() { ConfigDef def = new ConfigDef(); SslConfigs.addClientSslSupport(def); Properties props = new Properties(); props.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, "key_password"); props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "keystore_password"); props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "truststore_password"); Map<String, Object> vals = def.parse(props); assertEquals(new Password("key_password"), vals.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG)); assertEquals(Password.HIDDEN, vals.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG).toString()); assertEquals(new Password("keystore_password"), vals.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG)); assertEquals(Password.HIDDEN, vals.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG).toString()); assertEquals(new Password("truststore_password"), vals.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG)); assertEquals(Password.HIDDEN, vals.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG).toString()); }
Example #3
Source File: TestAuthorize.java From incubator-sentry with Apache License 2.0 | 6 votes |
private KafkaProducer<String, String> createKafkaProducer(String user) { Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); props.put(ProducerConfig.CLIENT_ID_CONFIG, "SentryKafkaProducer"); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SSL"); props.put(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "JKS"); props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, KafkaTestServer.class.getResource("/" + user + ".keystore.jks").getPath()); props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, user + "-ks-passwd"); props.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, user + "-key-passwd"); props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, KafkaTestServer.class.getResource("/" + user + ".truststore.jks").getPath()); props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, user + "-ts-passwd"); return new KafkaProducer<String, String>(props); }
Example #4
Source File: SSLUtils.java From kop with Apache License 2.0 | 6 votes |
/** * Configures TrustStore related settings in SslContextFactory. */ protected static void configureSslContextFactoryTrustStore(SslContextFactory ssl, Map<String, Object> sslConfigValues) { ssl.setTrustStoreType( (String) getOrDefault( sslConfigValues, SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, SslConfigs.DEFAULT_SSL_TRUSTSTORE_TYPE)); String sslTruststoreLocation = (String) sslConfigValues.get(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG); if (sslTruststoreLocation != null) { ssl.setTrustStorePath(sslTruststoreLocation); } Password sslTruststorePassword = new Password((String) sslConfigValues.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG)); if (sslTruststorePassword != null) { ssl.setTrustStorePassword(sslTruststorePassword.value()); } }
Example #5
Source File: SSLUtils.java From kop with Apache License 2.0 | 6 votes |
/** * Configures KeyStore related settings in SslContextFactory. */ protected static void configureSslContextFactoryKeyStore(SslContextFactory ssl, Map<String, Object> sslConfigValues) { ssl.setKeyStoreType((String) getOrDefault(sslConfigValues, SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, SslConfigs.DEFAULT_SSL_KEYSTORE_TYPE)); String sslKeystoreLocation = (String) sslConfigValues.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG); if (sslKeystoreLocation != null) { ssl.setKeyStorePath(sslKeystoreLocation); } Password sslKeystorePassword = new Password((String) sslConfigValues.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG)); if (sslKeystorePassword != null) { ssl.setKeyStorePassword(sslKeystorePassword.value()); } Password sslKeyPassword = new Password((String) sslConfigValues.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG)); if (sslKeyPassword != null) { ssl.setKeyManagerPassword(sslKeyPassword.value()); } }
Example #6
Source File: KsqlSchemaRegistryClientFactoryTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
@Test public void shouldPickUpNonPrefixedSslConfig() { // Given: final KsqlConfig config = config( SslConfigs.SSL_PROTOCOL_CONFIG, "SSLv3" ); final Map<String, Object> expectedConfigs = defaultConfigs(); expectedConfigs.put(SslConfigs.SSL_PROTOCOL_CONFIG, "SSLv3"); setUpMocksWithExpectedConfig(expectedConfigs); // When: final SchemaRegistryClient client = new KsqlSchemaRegistryClientFactory(config, restServiceSupplier, sslFactory).create(); // Then: assertThat(client, is(notNullValue())); EasyMock.verify(restService); }
Example #7
Source File: KsqlSchemaRegistryClientFactoryTest.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 6 votes |
@Test public void shouldPickUpPrefixedSslConfig() { // Given: final KsqlConfig config = config( "ksql.schema.registry." + SslConfigs.SSL_PROTOCOL_CONFIG, "SSLv3" ); final Map<String, Object> expectedConfigs = defaultConfigs(); expectedConfigs.put(SslConfigs.SSL_PROTOCOL_CONFIG, "SSLv3"); setUpMocksWithExpectedConfig(expectedConfigs); // When: final SchemaRegistryClient client = new KsqlSchemaRegistryClientFactory(config, restServiceSupplier, sslFactory).create(); // Then: assertThat(client, is(notNullValue())); EasyMock.verify(restService); }
Example #8
Source File: KafkaRangerAuthorizerTest.java From ranger with Apache License 2.0 | 6 votes |
@Test public void testAuthorizedWrite() throws Exception { // Create the Producer Properties producerProps = new Properties(); producerProps.put("bootstrap.servers", "localhost:" + port); producerProps.put("acks", "all"); producerProps.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); producerProps.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); producerProps.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SSL"); producerProps.put(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "JKS"); producerProps.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, serviceKeystorePath); producerProps.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "sspass"); producerProps.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, "skpass"); producerProps.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, truststorePath); producerProps.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "security"); final Producer<String, String> producer = new KafkaProducer<>(producerProps); // Send a message Future<RecordMetadata> record = producer.send(new ProducerRecord<String, String>("dev", "somekey", "somevalue")); producer.flush(); record.get(); producer.close(); }
Example #9
Source File: KafkaConnection.java From components with Apache License 2.0 | 6 votes |
public static Map<String, String> createConnMaps(KafkaDatastoreProperties datastore, boolean isBeam) { Map<String, String> props = new HashMap<>(); if (datastore != null) { props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, datastore.brokers.getValue()); if (!isBeam) { props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); } if (datastore.useSsl.getValue()) { props.put("security.protocol", "SSL"); props.put(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, datastore.trustStoreType.getValue().toString()); props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, datastore.trustStorePath.getValue()); props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, datastore.trustStorePassword.getValue()); if (datastore.needClientAuth.getValue()) { props.put(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, datastore.keyStoreType.getValue().toString()); props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, datastore.keyStorePath.getValue()); props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, datastore.keyStorePassword.getValue()); } } } return props; }
Example #10
Source File: CacheUtils.java From kcache with Apache License 2.0 | 6 votes |
/** * Get a new instance of an SSL KafkaCache and initialize it. */ public static Cache<String, String> createAndInitSSLKafkaCacheInstance( String bootstrapServers, Map<String, Object> sslConfigs, boolean requireSSLClientAuth) throws CacheInitializationException { Properties props = new Properties(); props.put(KafkaCacheConfig.KAFKACACHE_SECURITY_PROTOCOL_CONFIG, SecurityProtocol.SSL.toString()); props.put(KafkaCacheConfig.KAFKACACHE_SSL_TRUSTSTORE_LOCATION_CONFIG, sslConfigs.get(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG)); props.put(KafkaCacheConfig.KAFKACACHE_SSL_TRUSTSTORE_PASSWORD_CONFIG, ((Password) sslConfigs.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG)).value()); if (requireSSLClientAuth) { props.put(KafkaCacheConfig.KAFKACACHE_SSL_KEYSTORE_LOCATION_CONFIG, sslConfigs.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG)); props.put(KafkaCacheConfig.KAFKACACHE_SSL_KEYSTORE_PASSWORD_CONFIG, ((Password) sslConfigs.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG)).value()); props.put(KafkaCacheConfig.KAFKACACHE_SSL_KEY_PASSWORD_CONFIG, ((Password) sslConfigs.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG)).value()); } Cache<String, String> inMemoryCache = new InMemoryCache<>(); return createAndInitKafkaCacheInstance(bootstrapServers, inMemoryCache, props); }
Example #11
Source File: TestAuthorize.java From incubator-sentry with Apache License 2.0 | 6 votes |
private KafkaConsumer<String, String> createKafkaConsumer(String user) { Properties props = new Properties(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.put(ConsumerConfig.GROUP_ID_CONFIG, "sentrykafkaconsumer"); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "30000"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SSL"); props.put(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "JKS"); props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, KafkaTestServer.class.getResource("/" + user + ".keystore.jks").getPath()); props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, user + "-ks-passwd"); props.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, user + "-key-passwd"); props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, KafkaTestServer.class.getResource("/" + user + ".truststore.jks").getPath()); props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, user + "-ts-passwd"); return new KafkaConsumer<String, String>(props); }
Example #12
Source File: KafkaClientConfigUtil.java From kafka-webview with MIT License | 6 votes |
/** * If SASL is configured for this cluster, apply the settings. * @param clusterConfig Cluster configuration definition to source values from. * @param config Config map to apply settings to. */ private void applySaslSettings(final ClusterConfig clusterConfig, final Map<String, Object> config) { // If we're using SSL, we've already configured everything for SASL too... if (!clusterConfig.isUseSasl()) { return; } // If not using SSL if (clusterConfig.isUseSsl()) { // SASL+SSL config.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, SecurityProtocol.SASL_SSL.name); // Keystore and keystore password not required if using SASL+SSL config.remove(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG); config.remove(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG); } else { // Just SASL PLAINTEXT config.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, SecurityProtocol.SASL_PLAINTEXT.name); } config.put(SaslConfigs.SASL_MECHANISM, clusterConfig.getSaslMechanism()); config.put(SaslConfigs.SASL_JAAS_CONFIG, clusterConfig.getSaslJaas()); }
Example #13
Source File: KafkaClientConfigUtilTest.java From kafka-webview with MIT License | 6 votes |
private void validateSsl( final Map<String, Object> config, final String expectedSecurityProtocol, final boolean shouldHaveKeyStoreConfiguration, final boolean shouldHaveTrustStoreConfiguration ) { assertNotNull(config); validateKey(config, CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, expectedSecurityProtocol); if (shouldHaveTrustStoreConfiguration) { validateKey(config, SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, "/tmp/" + expectedTrustStoreFile); validateKey(config, SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, expectedTrustStorePassword); } else { validateNoKey(config, SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG); validateNoKey(config, SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG); } if (shouldHaveKeyStoreConfiguration) { validateKey(config, SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, "/tmp/" + expectedKeyStoreFile); validateKey(config, SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, expectedKeyStorePassword); } else { validateNoKey(config, SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG); validateNoKey(config, SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG); } }
Example #14
Source File: KafkaProperties.java From eventapis with Apache License 2.0 | 6 votes |
static void buildSslOptions(Ssl ssl, Map<String, Object> properties) { if (ssl.getKeyPassword() != null) { properties.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, ssl.getKeyPassword()); } if (ssl.getKeystoreLocation() != null) { properties.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, ssl.getKeystoreLocation()); } if (ssl.getKeystorePassword() != null) { properties.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, ssl.getKeystorePassword()); } if (ssl.getTruststoreLocation() != null) { properties.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, ssl.getTruststoreLocation()); } if (ssl.getTruststorePassword() != null) { properties.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, ssl.getTruststorePassword()); } }
Example #15
Source File: TestSslUtils.java From li-apache-kafka-clients with BSD 2-Clause "Simplified" License | 6 votes |
private static Map<String, Object> createSslConfig(Mode mode, File keyStoreFile, Password password, Password keyPassword, File trustStoreFile, Password trustStorePassword) { Map<String, Object> sslConfigs = new HashMap<>(); sslConfigs.put(SslConfigs.SSL_PROTOCOL_CONFIG, "TLSv1.2"); // protocol to create SSLContext if (mode == Mode.SERVER || (mode == Mode.CLIENT && keyStoreFile != null)) { sslConfigs.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, keyStoreFile.getPath()); sslConfigs.put(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "JKS"); sslConfigs.put(SslConfigs.SSL_KEYMANAGER_ALGORITHM_CONFIG, TrustManagerFactory.getDefaultAlgorithm()); sslConfigs.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, password); sslConfigs.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, keyPassword); } sslConfigs.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, trustStoreFile.getPath()); sslConfigs.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, trustStorePassword); sslConfigs.put(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, "JKS"); sslConfigs.put(SslConfigs.SSL_TRUSTMANAGER_ALGORITHM_CONFIG, TrustManagerFactory.getDefaultAlgorithm()); List<String> enabledProtocols = new ArrayList<>(); enabledProtocols.add("TLSv1.2"); sslConfigs.put(SslConfigs.SSL_ENABLED_PROTOCOLS_CONFIG, enabledProtocols); return sslConfigs; }
Example #16
Source File: KafkaModule.java From emodb with Apache License 2.0 | 6 votes |
@Provides @Singleton AdminClient provideAdminClient(@BootstrapServers String bootstrapServers, @Nullable SslConfiguration sslConfiguration) { Properties properties = new Properties(); properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); if (null != sslConfiguration) { properties.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, SslConfiguration.PROTOCOL); properties.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, sslConfiguration.getTrustStoreLocation()); properties.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, sslConfiguration.getTrustStorePassword()); properties.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, sslConfiguration.getKeyStoreLocation()); properties.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, sslConfiguration.getKeyStorePassword()); properties.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, sslConfiguration.getKeyPassword()); } return AdminClient.create(properties); }
Example #17
Source File: KafkaClientProperties.java From strimzi-kafka-operator with Apache License 2.0 | 6 votes |
public KafkaClientPropertiesBuilder withSaslJassConfigAndTls(String clientId, String clientSecretName, String oauthTokenEndpointUri) { try { importKeycloakCertificateToTruststore(properties); fixBadlyImportedAuthzSettings(); } catch (Exception e) { e.printStackTrace(); } if (clientId.isEmpty() || clientSecretName.isEmpty() || oauthTokenEndpointUri.isEmpty()) { throw new InvalidParameterException("You do not specify client-id, client-secret name or oauth-token-endpoint-uri inside kafka client!"); } properties.setProperty(SaslConfigs.SASL_JAAS_CONFIG, "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule " + "required " + "oauth.client.id=\"" + clientId + "\" " + "oauth.client.secret=\"" + clientSecretName + "\" " + "oauth.token.endpoint.uri=\"" + oauthTokenEndpointUri + "\" " + "oauth.ssl.endpoint.identification.algorithm=\"\"" + "oauth.ssl.truststore.location=\"" + properties.get(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG) + "\" " + "oauth.ssl.truststore.password=\"" + properties.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG) + "\" " + "oauth.ssl.truststore.type=\"" + properties.get(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG) + "\" ;"); return this; }
Example #18
Source File: CDM.java From SPADE with GNU General Public License v3.0 | 6 votes |
@Override protected Properties getDefaultKafkaProducerProperties(String kafkaServer, String kafkaTopic, String kafkaProducerID, String schemaFilename){ Properties properties = new Properties(); properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaServer); properties.put(ProducerConfig.CLIENT_ID_CONFIG, kafkaProducerID); properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, com.bbn.tc.schema.serialization.kafka.KafkaAvroGenericSerializer.class); properties.put(AvroConfig.SCHEMA_WRITER_FILE, schemaFilename); properties.put(AvroConfig.SCHEMA_SERDE_IS_SPECIFIC, true); if(useSsl){ properties.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, securityProtocol); properties.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, trustStoreLocation); properties.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, trustStorePassword); properties.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, keyStoreLocation); properties.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, keyStorePassword); properties.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, keyPassword); } return properties; }
Example #19
Source File: PepperBoxKafkaSampler.java From pepper-box with Apache License 2.0 | 5 votes |
/** * Set default parameters and their values * * @return */ @Override public Arguments getDefaultParameters() { Arguments defaultParameters = new Arguments(); defaultParameters.addArgument(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, ProducerKeys.BOOTSTRAP_SERVERS_CONFIG_DEFAULT); defaultParameters.addArgument(ProducerKeys.ZOOKEEPER_SERVERS, ProducerKeys.ZOOKEEPER_SERVERS_DEFAULT); defaultParameters.addArgument(ProducerKeys.KAFKA_TOPIC_CONFIG, ProducerKeys.KAFKA_TOPIC_CONFIG_DEFAULT); defaultParameters.addArgument(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ProducerKeys.KEY_SERIALIZER_CLASS_CONFIG_DEFAULT); defaultParameters.addArgument(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ProducerKeys.VALUE_SERIALIZER_CLASS_CONFIG_DEFAULT); defaultParameters.addArgument(ProducerConfig.COMPRESSION_TYPE_CONFIG, ProducerKeys.COMPRESSION_TYPE_CONFIG_DEFAULT); defaultParameters.addArgument(ProducerConfig.BATCH_SIZE_CONFIG, ProducerKeys.BATCH_SIZE_CONFIG_DEFAULT); defaultParameters.addArgument(ProducerConfig.LINGER_MS_CONFIG, ProducerKeys.LINGER_MS_CONFIG_DEFAULT); defaultParameters.addArgument(ProducerConfig.BUFFER_MEMORY_CONFIG, ProducerKeys.BUFFER_MEMORY_CONFIG_DEFAULT); defaultParameters.addArgument(ProducerConfig.ACKS_CONFIG, ProducerKeys.ACKS_CONFIG_DEFAULT); defaultParameters.addArgument(ProducerConfig.SEND_BUFFER_CONFIG, ProducerKeys.SEND_BUFFER_CONFIG_DEFAULT); defaultParameters.addArgument(ProducerConfig.RECEIVE_BUFFER_CONFIG, ProducerKeys.RECEIVE_BUFFER_CONFIG_DEFAULT); defaultParameters.addArgument(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, SecurityProtocol.PLAINTEXT.name); defaultParameters.addArgument(PropsKeys.KEYED_MESSAGE_KEY, PropsKeys.KEYED_MESSAGE_DEFAULT); defaultParameters.addArgument(PropsKeys.MESSAGE_KEY_PLACEHOLDER_KEY, PropsKeys.MSG_KEY_PLACEHOLDER); defaultParameters.addArgument(PropsKeys.MESSAGE_VAL_PLACEHOLDER_KEY, PropsKeys.MSG_PLACEHOLDER); defaultParameters.addArgument(ProducerKeys.KERBEROS_ENABLED, ProducerKeys.FLAG_NO); defaultParameters.addArgument(ProducerKeys.JAVA_SEC_AUTH_LOGIN_CONFIG, ProducerKeys.JAVA_SEC_AUTH_LOGIN_CONFIG_DEFAULT); defaultParameters.addArgument(ProducerKeys.JAVA_SEC_KRB5_CONFIG, ProducerKeys.JAVA_SEC_KRB5_CONFIG_DEFAULT); defaultParameters.addArgument(ProducerKeys.SASL_KERBEROS_SERVICE_NAME, ProducerKeys.SASL_KERBEROS_SERVICE_NAME_DEFAULT); defaultParameters.addArgument(ProducerKeys.SASL_MECHANISM, ProducerKeys.SASL_MECHANISM_DEFAULT); defaultParameters.addArgument(ProducerKeys.SSL_ENABLED, ProducerKeys.FLAG_NO); defaultParameters.addArgument(SslConfigs.SSL_KEY_PASSWORD_CONFIG, "<Key Password>"); defaultParameters.addArgument(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, "<Keystore Location>"); defaultParameters.addArgument(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "<Keystore Password>"); defaultParameters.addArgument(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, SslConfigs.DEFAULT_SSL_KEYSTORE_TYPE); defaultParameters.addArgument(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, "<Truststore Location>"); defaultParameters.addArgument(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "<Truststore Password>"); defaultParameters.addArgument(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, SslConfigs.DEFAULT_SSL_TRUSTSTORE_TYPE); return defaultParameters; }
Example #20
Source File: KafkaClientConfigUtilTest.java From kafka-webview with MIT License | 5 votes |
private void validateNoSsl(final Map<String, Object> config) { assertNotNull(config); validateNoKey(config, SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG); validateNoKey(config, SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG); validateNoKey(config, SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG); validateNoKey(config, SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG); }
Example #21
Source File: KafkaServiceImpl.java From kafka-eagle with Apache License 2.0 | 5 votes |
/** * Set topic ssl. */ public void ssl(Properties props, String clusterAlias) { // configure the following three settings for SSL Encryption props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, SystemConfigUtils.getProperty(clusterAlias + ".kafka.eagle.ssl.protocol")); props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, SystemConfigUtils.getProperty(clusterAlias + ".kafka.eagle.ssl.truststore.location")); props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, SystemConfigUtils.getProperty(clusterAlias + ".kafka.eagle.ssl.truststore.password")); // configure the following three settings for SSL Authentication props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, SystemConfigUtils.getProperty(clusterAlias + ".kafka.eagle.ssl.keystore.location")); props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, SystemConfigUtils.getProperty(clusterAlias + ".kafka.eagle.ssl.keystore.password")); props.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, SystemConfigUtils.getProperty(clusterAlias + ".kafka.eagle.ssl.key.password")); }
Example #22
Source File: KafkaProcessorUtils.java From nifi with Apache License 2.0 | 5 votes |
static void buildCommonKafkaProperties(final ProcessContext context, final Class<?> kafkaConfigClass, final Map<String, Object> mapToPopulate) { for (PropertyDescriptor propertyDescriptor : context.getProperties().keySet()) { if (propertyDescriptor.equals(SSL_CONTEXT_SERVICE)) { // Translate SSLContext Service configuration into Kafka properties final SSLContextService sslContextService = context.getProperty(SSL_CONTEXT_SERVICE).asControllerService(SSLContextService.class); if (sslContextService != null && sslContextService.isKeyStoreConfigured()) { mapToPopulate.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, sslContextService.getKeyStoreFile()); mapToPopulate.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, sslContextService.getKeyStorePassword()); final String keyPass = sslContextService.getKeyPassword() == null ? sslContextService.getKeyStorePassword() : sslContextService.getKeyPassword(); mapToPopulate.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, keyPass); mapToPopulate.put(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, sslContextService.getKeyStoreType()); } if (sslContextService != null && sslContextService.isTrustStoreConfigured()) { mapToPopulate.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, sslContextService.getTrustStoreFile()); mapToPopulate.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, sslContextService.getTrustStorePassword()); mapToPopulate.put(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, sslContextService.getTrustStoreType()); } } String propertyName = propertyDescriptor.getName(); String propertyValue = propertyDescriptor.isExpressionLanguageSupported() ? context.getProperty(propertyDescriptor).evaluateAttributeExpressions().getValue() : context.getProperty(propertyDescriptor).getValue(); if (propertyValue != null) { // If the property name ends in ".ms" then it is a time period. We want to accept either an integer as number of milliseconds // or the standard NiFi time period such as "5 secs" if (propertyName.endsWith(".ms") && !StringUtils.isNumeric(propertyValue.trim())) { // kafka standard time notation propertyValue = String.valueOf(FormatUtils.getTimeDuration(propertyValue.trim(), TimeUnit.MILLISECONDS)); } if (isStaticStringFieldNamePresent(propertyName, kafkaConfigClass, CommonClientConfigs.class, SslConfigs.class, SaslConfigs.class)) { mapToPopulate.put(propertyName, propertyValue); } } } }
Example #23
Source File: KafkaTopicProvisionerTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@SuppressWarnings("rawtypes") @Test public void bootPropertiesOverriddenExceptServers() throws Exception { KafkaProperties bootConfig = new KafkaProperties(); bootConfig.getProperties().put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "PLAINTEXT"); bootConfig.setBootstrapServers(Collections.singletonList("localhost:1234")); KafkaBinderConfigurationProperties binderConfig = new KafkaBinderConfigurationProperties( bootConfig); binderConfig.getConfiguration().put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SSL"); ClassPathResource ts = new ClassPathResource("test.truststore.ks"); binderConfig.getConfiguration().put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, ts.getFile().getAbsolutePath()); binderConfig.setBrokers("localhost:9092"); KafkaTopicProvisioner provisioner = new KafkaTopicProvisioner(binderConfig, bootConfig); AdminClient adminClient = provisioner.createAdminClient(); assertThat(KafkaTestUtils.getPropertyValue(adminClient, "client.selector.channelBuilder")).isInstanceOf(SslChannelBuilder.class); Map configs = KafkaTestUtils.getPropertyValue(adminClient, "client.selector.channelBuilder.configs", Map.class); assertThat( ((List) configs.get(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG)).get(0)) .isEqualTo("localhost:1234"); adminClient.close(); }
Example #24
Source File: ReportLineageToAtlas.java From nifi with Apache License 2.0 | 5 votes |
private void setKafkaConfig(Map<Object, Object> mapToPopulate, PropertyContext context) { final String kafkaBootStrapServers = context.getProperty(KAFKA_BOOTSTRAP_SERVERS).evaluateAttributeExpressions().getValue(); mapToPopulate.put(ATLAS_PROPERTY_KAFKA_BOOTSTRAP_SERVERS, kafkaBootStrapServers); mapToPopulate.put(ATLAS_PROPERTY_KAFKA_CLIENT_ID, String.format("%s.%s", getName(), getIdentifier())); final String kafkaSecurityProtocol = context.getProperty(KAFKA_SECURITY_PROTOCOL).getValue(); mapToPopulate.put(ATLAS_KAFKA_PREFIX + "security.protocol", kafkaSecurityProtocol); // Translate SSLContext Service configuration into Kafka properties final SSLContextService sslContextService = context.getProperty(SSL_CONTEXT_SERVICE).asControllerService(SSLContextService.class); if (sslContextService != null && sslContextService.isKeyStoreConfigured()) { mapToPopulate.put(ATLAS_KAFKA_PREFIX + SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, sslContextService.getKeyStoreFile()); mapToPopulate.put(ATLAS_KAFKA_PREFIX + SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, sslContextService.getKeyStorePassword()); final String keyPass = sslContextService.getKeyPassword() == null ? sslContextService.getKeyStorePassword() : sslContextService.getKeyPassword(); mapToPopulate.put(ATLAS_KAFKA_PREFIX + SslConfigs.SSL_KEY_PASSWORD_CONFIG, keyPass); mapToPopulate.put(ATLAS_KAFKA_PREFIX + SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, sslContextService.getKeyStoreType()); } if (sslContextService != null && sslContextService.isTrustStoreConfigured()) { mapToPopulate.put(ATLAS_KAFKA_PREFIX + SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, sslContextService.getTrustStoreFile()); mapToPopulate.put(ATLAS_KAFKA_PREFIX + SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, sslContextService.getTrustStorePassword()); mapToPopulate.put(ATLAS_KAFKA_PREFIX + SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, sslContextService.getTrustStoreType()); } if (SEC_SASL_PLAINTEXT.equals(kafkaSecurityProtocol) || SEC_SASL_SSL.equals(kafkaSecurityProtocol)) { setKafkaJaasConfig(mapToPopulate, context); } }
Example #25
Source File: KafkaRangerAuthorizerSASLSSLTest.java From ranger with Apache License 2.0 | 5 votes |
@Test public void testAuthorizedWrite() throws Exception { // Create the Producer Properties producerProps = new Properties(); producerProps.put("bootstrap.servers", "localhost:" + port); producerProps.put("acks", "all"); producerProps.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); producerProps.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); producerProps.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_SSL"); producerProps.put("sasl.mechanism", "PLAIN"); producerProps.put(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "JKS"); producerProps.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, serviceKeystorePath); producerProps.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "sspass"); producerProps.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, "skpass"); producerProps.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, truststorePath); producerProps.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "security"); final Producer<String, String> producer = new KafkaProducer<>(producerProps); // Send a message Future<RecordMetadata> record = producer.send(new ProducerRecord<String, String>("dev", "somekey", "somevalue")); producer.flush(); record.get(); producer.close(); }
Example #26
Source File: EventStreamsConsoleSample.java From event-streams-samples with Apache License 2.0 | 5 votes |
static final Map<String, Object> getCommonConfigs(String boostrapServers, String apikey) { Map<String, Object> configs = new HashMap<>(); configs.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, boostrapServers); configs.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_SSL"); configs.put(SaslConfigs.SASL_MECHANISM, "PLAIN"); configs.put(SaslConfigs.SASL_JAAS_CONFIG, "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"token\" password=\"" + apikey + "\";"); configs.put(SslConfigs.SSL_PROTOCOL_CONFIG, "TLSv1.2"); configs.put(SslConfigs.SSL_ENABLED_PROTOCOLS_CONFIG, "TLSv1.2"); configs.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "HTTPS"); return configs; }
Example #27
Source File: KafkaSecUtils.java From bdt with Apache License 2.0 | 5 votes |
public void createConnection(String brokersUrl, String keystore, String keypass, String truststore, String trustpass) throws InterruptedException { if (adminClient != null) { closeConnection(); } kafkaConnectionProperties.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, brokersUrl); kafkaProducerProperties.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, brokersUrl); kafkaConsumerProperties.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, brokersUrl); logger.debug("Creating secured Kafka connection: " + brokersUrl); kafkaConnectionProperties.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SSL"); kafkaConnectionProperties.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, truststore); kafkaConnectionProperties.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, trustpass); kafkaConnectionProperties.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, keystore); kafkaConnectionProperties.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, keypass); kafkaProducerProperties.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SSL"); kafkaProducerProperties.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, truststore); kafkaProducerProperties.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, trustpass); kafkaProducerProperties.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, keystore); kafkaProducerProperties.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, keypass); kafkaConsumerProperties.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SSL"); kafkaConsumerProperties.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, truststore); kafkaConsumerProperties.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, trustpass); kafkaConsumerProperties.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, keystore); kafkaConsumerProperties.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, keypass); adminClient = KafkaAdminClient.create(kafkaConnectionProperties); logger.debug("Kafka connection created."); }
Example #28
Source File: DefaultKafkaCluster.java From emodb with Apache License 2.0 | 5 votes |
private Producer<String, JsonNode> createProducer() { Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, _bootstrapServers); props.put(ProducerConfig.ACKS_CONFIG, Constants.ACKS_CONFIG); props.put(ProducerConfig.RETRIES_CONFIG, Constants.RETRIES_CONFIG); props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, Constants.MAX_REQUEST_SIZE); props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, Constants.PRODUCER_COMPRESSION_TYPE); props.put(ProducerConfig.CLIENT_ID_CONFIG, _instanceIdentifier); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class); if (_kafkaProducerConfiguration.getLingerMs().isPresent()) { props.put(ProducerConfig.LINGER_MS_CONFIG, _kafkaProducerConfiguration.getLingerMs().get()); } if (_kafkaProducerConfiguration.getBatchsize().isPresent()) { props.put(ProducerConfig.BATCH_SIZE_CONFIG, _kafkaProducerConfiguration.getBatchsize().get()); } if (_kafkaProducerConfiguration.getBufferMemory().isPresent()) { props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, _kafkaProducerConfiguration.getBufferMemory().get()); } if (null != _sslConfiguration) { props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, SslConfiguration.PROTOCOL); props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, _sslConfiguration.getTrustStoreLocation()); props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, _sslConfiguration.getTrustStorePassword()); props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, _sslConfiguration.getKeyStoreLocation()); props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, _sslConfiguration.getKeyStorePassword()); props.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, _sslConfiguration.getKeyPassword()); } return new KafkaProducer<>(props); }
Example #29
Source File: KafkaRangerAuthorizerTest.java From ranger with Apache License 2.0 | 5 votes |
@Test public void testUnauthorizedWrite() throws Exception { // Create the Producer Properties producerProps = new Properties(); producerProps.put("bootstrap.servers", "localhost:" + port); producerProps.put("acks", "all"); producerProps.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); producerProps.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); producerProps.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SSL"); producerProps.put(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "JKS"); producerProps.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, clientKeystorePath); producerProps.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "cspass"); producerProps.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, "ckpass"); producerProps.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, truststorePath); producerProps.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "security"); final Producer<String, String> producer = new KafkaProducer<>(producerProps); // Send a message Future<RecordMetadata> record = producer.send(new ProducerRecord<String, String>("test", "somekey", "somevalue")); producer.flush(); record.get(); try { record = producer.send(new ProducerRecord<String, String>("dev", "somekey", "somevalue")); producer.flush(); record.get(); } catch (Exception ex) { Assert.assertTrue(ex.getMessage().contains("Not authorized to access topics")); } producer.close(); }
Example #30
Source File: KafkaTopicProvisionerTests.java From spring-cloud-stream-binder-kafka with Apache License 2.0 | 5 votes |
@SuppressWarnings("rawtypes") @Test public void bootPropertiesOverriddenIncludingServers() throws Exception { KafkaProperties bootConfig = new KafkaProperties(); bootConfig.getProperties().put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "PLAINTEXT"); bootConfig.setBootstrapServers(Collections.singletonList("localhost:9092")); KafkaBinderConfigurationProperties binderConfig = new KafkaBinderConfigurationProperties( bootConfig); binderConfig.getConfiguration().put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SSL"); ClassPathResource ts = new ClassPathResource("test.truststore.ks"); binderConfig.getConfiguration().put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, ts.getFile().getAbsolutePath()); binderConfig.setBrokers("localhost:1234"); KafkaTopicProvisioner provisioner = new KafkaTopicProvisioner(binderConfig, bootConfig); AdminClient adminClient = provisioner.createAdminClient(); assertThat(KafkaTestUtils.getPropertyValue(adminClient, "client.selector.channelBuilder")).isInstanceOf(SslChannelBuilder.class); Map configs = KafkaTestUtils.getPropertyValue(adminClient, "client.selector.channelBuilder.configs", Map.class); assertThat( ((List) configs.get(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG)).get(0)) .isEqualTo("localhost:1234"); adminClient.close(); }