org.apache.kafka.common.acl.AclOperation Java Examples

The following examples show how to use org.apache.kafka.common.acl.AclOperation. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TopologyBuilderAdminClient.java    From kafka-topology-builder with MIT License 6 votes vote down vote up
public void clearAcls(TopologyAclBinding aclBinding) throws IOException {
  Collection<AclBindingFilter> filters = new ArrayList<>();

  LOGGER.debug("clearAcl = " + aclBinding);
  ResourcePatternFilter resourceFilter =
      new ResourcePatternFilter(
          aclBinding.getResourceType(),
          aclBinding.getResourceName(),
          PatternType.valueOf(aclBinding.getPattern()));

  AccessControlEntryFilter accessControlEntryFilter =
      new AccessControlEntryFilter(
          aclBinding.getPrincipal(),
          aclBinding.getHost(),
          AclOperation.valueOf(aclBinding.getOperation()),
          AclPermissionType.ANY);

  AclBindingFilter filter = new AclBindingFilter(resourceFilter, accessControlEntryFilter);
  filters.add(filter);
  clearAcls(filters);
}
 
Example #2
Source File: SecureIntegrationTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 6 votes vote down vote up
@Test
public void shouldRunQueryWithChangeLogsAgainstKafkaClusterWithWildcardAcls() throws Exception {
  // Given:
  givenAllowAcl(NORMAL_USER, ResourceType.CLUSTER, "kafka-cluster",
                ImmutableSet.of(AclOperation.DESCRIBE_CONFIGS, AclOperation.CREATE));

  givenAllowAcl(NORMAL_USER, ResourceType.TOPIC, "*",
                ImmutableSet.of(AclOperation.DESCRIBE, AclOperation.READ,
                                AclOperation.WRITE, AclOperation.DELETE));

  givenAllowAcl(NORMAL_USER, ResourceType.GROUP, "*",
                ImmutableSet.of(AclOperation.DESCRIBE, AclOperation.READ));

  givenTestSetupWithConfig(getKsqlConfig(NORMAL_USER));

  // Then:
  assertCanRunRepartitioningKsqlQuery();
}
 
Example #3
Source File: AccessControlManagerIT.java    From kafka-topology-builder with MIT License 6 votes vote down vote up
private void verifyControlCenterAcls(Platform platform)
    throws ExecutionException, InterruptedException {

  List<ControlCenter> c3List = platform.getControlCenter();

  for (ControlCenter c3 : c3List) {
    ResourcePatternFilter resourceFilter =
        new ResourcePatternFilter(ResourceType.TOPIC, null, PatternType.ANY);

    AccessControlEntryFilter entryFilter =
        new AccessControlEntryFilter(
            c3.getPrincipal(), null, AclOperation.ANY, AclPermissionType.ALLOW);

    AclBindingFilter filter = new AclBindingFilter(resourceFilter, entryFilter);

    Collection<AclBinding> acls = kafkaAdminClient.describeAcls(filter).values().get();

    Assert.assertEquals(16, acls.size());
  }
}
 
Example #4
Source File: TopologyBuilderAdminClient.java    From kafka-topology-builder with MIT License 6 votes vote down vote up
public List<AclBinding> setAclsForStreamsApp(
    String principal, String topicPrefix, List<String> readTopics, List<String> writeTopics)
    throws IOException {

  List<AclBinding> acls = new ArrayList<>();

  readTopics.forEach(
      topic -> {
        acls.add(buildTopicLevelAcl(principal, topic, PatternType.LITERAL, AclOperation.READ));
      });

  writeTopics.forEach(
      topic -> {
        acls.add(buildTopicLevelAcl(principal, topic, PatternType.LITERAL, AclOperation.WRITE));
      });

  acls.add(buildTopicLevelAcl(principal, topicPrefix, PatternType.PREFIXED, AclOperation.ALL));
  createAcls(acls);
  return acls;
}
 
Example #5
Source File: AccessControlManagerIT.java    From kafka-topology-builder with MIT License 5 votes vote down vote up
private void verifyConsumerAcls(List<Consumer> consumers, String topic)
    throws InterruptedException, ExecutionException {

  for (Consumer consumer : consumers) {
    ResourcePatternFilter resourceFilter = ResourcePatternFilter.ANY;
    AccessControlEntryFilter entryFilter =
        new AccessControlEntryFilter(
            consumer.getPrincipal(), null, AclOperation.ANY, AclPermissionType.ALLOW);

    AclBindingFilter filter = new AclBindingFilter(resourceFilter, entryFilter);
    Collection<AclBinding> acls = kafkaAdminClient.describeAcls(filter).values().get();

    Assert.assertEquals(3, acls.size());

    List<ResourceType> types =
        acls.stream()
            .map(aclBinding -> aclBinding.pattern().resourceType())
            .collect(Collectors.toList());

    Assert.assertTrue(types.contains(ResourceType.GROUP));
    Assert.assertTrue(types.contains(ResourceType.TOPIC));

    List<AclOperation> ops =
        acls.stream()
            .map(aclsBinding -> aclsBinding.entry().operation())
            .collect(Collectors.toList());

    Assert.assertTrue(ops.contains(AclOperation.DESCRIBE));
    Assert.assertTrue(ops.contains(AclOperation.READ));
  }
}
 
Example #6
Source File: SimpleAclOperator.java    From strimzi-kafka-operator with Apache License 2.0 5 votes vote down vote up
/**
 * Returns Set of ACLs applying to single user.
 *
 * @param username  Name of the user.
 * @return The Set of ACLs applying to single user.
 */
public Set<SimpleAclRule> getAcls(String username)   {
    log.debug("Searching for ACL rules of user {}", username);
    Set<SimpleAclRule> result = new HashSet<>();
    KafkaPrincipal principal = new KafkaPrincipal("User", username);

    AclBindingFilter aclBindingFilter = new AclBindingFilter(ResourcePatternFilter.ANY,
        new AccessControlEntryFilter(principal.toString(), null, AclOperation.ANY, AclPermissionType.ANY));

    Collection<AclBinding> aclBindings = null;
    try {
        aclBindings = adminClient.describeAcls(aclBindingFilter).values().get();
    } catch (InterruptedException | ExecutionException e) {
        // Admin Client API needs authorizer enabled on the Kafka brokers
        if (e.getCause() instanceof SecurityDisabledException) {
            throw new InvalidResourceException("Authorization needs to be enabled in the Kafka custom resource", e.getCause());
        } else if (e.getCause() instanceof UnknownServerException && e.getMessage().contains("Simple ACL delegation not enabled")) {
            throw new InvalidResourceException("Simple ACL delegation needs to be enabled in the Kafka custom resource", e.getCause());
        }
    }

    if (aclBindings != null) {
        log.debug("ACL rules for user {}", username);
        for (AclBinding aclBinding : aclBindings) {
            log.debug("{}", aclBinding);
            result.add(SimpleAclRule.fromAclBinding(aclBinding));
        }
    }

    return result;
}
 
Example #7
Source File: EmbeddedSingleNodeKafkaCluster.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
/**
 * Writes the supplied ACL information to ZK, where it will be picked up by the brokes authorizer.
 *
 * @param username    the who.
 * @param permission  the allow|deny.
 * @param resource    the thing
 * @param ops         the what.
 */
public void addUserAcl(final String username,
                       final AclPermissionType permission,
                       final Resource resource,
                       final Set<AclOperation> ops) {

  final KafkaPrincipal principal = new KafkaPrincipal("User", username);
  final PermissionType scalaPermission = PermissionType$.MODULE$.fromJava(permission);

  final Set<Acl> javaAcls = ops.stream()
      .map(Operation$.MODULE$::fromJava)
      .map(op -> new Acl(principal, scalaPermission, "*", op))
      .collect(Collectors.toSet());

  final scala.collection.immutable.Set<Acl> scalaAcls =
      JavaConversions.asScalaSet(javaAcls).toSet();

  kafka.security.auth.ResourceType scalaResType =
      ResourceType$.MODULE$.fromJava(resource.resourceType());

  final kafka.security.auth.Resource scalaResource =
      new kafka.security.auth.Resource(scalaResType, resource.name());

  authorizer.addAcls(scalaAcls, scalaResource);

  addedAcls.add(scalaResource);
}
 
Example #8
Source File: SecureIntegrationTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
private void givenAllowAcl(final Credentials credentials,
                           final ResourceType resourceType,
                           final String resourceName,
                           final Set<AclOperation> ops) {
  SECURE_CLUSTER.addUserAcl(credentials.username, AclPermissionType.ALLOW,
                            new Resource(resourceType, resourceName), ops);
}
 
Example #9
Source File: SecureIntegrationTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Test
public void shouldRunQueryWithChangeLogsAgainstKafkaClusterWithAclsAndCustomPrefixed()
    throws Exception {
  // Given:
  outputTopic = "ACLS_TEST_4";

  givenAllowAcl(NORMAL_USER, ResourceType.CLUSTER, "kafka-cluster",
                ImmutableSet.of(AclOperation.DESCRIBE_CONFIGS, AclOperation.CREATE));

  givenAllowAcl(NORMAL_USER, ResourceType.TOPIC, INPUT_TOPIC,
                ImmutableSet.of(AclOperation.DESCRIBE, AclOperation.READ));

  givenAllowAcl(NORMAL_USER, ResourceType.TOPIC, "__consumer_offsets",
                ImmutableSet.of(AclOperation.DESCRIBE));

  givenAllowAcl(NORMAL_USER, ResourceType.TOPIC, outputTopic,
                ImmutableSet.of(AclOperation.DESCRIBE, AclOperation.WRITE));

  givenAllowAcl(NORMAL_USER, ResourceType.TOPIC,
                "_confluent-ksql-t4_query_CTAS_ACLS_TEST_4-KSTREAM-AGGREGATE-STATE-STORE-0000000006-repartition",
                ImmutableSet.of(AclOperation.DESCRIBE, AclOperation.READ, AclOperation.WRITE,
                                AclOperation.DELETE));

  givenAllowAcl(NORMAL_USER, ResourceType.TOPIC,
                "_confluent-ksql-t4_query_CTAS_ACLS_TEST_4-KSTREAM-AGGREGATE-STATE-STORE-0000000006-changelog",
                ImmutableSet
                    .of(AclOperation.DESCRIBE, /* READ for recovery, */ AclOperation.WRITE,
                        AclOperation.DELETE));

  givenAllowAcl(NORMAL_USER, ResourceType.GROUP, "_confluent-ksql-t4_query_CTAS_ACLS_TEST_4",
                ImmutableSet.of(AclOperation.DESCRIBE, AclOperation.READ));

  final Map<String, Object> ksqlConfig = getKsqlConfig(NORMAL_USER);
  ksqlConfig.put(KsqlConfig.KSQL_SERVICE_ID_CONFIG, "t4_");
  givenTestSetupWithConfig(ksqlConfig);

  // Then:
  assertCanRunRepartitioningKsqlQuery();
}
 
Example #10
Source File: SecureIntegrationTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Test
public void shouldRunQueryWithChangeLogsAgainstKafkaClusterWithAcls() throws Exception {
  // Given:
  outputTopic = "ACLS_TEST_2";

  givenAllowAcl(NORMAL_USER, ResourceType.CLUSTER, "kafka-cluster",
                ImmutableSet.of(AclOperation.DESCRIBE_CONFIGS, AclOperation.CREATE));

  givenAllowAcl(NORMAL_USER, ResourceType.TOPIC, INPUT_TOPIC,
                ImmutableSet.of(AclOperation.DESCRIBE, AclOperation.READ));

  givenAllowAcl(NORMAL_USER, ResourceType.TOPIC, "__consumer_offsets",
                ImmutableSet.of(AclOperation.DESCRIBE));

  givenAllowAcl(NORMAL_USER, ResourceType.TOPIC, outputTopic,
                ImmutableSet.of(AclOperation.DESCRIBE, AclOperation.WRITE));

  givenAllowAcl(NORMAL_USER, ResourceType.TOPIC,
                "_confluent-ksql-default_query_CTAS_ACLS_TEST_2-KSTREAM-AGGREGATE-STATE-STORE-0000000006-repartition",
                ImmutableSet.of(AclOperation.DESCRIBE, AclOperation.READ, AclOperation.WRITE,
                                AclOperation.DELETE));

  givenAllowAcl(NORMAL_USER, ResourceType.TOPIC,
                "_confluent-ksql-default_query_CTAS_ACLS_TEST_2-KSTREAM-AGGREGATE-STATE-STORE-0000000006-changelog",
                ImmutableSet
                    .of(AclOperation.DESCRIBE, /* READ for recovery, */ AclOperation.WRITE,
                        AclOperation.DELETE));

  givenAllowAcl(NORMAL_USER, ResourceType.GROUP, "_confluent-ksql-default_query_CTAS_ACLS_TEST_2",
                ImmutableSet.of(AclOperation.DESCRIBE, AclOperation.READ));

  givenTestSetupWithConfig(getKsqlConfig(NORMAL_USER));

  // Then:
  assertCanRunRepartitioningKsqlQuery();
}
 
Example #11
Source File: SecureIntegrationTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
@Test
public void shouldRunQueryAgainstKafkaClusterOverSsl() throws Exception {
  // Given:
  givenAllowAcl(ALL_USERS, ResourceType.CLUSTER, "kafka-cluster",
                ImmutableSet.of(AclOperation.DESCRIBE_CONFIGS, AclOperation.CREATE));

  givenAllowAcl(ALL_USERS, ResourceType.TOPIC, "*",
                ImmutableSet.of(AclOperation.DESCRIBE, AclOperation.READ,
                                AclOperation.WRITE, AclOperation.DELETE));

  givenAllowAcl(ALL_USERS, ResourceType.GROUP, "*",
                ImmutableSet.of(AclOperation.DESCRIBE, AclOperation.READ));

  final Map<String, Object> configs = getBaseKsqlConfig();
  configs.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,
              SECURE_CLUSTER.bootstrapServers(SecurityProtocol.SSL));

  // Additional Properties required for KSQL to talk to cluster over SSL:
  configs.put("security.protocol", "SSL");
  configs.put("ssl.truststore.location", ClientTrustStore.trustStorePath());
  configs.put("ssl.truststore.password", ClientTrustStore.trustStorePassword());

  givenTestSetupWithConfig(configs);

  // Then:
  assertCanRunSimpleKsqlQuery();
}
 
Example #12
Source File: TopologyBuilderAdminClient.java    From kafka-topology-builder with MIT License 5 votes vote down vote up
public List<AclBinding> setAclForSchemaRegistry(String principal) throws IOException {
  List<AclBinding> bindings =
      Arrays.asList(AclOperation.DESCRIBE_CONFIGS, AclOperation.WRITE, AclOperation.READ).stream()
          .map(
              aclOperation -> {
                return buildTopicLevelAcl(
                    principal, "_schemas", PatternType.LITERAL, aclOperation);
              })
          .collect(Collectors.toList());
  createAcls(bindings);
  return bindings;
}
 
Example #13
Source File: AccessControlManagerIT.java    From kafka-topology-builder with MIT License 5 votes vote down vote up
private void verifyProducerAcls(List<Producer> producers, String topic)
    throws InterruptedException, ExecutionException {

  for (Producer producer : producers) {
    ResourcePatternFilter resourceFilter = ResourcePatternFilter.ANY;
    AccessControlEntryFilter entryFilter =
        new AccessControlEntryFilter(
            producer.getPrincipal(), null, AclOperation.ANY, AclPermissionType.ALLOW);

    AclBindingFilter filter = new AclBindingFilter(resourceFilter, entryFilter);
    Collection<AclBinding> acls = kafkaAdminClient.describeAcls(filter).values().get();

    Assert.assertEquals(2, acls.size());

    List<ResourceType> types =
        acls.stream()
            .map(aclBinding -> aclBinding.pattern().resourceType())
            .collect(Collectors.toList());

    Assert.assertTrue(types.contains(ResourceType.TOPIC));

    List<AclOperation> ops =
        acls.stream()
            .map(aclsBinding -> aclsBinding.entry().operation())
            .collect(Collectors.toList());

    Assert.assertTrue(ops.contains(AclOperation.DESCRIBE));
    Assert.assertTrue(ops.contains(AclOperation.WRITE));
  }
}
 
Example #14
Source File: TopologyBuilderAdminClient.java    From kafka-topology-builder with MIT License 5 votes vote down vote up
public List<AclBinding> setAclsForProducer(String principal, String topic) throws IOException {
  List<AclBinding> acls = new ArrayList<>();
  acls.add(buildTopicLevelAcl(principal, topic, PatternType.LITERAL, AclOperation.DESCRIBE));
  acls.add(buildTopicLevelAcl(principal, topic, PatternType.LITERAL, AclOperation.WRITE));
  createAcls(acls);
  return acls;
}
 
Example #15
Source File: TopologyBuilderAdminClient.java    From kafka-topology-builder with MIT License 5 votes vote down vote up
private AclBinding buildGroupLevelAcl(
    String principal, String group, PatternType patternType, AclOperation op) {
  return new AclBuilder(principal)
      .addResource(ResourceType.GROUP, group, patternType)
      .addControlEntry("*", op, AclPermissionType.ALLOW)
      .build();
}
 
Example #16
Source File: TopologyBuilderAdminClient.java    From kafka-topology-builder with MIT License 5 votes vote down vote up
private AclBinding buildTopicLevelAcl(
    String principal, String topic, PatternType patternType, AclOperation op) {
  return new AclBuilder(principal)
      .addResource(ResourceType.TOPIC, topic, patternType)
      .addControlEntry("*", op, AclPermissionType.ALLOW)
      .build();
}
 
Example #17
Source File: TopologyBuilderAdminClient.java    From kafka-topology-builder with MIT License 5 votes vote down vote up
public List<AclBinding> setAclsForConnect(
    String principal, String topicPrefix, List<String> readTopics, List<String> writeTopics)
    throws IOException {

  List<AclBinding> acls = new ArrayList<>();

  List<String> topics = Arrays.asList("connect-status", "connect-offsets", "connect-configs");
  for (String topic : topics) {
    acls.add(buildTopicLevelAcl(principal, topic, PatternType.LITERAL, AclOperation.READ));
    acls.add(buildTopicLevelAcl(principal, topic, PatternType.LITERAL, AclOperation.WRITE));
  }

  ResourcePattern resourcePattern =
      new ResourcePattern(ResourceType.CLUSTER, "kafka-cluster", PatternType.LITERAL);
  AccessControlEntry entry =
      new AccessControlEntry(principal, "*", AclOperation.CREATE, AclPermissionType.ALLOW);
  acls.add(new AclBinding(resourcePattern, entry));

  resourcePattern = new ResourcePattern(ResourceType.GROUP, "*", PatternType.LITERAL);
  entry = new AccessControlEntry(principal, "*", AclOperation.READ, AclPermissionType.ALLOW);
  acls.add(new AclBinding(resourcePattern, entry));

  if (readTopics != null) {
    readTopics.forEach(
        topic -> {
          acls.add(buildTopicLevelAcl(principal, topic, PatternType.LITERAL, AclOperation.READ));
        });
  }

  if (writeTopics != null) {
    writeTopics.forEach(
        topic -> {
          acls.add(buildTopicLevelAcl(principal, topic, PatternType.LITERAL, AclOperation.WRITE));
        });
  }

  createAcls(acls);
  return acls;
}
 
Example #18
Source File: TopologyBuilderAdminClient.java    From kafka-topology-builder with MIT License 5 votes vote down vote up
public List<AclBinding> setAclsForConsumer(String principal, String topic) throws IOException {

    List<AclBinding> acls = new ArrayList<>();
    acls.add(buildTopicLevelAcl(principal, topic, PatternType.LITERAL, AclOperation.DESCRIBE));
    acls.add(buildTopicLevelAcl(principal, topic, PatternType.LITERAL, AclOperation.READ));
    acls.add(buildGroupLevelAcl(principal, "*", PatternType.LITERAL, AclOperation.READ));
    createAcls(acls);
    return acls;
  }
 
Example #19
Source File: TopologyBuilderAdminClient.java    From kafka-topology-builder with MIT License 5 votes vote down vote up
public List<AclBinding> setAclsForControlCenter(String principal, String appId)
    throws IOException {
  List<AclBinding> bindings = new ArrayList<>();

  bindings.add(buildGroupLevelAcl(principal, appId, PatternType.PREFIXED, AclOperation.READ));
  bindings.add(
      buildGroupLevelAcl(principal, appId + "-command", PatternType.PREFIXED, AclOperation.READ));

  Arrays.asList("_confluent-monitoring", "_confluent-command", " _confluent-metrics")
      .forEach(
          topic ->
              Stream.of(
                      AclOperation.WRITE,
                      AclOperation.READ,
                      AclOperation.CREATE,
                      AclOperation.DESCRIBE)
                  .map(
                      aclOperation ->
                          buildTopicLevelAcl(principal, topic, PatternType.LITERAL, aclOperation))
                  .forEach(aclBinding -> bindings.add(aclBinding)));

  Stream.of(AclOperation.WRITE, AclOperation.READ, AclOperation.CREATE, AclOperation.DESCRIBE)
      .map(
          aclOperation ->
              buildTopicLevelAcl(principal, appId, PatternType.PREFIXED, aclOperation))
      .forEach(aclBinding -> bindings.add(aclBinding));

  ResourcePattern resourcePattern =
      new ResourcePattern(ResourceType.CLUSTER, "kafka-cluster", PatternType.LITERAL);
  AccessControlEntry entry =
      new AccessControlEntry(principal, "*", AclOperation.DESCRIBE, AclPermissionType.ALLOW);
  bindings.add(new AclBinding(resourcePattern, entry));

  entry =
      new AccessControlEntry(
          principal, "*", AclOperation.DESCRIBE_CONFIGS, AclPermissionType.ALLOW);
  bindings.add(new AclBinding(resourcePattern, entry));
  createAcls(bindings);
  return bindings;
}
 
Example #20
Source File: AccessControlManagerIT.java    From kafka-topology-builder with MIT License 4 votes vote down vote up
private void verifySchemaRegistryAcls(Platform platform)
    throws ExecutionException, InterruptedException {

  List<SchemaRegistry> srs = platform.getSchemaRegistry();

  for (SchemaRegistry sr : srs) {

    ResourcePatternFilter resourceFilter =
        new ResourcePatternFilter(ResourceType.TOPIC, null, PatternType.ANY);

    AccessControlEntryFilter entryFilter =
        new AccessControlEntryFilter(
            sr.getPrincipal(), null, AclOperation.ANY, AclPermissionType.ALLOW);

    AclBindingFilter filter = new AclBindingFilter(resourceFilter, entryFilter);

    Collection<AclBinding> acls = kafkaAdminClient.describeAcls(filter).values().get();

    Assert.assertEquals(3, acls.size());
  }
}
 
Example #21
Source File: AclBuilder.java    From kafka-topology-builder with MIT License 4 votes vote down vote up
public AclBuilder addControlEntry(
    String host, AclOperation op, AclPermissionType permissionType) {
  entry = new AccessControlEntry(principal, host, op, permissionType);
  return this;
}
 
Example #22
Source File: SecureIntegrationTest.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 4 votes vote down vote up
@Test
public void shouldRunQueryWithChangeLogsAgainstKafkaClusterWithAclsWhereTopicsPreexist()
    throws Exception {
  // Given:
  outputTopic = "ACLS_TEST_3";

  final String repartitionTopic =
      "_confluent-ksql-default_query_CTAS_ACLS_TEST_3-KSTREAM-AGGREGATE-STATE-STORE-0000000006-repartition";

  final String changeLogTopic =
      "_confluent-ksql-default_query_CTAS_ACLS_TEST_3-KSTREAM-AGGREGATE-STATE-STORE-0000000006-changelog";

  SECURE_CLUSTER.createTopic(outputTopic, 4, 1);
  SECURE_CLUSTER.createTopic(repartitionTopic, 1, 1);
  SECURE_CLUSTER.createTopic(changeLogTopic, 1, 1);

  givenAllowAcl(NORMAL_USER, ResourceType.CLUSTER, "kafka-cluster",
                ImmutableSet.of(AclOperation.DESCRIBE_CONFIGS));

  givenAllowAcl(NORMAL_USER, ResourceType.TOPIC, INPUT_TOPIC,
                ImmutableSet.of(AclOperation.DESCRIBE, AclOperation.READ));

  givenAllowAcl(NORMAL_USER, ResourceType.TOPIC, "__consumer_offsets",
                ImmutableSet.of(AclOperation.DESCRIBE));

  givenAllowAcl(NORMAL_USER, ResourceType.TOPIC, outputTopic,
                ImmutableSet.of(AclOperation.DESCRIBE, AclOperation.WRITE));

  givenAllowAcl(NORMAL_USER, ResourceType.TOPIC,
                repartitionTopic,
                ImmutableSet.of(AclOperation.DESCRIBE, AclOperation.READ, AclOperation.WRITE));

  givenAllowAcl(NORMAL_USER, ResourceType.TOPIC,
                changeLogTopic,
                ImmutableSet
                    .of(AclOperation.DESCRIBE, /* READ for recovery, */ AclOperation.WRITE));

  givenAllowAcl(NORMAL_USER, ResourceType.GROUP, "_confluent-ksql-default_query_CTAS_ACLS_TEST_3",
                ImmutableSet.of(AclOperation.DESCRIBE, AclOperation.READ));

  givenTestSetupWithConfig(getKsqlConfig(NORMAL_USER));

  // Then:
  assertCanRunRepartitioningKsqlQuery();
}