diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index f3c625414..4577fc231 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -22,6 +22,11 @@ updates:
schedule:
interval: weekly
+- package-ecosystem: "docker"
+ directory: "/api/src/test/resources"
+ schedule:
+ interval: weekly
+
- package-ecosystem: "npm"
directory: "/ui"
schedule:
diff --git a/api/pom.xml b/api/pom.xml
index 7cd793517..2fcafe3d0 100644
--- a/api/pom.xml
+++ b/api/pom.xml
@@ -18,10 +18,6 @@
streamshub
${project.version}
false
-
-
- quay.io/keycloak/keycloak:21.1
- quay.io/strimzi/kafka:0.43.0-kafka-3.8.0
@@ -382,9 +378,6 @@
true
- ${keycloak.image}
- ${strimzi-kafka.tag}
- ${apicurio-registry.version}
org.jboss.logmanager.LogManager
${maven.home}
true
diff --git a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/TestOAuthProfile.java b/api/src/test/java/com/github/streamshub/console/kafka/systemtest/TestOAuthProfile.java
deleted file mode 100644
index ca6ff00e3..000000000
--- a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/TestOAuthProfile.java
+++ /dev/null
@@ -1,25 +0,0 @@
-package com.github.streamshub.console.kafka.systemtest;
-
-import io.quarkus.test.junit.QuarkusTestProfile;
-
-import java.util.List;
-import java.util.Map;
-
-import com.github.streamshub.console.kafka.systemtest.deployment.KafkaOAuthSecuredResourceManager;
-
-public class TestOAuthProfile implements QuarkusTestProfile {
-
- static final String PROFILE = "testoauth";
- public static final int MAX_PARTITIONS = 100;
- public static final int EXCESSIVE_PARTITIONS = 101;
-
- @Override
- public String getConfigProfile() {
- return PROFILE;
- }
-
- @Override
- public List testResources() {
- return List.of(new TestResourceEntry(KafkaOAuthSecuredResourceManager.class, Map.of("profile", PROFILE)));
- }
-}
diff --git a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/TestPlainNoK8sProfile.java b/api/src/test/java/com/github/streamshub/console/kafka/systemtest/TestPlainNoK8sProfile.java
index 8d479bb04..d3b25fe9f 100644
--- a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/TestPlainNoK8sProfile.java
+++ b/api/src/test/java/com/github/streamshub/console/kafka/systemtest/TestPlainNoK8sProfile.java
@@ -1,11 +1,11 @@
package com.github.streamshub.console.kafka.systemtest;
-import io.quarkus.test.junit.QuarkusTestProfile;
-
import java.util.List;
import java.util.Map;
-import com.github.streamshub.console.kafka.systemtest.deployment.KafkaUnsecuredResourceManager;
+import com.github.streamshub.console.kafka.systemtest.deployment.KafkaResourceManager;
+
+import io.quarkus.test.junit.QuarkusTestProfile;
/**
* Same as profile {@linkplain TestPlainProfile}, but disables Kubernetes use by setting
@@ -17,7 +17,7 @@ public class TestPlainNoK8sProfile extends TestPlainProfile implements QuarkusTe
@Override
public List testResources() {
- return List.of(new TestResourceEntry(KafkaUnsecuredResourceManager.class, Map.of("profile", PROFILE)));
+ return List.of(new TestResourceEntry(KafkaResourceManager.class, Map.of("profile", PROFILE)));
}
@Override
diff --git a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/TestPlainProfile.java b/api/src/test/java/com/github/streamshub/console/kafka/systemtest/TestPlainProfile.java
index a8422d0e2..42a2dd500 100644
--- a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/TestPlainProfile.java
+++ b/api/src/test/java/com/github/streamshub/console/kafka/systemtest/TestPlainProfile.java
@@ -9,7 +9,7 @@
import java.util.List;
import java.util.Map;
-import com.github.streamshub.console.kafka.systemtest.deployment.KafkaUnsecuredResourceManager;
+import com.github.streamshub.console.kafka.systemtest.deployment.KafkaResourceManager;
import com.github.streamshub.console.kafka.systemtest.deployment.KeycloakResourceManager;
import com.github.streamshub.console.kafka.systemtest.deployment.StrimziCrdResourceManager;
@@ -39,7 +39,7 @@ public List testResources() {
return List.of(
new TestResourceEntry(StrimziCrdResourceManager.class, Collections.emptyMap(), true),
new TestResourceEntry(KeycloakResourceManager.class, Collections.emptyMap(), true),
- new TestResourceEntry(KafkaUnsecuredResourceManager.class, Map.of("profile", PROFILE), true));
+ new TestResourceEntry(KafkaResourceManager.class, Map.of("profile", PROFILE), true));
}
@Override
diff --git a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/DeploymentManager.java b/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/DeploymentManager.java
index dcf876f9e..82fac38ee 100644
--- a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/DeploymentManager.java
+++ b/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/DeploymentManager.java
@@ -1,27 +1,24 @@
package com.github.streamshub.console.kafka.systemtest.deployment;
+import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
+import java.io.InputStreamReader;
import java.io.UncheckedIOException;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
-import java.time.Duration;
import java.util.Collections;
-import java.util.HashMap;
import java.util.Map;
-import java.util.Properties;
import java.util.UUID;
import org.jboss.logging.Logger;
import org.slf4j.LoggerFactory;
-import org.testcontainers.containers.BindMode;
-import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.Network;
-import org.testcontainers.containers.SelinuxContext;
import org.testcontainers.containers.output.Slf4jLogConsumer;
-import org.testcontainers.containers.wait.strategy.Wait;
import org.testcontainers.lifecycle.Startable;
+import io.strimzi.test.container.StrimziKafkaContainer;
+
@SuppressWarnings("resource")
public class DeploymentManager {
@@ -29,39 +26,19 @@ public class DeploymentManager {
static final Map TEST_CONTAINER_LABELS =
Collections.singletonMap("test-ident", "systemtest");
- public enum UserType {
- OWNER("alice"),
- USER("susan"),
- OTHER("bob"),
- INVALID(null);
-
- String username;
-
- private UserType(String username) {
- this.username = username;
- }
-
- public String getUsername() {
- return username;
- }
- }
-
@Retention(RetentionPolicy.RUNTIME)
public @interface InjectDeploymentManager {
}
- private final boolean oauthEnabled;
private final Network testNetwork;
- private GenericContainer> keycloakContainer;
- private KafkaContainer kafkaContainer;
+ private StrimziKafkaContainer kafkaContainer;
- public static DeploymentManager newInstance(boolean oauthEnabled) {
- return new DeploymentManager(oauthEnabled);
+ public static DeploymentManager newInstance() {
+ return new DeploymentManager();
}
- private DeploymentManager(boolean oauthEnabled) {
- this.oauthEnabled = oauthEnabled;
+ private DeploymentManager() {
this.testNetwork = Network.newNetwork();
}
@@ -69,12 +46,8 @@ private static String name(String prefix) {
return prefix + '-' + UUID.randomUUID().toString();
}
- public boolean isOauthEnabled() {
- return oauthEnabled;
- }
-
public void shutdown() {
- stopAll(kafkaContainer, keycloakContainer);
+ stopAll(kafkaContainer);
}
private void stopAll(Startable... containers) {
@@ -85,28 +58,9 @@ private void stopAll(Startable... containers) {
}
}
- public GenericContainer> getKeycloakContainer() {
- if (keycloakContainer == null) {
- keycloakContainer = deployKeycloak();
- }
-
- return keycloakContainer;
- }
-
- public void stopKeycloakContainer() {
- if (keycloakContainer != null) {
- keycloakContainer.stop();
- keycloakContainer = null;
- }
- }
-
- public KafkaContainer getKafkaContainer() {
+ public StrimziKafkaContainer getKafkaContainer() {
if (kafkaContainer == null) {
- if (oauthEnabled) {
- kafkaContainer = deployKafka();
- } else {
- kafkaContainer = deployStrimziKafka();
- }
+ kafkaContainer = deployStrimziKafka();
}
return kafkaContainer;
@@ -127,68 +81,22 @@ public String getExternalBootstrapServers() {
return null;
}
- public GenericContainer> deployKeycloak() {
- LOGGER.info("Deploying keycloak container");
- String imageName = System.getProperty("keycloak.image");
-
- GenericContainer> container = new GenericContainer<>(imageName)
- .withLabels(TEST_CONTAINER_LABELS)
- .withLogConsumer(new Slf4jLogConsumer(LoggerFactory.getLogger("systemtests.keycloak"), true))
- .withCreateContainerCmdModifier(cmd -> cmd.withName(name("keycloak")))
- .withNetwork(testNetwork)
- .withNetworkAliases("keycloak")
- .withExposedPorts(8080)
- .withEnv(Map.of("KEYCLOAK_ADMIN", "admin",
- "KEYCLOAK_ADMIN_PASSWORD", "admin",
- "PROXY_ADDRESS_FORWARDING", "true"))
- .withClasspathResourceMapping("/keycloak/authz-realm.json", "/opt/keycloak/data/import/authz-realm.json", BindMode.READ_WRITE, SelinuxContext.SHARED)
- .withCommand("start", "--hostname=keycloak", "--hostname-strict-https=false", "--http-enabled=true", "--import-realm")
- .waitingFor(Wait.forHttp("/realms/kafka-authz").withStartupTimeout(Duration.ofMinutes(5)));
-
- LOGGER.info("Waiting for keycloak container");
- container.start();
- return container;
- }
-
- private KafkaContainer deployKafka() {
- LOGGER.info("Deploying Kafka container");
+ private StrimziKafkaContainer deployStrimziKafka() {
+ String kafkaImage;
- Map env = new HashMap<>();
-
- try (InputStream stream = getClass().getResourceAsStream("/kafka-oauth/env.properties")) {
- Properties envProps = new Properties();
- envProps.load(stream);
- envProps.keySet()
- .stream()
- .map(Object::toString)
- .forEach(key -> env.put(key, envProps.getProperty(key)));
+ try (InputStream in = getClass().getResourceAsStream("/Dockerfile.kafka");
+ BufferedReader reader = new BufferedReader(new InputStreamReader(in))) {
+ kafkaImage = reader.readLine().substring("FROM ".length());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
- var container = (KafkaContainer) new KeycloakSecuredKafkaContainer()
- .withLabels(TEST_CONTAINER_LABELS)
- .withLogConsumer(new Slf4jLogConsumer(LoggerFactory.getLogger("systemtests.oauth-kafka"), true))
- .withCreateContainerCmdModifier(cmd -> cmd.withName(name("oauth-kafka")))
- .withEnv(env)
- .withNetwork(testNetwork)
- .withClasspathResourceMapping("/kafka-oauth/config/", "/opt/kafka/config/strimzi/", BindMode.READ_WRITE, SelinuxContext.SHARED)
- .withClasspathResourceMapping("/kafka-oauth/scripts/functions.sh", "/opt/kafka/functions.sh", BindMode.READ_WRITE, SelinuxContext.SHARED)
- .withClasspathResourceMapping("/kafka-oauth/scripts/simple_kafka_config.sh", "/opt/kafka/simple_kafka_config.sh", BindMode.READ_WRITE, SelinuxContext.SHARED)
- .withClasspathResourceMapping("/kafka-oauth/scripts/start.sh", "/opt/kafka/start.sh", BindMode.READ_WRITE, SelinuxContext.SHARED)
- .withCommand("sh", "/opt/kafka/start.sh");
-
- container.start();
- return container;
- }
-
- private KafkaContainer deployStrimziKafka() {
- LOGGER.info("Deploying Strimzi Kafka container");
+ LOGGER.infof("Deploying Strimzi Kafka container: %s", kafkaImage);
- var container = (KafkaContainer) new KafkaContainer()
+ var container = new StrimziKafkaContainer(kafkaImage)
.withLabels(TEST_CONTAINER_LABELS)
- .withLogConsumer(new Slf4jLogConsumer(LoggerFactory.getLogger("systemtests.plain-kafka"), true))
- .withCreateContainerCmdModifier(cmd -> cmd.withName(name("plain-kafka")))
+ .withLogConsumer(new Slf4jLogConsumer(LoggerFactory.getLogger("systemtests.kafka"), true))
+ .withCreateContainerCmdModifier(cmd -> cmd.withName(name("kafka")))
.withKafkaConfigurationMap(Map.of(
"auto.create.topics.enable", "false",
"group.initial.rebalance.delay.ms", "0"
diff --git a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KafkaContainer.java b/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KafkaContainer.java
deleted file mode 100644
index 79587b3af..000000000
--- a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KafkaContainer.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package com.github.streamshub.console.kafka.systemtest.deployment;
-
-import io.strimzi.test.container.StrimziKafkaContainer;
-
-class KafkaContainer extends StrimziKafkaContainer {
-
- KafkaContainer() {
- super();
- }
-
- String getCACertificate() {
- return null;
- }
-
-}
diff --git a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KafkaOAuthSecuredResourceManager.java b/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KafkaOAuthSecuredResourceManager.java
deleted file mode 100644
index f32d3ae58..000000000
--- a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KafkaOAuthSecuredResourceManager.java
+++ /dev/null
@@ -1,48 +0,0 @@
-package com.github.streamshub.console.kafka.systemtest.deployment;
-
-import java.nio.charset.StandardCharsets;
-import java.util.Base64;
-import java.util.Map;
-import java.util.concurrent.ArrayBlockingQueue;
-import java.util.concurrent.CompletableFuture;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ThreadFactory;
-import java.util.concurrent.ThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.kafka.clients.CommonClientConfigs;
-import org.apache.kafka.common.config.SaslConfigs;
-import org.apache.kafka.common.config.SslConfigs;
-import org.testcontainers.containers.GenericContainer;
-
-import io.quarkus.test.common.QuarkusTestResourceLifecycleManager;
-
-public class KafkaOAuthSecuredResourceManager extends KafkaResourceManager implements QuarkusTestResourceLifecycleManager {
-
- GenericContainer> keycloakContainer;
-
- @Override
- public Map start() {
- ThreadFactory threadFactory = Executors.defaultThreadFactory();
- ThreadPoolExecutor exec = new ThreadPoolExecutor(2, 2, 5, TimeUnit.SECONDS, new ArrayBlockingQueue<>(2), threadFactory);
- deployments = DeploymentManager.newInstance(true);
-
- CompletableFuture.allOf(
- CompletableFuture.supplyAsync(() -> deployments.getKafkaContainer(), exec)
- .thenAccept(container -> kafkaContainer = container),
- CompletableFuture.supplyAsync(() -> deployments.getKeycloakContainer(), exec)
- .thenAccept(container -> keycloakContainer = container))
- .join();
-
- String externalBootstrap = deployments.getExternalBootstrapServers();
-
- int kcPort = keycloakContainer.getMappedPort(8080);
- String profile = "%" + initArgs.get("profile") + ".";
-
- return Map.of(profile + CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, externalBootstrap,
- profile + "quarkus.oidc.auth-server-url", String.format("http://localhost:%d/realms/kafka-authz", kcPort),
- profile + SaslConfigs.SASL_OAUTHBEARER_TOKEN_ENDPOINT_URL, String.format("http://localhost:%d/realms/kafka-authz/protocol/openid-connect/token", kcPort),
- profile + SslConfigs.SSL_TRUSTSTORE_CERTIFICATES_CONFIG, Base64.getEncoder().encodeToString(kafkaContainer.getCACertificate().getBytes(StandardCharsets.UTF_8)));
- }
-
-}
diff --git a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KafkaResourceManager.java b/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KafkaResourceManager.java
index 37d98aa14..b320eb80f 100644
--- a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KafkaResourceManager.java
+++ b/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KafkaResourceManager.java
@@ -1,14 +1,20 @@
package com.github.streamshub.console.kafka.systemtest.deployment;
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.net.ServerSocket;
+import java.net.URI;
import java.util.Map;
+import org.apache.kafka.clients.CommonClientConfigs;
+
import io.quarkus.test.common.QuarkusTestResourceLifecycleManager;
-public abstract class KafkaResourceManager implements QuarkusTestResourceLifecycleManager {
+public class KafkaResourceManager implements QuarkusTestResourceLifecycleManager {
- protected Map initArgs;
- protected DeploymentManager deployments;
- protected KafkaContainer kafkaContainer;
+ private Map initArgs;
+ private DeploymentManager deployments;
+ private ServerSocket randomSocket;
@Override
public void init(Map initArgs) {
@@ -19,9 +25,38 @@ public void init(Map initArgs) {
public void inject(TestInjector testInjector) {
testInjector.injectIntoFields(deployments, new TestInjector.MatchesType(DeploymentManager.class));
}
+
+ @Override
+ public Map start() {
+ deployments = DeploymentManager.newInstance();
+ deployments.getKafkaContainer();
+ String externalBootstrap = deployments.getExternalBootstrapServers();
+ String profile = "%" + initArgs.get("profile") + ".";
+
+ try {
+ randomSocket = new ServerSocket(0);
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+
+ URI randomBootstrapServers = URI.create("dummy://localhost:" + randomSocket.getLocalPort());
+
+ return Map.ofEntries(
+ Map.entry(profile + CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, externalBootstrap),
+ Map.entry(profile + "console.test.external-bootstrap", externalBootstrap),
+ Map.entry(profile + "console.test.random-bootstrap", randomBootstrapServers.toString()));
+ }
+
@Override
public void stop() {
deployments.shutdown();
- }
+ if (randomSocket != null) {
+ try {
+ randomSocket.close();
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ }
+ }
}
diff --git a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KafkaUnsecuredResourceManager.java b/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KafkaUnsecuredResourceManager.java
deleted file mode 100644
index bf0c0f1a6..000000000
--- a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KafkaUnsecuredResourceManager.java
+++ /dev/null
@@ -1,50 +0,0 @@
-package com.github.streamshub.console.kafka.systemtest.deployment;
-
-import java.io.IOException;
-import java.io.UncheckedIOException;
-import java.net.ServerSocket;
-import java.net.URI;
-import java.util.Map;
-
-import org.apache.kafka.clients.CommonClientConfigs;
-
-import io.quarkus.test.common.QuarkusTestResourceLifecycleManager;
-
-public class KafkaUnsecuredResourceManager extends KafkaResourceManager implements QuarkusTestResourceLifecycleManager {
-
- ServerSocket randomSocket;
-
- @Override
- public Map start() {
- deployments = DeploymentManager.newInstance(false);
- kafkaContainer = deployments.getKafkaContainer();
- String externalBootstrap = deployments.getExternalBootstrapServers();
- String profile = "%" + initArgs.get("profile") + ".";
-
- try {
- randomSocket = new ServerSocket(0);
- } catch (IOException e) {
- throw new UncheckedIOException(e);
- }
-
- URI randomBootstrapServers = URI.create("dummy://localhost:" + randomSocket.getLocalPort());
-
- return Map.ofEntries(
- Map.entry(profile + CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, externalBootstrap),
- Map.entry(profile + "console.test.external-bootstrap", externalBootstrap),
- Map.entry(profile + "console.test.random-bootstrap", randomBootstrapServers.toString()));
- }
-
- @Override
- public void stop() {
- super.stop();
-
- if (randomSocket != null) {
- try {
- randomSocket.close();
- } catch (IOException e) {
- throw new UncheckedIOException(e);
- }
- }
- }
-}
diff --git a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KeycloakResourceManager.java b/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KeycloakResourceManager.java
index 950c1155f..e161c01b3 100644
--- a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KeycloakResourceManager.java
+++ b/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KeycloakResourceManager.java
@@ -1,8 +1,10 @@
package com.github.streamshub.console.kafka.systemtest.deployment;
+import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
+import java.io.InputStreamReader;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.time.Duration;
@@ -36,8 +38,16 @@ public Map start() {
int port = 8443;
TlsHelper tls = TlsHelper.newInstance();
String keystorePath = "/opt/keycloak/keystore.p12";
+ String keycloakImage;
- keycloak = new GenericContainer<>("quay.io/keycloak/keycloak:26.0")
+ try (InputStream in = getClass().getResourceAsStream("/Dockerfile.keycloak");
+ BufferedReader reader = new BufferedReader(new InputStreamReader(in))) {
+ keycloakImage = reader.readLine().substring("FROM ".length());
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+
+ keycloak = new GenericContainer<>(keycloakImage)
.withLogConsumer(new Slf4jLogConsumer(LoggerFactory.getLogger("systemtests.keycloak"), true))
.withExposedPorts(port)
.withEnv(Map.of(
diff --git a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KeycloakSecuredKafkaContainer.java b/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KeycloakSecuredKafkaContainer.java
deleted file mode 100644
index 22d184e28..000000000
--- a/api/src/test/java/com/github/streamshub/console/kafka/systemtest/deployment/KeycloakSecuredKafkaContainer.java
+++ /dev/null
@@ -1,266 +0,0 @@
-package com.github.streamshub.console.kafka.systemtest.deployment;
-
-import com.github.dockerjava.api.command.InspectContainerResponse;
-import com.github.dockerjava.api.model.ContainerNetwork;
-import io.strimzi.test.container.StrimziZookeeperContainer;
-import org.bouncycastle.asn1.x500.X500Name;
-import org.bouncycastle.asn1.x509.BasicConstraints;
-import org.bouncycastle.asn1.x509.Extension;
-import org.bouncycastle.asn1.x509.GeneralName;
-import org.bouncycastle.asn1.x509.GeneralNames;
-import org.bouncycastle.asn1.x509.KeyUsage;
-import org.bouncycastle.cert.X509CertificateHolder;
-import org.bouncycastle.cert.X509v3CertificateBuilder;
-import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter;
-import org.bouncycastle.cert.jcajce.JcaX509ExtensionUtils;
-import org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder;
-import org.bouncycastle.jce.provider.BouncyCastleProvider;
-import org.bouncycastle.operator.ContentSigner;
-import org.bouncycastle.operator.OperatorCreationException;
-import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder;
-import org.bouncycastle.pkcs.PKCS10CertificationRequest;
-import org.bouncycastle.pkcs.PKCS10CertificationRequestBuilder;
-import org.bouncycastle.pkcs.jcajce.JcaPKCS10CertificationRequestBuilder;
-import org.jboss.logging.Logger;
-import org.testcontainers.images.builder.Transferable;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.math.BigInteger;
-import java.nio.charset.StandardCharsets;
-import java.security.GeneralSecurityException;
-import java.security.KeyPair;
-import java.security.KeyPairGenerator;
-import java.security.KeyStore;
-import java.security.SecureRandom;
-import java.security.Security;
-import java.security.cert.Certificate;
-import java.security.cert.CertificateEncodingException;
-import java.security.cert.X509Certificate;
-import java.time.LocalDateTime;
-import java.time.ZoneOffset;
-import java.util.Base64;
-import java.util.Collection;
-import java.util.Date;
-import java.util.UUID;
-
-class KeycloakSecuredKafkaContainer extends KafkaContainer {
-
- protected static final Logger LOGGER = Logger.getLogger(KeycloakSecuredKafkaContainer.class);
-
- private int kafkaExposedPort;
- String caCertificate;
- byte[] keystore;
- byte[] truststore;
-
- KeycloakSecuredKafkaContainer() {
- super();
- withExposedPorts(KAFKA_PORT);
- }
-
- @Override
- public boolean equals(Object o) {
- return super.equals(o);
- }
-
- @Override
- public int hashCode() {
- return super.hashCode();
- }
-
- @Override
- public void addFixedExposedPort(int hostPort, int containerPort) {
- super.addFixedExposedPort(hostPort, containerPort);
- }
-
- @Override
- protected void doStart() {
- final String passString = new String(PASSPHRASE);
- getEnvMap().entrySet().forEach(env -> {
- if ("${GENERATED_PASSPHRASE}".equals(env.getValue())) {
- env.setValue(passString);
- }
- });
- super.doStart();
- }
-
- @Override
- protected void containerIsStarting(InspectContainerResponse containerInfo, boolean reused) {
- kafkaExposedPort = getMappedPort(KAFKA_PORT);
-
- LOGGER.infof("This is mapped port %s", kafkaExposedPort);
-
- StringBuilder advertisedListeners = new StringBuilder(getBootstrapServers());
-
- Collection cns = containerInfo.getNetworkSettings().getNetworks().values();
-
- for (ContainerNetwork cn : cns) {
- advertisedListeners.append("," + "REPLICATION://").append(cn.getIpAddress()).append(":9091");
- }
-
- LOGGER.infof("This is all advertised listeners for Kafka %s", advertisedListeners);
-
- String command = "#!/bin/bash \n";
- command += "bin/zookeeper-server-start.sh ./config/zookeeper.properties &\n";
- command += "/bin/bash /opt/kafka/start.sh"
- + " --override listeners=REPLICATION://0.0.0.0:9091,SECURE://0.0.0.0:" + KAFKA_PORT
- + " --override advertised.listeners=" + advertisedListeners.toString()
- + " --override zookeeper.connect=localhost:" + StrimziZookeeperContainer.ZOOKEEPER_PORT
- + " --override listener.security.protocol.map=REPLICATION:SSL,SECURE:SASL_SSL"
- + " --override inter.broker.listener.name=REPLICATION\n";
-
- LOGGER.info("Copying command to 'STARTER_SCRIPT' script.");
-
- try {
- generateCertificates();
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
-
- copyFileToContainer(Transferable.of(keystore, 700), "/opt/kafka/certs/cluster.keystore.p12");
- copyFileToContainer(Transferable.of(truststore, 700), "/opt/kafka/certs/cluster.truststore.jks");
- copyFileToContainer(Transferable.of(command.getBytes(StandardCharsets.UTF_8), 700), STARTER_SCRIPT);
- }
-
- @Override
- public String getBootstrapServers() {
- return String.format("SECURE://%s:%s", getHost(), kafkaExposedPort);
- }
-
- @Override
- String getCACertificate() {
- return this.caCertificate;
- }
-
- private static final String BC_PROVIDER = "BC";
- private static final String KEY_ALGORITHM = "RSA";
- private static final String SIGNATURE_ALGORITHM = "SHA256withRSA";
- private static final String PKCS12 = "PKCS12";
- private static final char[] PASSPHRASE = UUID.randomUUID().toString().toCharArray();
-
- void generateCertificates() throws Exception {
- // Add the BouncyCastle Provider
- Security.addProvider(new BouncyCastleProvider());
-
- // Initialize a new KeyPair generator
- KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance(KEY_ALGORITHM, BC_PROVIDER);
- keyPairGenerator.initialize(2048);
-
- LocalDateTime now = LocalDateTime.now();
- Date notBefore = Date.from(now.minusDays(1).toInstant(ZoneOffset.UTC));
- Date notAfter = Date.from(now.plusYears(1).toInstant(ZoneOffset.UTC));
-
- KeyPair rootKeyPair = keyPairGenerator.generateKeyPair();
- X509Certificate rootCert = buildCACertificate(rootKeyPair, notBefore, notAfter);
-
- KeyPair issuedCertKeyPair = keyPairGenerator.generateKeyPair();
- Certificate issuedCert = buildServerCertificate(issuedCertKeyPair, rootKeyPair, rootCert, notBefore, notAfter);
-
- KeyStore sslTrustStore = KeyStore.getInstance("JKS");
- sslTrustStore.load(null, PASSPHRASE);
- sslTrustStore.setCertificateEntry("CACert", rootCert);
- this.truststore = toByteArray(sslTrustStore, PASSPHRASE);
-
- KeyStore sslKeyStore = KeyStore.getInstance(PKCS12, BC_PROVIDER);
- sslKeyStore.load(null, PASSPHRASE);
- sslKeyStore.setKeyEntry("localhost", issuedCertKeyPair.getPrivate(), null, new Certificate[] {
- issuedCert,
- rootCert
- });
- sslKeyStore.setCertificateEntry("CACert", rootCert);
- this.keystore = toByteArray(sslKeyStore, PASSPHRASE);
-
- this.caCertificate = pemEncodeCertificate(rootCert);
- }
-
- private X509Certificate buildCACertificate(KeyPair keyPair, Date notBefore, Date notAfter)
- throws OperatorCreationException, IOException, GeneralSecurityException {
-
- BigInteger rootSerialNum = new BigInteger(Long.toString(new SecureRandom().nextLong()));
-
- // Issued By and Issued To same for root certificate
- X500Name rootCertIssuer = new X500Name("CN=root-cert");
- X500Name rootCertSubject = rootCertIssuer;
- ContentSigner rootCertContentSigner = new JcaContentSignerBuilder(SIGNATURE_ALGORITHM).setProvider(BC_PROVIDER)
- .build(keyPair.getPrivate());
- X509v3CertificateBuilder rootCertBuilder = new JcaX509v3CertificateBuilder(rootCertIssuer, rootSerialNum,
- notBefore, notAfter, rootCertSubject, keyPair.getPublic());
-
- // Add Extensions
- // A BasicConstraint to mark root certificate as CA certificate
- JcaX509ExtensionUtils rootCertExtUtils = new JcaX509ExtensionUtils();
- rootCertBuilder.addExtension(Extension.basicConstraints, true, new BasicConstraints(true));
- rootCertBuilder.addExtension(Extension.subjectKeyIdentifier, false,
- rootCertExtUtils.createSubjectKeyIdentifier(keyPair.getPublic()));
-
- // Create a cert holder and export to X509Certificate
- X509CertificateHolder rootCertHolder = rootCertBuilder.build(rootCertContentSigner);
- return new JcaX509CertificateConverter().setProvider(BC_PROVIDER)
- .getCertificate(rootCertHolder);
- }
-
- private Certificate buildServerCertificate(KeyPair keyPair, KeyPair signerKeyPair, X509Certificate signerCert, Date notBefore, Date notAfter)
- throws GeneralSecurityException, IOException, OperatorCreationException {
-
- // Generate a new KeyPair and sign it using the Root Cert Private Key
- // by generating a CSR (Certificate Signing Request)
- X500Name issuedCertSubject = new X500Name("CN=kafka-broker,O=com.github.streamshub");
- BigInteger issuedCertSerialNum = new BigInteger(Long.toString(new SecureRandom().nextLong()));
-
- PKCS10CertificationRequestBuilder p10Builder = new JcaPKCS10CertificationRequestBuilder(issuedCertSubject,
- keyPair.getPublic());
- JcaContentSignerBuilder csrBuilder = new JcaContentSignerBuilder(SIGNATURE_ALGORITHM).setProvider(BC_PROVIDER);
-
- // Sign the new KeyPair with the root cert Private Key
- ContentSigner csrContentSigner = csrBuilder.build(signerKeyPair.getPrivate());
- PKCS10CertificationRequest csr = p10Builder.build(csrContentSigner);
-
- // Use the Signed KeyPair and CSR to generate an issued Certificate
- // Here serial number is randomly generated. In general, CAs use
- // a sequence to generate Serial number and avoid collisions
- X509v3CertificateBuilder issuedCertBuilder = new X509v3CertificateBuilder(new X500Name(signerCert.getSubjectX500Principal().getName()), issuedCertSerialNum,
- notBefore, notAfter, csr.getSubject(), csr.getSubjectPublicKeyInfo());
-
- JcaX509ExtensionUtils issuedCertExtUtils = new JcaX509ExtensionUtils();
-
- // Add Extensions
- // Use BasicConstraints to say that this Cert is not a CA
- issuedCertBuilder.addExtension(Extension.basicConstraints, true, new BasicConstraints(false));
-
- // Add Issuer cert identifier as Extension
- issuedCertBuilder.addExtension(Extension.authorityKeyIdentifier, false, issuedCertExtUtils.createAuthorityKeyIdentifier(signerCert));
-
- // Add intended key usage extension if needed
- issuedCertBuilder.addExtension(Extension.keyUsage, false, new KeyUsage(KeyUsage.digitalSignature | KeyUsage.keyEncipherment));
-
- // Add DNS name is cert is to used for SSL
- GeneralNames subjectAltName = new GeneralNames(new GeneralName[] {
- new GeneralName(GeneralName.dNSName, "kafka"),
- new GeneralName(GeneralName.dNSName, "localhost")
- });
- issuedCertBuilder.addExtension(Extension.subjectAlternativeName, false, subjectAltName);
-
- X509CertificateHolder issuedCertHolder = issuedCertBuilder.build(csrContentSigner);
- X509Certificate issuedCert = new JcaX509CertificateConverter().setProvider(BC_PROVIDER)
- .getCertificate(issuedCertHolder);
-
- // Verify the issued cert signature against the root (issuer) cert
- issuedCert.verify(signerCert.getPublicKey(), BC_PROVIDER);
- return issuedCert;
- }
-
- private byte[] toByteArray(KeyStore store, char[] passphrase) throws GeneralSecurityException, IOException {
- ByteArrayOutputStream buffer = new ByteArrayOutputStream();
- store.store(buffer, passphrase);
- return buffer.toByteArray();
- }
-
- private String pemEncodeCertificate(Certificate certificate) throws IOException, CertificateEncodingException {
- ByteArrayOutputStream certificateOut = new ByteArrayOutputStream();
- certificateOut.write("-----BEGIN CERTIFICATE-----\n".getBytes(StandardCharsets.UTF_8));
- certificateOut.write(Base64.getMimeEncoder(80, new byte[] {'\n'}).encode(certificate.getEncoded()));
- certificateOut.write("-----END CERTIFICATE-----\n".getBytes(StandardCharsets.UTF_8));
- certificateOut.close();
- return new String(certificateOut.toByteArray(), StandardCharsets.UTF_8);
- }
-}
diff --git a/api/src/test/resources/Dockerfile.kafka b/api/src/test/resources/Dockerfile.kafka
new file mode 100644
index 000000000..ceb7f608b
--- /dev/null
+++ b/api/src/test/resources/Dockerfile.kafka
@@ -0,0 +1,4 @@
+FROM quay.io/strimzi-test-container/test-container:0.109.1-kafka-3.9.0
+# No operations, this is only a placeholder used to manage the image
+# version via dependabot. The FROM statement is always expected to
+# present on the first line of this file.
diff --git a/api/src/test/resources/Dockerfile.keycloak b/api/src/test/resources/Dockerfile.keycloak
new file mode 100644
index 000000000..e8e66f084
--- /dev/null
+++ b/api/src/test/resources/Dockerfile.keycloak
@@ -0,0 +1,4 @@
+FROM quay.io/keycloak/keycloak:26.0
+# No operations, this is only a placeholder used to manage the image
+# version via dependabot. The FROM statement is always expected to
+# present on the first line of this file.
diff --git a/api/src/test/resources/kafka-oauth/config/log4j.properties b/api/src/test/resources/kafka-oauth/config/log4j.properties
deleted file mode 100644
index bc6df47d9..000000000
--- a/api/src/test/resources/kafka-oauth/config/log4j.properties
+++ /dev/null
@@ -1,58 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Unspecified loggers and loggers with additivity=true output to server.log and stdout
-# Note that INFO only applies to unspecified loggers, the log level of the child logger is used otherwise
-log4j.rootLogger=INFO, stdout
-
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n
-
-# Change the two lines below to adjust ZK client logging
-log4j.logger.org.I0Itec.zkclient.ZkClient=INFO
-log4j.logger.org.apache.zookeeper=INFO
-
-# Change the two lines below to adjust the general broker logging level (output to server.log and stdout)
-log4j.logger.kafka=INFO
-log4j.logger.org.apache.kafka=INFO
-
-# Control Strimzi OAuth logging
-log4j.logger.io.strimzi=INFO
-
-# Change to DEBUG or TRACE to enable request logging
-log4j.logger.kafka.request.logger=WARN
-log4j.additivity.kafka.request.logger=false
-
-# Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE for additional output
-# related to the handling of requests
-#log4j.logger.kafka.network.Processor=TRACE
-#log4j.logger.kafka.server.KafkaApis=TRACE
-#log4j.additivity.kafka.server.KafkaApis=false
-log4j.logger.kafka.network.RequestChannel$=WARN
-log4j.additivity.kafka.network.RequestChannel$=false
-
-log4j.logger.kafka.controller=TRACE
-log4j.additivity.kafka.controller=false
-
-log4j.logger.kafka.log.LogCleaner=INFO
-log4j.additivity.kafka.log.LogCleaner=false
-
-log4j.logger.state.change.logger=TRACE
-log4j.additivity.state.change.logger=false
-
-# Access denials are logged at INFO level, change to DEBUG to also log allowed accesses
-log4j.logger.kafka.authorizer.logger=INFO
-log4j.additivity.kafka.authorizer.logger=false
diff --git a/api/src/test/resources/kafka-oauth/env.properties b/api/src/test/resources/kafka-oauth/env.properties
deleted file mode 100644
index b5ba5ede6..000000000
--- a/api/src/test/resources/kafka-oauth/env.properties
+++ /dev/null
@@ -1,46 +0,0 @@
-LOG_DIR=/home/kafka/logs
-KAFKA_BROKER_ID=1
-KAFKA_ZOOKEEPER_CONNECT=localhost:2181
-KAFKA_SASL_ENABLED_MECHANISMS=OAUTHBEARER
-
-KAFKA_SSL_SECURE_RANDOM_IMPLEMENTATION=SHA1PRNG
-KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM=
-
-KAFKA_LISTENER_NAME_REPLICATION_SSL_KEYSTORE_LOCATION=/opt/kafka/certs/cluster.keystore.p12
-KAFKA_LISTENER_NAME_REPLICATION_SSL_KEYSTORE_PASSWORD=${GENERATED_PASSPHRASE}
-KAFKA_LISTENER_NAME_REPLICATION_SSL_KEYSTORE_TYPE=PKCS12
-KAFKA_LISTENER_NAME_REPLICATION_SSL_TRUSTSTORE_LOCATION=/opt/kafka/certs/cluster.truststore.jks
-KAFKA_LISTENER_NAME_REPLICATION_SSL_TRUSTSTORE_PASSWORD=${GENERATED_PASSPHRASE}
-KAFKA_LISTENER_NAME_REPLICATION_SSL_TRUSTSTORE_TYPE=JKS
-KAFKA_LISTENER_NAME_REPLICATION_SSL_CLIENT_AUTH=required
-
-KAFKA_LISTENER_NAME_SECURE_SSL_KEYSTORE_LOCATION=/opt/kafka/certs/cluster.keystore.p12
-KAFKA_LISTENER_NAME_SECURE_SSL_KEYSTORE_PASSWORD=${GENERATED_PASSPHRASE}
-KAFKA_LISTENER_NAME_SECURE_SSL_KEYSTORE_TYPE=PKCS12
-KAFKA_LISTENER_NAME_SECURE_OAUTHBEARER_SASL_JAAS_CONFIG=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required;
-KAFKA_LISTENER_NAME_SECURE_OAUTHBEARER_SASL_LOGIN_CALLBACK_HANDLER_CLASS=io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler
-KAFKA_LISTENER_NAME_SECURE_OAUTHBEARER_SASL_SERVER_CALLBACK_HANDLER_CLASS=io.strimzi.kafka.oauth.server.JaasServerOauthValidatorCallbackHandler
-KAFKA_LISTENER_NAME_SECURE_OAUTHBEARER_CONNECTIONS_MAX_REAUTH_MS=3600000
-
-KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1
-KAFKA_AUTHORIZER_CLASS_NAME=io.strimzi.kafka.oauth.server.OAuthSessionAuthorizer
-KAFKA_PRINCIPAL_BUILDER_CLASS=io.strimzi.kafka.oauth.server.OAuthKafkaPrincipalBuilder
-
-KAFKA_STRIMZI_AUTHORIZER_DELEGATE_CLASS_NAME=io.strimzi.kafka.oauth.server.authorizer.KeycloakRBACAuthorizer
-KAFKA_STRIMZI_AUTHORIZATION_KAFKA_CLUSTER_NAME=cluster2
-KAFKA_STRIMZI_AUTHORIZATION_DELEGATE_TO_KAFKA_ACL=true
-KAFKA_STRIMZI_AUTHORIZATION_GRANTS_REFRESH_POOL_SIZE=10
-KAFKA_STRIMZI_AUTHORIZATION_GRANTS_REFRESH_PERIOD_SECONDS=30
-
-KAFKA_SUPER_USERS=User:O=com.github.streamshub,CN=kafka-broker;User:service-account-kafka
-OAUTH_CLIENT_ID=kafka
-OAUTH_CLIENT_SECRET=kafka-secret
-OAUTH_TOKEN_ENDPOINT_URI=http://keycloak:8080/realms/kafka-authz/protocol/openid-connect/token
-OAUTH_VALID_ISSUER_URI=http://keycloak:8080/realms/kafka-authz
-OAUTH_JWKS_ENDPOINT_URI=http://keycloak:8080/realms/kafka-authz/protocol/openid-connect/certs
-OAUTH_USERNAME_CLAIM=preferred_username
-OAUTH_JWKS_REFRESH_MIN_PAUSE_SECONDS=5
-
-# For start.sh script to know where the keycloak is listening
-KEYCLOAK_HOST=keycloak
-REALM=kafka-authz
diff --git a/api/src/test/resources/kafka-oauth/scripts/functions.sh b/api/src/test/resources/kafka-oauth/scripts/functions.sh
deleted file mode 100644
index 854ea8010..000000000
--- a/api/src/test/resources/kafka-oauth/scripts/functions.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-
-wait_for_url() {
- URL=$1
- MSG=$2
-
- if [[ $URL == https* ]]; then
- CMD="curl -k -sL -o /dev/null -w %{http_code} $URL"
- else
- CMD="curl -sL -o /dev/null -w %{http_code} $URL"
- fi
-
- until [ "200" == "`$CMD`" ]
- do
- echo "$MSG ($URL)"
- sleep 2
- done
-}
diff --git a/api/src/test/resources/kafka-oauth/scripts/simple_kafka_config.sh b/api/src/test/resources/kafka-oauth/scripts/simple_kafka_config.sh
deleted file mode 100755
index 2ae04f265..000000000
--- a/api/src/test/resources/kafka-oauth/scripts/simple_kafka_config.sh
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/bin/bash
-
-declare -A props
-
-to_property_name() {
- key=$1
- echo ${key:6} | tr _ . | tr [:upper:] [:lower:]
-}
-
-pop_value() {
- key=$1
- fallback=$2
-
- if [ -z ${props[$key]+x} ] ; then
- echo $fallback
- else
- echo ${props[$key]}
- fi
- unset props[$key]
-}
-
-#
-# This function allows you to encode as KAFKA_* env vars property names that contain characters invalid for env var names
-# You can use:
-# KAFKA_LISTENER_NAME_CLIENT_SCRAM__2DSHA__2D256_SASL_JAAS_CONFIG=something
-#
-# Which will first be converted to:
-# KAFKA_LISTENER_NAME_CLIENT_SCRAM%2DSHA%2D256_SASL_JAAS_CONFIG=something
-#
-# And then to:
-# KAFKA_LISTENER_NAME_CLIENT_SCRAM-SHA-256_SASL_JAAS_CONFIG=something
-#
-unescape() {
- if [[ "$1" != "" ]]; then
- echo "$1" | sed -e "s@__@\%@g" -e "s@+@ @g;s@%@\\\\x@g" | xargs -0 printf "%b"
- fi
-}
-
-unset IFS
-for var in $(compgen -e); do
- if [[ $var == KAFKA_* ]]; then
-
- case $var in
- KAFKA_DEBUG|KAFKA_OPTS|KAFKA_VERSION|KAFKA_HOME|KAFKA_CHECKSUM|KAFKA_LOG4J_OPTS|KAFKA_HEAP_OPTS|KAFKA_JVM_PERFORMANCE_OPTS|KAFKA_GC_LOG_OPTS|KAFKA_JMX_OPTS) ;;
- *)
- props[$(to_property_name $(unescape $var))]=${!var}
- ;;
- esac
- fi
-done
-
-#
-# Generate output
-#
-echo "#"
-echo "# strimzi.properties"
-echo "#"
-
-echo broker.id=`pop_value broker.id 0`
-echo num.network.threads=`pop_value num.network.threads 3`
-echo num.io.threads=`pop_value num.io.threads 8`
-echo socket.send.buffer.bytes=`pop_value socket.send.buffer.bytes 102400`
-echo socket.receive.buffer.bytes=`pop_value socket.receive.buffer.bytes 102400`
-echo socket.request.max.bytes=`pop_value socket.request.max.bytes 104857600`
-echo log.dirs=`pop_value log.dirs /tmp/kafka-logs`
-echo num.partitions=`pop_value num.partitions 1`
-echo num.recovery.threads.per.data.dir=`pop_value num.recovery.threads.per.data.dir 1`
-echo offsets.topic.replication.factor=`pop_value offsets.topic.replication.factor 1`
-echo transaction.state.log.replication.factor=`pop_value transaction.state.log.replication.factor 1`
-echo transaction.state.log.min.isr=`pop_value transaction.state.log.min.isr 1`
-echo log.retention.hours=`pop_value log.retention.hours 168`
-echo log.segment.bytes=`pop_value log.segment.bytes 1073741824`
-echo log.retention.check.interval.ms=`pop_value log.retention.check.interval.ms 300000`
-echo zookeeper.connect=`pop_value zookeeper.connect localhost:2181`
-echo zookeeper.connection.timeout.ms=`pop_value zookeeper.connection.timeout.ms 6000`
-echo group.initial.rebalance.delay.ms=`pop_value group.initial.rebalance.delay.ms 0`
-
-#
-# Add what remains of KAFKA_* env vars
-#
-for K in "${!props[@]}"
-do
- echo $K=`pop_value $K`
-done
-
-echo
\ No newline at end of file
diff --git a/api/src/test/resources/kafka-oauth/scripts/start.sh b/api/src/test/resources/kafka-oauth/scripts/start.sh
deleted file mode 100755
index 26438b4cb..000000000
--- a/api/src/test/resources/kafka-oauth/scripts/start.sh
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/bash
-set -e
-
-source functions.sh
-
-URI=${KEYCLOAK_URI}
-if [ "" == "${URI}" ]; then
- URI="http://${KEYCLOAK_HOST:-keycloak}:8080"
-fi
-
-wait_for_url $URI "Waiting for Keycloak to start"
-
-wait_for_url "$URI/realms/${REALM:-demo}" "Waiting for realm '${REALM}' to be available"
-
-/bin/sh ./simple_kafka_config.sh | tee /tmp/strimzi.properties
-
-
-# Add 'admin' user
-/opt/kafka/bin/kafka-configs.sh --zookeeper ${KAFKA_ZOOKEEPER_CONNECT} --alter --add-config 'SCRAM-SHA-512=[password=admin-secret]' --entity-type users --entity-name admin
-
-# Add 'alice' user
-/opt/kafka/bin/kafka-configs.sh --zookeeper ${KAFKA_ZOOKEEPER_CONNECT} --alter --add-config 'SCRAM-SHA-512=[password=alice-secret]' --entity-type users --entity-name alice
-
-
-# set log dir to writable directory
-if [ "$LOG_DIR" == "" ]; then
- export LOG_DIR=/tmp/logs
-fi
-
-# set log4j properties file to custom one
-export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:/opt/kafka/config/strimzi/log4j.properties"
-
-# add extra jars to classpath
-export CLASSPATH="/opt/kafka/libs/strimzi/*:$CLASSPATH"
-
-exec /opt/kafka/bin/kafka-server-start.sh /tmp/strimzi.properties "$@"
diff --git a/api/src/test/resources/keycloak/authz-realm.json b/api/src/test/resources/keycloak/authz-realm.json
deleted file mode 100644
index 6908851c1..000000000
--- a/api/src/test/resources/keycloak/authz-realm.json
+++ /dev/null
@@ -1,624 +0,0 @@
-{
- "realm": "kafka-authz",
- "accessTokenLifespan": 60,
- "ssoSessionIdleTimeout": 864000,
- "ssoSessionMaxLifespan": 864000,
- "enabled": true,
- "sslRequired": "external",
- "roles": {
- "realm": [
- {
- "name": "Dev Team A",
- "description": "Developer on Dev Team A"
- },
- {
- "name": "Dev Team B",
- "description": "Developer on Dev Team B"
- },
- {
- "name": "Ops Team",
- "description": "Operations team member"
- }
- ],
- "client": {
- "team-a-client": [],
- "team-b-client": [],
- "kafka-cli": [],
- "kafka": [
- {
- "name": "uma_protection",
- "clientRole": true
- }
- ]
- }
- },
- "groups" : [
- {
- "name" : "ClusterManager Group",
- "path" : "/ClusterManager Group"
- }, {
- "name" : "ClusterUser Group",
- "path" : "/ClusterUser Group"
- }, {
- "name" : "ClusterManager-cluster2 Group",
- "path" : "/ClusterManager-cluster2 Group"
- }, {
- "name" : "Ops Team Group",
- "path" : "/Ops Team Group"
- }
- ],
- "users": [
- {
- "username" : "alice",
- "enabled" : true,
- "totp" : false,
- "emailVerified" : true,
- "firstName" : "Alice",
- "email" : "alice@strimzi.io",
- "credentials" : [ {
- "type" : "password",
- "secretData" : "{\"value\":\"KqABIiReBuRWbP4pBct3W067pNvYzeN7ILBV+8vT8nuF5cgYs2fdl2QikJT/7bGTW/PBXg6CYLwJQFYrBK9MWg==\",\"salt\":\"EPgscX9CQz7UnuZDNZxtMw==\"}",
- "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\"}"
- } ],
- "disableableCredentialTypes" : [ ],
- "requiredActions" : [ ],
- "realmRoles" : [ "offline_access", "uma_authorization" ],
- "clientRoles" : {
- "account" : [ "view-profile", "manage-account" ]
- },
- "groups" : [ "/ClusterManager Group" ]
- }, {
- "username" : "susan",
- "enabled" : true,
- "totp" : false,
- "emailVerified" : true,
- "firstName" : "Susan",
- "email" : "susan@strimzi.io",
- "credentials" : [ {
- "type" : "password",
- "value" : "susan-password"
- } ],
- "disableableCredentialTypes" : [ ],
- "requiredActions" : [ ],
- "realmRoles" : [ "offline_access", "uma_authorization" ],
- "clientRoles" : {
- "account" : [ "view-profile", "manage-account" ]
- },
- "groups" : [ "/ClusterUser Group" ]
- }, {
- "username" : "bob",
- "enabled" : true,
- "totp" : false,
- "emailVerified" : true,
- "firstName" : "Bob",
- "email" : "bob@strimzi.io",
- "credentials" : [ {
- "type" : "password",
- "secretData" : "{\"value\":\"QhK0uLsKuBDrMm9Z9XHvq4EungecFRnktPgutfjKtgVv2OTPd8D390RXFvJ8KGvqIF8pdoNxHYQyvDNNwMORpg==\",\"salt\":\"yxkgwEyTnCGLn42Yr9GxBQ==\"}",
- "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\"}"
- } ],
- "disableableCredentialTypes" : [ ],
- "requiredActions" : [ ],
- "realmRoles" : [ "offline_access", "uma_authorization" ],
- "clientRoles" : {
- "account" : [ "view-profile", "manage-account" ]
- },
- "groups" : [ "/ClusterManager-cluster2 Group" ]
- },
- {
- "username" : "service-account-team-a-client",
- "enabled" : true,
- "serviceAccountClientId" : "team-a-client",
- "realmRoles" : [ "offline_access", "Dev Team A" ],
- "clientRoles" : {
- "account" : [ "manage-account", "view-profile" ]
- },
- "groups" : [ ]
- },
- {
- "username" : "service-account-team-b-client",
- "enabled" : true,
- "serviceAccountClientId" : "team-b-client",
- "realmRoles" : [ "offline_access", "Dev Team B" ],
- "clientRoles" : {
- "account" : [ "manage-account", "view-profile" ]
- },
- "groups" : [ ]
- }
- ],
- "clients": [
- {
- "clientId": "team-a-client",
- "enabled": true,
- "clientAuthenticatorType": "client-secret",
- "secret": "team-a-client-secret",
- "bearerOnly": false,
- "consentRequired": false,
- "standardFlowEnabled": false,
- "implicitFlowEnabled": false,
- "directAccessGrantsEnabled": true,
- "serviceAccountsEnabled": true,
- "publicClient": false,
- "fullScopeAllowed": true
- },
- {
- "clientId": "team-b-client",
- "enabled": true,
- "clientAuthenticatorType": "client-secret",
- "secret": "team-b-client-secret",
- "bearerOnly": false,
- "consentRequired": false,
- "standardFlowEnabled": false,
- "implicitFlowEnabled": false,
- "directAccessGrantsEnabled": true,
- "serviceAccountsEnabled": true,
- "publicClient": false,
- "fullScopeAllowed": true
- },
- {
- "clientId": "kafka",
- "enabled": true,
- "clientAuthenticatorType": "client-secret",
- "secret": "kafka-secret",
- "bearerOnly": false,
- "consentRequired": false,
- "standardFlowEnabled": false,
- "implicitFlowEnabled": false,
- "directAccessGrantsEnabled": true,
- "serviceAccountsEnabled": true,
- "authorizationServicesEnabled": true,
- "publicClient": false,
- "fullScopeAllowed": true,
- "authorizationSettings": {
- "allowRemoteResourceManagement": true,
- "policyEnforcementMode": "ENFORCING",
- "resources": [
- {
- "name": "Topic:a_*",
- "type": "Topic",
- "ownerManagedAccess": false,
- "displayName": "Topics that start with a_",
- "attributes": {},
- "uris": [],
- "scopes": [
- {
- "name": "Create"
- },
- {
- "name": "Delete"
- },
- {
- "name": "Describe"
- },
- {
- "name": "Write"
- },
- {
- "name": "Read"
- },
- {
- "name": "Alter"
- },
- {
- "name": "DescribeConfigs"
- },
- {
- "name": "AlterConfigs"
- }
- ]
- },
- {
- "name": "Group:x_*",
- "type": "Group",
- "ownerManagedAccess": false,
- "displayName": "Consumer groups that start with x_",
- "attributes": {},
- "uris": [],
- "scopes": [
- {
- "name": "Describe"
- },
- {
- "name": "Delete"
- },
- {
- "name": "Read"
- }
- ]
- },
- {
- "name": "Topic:x_*",
- "type": "Topic",
- "ownerManagedAccess": false,
- "displayName": "Topics that start with x_",
- "attributes": {},
- "uris": [],
- "scopes": [
- {
- "name": "Create"
- },
- {
- "name": "Describe"
- },
- {
- "name": "Delete"
- },
- {
- "name": "Write"
- },
- {
- "name": "Read"
- },
- {
- "name": "Alter"
- },
- {
- "name": "DescribeConfigs"
- },
- {
- "name": "AlterConfigs"
- }
- ]
- },
- {
- "name": "Group:a_*",
- "type": "Group",
- "ownerManagedAccess": false,
- "displayName": "Groups that start with a_",
- "attributes": {},
- "uris": [],
- "scopes": [
- {
- "name": "Describe"
- },
- {
- "name": "Read"
- }
- ]
- },
- {
- "name": "Group:*",
- "type": "Group",
- "ownerManagedAccess": false,
- "displayName": "Any group",
- "attributes": {},
- "uris": [],
- "scopes": [
- {
- "name": "Delete"
- },
- {
- "name": "Describe"
- },
- {
- "name": "Read"
- },
- {
- "name": "DescribeConfigs"
- },
- {
- "name": "AlterConfigs"
- }
- ]
- },
- {
- "name": "Topic:*",
- "type": "Topic",
- "ownerManagedAccess": false,
- "displayName": "Any topic",
- "attributes": {},
- "uris": [],
- "scopes": [
- {
- "name": "Create"
- },
- {
- "name": "Delete"
- },
- {
- "name": "Describe"
- },
- {
- "name": "Write"
- },
- {
- "name": "Read"
- },
- {
- "name": "Alter"
- },
- {
- "name": "DescribeConfigs"
- },
- {
- "name": "AlterConfigs"
- }
- ]
- },
- {
- "name": "kafka-cluster:cluster2,Topic:b_*",
- "type": "Topic",
- "ownerManagedAccess": false,
- "attributes": {},
- "uris": [],
- "scopes": [
- {
- "name": "Create"
- },
- {
- "name": "Delete"
- },
- {
- "name": "Describe"
- },
- {
- "name": "Write"
- },
- {
- "name": "Read"
- },
- {
- "name": "Alter"
- },
- {
- "name": "DescribeConfigs"
- },
- {
- "name": "AlterConfigs"
- }
- ]
- },
- {
- "name": "kafka-cluster:cluster2,Cluster:*",
- "type": "Cluster",
- "ownerManagedAccess": false,
- "displayName": "Cluster scope on cluster2",
- "attributes": {},
- "uris": [],
- "scopes": [
- {
- "name": "DescribeConfigs"
- },
- {
- "name": "AlterConfigs"
- },
- {
- "name": "ClusterAction"
- }
- ]
- },
- {
- "name": "kafka-cluster:cluster2,Group:*",
- "type": "Group",
- "ownerManagedAccess": false,
- "displayName": "Any group on cluster2",
- "attributes": {},
- "uris": [],
- "scopes": [
- {
- "name": "Delete"
- },
- {
- "name": "Describe"
- },
- {
- "name": "Read"
- },
- {
- "name": "DescribeConfigs"
- },
- {
- "name": "AlterConfigs"
- }
- ]
- },
- {
- "name": "kafka-cluster:cluster2,Topic:*",
- "type": "Topic",
- "ownerManagedAccess": false,
- "displayName": "Any topic on cluster2",
- "attributes": {},
- "uris": [],
- "scopes": [
- {
- "name": "Create"
- },
- {
- "name": "Delete"
- },
- {
- "name": "Describe"
- },
- {
- "name": "Write"
- },
- {
- "name": "IdempotentWrite"
- },
- {
- "name": "Read"
- },
- {
- "name": "Alter"
- },
- {
- "name": "DescribeConfigs"
- },
- {
- "name": "AlterConfigs"
- }
- ]
- },
- {
- "name" : "Cluster:*",
- "type" : "Cluster",
- "ownerManagedAccess" : false,
- "attributes" : { },
- "uris" : [ ],
- "scopes": [
- {
- "name": "Alter"
- },
- {
- "name": "Describe"
- }
- ]
- }
- ],
- "policies": [
- {
- "name": "Dev Team B",
- "type": "role",
- "logic": "POSITIVE",
- "decisionStrategy": "UNANIMOUS",
- "config": {
- "roles": "[{\"id\":\"Dev Team B\",\"required\":true}]"
- }
- },
- {
- "name": "Ops Team",
- "type": "role",
- "logic": "POSITIVE",
- "decisionStrategy": "UNANIMOUS",
- "config": {
- "roles": "[{\"id\":\"Ops Team\",\"required\":true}]"
- }
- },
- {
- "name" : "ClusterManager Group",
- "type" : "group",
- "logic" : "POSITIVE",
- "decisionStrategy" : "UNANIMOUS",
- "config" : {
- "groups" : "[{\"path\":\"/ClusterManager Group\",\"extendChildren\":false}]"
- }
- },
- {
- "name" : "ClusterUser Group",
- "type" : "group",
- "logic" : "POSITIVE",
- "decisionStrategy" : "UNANIMOUS",
- "config" : {
- "groups" : "[{\"path\":\"/ClusterUser Group\",\"extendChildren\":false}]"
- }
- }, {
- "name" : "ClusterManager of cluster2 Group",
- "type" : "group",
- "logic" : "POSITIVE",
- "decisionStrategy" : "UNANIMOUS",
- "config" : {
- "groups" : "[{\"path\":\"/ClusterManager-cluster2 Group\",\"extendChildren\":false}]"
- }
- },
- {
- "name": "Dev Team B owns topics that start with b_ on cluster cluster2",
- "type": "resource",
- "logic": "POSITIVE",
- "decisionStrategy": "UNANIMOUS",
- "config": {
- "resources": "[\"kafka-cluster:cluster2,Topic:b_*\"]",
- "applyPolicies": "[\"Dev Team B\"]"
- }
- },
- {
- "name": "Dev Team B can read from topics that start with x_ on any cluster",
- "type": "scope",
- "logic": "POSITIVE",
- "decisionStrategy": "UNANIMOUS",
- "config": {
- "resources": "[\"Topic:x_*\"]",
- "scopes": "[\"Describe\",\"Read\"]",
- "applyPolicies": "[\"Dev Team B\"]"
- }
- },
- {
- "name": "Dev Team B can update consumer group offsets that start with x_ on any cluster",
- "type": "scope",
- "logic": "POSITIVE",
- "decisionStrategy": "UNANIMOUS",
- "config": {
- "resources": "[\"Group:x_*\"]",
- "scopes": "[\"Describe\",\"Read\"]",
- "applyPolicies": "[\"Dev Team B\"]"
- }
- },
- {
- "name" : "ClusterManager Group has full access to manage and affect groups",
- "type" : "resource",
- "logic" : "POSITIVE",
- "decisionStrategy" : "UNANIMOUS",
- "config" : {
- "resources" : "[\"Group:*\"]",
- "applyPolicies" : "[\"ClusterManager Group\"]"
- }
- }, {
- "name" : "ClusterManager Group has full access to manage and affect topics",
- "type" : "resource",
- "logic" : "POSITIVE",
- "decisionStrategy" : "UNANIMOUS",
- "config" : {
- "resources" : "[\"Topic:*\"]",
- "applyPolicies" : "[\"ClusterManager Group\"]"
- }
- }, {
- "name" : "ClusterManager Group has full access to cluster config",
- "type" : "resource",
- "logic" : "POSITIVE",
- "decisionStrategy" : "UNANIMOUS",
- "config" : {
- "resources" : "[\"Cluster:*\"]",
- "applyPolicies" : "[\"ClusterManager Group\"]"
- }
- }
- ],
- "scopes": [
- {
- "name": "Create"
- },
- {
- "name": "Read"
- },
- {
- "name": "Write"
- },
- {
- "name": "Delete"
- },
- {
- "name": "Alter"
- },
- {
- "name": "Describe"
- },
- {
- "name": "ClusterAction"
- },
- {
- "name": "DescribeConfigs"
- },
- {
- "name": "AlterConfigs"
- },
- {
- "name": "IdempotentWrite"
- }
- ],
- "decisionStrategy": "AFFIRMATIVE"
- }
- },
- {
- "clientId": "kafka-cli",
- "enabled": true,
- "clientAuthenticatorType": "client-secret",
- "secret": "kafka-cli-secret",
- "bearerOnly": false,
- "consentRequired": false,
- "standardFlowEnabled": false,
- "implicitFlowEnabled": false,
- "directAccessGrantsEnabled": true,
- "serviceAccountsEnabled": false,
- "publicClient": true,
- "fullScopeAllowed": true
- }
- ]
-}
\ No newline at end of file