Skip to content

Commit

Permalink
Use dependabot for test image updates, rm obsolete/cleanup test code (#…
Browse files Browse the repository at this point in the history
…1350)

Signed-off-by: Michael Edgar <[email protected]>
  • Loading branch information
MikeEdgar authored Jan 14, 2025
1 parent 143c0d6 commit 806e27c
Show file tree
Hide file tree
Showing 20 changed files with 90 additions and 1,402 deletions.
5 changes: 5 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,11 @@ updates:
schedule:
interval: weekly

- package-ecosystem: "docker"
directory: "/api/src/test/resources"
schedule:
interval: weekly

- package-ecosystem: "npm"
directory: "/ui"
schedule:
Expand Down
7 changes: 0 additions & 7 deletions api/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,6 @@
<docker.group>streamshub</docker.group>
<docker.tag>${project.version}</docker.tag>
<docker.push>false</docker.push>

<!-- System test image dependencies -->
<keycloak.image>quay.io/keycloak/keycloak:21.1</keycloak.image>
<strimzi-kafka.tag>quay.io/strimzi/kafka:0.43.0-kafka-3.8.0</strimzi-kafka.tag>
</properties>

<dependencies>
Expand Down Expand Up @@ -382,9 +378,6 @@
<configuration>
<redirectTestOutputToFile>true</redirectTestOutputToFile>
<systemProperties>
<keycloak.image>${keycloak.image}</keycloak.image>
<strimzi.test-container.kafka.custom.image>${strimzi-kafka.tag}</strimzi.test-container.kafka.custom.image>
<apicurio-registry.version>${apicurio-registry.version}</apicurio-registry.version>
<java.util.logging.manager>org.jboss.logmanager.LogManager</java.util.logging.manager>
<maven.home>${maven.home}</maven.home>
<quarkus.jacoco.reuse-data-file>true</quarkus.jacoco.reuse-data-file>
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
package com.github.streamshub.console.kafka.systemtest;

import io.quarkus.test.junit.QuarkusTestProfile;

import java.util.List;
import java.util.Map;

import com.github.streamshub.console.kafka.systemtest.deployment.KafkaUnsecuredResourceManager;
import com.github.streamshub.console.kafka.systemtest.deployment.KafkaResourceManager;

import io.quarkus.test.junit.QuarkusTestProfile;

/**
* Same as profile {@linkplain TestPlainProfile}, but disables Kubernetes use by setting
Expand All @@ -17,7 +17,7 @@ public class TestPlainNoK8sProfile extends TestPlainProfile implements QuarkusTe

@Override
public List<TestResourceEntry> testResources() {
return List.of(new TestResourceEntry(KafkaUnsecuredResourceManager.class, Map.of("profile", PROFILE)));
return List.of(new TestResourceEntry(KafkaResourceManager.class, Map.of("profile", PROFILE)));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import java.util.List;
import java.util.Map;

import com.github.streamshub.console.kafka.systemtest.deployment.KafkaUnsecuredResourceManager;
import com.github.streamshub.console.kafka.systemtest.deployment.KafkaResourceManager;
import com.github.streamshub.console.kafka.systemtest.deployment.KeycloakResourceManager;
import com.github.streamshub.console.kafka.systemtest.deployment.StrimziCrdResourceManager;

Expand Down Expand Up @@ -39,7 +39,7 @@ public List<TestResourceEntry> testResources() {
return List.of(
new TestResourceEntry(StrimziCrdResourceManager.class, Collections.emptyMap(), true),
new TestResourceEntry(KeycloakResourceManager.class, Collections.emptyMap(), true),
new TestResourceEntry(KafkaUnsecuredResourceManager.class, Map.of("profile", PROFILE), true));
new TestResourceEntry(KafkaResourceManager.class, Map.of("profile", PROFILE), true));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
@@ -1,80 +1,53 @@
package com.github.streamshub.console.kafka.systemtest.deployment;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UncheckedIOException;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.time.Duration;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;

import org.jboss.logging.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.BindMode;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.Network;
import org.testcontainers.containers.SelinuxContext;
import org.testcontainers.containers.output.Slf4jLogConsumer;
import org.testcontainers.containers.wait.strategy.Wait;
import org.testcontainers.lifecycle.Startable;

import io.strimzi.test.container.StrimziKafkaContainer;

@SuppressWarnings("resource")
public class DeploymentManager {

protected static final Logger LOGGER = Logger.getLogger(DeploymentManager.class);
static final Map<String, String> TEST_CONTAINER_LABELS =
Collections.singletonMap("test-ident", "systemtest");

public enum UserType {
OWNER("alice"),
USER("susan"),
OTHER("bob"),
INVALID(null);

String username;

private UserType(String username) {
this.username = username;
}

public String getUsername() {
return username;
}
}

@Retention(RetentionPolicy.RUNTIME)
public @interface InjectDeploymentManager {
}

private final boolean oauthEnabled;
private final Network testNetwork;

private GenericContainer<?> keycloakContainer;
private KafkaContainer kafkaContainer;
private StrimziKafkaContainer kafkaContainer;

public static DeploymentManager newInstance(boolean oauthEnabled) {
return new DeploymentManager(oauthEnabled);
public static DeploymentManager newInstance() {
return new DeploymentManager();
}

private DeploymentManager(boolean oauthEnabled) {
this.oauthEnabled = oauthEnabled;
private DeploymentManager() {
this.testNetwork = Network.newNetwork();
}

private static String name(String prefix) {
return prefix + '-' + UUID.randomUUID().toString();
}

public boolean isOauthEnabled() {
return oauthEnabled;
}

public void shutdown() {
stopAll(kafkaContainer, keycloakContainer);
stopAll(kafkaContainer);
}

private void stopAll(Startable... containers) {
Expand All @@ -85,28 +58,9 @@ private void stopAll(Startable... containers) {
}
}

public GenericContainer<?> getKeycloakContainer() {
if (keycloakContainer == null) {
keycloakContainer = deployKeycloak();
}

return keycloakContainer;
}

public void stopKeycloakContainer() {
if (keycloakContainer != null) {
keycloakContainer.stop();
keycloakContainer = null;
}
}

public KafkaContainer getKafkaContainer() {
public StrimziKafkaContainer getKafkaContainer() {
if (kafkaContainer == null) {
if (oauthEnabled) {
kafkaContainer = deployKafka();
} else {
kafkaContainer = deployStrimziKafka();
}
kafkaContainer = deployStrimziKafka();
}

return kafkaContainer;
Expand All @@ -127,68 +81,22 @@ public String getExternalBootstrapServers() {
return null;
}

public GenericContainer<?> deployKeycloak() {
LOGGER.info("Deploying keycloak container");
String imageName = System.getProperty("keycloak.image");

GenericContainer<?> container = new GenericContainer<>(imageName)
.withLabels(TEST_CONTAINER_LABELS)
.withLogConsumer(new Slf4jLogConsumer(LoggerFactory.getLogger("systemtests.keycloak"), true))
.withCreateContainerCmdModifier(cmd -> cmd.withName(name("keycloak")))
.withNetwork(testNetwork)
.withNetworkAliases("keycloak")
.withExposedPorts(8080)
.withEnv(Map.of("KEYCLOAK_ADMIN", "admin",
"KEYCLOAK_ADMIN_PASSWORD", "admin",
"PROXY_ADDRESS_FORWARDING", "true"))
.withClasspathResourceMapping("/keycloak/authz-realm.json", "/opt/keycloak/data/import/authz-realm.json", BindMode.READ_WRITE, SelinuxContext.SHARED)
.withCommand("start", "--hostname=keycloak", "--hostname-strict-https=false", "--http-enabled=true", "--import-realm")
.waitingFor(Wait.forHttp("/realms/kafka-authz").withStartupTimeout(Duration.ofMinutes(5)));

LOGGER.info("Waiting for keycloak container");
container.start();
return container;
}

private KafkaContainer deployKafka() {
LOGGER.info("Deploying Kafka container");
private StrimziKafkaContainer deployStrimziKafka() {
String kafkaImage;

Map<String, String> env = new HashMap<>();

try (InputStream stream = getClass().getResourceAsStream("/kafka-oauth/env.properties")) {
Properties envProps = new Properties();
envProps.load(stream);
envProps.keySet()
.stream()
.map(Object::toString)
.forEach(key -> env.put(key, envProps.getProperty(key)));
try (InputStream in = getClass().getResourceAsStream("/Dockerfile.kafka");
BufferedReader reader = new BufferedReader(new InputStreamReader(in))) {
kafkaImage = reader.readLine().substring("FROM ".length());
} catch (IOException e) {
throw new UncheckedIOException(e);
}

var container = (KafkaContainer) new KeycloakSecuredKafkaContainer()
.withLabels(TEST_CONTAINER_LABELS)
.withLogConsumer(new Slf4jLogConsumer(LoggerFactory.getLogger("systemtests.oauth-kafka"), true))
.withCreateContainerCmdModifier(cmd -> cmd.withName(name("oauth-kafka")))
.withEnv(env)
.withNetwork(testNetwork)
.withClasspathResourceMapping("/kafka-oauth/config/", "/opt/kafka/config/strimzi/", BindMode.READ_WRITE, SelinuxContext.SHARED)
.withClasspathResourceMapping("/kafka-oauth/scripts/functions.sh", "/opt/kafka/functions.sh", BindMode.READ_WRITE, SelinuxContext.SHARED)
.withClasspathResourceMapping("/kafka-oauth/scripts/simple_kafka_config.sh", "/opt/kafka/simple_kafka_config.sh", BindMode.READ_WRITE, SelinuxContext.SHARED)
.withClasspathResourceMapping("/kafka-oauth/scripts/start.sh", "/opt/kafka/start.sh", BindMode.READ_WRITE, SelinuxContext.SHARED)
.withCommand("sh", "/opt/kafka/start.sh");

container.start();
return container;
}

private KafkaContainer deployStrimziKafka() {
LOGGER.info("Deploying Strimzi Kafka container");
LOGGER.infof("Deploying Strimzi Kafka container: %s", kafkaImage);

var container = (KafkaContainer) new KafkaContainer()
var container = new StrimziKafkaContainer(kafkaImage)
.withLabels(TEST_CONTAINER_LABELS)
.withLogConsumer(new Slf4jLogConsumer(LoggerFactory.getLogger("systemtests.plain-kafka"), true))
.withCreateContainerCmdModifier(cmd -> cmd.withName(name("plain-kafka")))
.withLogConsumer(new Slf4jLogConsumer(LoggerFactory.getLogger("systemtests.kafka"), true))
.withCreateContainerCmdModifier(cmd -> cmd.withName(name("kafka")))
.withKafkaConfigurationMap(Map.of(
"auto.create.topics.enable", "false",
"group.initial.rebalance.delay.ms", "0"
Expand Down

This file was deleted.

This file was deleted.

Loading

0 comments on commit 806e27c

Please sign in to comment.