diff --git a/charms/argo-controller/tests/integration/test_charm.py b/charms/argo-controller/tests/integration/test_charm.py index bc147b9..d1c4106 100644 --- a/charms/argo-controller/tests/integration/test_charm.py +++ b/charms/argo-controller/tests/integration/test_charm.py @@ -11,17 +11,28 @@ import yaml from pytest_operator.plugin import OpsTest -log = logging.getLogger(__name__) - METADATA = yaml.safe_load(Path("./metadata.yaml").read_text()) CHARM_ROOT = "." -APP_NAME = "argo-controller" - +ARGO_CONTROLLER = "argo-controller" +ARGO_CONTROLLER_TRUST = True +MINIO = "minio" +MINIO_CHANNEL = "ckf-1.8/stable" MINIO_CONFIG = { "access-key": "minio", "secret-key": "minio-secret-key", } +PROMETHEUS_K8S = "prometheus-k8s" +PROMETHEUS_K8S_CHANNEL = "1.0/stable" +PROMETHEUS_K8S_TRUST = True +GRAFANA_K8S = "grafana-k8s" +GRAFANA_K8S_CHANNEL = "1.0/stable" +GRAFANA_K8S_TRUST = True +PROMETHEUS_SCRAPE_K8S = "prometheus-scrape-config-k8s" +PROMETHEUS_SCRAPE_K8S_CHANNEL = "1.0/stable" +PROMETHEUS_SCRAPE_CONFIG = {"scrape_interval": "30s"} +log = logging.getLogger(__name__) + @pytest.mark.abort_on_fail async def test_build_and_deploy_with_relations(ops_test: OpsTest): @@ -33,21 +44,23 @@ async def test_build_and_deploy_with_relations(ops_test: OpsTest): await ops_test.model.deploy( entity_url=built_charm_path, - application_name=APP_NAME, + application_name=ARGO_CONTROLLER, resources=resources, - trust=True, + trust=ARGO_CONTROLLER_TRUST, ) # Deploy required relations - await ops_test.model.deploy(entity_url="minio", config=MINIO_CONFIG) - await ops_test.model.add_relation(f"{APP_NAME}:object-storage", "minio:object-storage") + await ops_test.model.deploy(entity_url=MINIO, config=MINIO_CONFIG) + await ops_test.model.add_relation( + f"{ARGO_CONTROLLER}:object-storage", f"{MINIO}:object-storage" + ) await ops_test.model.wait_for_idle(timeout=60 * 10) # TODO: This does not handle blocked status right. Sometimes it passes when argo-controller # is still setting up # The unit should be active before creating/testing resources - await ops_test.model.wait_for_idle(apps=[APP_NAME], status="active", timeout=1000) + await ops_test.model.wait_for_idle(apps=[ARGO_CONTROLLER], status="active", timeout=1000) async def create_artifact_bucket(ops_test: OpsTest): @@ -60,7 +73,7 @@ async def create_artifact_bucket(ops_test: OpsTest): bucket = "mlpipeline" minio_cmd = ( - f"mc alias set {alias} {url} {MINIO_CONFIG['access-key']} {MINIO_CONFIG['secret-key']}" + f"mc alias set {alias} {url} {MINIO_CONFIG['access-key']} {MINIO_CONFIG['secret-key']}" # noqa f"&& mc mb {alias}/{bucket} -p" ) kubectl_cmd = ( @@ -132,31 +145,43 @@ async def test_workflow_using_artifacts(ops_test: OpsTest): async def test_prometheus_grafana_integration(ops_test: OpsTest): """Deploy prometheus, grafana and required relations, then test the metrics.""" - prometheus = "prometheus-k8s" - grafana = "grafana-k8s" - prometheus_scrape = "prometheus-scrape-config-k8s" - scrape_config = {"scrape_interval": "30s"} - # Deploy and relate prometheus - await ops_test.model.deploy(prometheus, channel="latest/stable", trust=True) - await ops_test.model.deploy(grafana, channel="latest/stable", trust=True) - await ops_test.model.deploy(prometheus_scrape, channel="latest/stable", config=scrape_config) + await ops_test.model.deploy( + PROMETHEUS_K8S, + channel=PROMETHEUS_K8S_CHANNEL, + trust=PROMETHEUS_K8S_TRUST, + ) + await ops_test.model.deploy( + GRAFANA_K8S, + channel=GRAFANA_K8S_CHANNEL, + trust=GRAFANA_K8S_TRUST, + ) + await ops_test.model.deploy( + PROMETHEUS_SCRAPE_K8S, + channel=PROMETHEUS_SCRAPE_K8S_CHANNEL, + config=PROMETHEUS_SCRAPE_CONFIG, + ) - await ops_test.model.add_relation(APP_NAME, prometheus_scrape) + await ops_test.model.add_relation(ARGO_CONTROLLER, PROMETHEUS_SCRAPE_K8S) await ops_test.model.add_relation( - f"{prometheus}:grafana-dashboard", f"{grafana}:grafana-dashboard" + f"{PROMETHEUS_K8S}:grafana-dashboard", + f"{GRAFANA_K8S}:grafana-dashboard", ) await ops_test.model.add_relation( - f"{APP_NAME}:grafana-dashboard", f"{grafana}:grafana-dashboard" + f"{ARGO_CONTROLLER}:grafana-dashboard", + f"{GRAFANA_K8S}:grafana-dashboard", ) await ops_test.model.add_relation( - f"{prometheus}:metrics-endpoint", f"{prometheus_scrape}:metrics-endpoint" + f"{PROMETHEUS_K8S}:metrics-endpoint", + f"{PROMETHEUS_SCRAPE_K8S}:metrics-endpoint", ) await ops_test.model.wait_for_idle(status="active", timeout=60 * 20) status = await ops_test.model.get_status() - prometheus_unit_ip = status["applications"][prometheus]["units"][f"{prometheus}/0"]["address"] + prometheus_unit_ip = status["applications"][PROMETHEUS_K8S]["units"][f"{PROMETHEUS_K8S}/0"][ + "address" + ] log.info(f"Prometheus available at http://{prometheus_unit_ip}:9090") for attempt in retry_for_5_attempts: @@ -166,7 +191,7 @@ async def test_prometheus_grafana_integration(ops_test: OpsTest): with attempt: r = requests.get( f"http://{prometheus_unit_ip}:9090/api/v1/query?" - f'query=up{{juju_application="{APP_NAME}"}}' + f'query=up{{juju_application="{ARGO_CONTROLLER}"}}' ) response = json.loads(r.content.decode("utf-8")) response_status = response["status"] @@ -174,7 +199,7 @@ async def test_prometheus_grafana_integration(ops_test: OpsTest): assert response_status == "success" response_metric = response["data"]["result"][0]["metric"] - assert response_metric["juju_application"] == APP_NAME + assert response_metric["juju_application"] == ARGO_CONTROLLER assert response_metric["juju_model"] == ops_test.model_name # Verify that Prometheus receives the same set of targets as specified.