diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 21452017f..6e2e9c6f1 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -44,11 +44,5 @@ jobs: uses: actions/upload-artifact@v2 if: ${{ always() }} with: - name: logs - # This directory is created by the fixture session_tmppath. The path is composed of a - # prefix defined by --basepath, followed by a folder name, in this case it is "karapace". - # - # See: - # - https://docs.pytest.org/en/6.2.x/tmpdir.html#base-temporary-directory - # - fixture session_tmppath in tests/integration/conftest.py - path: /tmp/pytest* + name: logs ${{ matrix.python-version }} + path: /tmp/ci-logs diff --git a/tests/conftest.py b/tests/conftest.py index 007babfb5..d6dd4b0d5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -54,6 +54,7 @@ def split_by_comma(arg: str) -> List[str]: def pytest_addoption(parser, pluginmanager) -> None: # pylint: disable=unused-argument parser.addoption("--kafka-bootstrap-servers", type=split_by_comma) parser.addoption("--kafka-version", default=KAFKA_VERSION) + parser.addoption("--log-dir") parser.addoption("--registry-url") parser.addoption("--rest-url") parser.addoption("--server-ca") @@ -83,13 +84,36 @@ def fixture_validate_options(request) -> None: raise ValueError(msg) -@pytest.fixture(scope="session", name="session_tmppath") -def fixture_session_tmppath(tmp_path_factory) -> Path: - return tmp_path_factory.mktemp("karapace") +@pytest.fixture(scope="session", name="session_datadir") +def fixture_session_datadir(tmp_path_factory) -> Path: + """Data files generated throught the tests should be stored here. + + These files are NOT persisted. + """ + return tmp_path_factory.mktemp("data") + + +@pytest.fixture(scope="session", name="session_logdir") +def fixture_session_logdir(request, tmp_path_factory, worker_id) -> Path: + """All useful log data for debugging should be stored here. + + These files are persisted by the CI for debugging purposes. + """ + log_dir = request.config.getoption("log_dir") + + if log_dir is None and worker_id == "master": + path = tmp_path_factory.mktemp("log") + elif log_dir is None: + path = tmp_path_factory.getbasetemp().parent / "log" + path.mkdir(parents=True, exist_ok=True) + else: + path = Path(log_dir) + path.mkdir(parents=True, exist_ok=True) + return path @pytest.fixture(scope="session", name="default_config_path") -def fixture_default_config(session_tmppath: Path) -> str: - path = session_tmppath / "karapace_config.json" +def fixture_default_config(session_logdir: Path) -> str: + path = session_logdir / "karapace_config.json" path.write_text(ujson.dumps({"registry_host": "localhost", "registry_port": 8081})) return str(path) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 235f7c341..35793fe99 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -176,7 +176,12 @@ def fixture_kafka_description(request) -> KafkaDescription: @pytest.fixture(scope="session", name="kafka_servers") -def fixture_kafka_server(request, session_tmppath: Path, kafka_description: KafkaDescription) -> Iterator[KafkaServers]: +def fixture_kafka_server( + request, + session_datadir: Path, + session_logdir: Path, + kafka_description: KafkaDescription, +) -> Iterator[KafkaServers]: bootstrap_servers = request.config.getoption("kafka_bootstrap_servers") if bootstrap_servers: @@ -185,9 +190,8 @@ def fixture_kafka_server(request, session_tmppath: Path, kafka_description: Kafk yield kafka_servers return - kafka_dir = session_tmppath / "kafka" - zk_dir = session_tmppath / "zk" - transfer_file = session_tmppath / "zk_kafka_config" + zk_dir = session_logdir / "zk" + transfer_file = session_logdir / "zk_kafka_config" with ExitStack() as stack: # Synchronize xdist workers, data generated by the winner is shared through @@ -212,7 +216,8 @@ def fixture_kafka_server(request, session_tmppath: Path, kafka_description: Kafk wait_for_port(zk_config.client_port, zk_proc, wait_time=20) kafka_config, kafka_proc = configure_and_start_kafka( - kafka_dir, + session_datadir, + session_logdir, zk_config, kafka_description, ) @@ -581,16 +586,17 @@ def get_java_process_configuration(java_args: List[str]) -> List[str]: def configure_and_start_kafka( - kafka_dir: Path, + datadir: Path, + logdir: Path, zk: ZKConfig, kafka_description: KafkaDescription, ) -> Tuple[KafkaConfig, Popen]: # setup filesystem - data_dir = kafka_dir / "data" - config_dir = kafka_dir / "config" - config_path = config_dir / "server.properties" + data_dir = datadir / "kafka" + log_dir = logdir / "kafka" + config_path = log_dir / "server.properties" data_dir.mkdir(parents=True) - config_dir.mkdir(parents=True) + log_dir.mkdir(parents=True) plaintext_port = get_random_port(port_range=KAFKA_PORT_RANGE, blacklist=[]) @@ -653,7 +659,7 @@ def configure_and_start_kafka( kafka_cmd = get_java_process_configuration( java_args=kafka_java_args( heap_mb=256, - logs_dir=str(kafka_dir), + logs_dir=str(log_dir), log4j_properties_path=log4j_properties_path, kafka_config_path=str(config_path), kafka_description=kafka_description,