diff --git a/pyproject.toml b/pyproject.toml index c8806c1..8b4d4db 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -125,7 +125,7 @@ addopts = [ "--cov-report=xml:build/coverage/coverage.xml", "--cov-report=term-missing", "--numprocesses=auto", - "--timeout=30" + "--timeout=60" ] @@ -141,4 +141,4 @@ source = [ [tool.coverage.report] show_missing = true -fail_under = 92 \ No newline at end of file +fail_under = 92 diff --git a/src/openjd/cli/_common/__init__.py b/src/openjd/cli/_common/__init__.py index ee26ea1..d970dee 100644 --- a/src/openjd/cli/_common/__init__.py +++ b/src/openjd/cli/_common/__init__.py @@ -7,6 +7,7 @@ from typing import Callable, Literal import json import yaml +import os from ._job_from_template import ( job_from_template, @@ -100,14 +101,15 @@ def add(self, name: str, description: str, **kwargs) -> ArgumentParser: def generate_job(args: Namespace) -> Job: try: # Raises: RuntimeError, DecodeValidationError - _, template = read_template(args) + template_file, template = read_template(args) # Raises: RuntimeError - sample_job = job_from_template(template, args.job_params if args.job_params else None) + return job_from_template( + template, args.job_params if args.job_params else None, Path(os.path.abspath(template_file.parent)), Path(os.getcwd()) + ) except RuntimeError as rte: raise RuntimeError(f"ERROR generating Job: {str(rte)}") except DecodeValidationError as dve: raise RuntimeError(f"ERROR validating template: {str(dve)}") - return sample_job @dataclass diff --git a/src/openjd/cli/_common/_job_from_template.py b/src/openjd/cli/_common/_job_from_template.py index 86f7470..c0f3317 100644 --- a/src/openjd/cli/_common/_job_from_template.py +++ b/src/openjd/cli/_common/_job_from_template.py @@ -159,7 +159,12 @@ def get_task_params(arguments: list[list[str]]) -> list[dict[str, str]]: return all_parameter_sets -def job_from_template(template: JobTemplate, parameter_args: list[str] | None = None) -> Job: +def job_from_template( + template: JobTemplate, + parameter_args: list[str] | None, + job_template_dir: Path, + current_working_dir: Path, +) -> Job: """ Given a decoded Job Template and a user-inputted parameter dictionary, generates a Job object. @@ -170,7 +175,10 @@ def job_from_template(template: JobTemplate, parameter_args: list[str] | None = try: parameter_values = preprocess_job_parameters( - job_template=template, job_parameter_values=parameter_dict + job_template=template, + job_parameter_values=parameter_dict, + job_template_dir=job_template_dir, + current_working_dir=current_working_dir, ) except ValueError as ve: raise RuntimeError(f"Parameters can't be used with Template: {str(ve)}") diff --git a/test/openjd/cli/conftest.py b/test/openjd/cli/conftest.py index 6227260..0bfcd9f 100644 --- a/test/openjd/cli/conftest.py +++ b/test/openjd/cli/conftest.py @@ -1,6 +1,9 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +import os import pytest +import tempfile +from pathlib import Path from unittest.mock import patch from . import MOCK_TEMPLATE, SampleSteps @@ -10,18 +13,36 @@ @pytest.fixture(scope="function", params=[[], ["Message='A new message!'"]]) -def sample_job(request): +def sample_job_and_dirs(request): """ Uses the MOCK_TEMPLATE object to create a Job, once with default parameters and once with user-specified parameters. + + This fixture also manages the life time of a temporary directory that's + used for the job template dir and the current working directory. """ - template = decode_template(template=MOCK_TEMPLATE) - return job_from_template(template=template, parameter_args=request.param) + with tempfile.TemporaryDirectory() as tmpdir: + template_dir = Path(tmpdir) / "template_dir" + current_working_dir = Path(tmpdir) / "current_working_dir" + os.makedirs(template_dir) + os.makedirs(current_working_dir) + + template = decode_template(template=MOCK_TEMPLATE) + yield ( + job_from_template( + template=template, + parameter_args=request.param, + job_template_dir=template_dir, + current_working_dir=current_working_dir, + ), + template_dir, + current_working_dir, + ) @pytest.fixture(scope="function") -def sample_step_map(sample_job): - return {step.name: step for step in sample_job.steps} +def sample_step_map(sample_job_and_dirs): + return {step.name: step for step in sample_job_and_dirs[0].steps} @pytest.fixture( diff --git a/test/openjd/cli/test_common.py b/test/openjd/cli/test_common.py index f8dc9a3..48dca3d 100644 --- a/test/openjd/cli/test_common.py +++ b/test/openjd/cli/test_common.py @@ -28,6 +28,21 @@ ) +@pytest.fixture(scope="function") +def template_dir_and_cwd(): + """ + This fixture manages the life time of a temporary directory that's + used for the job template dir and the current working directory. + """ + with tempfile.TemporaryDirectory() as tmpdir: + template_dir = Path(tmpdir) / "template_dir" + current_working_dir = Path(tmpdir) / "current_working_dir" + os.makedirs(template_dir) + os.makedirs(current_working_dir) + + yield (template_dir, current_working_dir) + + @patch("openjd.cli._common._validation_utils.decode_template") def test_decode_template_called(mock_decode_template: Mock): """ @@ -335,13 +350,16 @@ def test_get_job_params_error( ), ], ) -def test_job_from_template_success(mock_params: list, expected_job_name: str, template_dict: dict): +def test_job_from_template_success( + mock_params: list, expected_job_name: str, template_dict: dict, template_dir_and_cwd: tuple +): """ Test that `job_from_template` creates a Job with the provided parameters. """ + template_dir, current_working_dir = template_dir_and_cwd template = decode_template(template=template_dict) - result = job_from_template(template, mock_params) + result = job_from_template(template, mock_params, template_dir, current_working_dir) assert result.name == expected_job_name assert result.steps == template.steps if result.parameters: @@ -377,15 +395,18 @@ def test_job_from_template_success(mock_params: list, expected_job_name: str, te ), ], ) -def test_job_from_template_error(mock_params: list, template_dict: dict, expected_error: str): +def test_job_from_template_error( + mock_params: list, template_dict: dict, expected_error: str, template_dir_and_cwd: tuple +): """ Test that errors thrown by `job_from_template` have expected information """ + template_dir, current_working_dir = template_dir_and_cwd template = decode_template(template=template_dict) with pytest.raises(RuntimeError) as rte: - job_from_template(template, mock_params) + job_from_template(template, mock_params, template_dir, current_working_dir) assert expected_error in str(rte.value) @@ -563,6 +584,8 @@ def test_generate_job_success( new=Mock(side_effect=job_from_template), ) as patched_job_from_template: generate_job(mock_args) - patched_job_from_template.assert_called_once_with(ANY, expected_param_list) + patched_job_from_template.assert_called_once_with( + ANY, expected_param_list, Path(temp_template.name).parent, Path(os.getcwd()) + ) Path(temp_template.name).unlink() diff --git a/test/openjd/cli/test_local_session.py b/test/openjd/cli/test_local_session.py index 4d010c8..e322b00 100644 --- a/test/openjd/cli/test_local_session.py +++ b/test/openjd/cli/test_local_session.py @@ -36,7 +36,7 @@ def patched_actions(): yield patched_enter, patched_run, patched_exit, patched_callback -@pytest.mark.usefixtures("sample_job") +@pytest.mark.usefixtures("sample_job_and_dirs") @pytest.mark.parametrize( "given_parameters,expected_parameters", [ @@ -63,11 +63,12 @@ def patched_actions(): ], ) def test_generate_task_parameter_set( - sample_job: Job, given_parameters: dict, expected_parameters: dict + sample_job_and_dirs: tuple, given_parameters: dict, expected_parameters: dict ): """ Test that a LocalSession can generate Task parameters given valid user input. """ + sample_job, template_dir, current_working_dir = sample_job_and_dirs with LocalSession(job=sample_job, session_id="my-session") as session: # Convince the type checker that `parameterSpace` exists param_space = sample_job.steps[SampleSteps.TaskParamStep].parameterSpace @@ -82,10 +83,10 @@ def test_generate_task_parameter_set( ) -@pytest.mark.usefixtures("sample_job") +@pytest.mark.usefixtures("sample_job_and_dirs") @pytest.mark.parametrize(*SESSION_PARAMETERS) def test_localsession_initialize( - sample_job: Job, + sample_job_and_dirs: tuple, dependency_indexes: list[int], step_index: int, maximum_tasks: int, @@ -97,6 +98,7 @@ def test_localsession_initialize( Test that initializing the local Session clears the `ended` flag, only generates Task parameters when necessary, and adds to the Action queue appropriately. """ + sample_job, template_dir, current_working_dir = sample_job_and_dirs with LocalSession(job=sample_job, session_id="my-session") as session: with patch.object( LocalSession, @@ -121,9 +123,10 @@ def test_localsession_initialize( assert session._action_queue.qsize() == 2 * num_expected_environments + num_expected_tasks -@pytest.mark.usefixtures("sample_job") -def test_localsession_traps_sigint(sample_job: Job): +@pytest.mark.usefixtures("sample_job_and_dirs") +def test_localsession_traps_sigint(sample_job_and_dirs: tuple): # Make sure that we hook up, and remove the signal handler when using the local session + sample_job, template_dir, current_working_dir = sample_job_and_dirs # GIVEN with patch.object(local_session_mod, "signal") as signal_mod: @@ -143,10 +146,10 @@ def test_localsession_traps_sigint(sample_job: Job): ) -@pytest.mark.usefixtures("sample_job", "capsys") +@pytest.mark.usefixtures("sample_job_and_dirs", "capsys") @pytest.mark.parametrize(*SESSION_PARAMETERS) def test_localsession_run_success( - sample_job: Job, + sample_job_and_dirs: tuple, capsys: pytest.CaptureFixture, dependency_indexes: list[int], step_index: int, @@ -159,6 +162,7 @@ def test_localsession_run_success( Test that calling `run` causes the local Session to iterate through the actions defined in the Job. """ + sample_job, template_dir, current_working_dir = sample_job_and_dirs with LocalSession(job=sample_job, session_id="my-session") as session: session.initialize( dependencies=[sample_job.steps[i] for i in dependency_indexes], @@ -182,11 +186,12 @@ def test_localsession_run_success( ) -@pytest.mark.usefixtures("sample_job") -def test_localsession_run_not_ready(sample_job: Job): +@pytest.mark.usefixtures("sample_job_and_dirs") +def test_localsession_run_not_ready(sample_job_and_dirs: tuple): """ Test that a LocalSession throws an error when it is not in the "READY" state. """ + sample_job, template_dir, current_working_dir = sample_job_and_dirs with LocalSession(job=sample_job, session_id="my-session") as session: with patch.object(Session, "state", new=SessionState.ENDED), pytest.raises( RuntimeError @@ -196,11 +201,12 @@ def test_localsession_run_not_ready(sample_job: Job): assert "not in a READY state" in str(rte.value) -@pytest.mark.usefixtures("sample_job", "capsys") -def test_localsession_run_failed(sample_job: Job, capsys: pytest.CaptureFixture): +@pytest.mark.usefixtures("sample_job_and_dirs", "capsys") +def test_localsession_run_failed(sample_job_and_dirs: tuple, capsys: pytest.CaptureFixture): """ Test that a LocalSession can gracefully handle an error in its inner Session. """ + sample_job, template_dir, current_working_dir = sample_job_and_dirs with LocalSession(job=sample_job, session_id="bad-session") as session: session.initialize(dependencies=[], step=sample_job.steps[SampleSteps.BadCommand]) session.run() diff --git a/test/openjd/cli/test_run_command.py b/test/openjd/cli/test_run_command.py index 37af056..1da39b6 100644 --- a/test/openjd/cli/test_run_command.py +++ b/test/openjd/cli/test_run_command.py @@ -2,7 +2,7 @@ from argparse import Namespace import json -from pathlib import Path, PureWindowsPath, PurePosixPath +from pathlib import Path, PurePath, PureWindowsPath, PurePosixPath import tempfile import pytest @@ -107,7 +107,7 @@ def test_do_run_path_mapping_rules(): PathMappingRule( source_path_format=PathFormat.WINDOWS, source_path=PureWindowsPath(r"C:\test"), - destination_path=PurePosixPath("/mnt/test"), + destination_path=PurePath("/mnt/test"), ) ] @@ -187,7 +187,7 @@ def test_do_run_nonexistent_step(capsys: pytest.CaptureFixture): Path(temp_template.name).unlink() -@pytest.mark.usefixtures("sample_job", "sample_step_map", "patched_session_cleanup", "capsys") +@pytest.mark.usefixtures("sample_job_and_dirs", "sample_step_map", "patched_session_cleanup", "capsys") @pytest.mark.parametrize( "step_index,dependency_indexes,should_run_dependencies", [ @@ -226,7 +226,7 @@ def test_do_run_nonexistent_step(capsys: pytest.CaptureFixture): ], ) def test_run_local_session_success( - sample_job: Job, + sample_job_and_dirs: tuple, sample_step_map: dict, patched_session_cleanup: Mock, capsys: pytest.CaptureFixture, @@ -240,6 +240,7 @@ def test_run_local_session_success( Note that we don't need to test with custom Task parameters, as those are tested within the `LocalSession` object. """ + sample_job, template_dir, current_working_dir = sample_job_and_dirs path_mapping_rules = [ PathMappingRule( source_path_format=PathFormat.WINDOWS, @@ -273,7 +274,7 @@ def test_run_local_session_success( patched_session_cleanup.assert_called() -@pytest.mark.usefixtures("sample_job", "sample_step_map") +@pytest.mark.usefixtures("sample_job_and_dirs", "sample_step_map") @pytest.mark.parametrize( "step_index,should_run_dependencies,expected_error", [ @@ -289,7 +290,7 @@ def test_run_local_session_success( ], ) def test_run_local_session_failed( - sample_job: Job, + sample_job_and_dirs: tuple, sample_step_map: dict, step_index: int, should_run_dependencies: bool, @@ -298,6 +299,7 @@ def test_run_local_session_failed( """ Test the output of a Session that finishes after encountering errors. """ + sample_job, template_dir, current_working_dir = sample_job_and_dirs response = _run_local_session( job=sample_job, step_map=sample_step_map,