From afef801cf3cf2392b53ab36115dc7a4756116a5f Mon Sep 17 00:00:00 2001 From: Mark Wiebe Date: Sun, 27 Aug 2023 21:07:14 -0700 Subject: [PATCH] feat!: Add job & fleet CLI usability improvements BREAKING-CHANGE: This change modifies some of the dataclass property names that the integrated GUI submitters use NOTE: To get the tests passing in github actions, I had to comment out the PIP_INDEX_URL configuration in hatch.toml. - Add a defaults.job_id config that is usually the most recently submitted job, because it is set by the job bundle submission functions. This means a user can run `deadline job get` after submitting a job to get information about what they just created. - Modify the `deadline fleet get` to accept a queue id, and when provided display all the fleets associated with that queue. Additional fixes: - Fix the handle-web-url logic around farm id and queue id. It should not have been using the default parameter logic for farm and queue id. - Remove the integ testing dependency from requirements-testing so that it's easier to run the unit tests in a bare-bones setup. Added requirements-integ-testing to make up the difference in the integ env. This takes it one step closer to working on Windows. - Rearrange the unit vs integ tests, because the pattern did not work on Windows. Instead have separate test/unit and test/integ that can be selected separately via a single file path. - Remove the deprecated uihint. Removed it here because fixing the Windows tests ended up in the path-related code, and removing the deprecated uihint was better than spending the time to fix it for tests. - Removed empty path parameter values as an error case. For it to be an error, the parameter definition should set a minLength of 1. - Adjusted PATH parameter treatment with NONE dataflow slightly. - Add types to the fus3 class so that it passes mypy. - Correct some typo "a" to "an". - Update installation_requirements to rez_packages in the settings dataclasses. - Update max_failed_tasks_count and max_retries_per_task property names and GUI field names to match the service API choices. - Fix asset_sync.py to work on Windows. - Renamed the setting defaults.storage_profile_id to settings.storage_profile_id, because it is not a default that you select from multiple options, it is the configuration of the machine. - Made settings.storage_profile_id depend on defaults.farm_id instead of defaults.queue_id. The storage profile is a child of the farm in the resource model, and a workstation would use the same one across multiple queues. --- .github/workflows/reuse_python_build.yml | 10 +- hatch.toml | 15 +- pyproject.toml | 4 +- requirements-integ-testing.txt | 1 + requirements-testing.txt | 1 - src/deadline/client/api/_submit_job_bundle.py | 11 +- src/deadline/client/cli/_common.py | 15 +- .../client/cli/groups/bundle_group.py | 19 +- .../client/cli/groups/config_group.py | 10 +- src/deadline/client/cli/groups/farm_group.py | 2 +- src/deadline/client/cli/groups/fleet_group.py | 49 ++- .../cli/groups/handle_web_url_command.py | 25 +- src/deadline/client/cli/groups/job_group.py | 24 +- src/deadline/client/cli/groups/queue_group.py | 2 +- .../client/cli/groups/worker_group.py | 4 +- src/deadline/client/config/config_file.py | 22 +- src/deadline/client/job_bundle/parameters.py | 80 +---- src/deadline/client/ui/cli_job_submitter.py | 4 +- .../client/ui/dataclasses/__init__.py | 16 +- .../ui/dialogs/deadline_config_dialog.py | 8 +- .../ui/dialogs/submit_job_progress_dialog.py | 10 +- .../dialogs/submit_job_to_deadline_dialog.py | 8 +- .../client/ui/job_bundle_submitter.py | 6 +- .../deadline_credentials_status_widget.py | 2 +- .../widgets/job_template_parameters_widget.py | 33 +- .../ui/widgets/shared_job_settings_tab.py | 66 ++-- src/deadline/job_attachments/asset_sync.py | 15 +- src/deadline/job_attachments/fus3.py | 31 +- src/deadline/job_attachments/models.py | 2 +- ...submission_asset_refs_deprecated_uihint.py | 325 ------------------ ...est_job_bundle_loader_deprecated_uihint.py | 232 ------------- .../asset_manifests/v2023_03_03/__init__.py | 1 - .../unit/aws/__init__.py | 1 - .../deadline_job_attachments}/__init__.py | 0 .../deadline_job_attachments}/conftest.py | 0 .../test_data/inputs/scene.ma | 0 .../test_data/inputs/textures/brick.png | 0 .../test_data/inputs/textures/cloth.png | 0 .../test_job_attachments.py | 0 test/{deadline_client => unit}/__init__.py | 0 .../unit => unit/deadline_client}/__init__.py | 0 .../deadline_client}/api/__init__.py | 0 .../deadline_client}/api/test_api_farm.py | 0 .../deadline_client}/api/test_api_job.py | 0 .../deadline_client}/api/test_api_queue.py | 0 .../deadline_client}/api/test_api_session.py | 0 .../api/test_api_storage_profile.py | 2 +- .../api/test_job_bundle_submission.py | 12 +- .../test_job_bundle_submission_asset_refs.py | 22 +- .../deadline_client}/cli/__init__.py | 0 .../deadline_client}/cli/test_cli.py | 0 .../deadline_client}/cli/test_cli_bundle.py | 0 .../deadline_client}/cli/test_cli_config.py | 9 +- .../deadline_client}/cli/test_cli_farm.py | 0 .../deadline_client}/cli/test_cli_fleet.py | 104 +++++- .../cli/test_cli_handle_web_url.py | 0 .../deadline_client}/cli/test_cli_job.py | 0 .../deadline_client}/cli/test_cli_loginout.py | 0 .../deadline_client}/cli/test_cli_queue.py | 15 +- .../deadline_client}/config/__init__.py | 0 .../config/test_config_file.py | 12 +- .../unit => unit/deadline_client}/conftest.py | 0 .../deadline_client}/job_bundle/__init__.py | 0 .../job_bundle/test_adaptors.py | 0 .../job_bundle/test_job_bundle_loader.py | 0 .../job_bundle/test_job_history_folders.py | 0 .../job_bundle/test_job_parameters.py | 0 .../job_bundle/test_job_submission.py | 0 .../job_bundle/test_job_template.py | 0 .../deadline_client}/shared_constants.py | 12 + .../deadline_client}/testing_utilities.py | 0 .../deadline_client}/ui/__init__.py | 0 .../deadline_job_attachments/__init__.py | 0 .../asset_manifests}/__init__.py | 0 .../asset_manifests/test_decode.py | 0 .../asset_manifests/test_manifest_model.py | 0 .../asset_manifests/v2022_06_06}/__init__.py | 0 .../v2022_06_06/test_asset_manifest.py | 0 .../asset_manifests/v2023_03_03}/__init__.py | 0 .../v2023_03_03/test_asset_manifest.py | 0 .../deadline_job_attachments/aws}/__init__.py | 0 .../aws/test_aws_clients.py | 2 - .../aws/test_deadline.py | 0 .../deadline_job_attachments}/conftest.py | 0 .../deadline/2020-08-21/service-2.json | 0 .../data/manifest_bados.json | 0 .../data/manifest_v2022_06_06.json | 0 .../data/manifest_v2023_03_03.json | 0 .../test_asset_sync.py | 36 +- .../test_download.py | 0 .../deadline_job_attachments}/test_fus3.py | 0 .../test_hash_cache.py | 0 .../test_progress_tracker.py | 0 .../deadline_job_attachments}/test_upload.py | 1 + .../deadline_job_attachments}/test_utils.py | 0 test/{ => unit}/test_copyright_headers.py | 2 +- 96 files changed, 402 insertions(+), 881 deletions(-) create mode 100644 requirements-integ-testing.txt delete mode 100644 test/deadline_client/unit/api/test_job_bundle_submission_asset_refs_deprecated_uihint.py delete mode 100644 test/deadline_client/unit/job_bundle/test_job_bundle_loader_deprecated_uihint.py delete mode 100644 test/deadline_job_attachments/unit/asset_manifests/v2023_03_03/__init__.py delete mode 100644 test/deadline_job_attachments/unit/aws/__init__.py rename test/{ => integ/deadline_job_attachments}/__init__.py (100%) rename test/{deadline_job_attachments/integ => integ/deadline_job_attachments}/conftest.py (100%) rename test/{deadline_job_attachments/integ => integ/deadline_job_attachments}/test_data/inputs/scene.ma (100%) rename test/{deadline_job_attachments/integ => integ/deadline_job_attachments}/test_data/inputs/textures/brick.png (100%) rename test/{deadline_job_attachments/integ => integ/deadline_job_attachments}/test_data/inputs/textures/cloth.png (100%) rename test/{deadline_job_attachments/integ => integ/deadline_job_attachments}/test_job_attachments.py (100%) rename test/{deadline_client => unit}/__init__.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/__init__.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/api/__init__.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/api/test_api_farm.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/api/test_api_job.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/api/test_api_queue.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/api/test_api_session.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/api/test_api_storage_profile.py (97%) rename test/{deadline_client/unit => unit/deadline_client}/api/test_job_bundle_submission.py (98%) rename test/{deadline_client/unit => unit/deadline_client}/api/test_job_bundle_submission_asset_refs.py (93%) rename test/{deadline_client/unit => unit/deadline_client}/cli/__init__.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/cli/test_cli.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/cli/test_cli_bundle.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/cli/test_cli_config.py (94%) rename test/{deadline_client/unit => unit/deadline_client}/cli/test_cli_farm.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/cli/test_cli_fleet.py (67%) rename test/{deadline_client/unit => unit/deadline_client}/cli/test_cli_handle_web_url.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/cli/test_cli_job.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/cli/test_cli_loginout.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/cli/test_cli_queue.py (93%) rename test/{deadline_client/unit => unit/deadline_client}/config/__init__.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/config/test_config_file.py (94%) rename test/{deadline_client/unit => unit/deadline_client}/conftest.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/job_bundle/__init__.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/job_bundle/test_adaptors.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/job_bundle/test_job_bundle_loader.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/job_bundle/test_job_history_folders.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/job_bundle/test_job_parameters.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/job_bundle/test_job_submission.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/job_bundle/test_job_template.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/shared_constants.py (62%) rename test/{deadline_client/unit => unit/deadline_client}/testing_utilities.py (100%) rename test/{deadline_client/unit => unit/deadline_client}/ui/__init__.py (100%) rename test/{ => unit}/deadline_job_attachments/__init__.py (100%) rename test/{deadline_job_attachments/integ => unit/deadline_job_attachments/asset_manifests}/__init__.py (100%) rename test/{deadline_job_attachments/unit => unit/deadline_job_attachments}/asset_manifests/test_decode.py (100%) rename test/{deadline_job_attachments/unit => unit/deadline_job_attachments}/asset_manifests/test_manifest_model.py (100%) rename test/{deadline_job_attachments/unit => unit/deadline_job_attachments/asset_manifests/v2022_06_06}/__init__.py (100%) rename test/{deadline_job_attachments/unit => unit/deadline_job_attachments}/asset_manifests/v2022_06_06/test_asset_manifest.py (100%) rename test/{deadline_job_attachments/unit/asset_manifests => unit/deadline_job_attachments/asset_manifests/v2023_03_03}/__init__.py (100%) rename test/{deadline_job_attachments/unit => unit/deadline_job_attachments}/asset_manifests/v2023_03_03/test_asset_manifest.py (100%) rename test/{deadline_job_attachments/unit/asset_manifests/v2022_06_06 => unit/deadline_job_attachments/aws}/__init__.py (100%) rename test/{deadline_job_attachments/unit => unit/deadline_job_attachments}/aws/test_aws_clients.py (94%) rename test/{deadline_job_attachments/unit => unit/deadline_job_attachments}/aws/test_deadline.py (100%) rename test/{deadline_job_attachments/unit => unit/deadline_job_attachments}/conftest.py (100%) rename test/{deadline_job_attachments/unit => unit/deadline_job_attachments}/data/boto_module/deadline/2020-08-21/service-2.json (100%) rename test/{deadline_job_attachments/unit => unit/deadline_job_attachments}/data/manifest_bados.json (100%) rename test/{deadline_job_attachments/unit => unit/deadline_job_attachments}/data/manifest_v2022_06_06.json (100%) rename test/{deadline_job_attachments/unit => unit/deadline_job_attachments}/data/manifest_v2023_03_03.json (100%) rename test/{deadline_job_attachments/unit => unit/deadline_job_attachments}/test_asset_sync.py (94%) rename test/{deadline_job_attachments/unit => unit/deadline_job_attachments}/test_download.py (100%) rename test/{deadline_job_attachments/unit => unit/deadline_job_attachments}/test_fus3.py (100%) rename test/{deadline_job_attachments/unit => unit/deadline_job_attachments}/test_hash_cache.py (100%) rename test/{deadline_job_attachments/unit => unit/deadline_job_attachments}/test_progress_tracker.py (100%) rename test/{deadline_job_attachments/unit => unit/deadline_job_attachments}/test_upload.py (99%) rename test/{deadline_job_attachments/unit => unit/deadline_job_attachments}/test_utils.py (100%) rename test/{ => unit}/test_copyright_headers.py (98%) diff --git a/.github/workflows/reuse_python_build.yml b/.github/workflows/reuse_python_build.yml index 90274709..2f734d4e 100644 --- a/.github/workflows/reuse_python_build.yml +++ b/.github/workflows/reuse_python_build.yml @@ -9,13 +9,14 @@ on: jobs: Python: - runs-on: ubuntu-latest + runs-on: ${{ matrix.os }} permissions: id-token: write contents: read strategy: matrix: python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] + os: [ubuntu-latest, windows-latest, macOS-latest] env: PYTHON: ${{ matrix.python-version }} CODEARTIFACT_REGION: "us-west-2" @@ -25,7 +26,7 @@ jobs: steps: - uses: actions/checkout@v3 if: ${{ !inputs.branch }} - + - uses: actions/checkout@v3 if: ${{ inputs.branch }} with: @@ -36,7 +37,7 @@ jobs: uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - + - name: Configure AWS credentials uses: aws-actions/configure-aws-credentials@v2 with: @@ -44,6 +45,7 @@ jobs: aws-region: us-west-2 - name: Install Hatch + shell: bash run: | CODEARTIFACT_AUTH_TOKEN=$(aws codeartifact get-authorization-token --domain ${{ secrets.CODEARTIFACT_DOMAIN }} --domain-owner ${{ secrets.CODEARTIFACT_ACCOUNT_ID }} --query authorizationToken --output text --region us-west-2) echo "::add-mask::$CODEARTIFACT_AUTH_TOKEN" @@ -57,4 +59,4 @@ jobs: run: hatch build - name: Run Tests - run: hatch run test + run: hatch run test -vv diff --git a/hatch.toml b/hatch.toml index 513dfee4..be4e14f0 100644 --- a/hatch.toml +++ b/hatch.toml @@ -5,7 +5,7 @@ pre-install-commands = [ [envs.default.scripts] sync = "pip install -r requirements-testing.txt" -test = "pytest --cov-config pyproject.toml {args:test/*/unit}" +test = "pytest --cov-config pyproject.toml {args:test/unit}" typing = "mypy {args:src test}" style = [ "ruff {args:.}", @@ -24,7 +24,7 @@ lint = [ python = ["3.7", "3.8", "3.9", "3.10", "3.11"] [envs.default.env-vars] -PIP_INDEX_URL="https://aws:{env:CODEARTIFACT_AUTH_TOKEN}@{env:CODEARTIFACT_DOMAIN}-{env:CODEARTIFACT_ACCOUNT_ID}.d.codeartifact.{env:CODEARTIFACT_REGION}.amazonaws.com/pypi/{env:CODEARTIFACT_REPOSITORY}/simple/" +# PIP_INDEX_URL="https://aws:{env:CODEARTIFACT_AUTH_TOKEN}@{env:CODEARTIFACT_DOMAIN}-{env:CODEARTIFACT_ACCOUNT_ID}.d.codeartifact.{env:CODEARTIFACT_REGION}.amazonaws.com/pypi/{env:CODEARTIFACT_REPOSITORY}/simple/" SKIP_BOOTSTRAP_TEST_RESOURCES="True" [envs.codebuild.scripts] @@ -47,11 +47,13 @@ SKIP_BOOTSTRAP_TEST_RESOURCES="True" build = "hatch build" make_exe = "python scripts/pyinstaller/make_exe.py --output {env:OUT_FILE}" -[envs.integ.scripts] -test = "pytest --no-cov {args:test/*/integ} -vvv --numprocesses=1" +[envs.integ] +pre-install-commands = [ + "pip install -r requirements-integ-testing.txt" +] -[envs.e2e.scripts] -test = "pytest --no-cov {args:test/*/e2e}" +[envs.integ.scripts] +test = "pytest --no-cov {args:test/integ} -vvv --numprocesses=1" [envs.installer] pre-install-commands = [ @@ -61,4 +63,3 @@ pre-install-commands = [ [envs.installer.scripts] build = "hatch build" make_exe = "python scripts/pyinstaller/make_exe.py --output {env:OUT_FILE}" - diff --git a/pyproject.toml b/pyproject.toml index 312f09a4..c3e1a2e1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -118,10 +118,10 @@ addopts = [ "--cov-report=term-missing", "--numprocesses=auto" ] -testpaths = [ "test" ] +testpaths = [ "test/unit" ] looponfailroots = [ "src", - "test", + "test/unit", ] markers = [ "no_setup: mark that test shouldn't use default setups", diff --git a/requirements-integ-testing.txt b/requirements-integ-testing.txt new file mode 100644 index 00000000..ad4265d4 --- /dev/null +++ b/requirements-integ-testing.txt @@ -0,0 +1 @@ +deadline-cloud-test-fixtures ~= 0.2.0 diff --git a/requirements-testing.txt b/requirements-testing.txt index f6f828dc..ceb9af92 100644 --- a/requirements-testing.txt +++ b/requirements-testing.txt @@ -12,4 +12,3 @@ mypy ~= 1.4 ruff ~= 0.0.282 moto ~= 4.1 jsondiff ~= 2.0 -deadline-cloud-test-fixtures ~= 0.2.0 \ No newline at end of file diff --git a/src/deadline/client/api/_submit_job_bundle.py b/src/deadline/client/api/_submit_job_bundle.py index 455e63f2..0757a441 100644 --- a/src/deadline/client/api/_submit_job_bundle.py +++ b/src/deadline/client/api/_submit_job_bundle.py @@ -13,7 +13,7 @@ from deadline.client import api from deadline.client.exceptions import DeadlineOperationError, CreateJobWaiterCanceled -from deadline.client.config import get_setting +from deadline.client.config import get_setting, set_setting from deadline.client.job_bundle.loader import read_yaml_or_json, read_yaml_or_json_object from deadline.client.job_bundle.parameters import apply_job_parameters, read_job_bundle_parameters from deadline.client.job_bundle.submission import ( @@ -99,7 +99,7 @@ def create_job_from_job_bundle( "templateType": file_type, } - storage_profile_id = get_setting("defaults.storage_profile_id", config=config) + storage_profile_id = get_setting("settings.storage_profile_id", config=config) if storage_profile_id: create_job_args["storageProfileId"] = storage_profile_id @@ -171,6 +171,12 @@ def create_job_from_job_bundle( logger.debug(f"CreateJob Response {create_job_response}") if create_job_response and "jobId" in create_job_response: + job_id = create_job_response["jobId"] + + # If using the default config, set the default job id so it holds the + # most-recently submitted job. + if config is None: + set_setting("defaults.job_id", job_id) def _default_create_job_result_callback() -> bool: return True @@ -178,7 +184,6 @@ def _default_create_job_result_callback() -> bool: if not create_job_result_callback: create_job_result_callback = _default_create_job_result_callback - job_id = create_job_response["jobId"] success, status_message = wait_for_create_job_to_complete( create_job_args["farmId"], create_job_args["queueId"], diff --git a/src/deadline/client/cli/_common.py b/src/deadline/client/cli/_common.py index 282706e6..5efa9c70 100644 --- a/src/deadline/client/cli/_common.py +++ b/src/deadline/client/cli/_common.py @@ -72,11 +72,11 @@ def apply_cli_options_to_config( *, config: Optional[ConfigParser] = None, required_options: Set[str] = set(), **args ) -> Optional[ConfigParser]: """ - Modifies a Amazon Deadline Cloud config object to apply standard option names to it, such as + Modifies an Amazon Deadline Cloud config object to apply standard option names to it, such as the AWS profile, Amazon Deadline Cloud Farm, or Amazon Deadline Cloud Queue to use. Args: - config (ConfigParser, optional): A Amazon Deadline Cloud config, read by config_file.read_config(). + config (ConfigParser, optional): an Amazon Deadline Cloud config, read by config_file.read_config(). If not provided, loads the config from disk. """ # Only work with a custom config if there are standard options provided @@ -96,12 +96,16 @@ def apply_cli_options_to_config( if queue_id: config_file.set_setting("defaults.queue_id", queue_id, config=config) + job_id = args.pop("job_id", None) + if job_id: + config_file.set_setting("defaults.job_id", job_id, config=config) + auto_accept = args.pop("yes", None) if auto_accept: config_file.set_setting("settings.auto_accept", "true", config=config) else: # Remove the standard option names from the args list - for name in ["profile", "farm_id", "queue_id"]: + for name in ["profile", "farm_id", "queue_id", "job_id"]: args.pop(name, None) # Check that the required options have values @@ -115,6 +119,11 @@ def apply_cli_options_to_config( if not config_file.get_setting("defaults.queue_id", config=config): raise click.UsageError("Missing '--queue-id' or default Queue ID configuration") + if "job_id" in required_options: + required_options.remove("job_id") + if not config_file.get_setting("defaults.job_id", config=config): + raise click.UsageError("Missing '--job-id' or default Job ID configuration") + if required_options: raise RuntimeError( f"Unexpected required Amazon Deadline Cloud CLI options: {required_options}" diff --git a/src/deadline/client/cli/groups/bundle_group.py b/src/deadline/client/cli/groups/bundle_group.py index 2f845464..d725759e 100644 --- a/src/deadline/client/cli/groups/bundle_group.py +++ b/src/deadline/client/cli/groups/bundle_group.py @@ -17,7 +17,7 @@ from deadline.client import api from deadline.client.api import get_boto3_client, get_queue_boto3_session from deadline.client.api._session import _modified_logging_level -from deadline.client.config import config_file, get_setting +from deadline.client.config import config_file, get_setting, set_setting from deadline.client.job_bundle.loader import read_yaml_or_json, read_yaml_or_json_object from deadline.client.job_bundle.parameters import apply_job_parameters, read_job_bundle_parameters from deadline.client.job_bundle.submission import ( @@ -89,7 +89,7 @@ def validate_parameters(ctx, param, value): "--asset-loading-method", help="The method to use for loading assets on the server. Options are PRELOAD (load assets onto server first then run the job) or ON_DEMAND (load assets as requested).", type=click.Choice([e.value for e in AssetLoadingMethod]), - default=AssetLoadingMethod.PRELOAD, + default=AssetLoadingMethod.PRELOAD.value, ) @click.option( "--yes", @@ -102,6 +102,12 @@ def bundle_submit(job_bundle_dir, asset_loading_method, parameter, **args): """ Submits an OpenJobIO job bundle to Amazon Deadline Cloud. """ + # Check Whether the CLI options are modifying any of the default settings that affect + # the job id. If not, we'll save the job id submitted as the default job id. + if args.get("profile") is None and args.get("farm_id") is None and args.get("queue_id") is None: + should_save_job_id = True + else: + should_save_job_id = False # Get a temporary config object with the standard options handled config = apply_cli_options_to_config(required_options={"farm_id", "queue_id"}, **args) @@ -123,7 +129,7 @@ def bundle_submit(job_bundle_dir, asset_loading_method, parameter, **args): "templateType": file_type, } - storage_profile_id = get_setting("defaults.storage_profile_id", config=config) + storage_profile_id = get_setting("settings.storage_profile_id", config=config) if storage_profile_id: create_job_args["storageProfileId"] = storage_profile_id @@ -202,12 +208,17 @@ def bundle_submit(job_bundle_dir, asset_loading_method, parameter, **args): logger.debug(f"CreateJob Response {create_job_response}") if create_job_response and "jobId" in create_job_response: + job_id = create_job_response["jobId"] click.echo("Waiting for Job to be created...") + # If using the default config, set the default job id so it holds the + # most-recently submitted job. + if should_save_job_id: + set_setting("defaults.job_id", job_id) + def _check_create_job_wait_canceled() -> bool: return continue_submission - job_id = create_job_response["jobId"] success, status_message = api.wait_for_create_job_to_complete( create_job_args["farmId"], create_job_args["queueId"], diff --git a/src/deadline/client/cli/groups/config_group.py b/src/deadline/client/cli/groups/config_group.py index 73036902..38f9982c 100644 --- a/src/deadline/client/cli/groups/config_group.py +++ b/src/deadline/client/cli/groups/config_group.py @@ -27,8 +27,10 @@ def cli_config(): defaults.queue_id: The default queue ID to use for job submissions or CLI operations. - defaults.storage_profile_id: - The default storage profile ID to use for job submission or CLI operations. + settings.storage_profile_id: + The storage profile that this workstation conforms to. It specifies + where shared file systems are mounted, and where named job attachments + should go. settings.job_history_dir: The directory in which to create new job bundles for @@ -86,7 +88,7 @@ def config_gui(): @handle_error def config_set(setting_name, value): """ - Sets a Amazon Deadline Cloud workstation configuration setting. + Sets an Amazon Deadline Cloud workstation configuration setting. For example `deadline config set defaults.farm_id `. Run `deadline config --help` to show available settings. @@ -99,7 +101,7 @@ def config_set(setting_name, value): @handle_error def config_get(setting_name): """ - Gets a Amazon Deadline Cloud workstation configuration setting. + Gets an Amazon Deadline Cloud workstation configuration setting. For example `deadline config get defaults.farm_id`. Run `deadline config --help` to show available settings. diff --git a/src/deadline/client/cli/groups/farm_group.py b/src/deadline/client/cli/groups/farm_group.py index 5a64cf31..e3284e5a 100644 --- a/src/deadline/client/cli/groups/farm_group.py +++ b/src/deadline/client/cli/groups/farm_group.py @@ -50,7 +50,7 @@ def farm_list(**args): @handle_error def farm_get(**args): """ - Get the details of a Amazon Deadline Cloud farm. + Get the details of an Amazon Deadline Cloud farm. If farm ID is not provided, returns the configured default farm. """ diff --git a/src/deadline/client/cli/groups/fleet_group.py b/src/deadline/client/cli/groups/fleet_group.py index 9098824b..0e475e33 100644 --- a/src/deadline/client/cli/groups/fleet_group.py +++ b/src/deadline/client/cli/groups/fleet_group.py @@ -51,19 +51,56 @@ def fleet_list(**args): @cli_fleet.command(name="get") @click.option("--profile", help="The AWS profile to use.") @click.option("--farm-id", help="The Amazon Deadline Cloud Farm to use.") -@click.option("--fleet-id", help="The Amazon Deadline Cloud Fleet to use.", required=True) +@click.option("--fleet-id", help="The Amazon Deadline Cloud Fleet to use.") +@click.option("--queue-id", help="If provided, gets all Fleets associated with the Queue.") @handle_error -def fleet_get(fleet_id, **args): +def fleet_get(fleet_id, queue_id, **args): """ - Get the details of a Amazon Deadline Cloud Fleet. + Get the details of an Amazon Deadline Cloud Fleet. """ + if fleet_id and queue_id: + raise DeadlineOperationError( + "Only one of the --fleet-id and --queue-id options may be provided." + ) + # Get a temporary config object with the standard options handled config = apply_cli_options_to_config(required_options={"farm_id"}, **args) farm_id = config_file.get_setting("defaults.farm_id", config=config) + if not fleet_id: + queue_id = config_file.get_setting("defaults.queue_id", config=config) + if not queue_id: + raise click.UsageError( + "Missing '--fleet-id', '--queue-id', or default Queue ID configuration" + ) deadline = api.get_boto3_client("deadline", config=config) - response = deadline.get_fleet(farmId=farm_id, fleetId=fleet_id) - response.pop("ResponseMetadata", None) - click.echo(cli_object_repr(response)) + if fleet_id: + response = deadline.get_fleet(farmId=farm_id, fleetId=fleet_id) + response.pop("ResponseMetadata", None) + + click.echo(cli_object_repr(response)) + else: + response = deadline.get_queue(farmId=farm_id, queueId=queue_id) + queue_name = response["displayName"] + + response = api._call_paginated_deadline_list_api( + deadline.list_queue_fleet_associations, + "queueFleetAssociations", + farmId=farm_id, + queueId=queue_id, + ) + response.pop("ResponseMetadata", None) + qfa_list = response["queueFleetAssociations"] + + click.echo( + f"Showing all fleets ({len(qfa_list)} total) associated with queue: {queue_name}" + ) + for qfa in qfa_list: + response = deadline.get_fleet(farmId=farm_id, fleetId=qfa["fleetId"]) + response.pop("ResponseMetadata", None) + response["queueFleetAssociationStatus"] = qfa["status"] + + click.echo("") + click.echo(cli_object_repr(response)) diff --git a/src/deadline/client/cli/groups/handle_web_url_command.py b/src/deadline/client/cli/groups/handle_web_url_command.py index c16d3bd1..ef60837c 100644 --- a/src/deadline/client/cli/groups/handle_web_url_command.py +++ b/src/deadline/client/cli/groups/handle_web_url_command.py @@ -13,7 +13,6 @@ from ...exceptions import DeadlineOperationError from .._common import ( PROMPT_WHEN_COMPLETE, - apply_cli_options_to_config, handle_error, prompt_at_completion, ) @@ -24,7 +23,7 @@ uninstall_deadline_web_url_handler, validate_resource_ids, ) -from .job_group import download_job_output +from .job_group import _download_job_output @click.command(name="handle-web-url") @@ -108,28 +107,24 @@ def cli_handle_web_url( # We copy the dict without the 'profile' key as that isn't a resource ID validate_resource_ids({k: url_queries[k] for k in url_queries.keys() - {"profile"}}) + farm_id = url_queries.pop("farm_id") + queue_id = url_queries.pop("queue_id") job_id = url_queries.pop("job_id") step_id = url_queries.pop("step_id", None) task_id = url_queries.pop("task_id", None) - # Add the standard option "profile", using the one provided by the url(set by Cloud Companion) + # Add the standard option "profile", using the one provided by the url (set by Cloud Companion) # or choosing a best guess based on farm and queue IDs - url_queries["profile"] = url_queries.pop( + aws_profile_name = url_queries.pop( "profile", - config_file.get_best_profile_for_farm( - url_queries["farm_id"], url_queries["queue_id"] - ), + config_file.get_best_profile_for_farm(farm_id, queue_id), ) - # Get a temporary config object with the remaining standard options handled - config = apply_cli_options_to_config( - required_options={"farm_id", "queue_id"}, config=None, **url_queries - ) - - farm_id = str(config_file.get_setting("defaults.farm_id", config=config)) - queue_id = str(config_file.get_setting("defaults.queue_id", config=config)) + # Read the config, and switch the in-memory version to use the chosen AWS profile + config = config_file.read_config() + config_file.set_setting("defaults.aws_profile_name", aws_profile_name, config=config) - download_job_output(config, farm_id, queue_id, job_id, step_id, task_id) + _download_job_output(config, farm_id, queue_id, job_id, step_id, task_id) else: raise DeadlineOperationError( f"Command {split_url.netloc} is not supported through handle-web-url.", diff --git a/src/deadline/client/cli/groups/job_group.py b/src/deadline/client/cli/groups/job_group.py index 3898805a..58851416 100644 --- a/src/deadline/client/cli/groups/job_group.py +++ b/src/deadline/client/cli/groups/job_group.py @@ -56,7 +56,7 @@ def cli_job(): @handle_error def job_list(page_size, item_offset, **args): """ - Lists the Jobs in a Amazon Deadline Cloud Queue. + Lists the Jobs in an Amazon Deadline Cloud Queue. """ # Get a temporary config object with the standard options handled config = apply_cli_options_to_config(required_options={"farm_id", "queue_id"}, **args) @@ -110,17 +110,18 @@ def job_list(page_size, item_offset, **args): @click.option("--profile", help="The AWS profile to use.") @click.option("--farm-id", help="The Amazon Deadline Cloud Farm to use.") @click.option("--queue-id", help="The Amazon Deadline Cloud Queue to use.") -@click.option("--job-id", help="The Amazon Deadline Cloud Job to get.", required=True) +@click.option("--job-id", help="The Amazon Deadline Cloud Job to get.") @handle_error -def job_get(job_id, **args): +def job_get(**args): """ - Get the details of a Amazon Deadline Cloud Job. + Get the details of an Amazon Deadline Cloud Job. """ # Get a temporary config object with the standard options handled - config = apply_cli_options_to_config(required_options={"farm_id", "queue_id"}, **args) + config = apply_cli_options_to_config(required_options={"farm_id", "queue_id", "job_id"}, **args) farm_id = config_file.get_setting("defaults.farm_id", config=config) queue_id = config_file.get_setting("defaults.queue_id", config=config) + job_id = config_file.get_setting("defaults.job_id", config=config) deadline = api.get_boto3_client("deadline", config=config) response = deadline.get_job(farmId=farm_id, queueId=queue_id, jobId=job_id) @@ -129,7 +130,7 @@ def job_get(job_id, **args): click.echo(cli_object_repr(response)) -def download_job_output( +def _download_job_output( config: Optional[ConfigParser], farm_id: str, queue_id: str, @@ -444,7 +445,7 @@ def _is_path_in_windows_format(path_str: str) -> bool: @click.option("--profile", help="The AWS profile to use.") @click.option("--farm-id", help="The Amazon Deadline Cloud Farm to use.") @click.option("--queue-id", help="The Amazon Deadline Cloud Queue to use.") -@click.option("--job-id", help="The Amazon Deadline Cloud Job to use.", required=True) +@click.option("--job-id", help="The Amazon Deadline Cloud Job to use.") @click.option("--step-id", help="The Amazon Deadline Cloud Step to use.") @click.option("--task-id", help="The Amazon Deadline Cloud Task to use.") @click.option( @@ -480,21 +481,22 @@ def _is_path_in_windows_format(path_str: str) -> bool: "parsed/consumed by custom scripts.", ) @handle_error -def job_download_output(job_id, step_id, task_id, conflict_resolution, output, **args): +def job_download_output(step_id, task_id, conflict_resolution, output, **args): """ - Download the output attached to a Amazon Deadline Cloud Job. + Download the output attached to an Amazon Deadline Cloud Job. """ if task_id and not step_id: raise click.UsageError("Missing option '--step-id' required with '--task-id'") # Get a temporary config object with the standard options handled - config = apply_cli_options_to_config(required_options={"farm_id", "queue_id"}, **args) + config = apply_cli_options_to_config(required_options={"farm_id", "queue_id", "job_id"}, **args) farm_id = config_file.get_setting("defaults.farm_id", config=config) queue_id = config_file.get_setting("defaults.queue_id", config=config) + job_id = config_file.get_setting("defaults.job_id", config=config) is_json_format = True if output == "json" else False try: - download_job_output( + _download_job_output( config, farm_id, queue_id, job_id, step_id, task_id, conflict_resolution, is_json_format ) except Exception as e: diff --git a/src/deadline/client/cli/groups/queue_group.py b/src/deadline/client/cli/groups/queue_group.py index 2767768c..6855c460 100644 --- a/src/deadline/client/cli/groups/queue_group.py +++ b/src/deadline/client/cli/groups/queue_group.py @@ -55,7 +55,7 @@ def queue_list(**args): @handle_error def queue_get(**args): """ - Get the details of a Amazon Deadline Cloud Queue. + Get the details of an Amazon Deadline Cloud Queue. If Queue ID is not provided, returns the configured default Queue. """ diff --git a/src/deadline/client/cli/groups/worker_group.py b/src/deadline/client/cli/groups/worker_group.py index 36f13d65..a1597cdf 100644 --- a/src/deadline/client/cli/groups/worker_group.py +++ b/src/deadline/client/cli/groups/worker_group.py @@ -30,7 +30,7 @@ def cli_worker(): @handle_error def worker_list(page_size, item_offset, fleet_id, **args): """ - Lists the Workers in a Amazon Deadline Cloud Fleet. + Lists the Workers in an Amazon Deadline Cloud Fleet. """ # Get a temporary config object with the standard options handled config = apply_cli_options_to_config(required_options={"farm_id"}, **args) @@ -83,7 +83,7 @@ def worker_list(page_size, item_offset, fleet_id, **args): @handle_error def worker_get(fleet_id, worker_id, **args): """ - Get the details of a Amazon Deadline Cloud worker. + Get the details of an Amazon Deadline Cloud worker. """ # Get a temporary config object with the standard options handled config = apply_cli_options_to_config(required_options={"farm_id"}, **args) diff --git a/src/deadline/client/config/config_file.py b/src/deadline/client/config/config_file.py index 178884a6..d440e671 100644 --- a/src/deadline/client/config/config_file.py +++ b/src/deadline/client/config/config_file.py @@ -26,7 +26,7 @@ # Environment variable that, if set, overrides the value of CONFIG_FILE_PATH CONFIG_FILE_PATH_ENV_VAR = "DEADLINE_CONFIG_FILE_PATH" # The default Amazon Deadline Cloud endpoint URL -# TODO: This is currently set to our closed-beta endpoint. We need to update this for GA. +# TODO: This is currently set to our closed-beta endpoint. We need to remove this for GA. DEFAULT_DEADLINE_ENDPOINT_URL = "https://btpdb6qczg.execute-api.us-west-2.amazonaws.com" # The default directory within which to save the history of created jobs. DEFAULT_JOB_HISTORY_DIR = os.path.join("~", ".deadline", "job_history", "{aws_profile_name}") @@ -47,36 +47,48 @@ # section [profile-{profileName} default] # "section_format" - How its value gets formatted into config file sections. SETTINGS: Dict[str, Dict[str, Any]] = { - # This is written by Cloud Companion and read by deadline "cloud-companion.path": { "default": "", + "description": "The filesystem path to Cloud Companion, set during login process.", }, "defaults.aws_profile_name": { "default": "", "section_format": "profile-{}", + "description": "The AWS profile name to use by default. Set to '' to use the default credentials." + + " Other settings are saved with the profile.", }, "settings.job_history_dir": { "default": DEFAULT_JOB_HISTORY_DIR, "depend": "defaults.aws_profile_name", + "description": "The directory in which to place the job submission history for this AWS profile name.", }, "settings.deadline_endpoint_url": { "default": DEFAULT_DEADLINE_ENDPOINT_URL, "depend": "defaults.aws_profile_name", + "description": "The endpoint URL for Amazon Deadline Cloud.", }, "defaults.farm_id": { "default": "", "depend": "defaults.aws_profile_name", "section_format": "{}", + "description": "The Farm ID to use by default.", + }, + "settings.storage_profile_id": { + "default": "", + "depend": "defaults.farm_id", + "section_format": "{}", + "description": "The storage profile that this workstation conforms to. It specifies where shared file systems are mounted, and where named job attachments should go.", }, "defaults.queue_id": { "default": "", "depend": "defaults.farm_id", "section_format": "{}", + "description": "The Queue ID to use by default.", }, - "defaults.storage_profile_id": { + "defaults.job_id": { "default": "", "depend": "defaults.queue_id", - "section_format": "{}", + "description": "The Job ID to use by default. This gets updated by job submission, so is normally the most recently submitted job.", }, "settings.auto_accept": { "default": "false", @@ -255,7 +267,7 @@ def set_setting(setting_name: str, value: str, config: Optional[ConfigParser] = raise DeadlineOperationError(f"The setting name {setting_name!r} is not valid.") section, name = setting_name.split(".", 1) - # Get the type of the default to validate it is a Amazon Deadline Cloud setting, and retrieve its type + # Get the type of the default to validate it is an Amazon Deadline Cloud setting, and retrieve its type setting_config = _get_setting_config(setting_name) # If no config was provided, then read from disk and signal to write it later diff --git a/src/deadline/client/job_bundle/parameters.py b/src/deadline/client/job_bundle/parameters.py index 8863541e..f8b58f38 100644 --- a/src/deadline/client/job_bundle/parameters.py +++ b/src/deadline/client/job_bundle/parameters.py @@ -38,7 +38,6 @@ def apply_job_parameters( # Convert the job_parameters to a dict for efficient lookup param_dict = {parameter["name"]: parameter["value"] for parameter in job_parameters} modified_job_parameters = param_dict.copy() - empty_path_parameters: list[str] = [] for parameter in job_bundle_parameters: parameter_name = parameter["name"] @@ -47,23 +46,13 @@ def apply_job_parameters( continue parameter_type = parameter["type"] - # TODO: uiHint is deprecated, remove this once all job bundles use "userInterface" - ui_hint = parameter.get("uiHint", {}) - if parameter_type == "STRING" and ui_hint.get("ojioFutureType") == "PATH": - parameter_type = "PATH" - # Apply the job_parameters value if available parameter_value = param_dict.pop(parameter_name, None) if parameter_value is not None: - # Make PATH parameter values that have data flow, and are not constrained - # by allowedValues, absolute by joining with the current working directory - if ( - parameter_type == "PATH" - and parameter.get("dataFlow") != "NONE" - and "allowedValues" not in parameter - ): + # Make PATH parameter values that are not constrained by allowedValues + # absolute by joining with the current working directory + if parameter_type == "PATH" and "allowedValues" not in parameter: if parameter_value == "": - empty_path_parameters.append(parameter_name) continue parameter_value = os.path.abspath(parameter_value) modified_job_parameters[parameter_name] = parameter_value @@ -77,7 +66,7 @@ def apply_job_parameters( # If it's a PATH parameter with dataFlow, add it to asset_references if parameter_type == "PATH": - data_flow = parameter.get("dataFlow", ui_hint.get("assetReference", "NONE")) + data_flow = parameter.get("dataFlow", "NONE") if data_flow not in ("NONE", "IN", "OUT", "INOUT"): raise DeadlineOperationError( f"Job Template parameter {parameter_name} had an incorrect " @@ -86,25 +75,6 @@ def apply_job_parameters( ) if data_flow != "NONE": object_type = parameter.get("objectType") - if ui_hint and not object_type: - # uiHint determined the object type based on the control type, - # not 'objectType' is it is now. - control_type = ui_hint.get("controlType") - if not control_type: - raise DeadlineOperationError( - f"Job Template parameter {parameter_name} has a PATH type " - + "but is missing a controlType value required to specify whether it " - + "is a DIRECTORY or FILE path." - ) - if control_type == "CHOOSE_DIRECTORY": - object_type = "DIRECTORY" - elif control_type in ("CHOOSE_INPUT_FILE", "CHOOSE_OUTPUT_FILE"): - object_type = "FILE" - else: - raise RuntimeError( - f"Job Template parameter {parameter_name} had an incorrect " - + f"control type {control_type} for the 'assetReference' in 'uiHint'" - ) if "IN" in data_flow: if object_type == "FILE": @@ -123,11 +93,6 @@ def apply_job_parameters( for param_name, param_value in modified_job_parameters.items(): job_parameters.append({"name": param_name, "value": param_value}) - if empty_path_parameters: - raise DeadlineOperationError( - f"The following parameter values are missing: {empty_path_parameters}" - ) - def read_job_bundle_parameters(bundle_dir: str) -> list[dict[str, Any]]: """ @@ -140,7 +105,6 @@ def read_job_bundle_parameters(bundle_dir: str) -> list[dict[str, Any]]: { "name": , - "uiHint": "value": }, ... @@ -151,10 +115,6 @@ def read_job_bundle_parameters(bundle_dir: str) -> list[dict[str, Any]]: parameter_values = read_yaml_or_json_object( bundle_dir=bundle_dir, filename="parameter_values", required=False ) - # This sidecar file is deprecated and will be removed. Use parameter "userInterface" properties instead. - template_uihint = read_yaml_or_json_object( - bundle_dir=bundle_dir, filename="template_uihint", required=False - ) # Get the spec version of the template schema_version: str = "" @@ -193,44 +153,14 @@ def read_job_bundle_parameters(bundle_dir: str) -> list[dict[str, Any]]: # Names with a ':' are for the system using the job bundle, like Amazon Deadline Cloud template_parameters[name] = parameter_value - # Add the deprecated ui hints where provided - if template_uihint: - for parameter_uihint in template_uihint.get("parameters", []): - name = parameter_uihint["name"] - if name in template_parameters: - template_parameters[name]["uiHint"] = parameter_uihint["uiHint"] - # If the default value is a relative path, fix it to the bundle dir - # This only performs the transform where the parameter's type is STRING - # but the uiHint override type is PATH. - if ( - "value" not in template_parameters[name] - and template_parameters[name]["type"] == "STRING" - and template_parameters[name]["uiHint"]["ojioFutureType"] == "PATH" - and "allowedValues" not in template_parameters[name] - ): - default = template_parameters[name].get("default") - if default: - default_absolute = os.path.normpath( - os.path.abspath(os.path.join(bundle_dir, default)) - ) - - if default_absolute != default: - template_parameters[name]["value"] = default_absolute - else: - raise RuntimeError( - f"Job bundle's template_uihint contains a parameter named '{name}' not in the template." - ) - - # Make valueless PATH parameters with default, that have data flow, and are not constrained + # Make valueless PATH parameters with default and are not constrained # by allowedValues, absolute by joining with the job bundle directory for name in template_parameters: parameter = template_parameters[name] - data_flow = parameter.get("dataFlow", "NONE") if ( "value" not in parameter and parameter["type"] == "PATH" - and data_flow != "NONE" and "allowedValues" not in parameter ): default = parameter.get("default") diff --git a/src/deadline/client/ui/cli_job_submitter.py b/src/deadline/client/ui/cli_job_submitter.py index c1681d25..aa6c7b72 100644 --- a/src/deadline/client/ui/cli_job_submitter.py +++ b/src/deadline/client/ui/cli_job_submitter.py @@ -125,8 +125,8 @@ def on_create_job_bundle_callback( parameters_values = [ {"name": "deadline:priority", "value": settings.priority}, {"name": "deadline:targetTaskRunStatus", "value": settings.initial_status}, - {"name": "deadline:maxFailedTasksCount", "value": settings.failed_tasks_limit}, - {"name": "deadline:maxRetriesPerTask", "value": settings.task_retry_limit}, + {"name": "deadline:maxFailedTasksCount", "value": settings.max_failed_tasks_count}, + {"name": "deadline:maxRetriesPerTask", "value": settings.max_retries_per_task}, ] with open( diff --git a/src/deadline/client/ui/dataclasses/__init__.py b/src/deadline/client/ui/dataclasses/__init__.py index fa961528..5c58972f 100644 --- a/src/deadline/client/ui/dataclasses/__init__.py +++ b/src/deadline/client/ui/dataclasses/__init__.py @@ -22,11 +22,11 @@ class JobBundleSettings: # pylint: disable=too-many-instance-attributes name: str = field(default="Job Bundle") description: str = field(default="") initial_status: str = field(default="READY") - failed_tasks_limit: int = field(default=100) - task_retry_limit: int = field(default=5) + max_failed_tasks_count: int = field(default=100) + max_retries_per_task: int = field(default=5) priority: int = field(default=50) - override_installation_requirements: bool = field(default=False) - installation_requirements: str = field(default="") + override_rez_packages: bool = field(default=False) + rez_packages: str = field(default="") # Job Bundle settings input_job_bundle_dir: str = field(default="") @@ -46,11 +46,11 @@ class CliJobSettings: # pylint: disable=too-many-instance-attributes name: str = field(default="CLI Job") description: str = field(default="") initial_status: str = field(default="READY") - failed_tasks_limit: int = field(default=100) - task_retry_limit: int = field(default=5) + max_failed_tasks_count: int = field(default=20) + max_retries_per_task: int = field(default=5) priority: int = field(default=50) - override_installation_requirements: bool = field(default=False) - installation_requirements: str = field(default="") + override_rez_packages: bool = field(default=False) + rez_packages: str = field(default="") # CLI job settings bash_script_contents: str = field( diff --git a/src/deadline/client/ui/dialogs/deadline_config_dialog.py b/src/deadline/client/ui/dialogs/deadline_config_dialog.py index f98bbd67..a7712bd9 100644 --- a/src/deadline/client/ui/dialogs/deadline_config_dialog.py +++ b/src/deadline/client/ui/dialogs/deadline_config_dialog.py @@ -243,7 +243,7 @@ def _build_farm_settings_ui(self, group, layout): layout.addRow(default_queue_box_label, self.default_queue_box) self.default_storage_profile_box = DeadlineStorageProfileNameListComboBox(parent=group) - default_storage_profile_box_label = self.labels["defaults.storage_profile_id"] = QLabel( + default_storage_profile_box_label = self.labels["settings.storage_profile_id"] = QLabel( "Default Storage Profile" ) self.default_storage_profile_box.box.currentIndexChanged.connect( @@ -499,7 +499,7 @@ def default_queue_changed(self, index): self.default_storage_profile_box.refresh_list() def default_storage_profile_name_changed(self, index): - self.changes["defaults.storage_profile_id"] = self.default_storage_profile_box.box.itemData( + self.changes["settings.storage_profile_id"] = self.default_storage_profile_box.box.itemData( index ) self.refresh() @@ -507,7 +507,7 @@ def default_storage_profile_name_changed(self, index): class _DeadlineResourceListComboBox(QWidget): """ - A ComboBox for selecting a Amazon Deadline Cloud Id, with a refresh button. + A ComboBox for selecting an Amazon Deadline Cloud Id, with a refresh button. The caller should connect the `background_exception` signal, e.g. to show a message box, and should call `set_config` whenever there is @@ -659,7 +659,7 @@ class DeadlineStorageProfileNameListComboBox(_DeadlineResourceListComboBox): def __init__(self, parent=None): super().__init__( resource_name="Storage Profile", - setting_name="defaults.storage_profile_id", + setting_name="settings.storage_profile_id", parent=parent, ) diff --git a/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py b/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py index fb1ac784..ea37a713 100644 --- a/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py +++ b/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py @@ -10,7 +10,6 @@ import logging import os import threading -from configparser import ConfigParser from typing import Any, Dict, List, Optional, Set from botocore.client import BaseClient # type: ignore[import] @@ -30,6 +29,7 @@ from deadline.client import api from deadline.client.exceptions import CreateJobWaiterCanceled +from deadline.client.config import set_setting from deadline.client.job_bundle.loader import read_yaml_or_json, read_yaml_or_json_object from deadline.client.job_bundle.parameters import apply_job_parameters, read_job_bundle_parameters from deadline.client.job_bundle.submission import ( @@ -77,7 +77,6 @@ def start_submission( deadline_client: BaseClient, parent: QWidget = None, auto_accept: bool = False, - config: Optional[ConfigParser] = None, ) -> Optional[Dict[str, Any]]: """ Static method that runs the SubmitJobProgressDialog. Returns the response @@ -109,7 +108,6 @@ def start_submission( deadline_client, parent=parent, auto_accept=auto_accept, - config=config, ) return job_progress_dialog.exec() @@ -123,7 +121,6 @@ def __init__( deadline_client: BaseClient, parent: QWidget = None, auto_accept: bool = False, - config: Optional[ConfigParser] = None, ) -> None: super().__init__(parent=parent) @@ -134,7 +131,6 @@ def __init__( self._asset_manager = asset_manager self._deadline_client = deadline_client self._auto_accept = auto_accept - self._config = config self._continue_submission = True self._submission_complete = False @@ -337,6 +333,10 @@ def _continue_create_job_wait() -> bool: if self._create_job_response and "jobId" in self._create_job_response: job_id = self._create_job_response["jobId"] + + # Set the default job id so it holds the most-recently submitted job. + set_setting("defaults.job_id", job_id) + success, message = api.wait_for_create_job_to_complete( self._farm_id, self._queue_id, diff --git a/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py b/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py index 26a43aa7..81610ad4 100644 --- a/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py +++ b/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py @@ -41,7 +41,7 @@ class SubmitJobToDeadlineDialog(QDialog): """ - A widget containing all the standard tabs for submitting a Amazon Deadline Cloud job. + A widget containing all the standard tabs for submitting an Amazon Deadline Cloud job. If you're using this dialog within an application and want it to stay in front, pass f=Qt.Tool, a flag that tells it to do that. @@ -213,7 +213,7 @@ def on_save_bundle(self): self.shared_job_settings.deadline_settings_box.update_settings(settings) self.shared_job_settings.desc_box.update_settings(settings) self.job_settings.update_settings(settings) - self.shared_job_settings.installation_requirements_box.update_settings(settings) + self.shared_job_settings.rez_packages_box.update_settings(settings) asset_references = self.job_attachments.get_asset_references() @@ -248,7 +248,7 @@ def on_submit(self): self.shared_job_settings.deadline_settings_box.update_settings(settings) self.shared_job_settings.desc_box.update_settings(settings) self.job_settings.update_settings(settings) - self.shared_job_settings.installation_requirements_box.update_settings(settings) + self.shared_job_settings.rez_packages_box.update_settings(settings) asset_references = self.job_attachments.get_asset_references() @@ -261,7 +261,7 @@ def on_submit(self): farm_id = get_setting("defaults.farm_id") queue_id = get_setting("defaults.queue_id") - storage_profile_id = get_setting("defaults.storage_profile_id") + storage_profile_id = get_setting("settings.storage_profile_id") queue = deadline.get_queue(farmId=farm_id, queueId=queue_id) diff --git a/src/deadline/client/ui/job_bundle_submitter.py b/src/deadline/client/ui/job_bundle_submitter.py index a6d72e10..1d11af3d 100644 --- a/src/deadline/client/ui/job_bundle_submitter.py +++ b/src/deadline/client/ui/job_bundle_submitter.py @@ -31,7 +31,7 @@ def show_job_bundle_submitter( input_job_bundle_dir: str = "", parent=None, f=Qt.WindowFlags() ) -> Optional[SubmitJobToDeadlineDialog]: """ - Opens a Amazon Deadline Cloud job submission dialog for the provided job bundle. + Opens an Amazon Deadline Cloud job submission dialog for the provided job bundle. Pass f=Qt.Tool if running it within an application context and want it to stay on top. @@ -90,8 +90,8 @@ def on_create_job_bundle_callback( parameters_values: List[Dict[str, Any]] = [ {"name": "deadline:priority", "value": settings.priority}, {"name": "deadline:targetTaskRunStatus", "value": settings.initial_status}, - {"name": "deadline:maxFailedTasksCount", "value": settings.failed_tasks_limit}, - {"name": "deadline:maxRetriesPerTask", "value": settings.task_retry_limit}, + {"name": "deadline:maxFailedTasksCount", "value": settings.max_failed_tasks_count}, + {"name": "deadline:maxRetriesPerTask", "value": settings.max_retries_per_task}, ] if asset_references: diff --git a/src/deadline/client/ui/widgets/deadline_credentials_status_widget.py b/src/deadline/client/ui/widgets/deadline_credentials_status_widget.py index bbcb57af..36b34a76 100644 --- a/src/deadline/client/ui/widgets/deadline_credentials_status_widget.py +++ b/src/deadline/client/ui/widgets/deadline_credentials_status_widget.py @@ -52,7 +52,7 @@ class DeadlineCredentialsStatusWidget(QWidget): widget.deadline_api_available: result of api.check_deadline_api_available() To display the status of a non-default Amazon Deadline Cloud configuration, pass in - a Amazon Deadline Cloud configuration object to config, call set_config to change it. + an Amazon Deadline Cloud configuration object to config, call set_config to change it. """ # This signal is sent when an AWS credential changes (e.g. config file) diff --git a/src/deadline/client/ui/widgets/job_template_parameters_widget.py b/src/deadline/client/ui/widgets/job_template_parameters_widget.py index e5af6c63..f64179f9 100644 --- a/src/deadline/client/ui/widgets/job_template_parameters_widget.py +++ b/src/deadline/client/ui/widgets/job_template_parameters_widget.py @@ -39,14 +39,9 @@ class JobTemplateParametersWidget(QWidget): a UI form to edit them with. OpenJobIO has optional UI metadata for each parameter specified under "userInterface". - Before this was added,this widget used an extension to the OJIO syntax, consisting of a - "uiHint" property, that is now deprecated. The code prefers to use the "userInterface" - properties, then the "uiHint" properties, then the defaults defined by the OpenJobIO - specification of "userInterface". Args: - initial_job_parameters (Dict[str, Any]): OpenJobIO parameters block, with additional - uiHint fields. + initial_job_parameters (Dict[str, Any]): OpenJobIO parameters block. parent: The parent Qt Widget. """ @@ -84,14 +79,6 @@ def _build_ui(self, job_parameters: List[Dict[str, Any]]): control_type_name = "" if "userInterface" in parameter: control_type_name = parameter["userInterface"].get("control", "") - elif "uiHint" in parameter: - control_type_name = parameter["uiHint"].get("controlType", "") - # Special case the uiHint pre-PATH parameter logic - if ( - parameter["type"] == "STRING" - and parameter["uiHint"].get("ojioFutureType") == "PATH" - ): - parameter["type"] = "PATH" # If not explicitly provided, determine the default control type name based on the OJIO specification if not control_type_name: @@ -122,8 +109,6 @@ def _build_ui(self, job_parameters: List[Dict[str, Any]]): if "userInterface" in parameter: group_label = parameter["userInterface"].get("groupLabel", "") - elif "uiHint" in parameter: - group_label = parameter["uiHint"].get("displayGroup", "") else: group_label = "" @@ -154,14 +139,12 @@ def get_parameter_values(self): def _get_parameter_label(parameter): """ - Returns the label to use for this parameter. Default to the label from "userInterface", then - the label from "uiHint", then the parameter name. + Returns the label to use for this parameter. Default to the label from "userInterface", + then the parameter name. """ name = parameter["name"] if "userInterface" in parameter: return parameter["userInterface"].get("label", name) - elif "uiHint" in parameter: - return parameter["uiHint"].get("label", name) else: return name @@ -375,9 +358,6 @@ def _build_ui(self, parameter): if "userInterface" in parameter: single_step_delta = parameter["userInterface"].get("singleStepDelta", -1) drag_multiplier = -1.0 # TODO: Make a good default based on single_step_delta - elif "uiHint" in parameter: - single_step_delta = parameter["uiHint"].get("singleStep", -1) - drag_multiplier = parameter["uiHint"].get("dragMultiplier", -1.0) else: single_step_delta = -1 drag_multiplier = -1.0 @@ -451,10 +431,6 @@ def _build_ui(self, parameter): decimals = parameter["userInterface"].get("decimals", -1) single_step_delta = parameter["userInterface"].get("singleStepDelta", -1) drag_multiplier = -1.0 # TODO: Make a good default based on single_step_delta - elif "uiHint" in parameter: - decimals = parameter["uiHint"].get("decimals", -1) - single_step_delta = parameter["uiHint"].get("singleStep", -1.0) - drag_multiplier = parameter["uiHint"].get("dragMultiplier", -1.0) else: decimals = -1 single_step_delta = -1 @@ -554,9 +530,6 @@ def _build_ui(self, parameter): selected_filter = ( f"{file_filter_default['label']} ({' '.join(file_filter_default['patterns'])})" ) - elif "uiHint" in parameter: - filetype_filter = parameter["uiHint"].get("filter", filetype_filter) - selected_filter = parameter["uiHint"].get("selectedFilter", selected_filter) if not selected_filter: selected_filter = filetype_filter.split(";", 1)[0] diff --git a/src/deadline/client/ui/widgets/shared_job_settings_tab.py b/src/deadline/client/ui/widgets/shared_job_settings_tab.py index 080810c0..22cb26b2 100644 --- a/src/deadline/client/ui/widgets/shared_job_settings_tab.py +++ b/src/deadline/client/ui/widgets/shared_job_settings_tab.py @@ -43,8 +43,8 @@ def __init__(self, initial_settings, parent=None): self.deadline_settings_box = DeadlineSettingsWidget(initial_settings, self) layout.addWidget(self.deadline_settings_box) - self.installation_requirements_box = InstallationRequirementsWidget(initial_settings, self) - layout.addWidget(self.installation_requirements_box) + self.rez_packages_box = InstallationRequirementsWidget(initial_settings, self) + layout.addWidget(self.rez_packages_box) layout.addItem(QSpacerItem(0, 0, QSizePolicy.Minimum, QSizePolicy.Expanding)) @@ -74,10 +74,6 @@ def _build_ui(self): self.desc_edit = QLineEdit() self.layout.addRow(self.desc_label, self.desc_edit) - # TODO: Re-enable when this option is available in the back end. - self.desc_label.setEnabled(False) - self.desc_edit.setEnabled(False) - def _load_initial_settings(self, settings): self.sub_name_edit.setText(settings.name) self.desc_edit.setText(settings.description) @@ -126,21 +122,21 @@ def _build_ui(self): self.initial_status_box.addItems(["READY", "SUSPENDED"]) self.lyt.addRow(self.initial_status_box_label, self.initial_status_box) - self.failed_tasks_limit_box_label = QLabel("Failed Tasks Limit") - self.failed_tasks_limit_box_label.setToolTip( + self.max_failed_tasks_count_box_label = QLabel("Maximum Failed Tasks Count") + self.max_failed_tasks_count_box_label.setToolTip( "Maximum number of Tasks that can fail before the Job will be marked as failed." ) - self.failed_tasks_limit_box = QSpinBox(parent=self) - self.failed_tasks_limit_box.setRange(0, 2147483647) - self.lyt.addRow(self.failed_tasks_limit_box_label, self.failed_tasks_limit_box) + self.max_failed_tasks_count_box = QSpinBox(parent=self) + self.max_failed_tasks_count_box.setRange(0, 2147483647) + self.lyt.addRow(self.max_failed_tasks_count_box_label, self.max_failed_tasks_count_box) - self.task_retry_limit_box_label = QLabel("Task Retry Limit") - self.task_retry_limit_box_label.setToolTip( + self.max_retries_per_task_box_label = QLabel("Maximum Retries Per Task") + self.max_retries_per_task_box_label.setToolTip( "Maximum number of times that a Task will retry before it's marked as failed." ) - self.task_retry_limit_box = QSpinBox(parent=self) - self.task_retry_limit_box.setRange(0, 2147483647) - self.lyt.addRow(self.task_retry_limit_box_label, self.task_retry_limit_box) + self.max_retries_per_task_box = QSpinBox(parent=self) + self.max_retries_per_task_box.setRange(0, 2147483647) + self.lyt.addRow(self.max_retries_per_task_box_label, self.max_retries_per_task_box) self.priority_box_label = QLabel("Priority") self.priority_box = QSpinBox(parent=self) @@ -154,30 +150,30 @@ def refresh_setting_controls(self, deadline_authorized): Args: deadline_authorized (bool): Should be the result of a call to api.check_deadline_available, for example from - a Amazon Deadline Cloud Status Widget. + an Amazon Deadline Cloud Status Widget. """ self.farm_box.refresh(deadline_authorized) self.queue_box.refresh(deadline_authorized) def _load_initial_settings(self, settings): self.initial_status_box.setCurrentText(settings.initial_status) - self.failed_tasks_limit_box.setValue(settings.failed_tasks_limit) - self.task_retry_limit_box.setValue(settings.task_retry_limit) + self.max_failed_tasks_count_box.setValue(settings.max_failed_tasks_count) + self.max_retries_per_task_box.setValue(settings.max_retries_per_task) self.priority_box.setValue(settings.priority) def update_settings(self, settings) -> None: """ - Updates a Amazon Deadline Cloud settings object with the latest values. + Updates an Amazon Deadline Cloud settings object with the latest values. The settings object should be a dataclass with: initial_status: str (or enum of base str) - failed_tasks_limit: int - task_retry_limit: int + max_failed_tasks_count: int + max_retries_per_task: int priority: int """ settings.initial_status = self.initial_status_box.currentText() - settings.failed_tasks_limit = self.failed_tasks_limit_box.value() - settings.task_retry_limit = self.task_retry_limit_box.value() + settings.max_failed_tasks_count = self.max_failed_tasks_count_box.value() + settings.max_retries_per_task = self.max_retries_per_task_box.value() settings.priority = self.priority_box.value() @@ -186,8 +182,8 @@ class InstallationRequirementsWidget(QGroupBox): # pylint: disable=too-few-publ UI element to hold list of Installation Requirements The settings object should be a dataclass with: - - `override_installation_requirements: bool` - - `installation_requirements: str` + - `override_rez_packages: bool` + - `rez_packages: str` """ def __init__(self, initial_settings, parent=None): @@ -199,23 +195,23 @@ def __init__(self, initial_settings, parent=None): def _build_ui(self): self.layout = QGridLayout(self) - self.requirements_chck = QCheckBox("Override Installation Requirements", self) + self.requirements_chck = QCheckBox("Override Rez Packages", self) self.requirements_edit = QLineEdit(self) self.layout.addWidget(self.requirements_chck, 4, 0) self.layout.addWidget(self.requirements_edit, 4, 1) self.requirements_chck.stateChanged.connect(self.enable_requirements_override_changed) def _load_initial_settings(self, settings): - self.requirements_chck.setChecked(settings.override_installation_requirements) - self.requirements_edit.setEnabled(settings.override_installation_requirements) - self.requirements_edit.setText(settings.installation_requirements) + self.requirements_chck.setChecked(settings.override_rez_packages) + self.requirements_edit.setEnabled(settings.override_rez_packages) + self.requirements_edit.setText(settings.rez_packages) def update_settings(self, settings): """ Update a given instance of scene settings with updated values. """ - settings.installation_requirements = self.requirements_edit.text() - settings.override_installation_requirements = self.requirements_chck.isChecked() + settings.rez_packages = self.requirements_edit.text() + settings.override_rez_packages = self.requirements_chck.isChecked() def enable_requirements_override_changed(self, state): """ @@ -226,7 +222,7 @@ def enable_requirements_override_changed(self, state): class _DeadlineNamedResourceDisplay(QWidget): """ - A Label for displaying a Amazon Deadline Cloud resource, that starts displaying + A Label for displaying an Amazon Deadline Cloud resource, that starts displaying it as the Id, but does an async call to Amazon Deadline Cloud to convert it to the name. @@ -285,7 +281,7 @@ def refresh(self, deadline_authorized): Args: deadline_authorized (bool): Should be the result of a call to api.check_deadline_available, for example from - a Amazon Deadline Cloud Status Widget. + an Amazon Deadline Cloud Status Widget. """ resource_id = get_setting(self.setting_name) if resource_id != self.item_id or not self.item_name: @@ -369,7 +365,7 @@ class DeadlineStorageProfileNameDisplay(_DeadlineNamedResourceDisplay): def __init__(self, parent=None): super().__init__( resource_name="Storage Profile Name", - setting_name="defaults.storage_profile_id", + setting_name="settings.storage_profile_id", parent=parent, ) diff --git a/src/deadline/job_attachments/asset_sync.py b/src/deadline/job_attachments/asset_sync.py index 56dc565d..7ceee0cd 100644 --- a/src/deadline/job_attachments/asset_sync.py +++ b/src/deadline/job_attachments/asset_sync.py @@ -7,7 +7,7 @@ from io import BytesIO from logging import Logger, LoggerAdapter, getLogger from math import trunc -from pathlib import Path +from pathlib import Path, PurePosixPath from typing import Any, Callable, DefaultDict, Dict, List, Optional, Tuple, Type, Union import boto3 @@ -87,10 +87,6 @@ def __init__( version=manifest_version ) - # TODO: Once Windows pathmapping is implemented we can remove this - if sys.platform == "win32": - raise NotImplementedError("Windows is not currently supported for Job Attachments") - def _upload_output_files_to_s3( self, s3_settings: JobAttachmentS3Settings, @@ -217,7 +213,7 @@ def _get_output_files( OutputFile( file_size=file_size, file_hash=file_hash, - rel_path=str(file_path.relative_to(local_root)), + rel_path=str(PurePosixPath(*file_path.relative_to(local_root).parts)), full_path=str(file_path.resolve()), s3_key=s3_key, in_s3=in_s3, @@ -349,7 +345,10 @@ def sync_inputs( merged_manifests_by_root[root] = merged_manifest # Download - if attachments.assetLoadingMethod == AssetLoadingMethod.ON_DEMAND: + if ( + attachments.assetLoadingMethod == AssetLoadingMethod.ON_DEMAND.value + and sys.platform != "win32" + ): mount_vfs_from_manifests( s3_bucket=s3_settings.s3BucketName, manifests_by_root=merged_manifests_by_root, @@ -435,7 +434,7 @@ def sync_outputs( else: summary_stats = SummaryStatistics() finally: - if attachments.assetLoadingMethod == AssetLoadingMethod.ON_DEMAND: + if attachments.assetLoadingMethod == AssetLoadingMethod.ON_DEMAND.value: # Shutdown all running Fus3 processes since task is completed Fus3ProcessManager.kill_all_processes(session_dir=session_dir) diff --git a/src/deadline/job_attachments/fus3.py b/src/deadline/job_attachments/fus3.py index f35419f4..0e591fb6 100644 --- a/src/deadline/job_attachments/fus3.py +++ b/src/deadline/job_attachments/fus3.py @@ -9,7 +9,7 @@ from pathlib import Path import threading from signal import SIGTERM -from typing import List, Union +from typing import List, Union, Optional from .errors import Fus3ExecutableMissingError, Fus3FailedToMountError @@ -23,11 +23,28 @@ class Fus3ProcessManager(object): - fus3_path = None - library_path = None - cwd_path = None - - def __init__(self, asset_bucket, region, manifest_path, mount_point, cas_prefix=None): + fus3_path: Optional[str] = None + library_path: Optional[str] = None + cwd_path: Optional[str] = None + + _mount_point: str + _fus3_proc: Optional[subprocess.Popen] + _fus3_thread: Optional[threading.Thread] + _mount_temp_directory: Optional[str] + _run_path: Optional[str] + _asset_bucket: str + _region: str + _manifest_path: str + _cas_prefix: Optional[str] + + def __init__( + self, + asset_bucket: str, + region: str, + manifest_path: str, + mount_point: str, + cas_prefix: Optional[str] = None, + ): # TODO: Once Windows pathmapping is implemented we can remove this if sys.platform == "win32": raise NotImplementedError("Windows is not currently supported for Job Attachments") @@ -238,7 +255,7 @@ def start(self, session_dir: Path) -> None: Start our fus3 process :return: fus3 process id """ - self._run_path = self.get_cwd() + self._run_path = str(self.get_cwd()) log.info(f"Using run_path {self._run_path}") log.info(f"Using mount_point {self._mount_point}") self.create_mount_point(self._mount_point) diff --git a/src/deadline/job_attachments/models.py b/src/deadline/job_attachments/models.py index 49573b7f..bb757af1 100644 --- a/src/deadline/job_attachments/models.py +++ b/src/deadline/job_attachments/models.py @@ -92,7 +92,7 @@ class Attachments: # The list of required assests per asset root manifests: List[ManifestProperties] = field(default_factory=list) # Method to use when loading assets required for a job - assetLoadingMethod: AssetLoadingMethod = AssetLoadingMethod.PRELOAD + assetLoadingMethod: str = AssetLoadingMethod.PRELOAD.value @dataclass diff --git a/test/deadline_client/unit/api/test_job_bundle_submission_asset_refs_deprecated_uihint.py b/test/deadline_client/unit/api/test_job_bundle_submission_asset_refs_deprecated_uihint.py deleted file mode 100644 index ce472264..00000000 --- a/test/deadline_client/unit/api/test_job_bundle_submission_asset_refs_deprecated_uihint.py +++ /dev/null @@ -1,325 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. - -""" -Tests the deadline.client.api functions for submitting OpenJobIO job bundles, -where there are PATH parameters that carry assetReference IN/OUT metadata. -""" - -import os -from unittest.mock import ANY, patch - -from deadline.client import api, config -from deadline.client.api import _submit_job_bundle -from deadline.client.job_bundle import submission -from deadline.job_attachments.models import ManifestProperties, AssetRootManifest, Attachments -from deadline.job_attachments.progress_tracker import SummaryStatistics -from deadline.job_attachments.utils import AssetLoadingMethod, OperatingSystemFamily - -from ..shared_constants import MOCK_FARM_ID, MOCK_QUEUE_ID -from .test_job_bundle_submission import ( - MOCK_CREATE_JOB_RESPONSE, - MOCK_GET_JOB_RESPONSE, - MOCK_GET_QUEUE_RESPONSE, - _write_asset_files, -) - -# A YAML job template that contains every type of (file, directory) * (none, in, out, inout) asset references -JOB_BUNDLE_RELATIVE_FILE_PATH = "./file/inside/job_bundle.txt" -JOB_BUNDLE_RELATIVE_DIR_PATH = "./dir/inside/job_bundle" -JOB_TEMPLATE_ALL_ASSET_REF_VARIANTS = f""" -specificationVersion: '2022-09-01' -name: Job Template to test all assetReference variants. -parameters: -- name: FileNone - type: STRING - description: FILE * NONE -- name: FileIn - type: STRING - description: FILE * IN - default: {JOB_BUNDLE_RELATIVE_FILE_PATH} -- name: FileInAbsolute - type: STRING - description: FILE * IN - default: JOB_BUNDLE_ABSOLUTE_FILE_PATH -- name: FileOut - type: STRING - description: FILE * OUT -- name: FileInout - type: STRING - description: FILE * INOUT -- name: DirNone - type: STRING - description: DIR * NONE -- name: DirIn - type: STRING - description: DIR * IN - default: {JOB_BUNDLE_RELATIVE_DIR_PATH} -- name: DirInAbsolute - type: STRING - description: DIR * IN - default: JOB_BUNDLE_ABSOLUTE_DIR_PATH -- name: DirOut - type: STRING - description: DIR * OUT -- name: DirInout - type: STRING - description: DIR * INOUT -steps: -- name: CliScript - script: - embeddedFiles: - - name: runScript - type: TEXT - runnable: true - data: | - #!/usr/bin/env bash - echo ' - {{Job.Parameter.FileNone}} - {{Job.Parameter.FileIn}} - {{Job.Parameter.FileInAbsolute}} - {{Job.Parameter.FileOut}} - {{Job.Parameter.FileInout}} - {{Job.Parameter.DirNone}} - {{Job.Parameter.DirIn}} - {{Job.Parameter.DirInAbsolute}} - {{Job.Parameter.DirOut}} - {{Job.Parameter.DirInout}} - ' - actions: - onRun: - command: '{{Task.Attachment.runScript.Path}}' -""" -JOB_TEMPLATE_UIHINT_ALL_ASSET_REF_VARIANTS = """ -parameters: -- name: FileNone - uiHint: - ojioFutureType: PATH - controlType: CHOOSE_INPUT_FILE - assetReference: NONE -- name: FileIn - uiHint: - ojioFutureType: PATH - controlType: CHOOSE_INPUT_FILE - assetReference: IN -- name: FileInAbsolute - uiHint: - ojioFutureType: PATH - controlType: CHOOSE_INPUT_FILE - assetReference: IN -- name: FileOut - uiHint: - ojioFutureType: PATH - controlType: CHOOSE_OUTPUT_FILE - assetReference: OUT -- name: FileInout - uiHint: - ojioFutureType: PATH - controlType: CHOOSE_OUTPUT_FILE - assetReference: INOUT -- name: DirNone - uiHint: - ojioFutureType: PATH - controlType: CHOOSE_DIRECTORY - assetReference: NONE -- name: DirIn - uiHint: - ojioFutureType: PATH - controlType: CHOOSE_DIRECTORY - assetReference: IN -- name: DirInAbsolute - uiHint: - ojioFutureType: PATH - controlType: CHOOSE_DIRECTORY - assetReference: IN -- name: DirOut - uiHint: - ojioFutureType: PATH - controlType: CHOOSE_DIRECTORY - assetReference: OUT -- name: DirInout - uiHint: - ojioFutureType: PATH - controlType: CHOOSE_DIRECTORY - assetReference: INOUT -""" - - -def test_create_job_from_job_bundle_with_all_asset_ref_variants_deprecated_uihint( - fresh_deadline_config, temp_job_bundle_dir, temp_assets_dir -): - """ - Test a job bundle with template from JOB_TEMPLATE_ALL_ASSET_REF_VARIANTS. - """ - # Use a temporary directory for the job bundle - with patch.object(_submit_job_bundle.api, "get_boto3_session"), patch.object( - _submit_job_bundle.api, "get_boto3_client" - ) as client_mock, patch.object(_submit_job_bundle.api, "get_queue_boto3_session"), patch.object( - submission.S3AssetManager, "hash_assets_and_create_manifest" - ) as mock_hash_assets, patch.object( - submission.S3AssetManager, "upload_assets" - ) as mock_upload_assets: - client_mock().create_job.side_effect = [MOCK_CREATE_JOB_RESPONSE] - client_mock().get_queue.side_effect = [MOCK_GET_QUEUE_RESPONSE] - client_mock().get_job.side_effect = [MOCK_GET_JOB_RESPONSE] - mock_hash_assets.return_value = [SummaryStatistics(), AssetRootManifest()] - mock_upload_assets.return_value = [ - SummaryStatistics(), - Attachments( - [ - ManifestProperties( - rootPath="/mnt/root/path1", - osType=OperatingSystemFamily.LINUX, - inputManifestPath="mock-manifest", - inputManifestHash="mock-manifest-hash", - outputRelativeDirectories=["."], - ), - ], - ), - ] - - config.set_setting("defaults.farm_id", MOCK_FARM_ID) - config.set_setting("defaults.queue_id", MOCK_QUEUE_ID) - - # Define absolute paths for testing within temp_assets_dir - job_bundle_absolute_file_path = os.path.normpath(temp_assets_dir + "/absolute/absolute.txt") - job_bundle_absolute_dir_path = os.path.normpath(temp_assets_dir + "/absolutedir") - - # Insert absolute paths with temp dir into job template - job_template_replaced = JOB_TEMPLATE_ALL_ASSET_REF_VARIANTS.replace( - "JOB_BUNDLE_ABSOLUTE_FILE_PATH", job_bundle_absolute_file_path - ) - job_template_replaced = job_template_replaced.replace( - "JOB_BUNDLE_ABSOLUTE_DIR_PATH", job_bundle_absolute_dir_path - ) - - # Write the YAML template - with open(os.path.join(temp_job_bundle_dir, "template.yaml"), "w", encoding="utf8") as f: - f.write(job_template_replaced) - with open( - os.path.join(temp_job_bundle_dir, "template_uihint.yaml"), "w", encoding="utf8" - ) as f: - f.write(JOB_TEMPLATE_UIHINT_ALL_ASSET_REF_VARIANTS) - - job_parameters = [ - { - "name": "FileNone", - "value": os.path.join(temp_assets_dir, "file/inside/asset-dir.txt"), - }, - # Leaving out "FileIn" so it gets the default value - {"name": "FileOut", "value": "./file/inside/cwd.txt"}, - { - "name": "FileInout", - "value": os.path.join(temp_assets_dir, "file/inside/asset-dir-2.txt"), - }, - {"name": "DirNone", "value": os.path.join(temp_assets_dir, "./dir/inside/asset-dir")}, - # Leaving out "DirIn" so it gets the default value - {"name": "DirOut", "value": "./dir/inside"}, - { - "name": "DirInout", - "value": os.path.join(temp_assets_dir, "./dir/inside/asset-dir-2"), - }, - ] - - # Write file contents to the job bundle dir - _write_asset_files( - temp_job_bundle_dir, - { - JOB_BUNDLE_RELATIVE_FILE_PATH: "file in", - JOB_BUNDLE_RELATIVE_DIR_PATH + "/file1.txt": "dir in file1", - JOB_BUNDLE_RELATIVE_DIR_PATH + "/subdir/file1.txt": "dir in file2", - }, - ) - # Write file contents to the temporary assets dir - _write_asset_files( - temp_assets_dir, - { - "file/inside/asset-dir-2.txt": "file inout", - "./dir/inside/asset-dir-2/file_x.txt": "dir inout", - "./dir/inside/asset-dir-2/subdir/file_y.txt": "dir inout", - }, - ) - - # Write file contents to absolute asset directories - _write_asset_files( - "/", - { - job_bundle_absolute_file_path: "absolute file in", - job_bundle_absolute_dir_path + "/absolute.txt": "absolute dir in", - }, - ) - - # This is the function we're testing - api.create_job_from_job_bundle(temp_job_bundle_dir, job_parameters=job_parameters) - - # The values of input_paths and output_paths are the first - # thing this test needs to verify, confirming that the - # bundle dir is used for default parameter values, and the - # current working directory is used for job parameters. - input_paths = sorted( - os.path.normpath(p) - for p in [ - temp_assets_dir + "/dir/inside/asset-dir-2/file_x.txt", - temp_assets_dir + "/dir/inside/asset-dir-2/subdir/file_y.txt", - temp_assets_dir + "/file/inside/asset-dir-2.txt", - temp_job_bundle_dir + "/dir/inside/job_bundle/file1.txt", - temp_job_bundle_dir + "/dir/inside/job_bundle/subdir/file1.txt", - temp_job_bundle_dir + "/file/inside/job_bundle.txt", - job_bundle_absolute_file_path, - job_bundle_absolute_dir_path + "/absolute.txt", - ] - ) - output_paths = sorted( - os.path.normpath(os.path.abspath(p)) - for p in [ - temp_assets_dir + "/dir/inside/asset-dir-2", - temp_assets_dir + "/file/inside", - "./dir/inside", - "./file/inside", - ] - ) - mock_hash_assets.assert_called_once_with( - input_paths=input_paths, - output_paths=output_paths, - hash_cache_dir=os.path.expanduser(os.path.join("~", ".deadline", "cache")), - on_preparing_to_submit=ANY, - ) - client_mock().create_job.assert_called_once_with( - farmId=MOCK_FARM_ID, - queueId=MOCK_QUEUE_ID, - template=ANY, - templateType="YAML", - attachments={ - "manifests": [ - { - "rootPath": "/mnt/root/path1", - "osType": OperatingSystemFamily.LINUX, - "inputManifestPath": "mock-manifest", - "inputManifestHash": "mock-manifest-hash", - "outputRelativeDirectories": ["."], - }, - ], - "assetLoadingMethod": AssetLoadingMethod.PRELOAD, - }, - # The job parameter values are the second thing this test needs to verify, - # confirming that the parameters were processed according to their types. - parameters={ - "FileNone": { - "string": os.path.normpath(temp_assets_dir + "/file/inside/asset-dir.txt") - }, - "FileOut": {"string": os.path.normpath(os.path.abspath("file/inside/cwd.txt"))}, - "FileIn": { - "string": os.path.normpath(temp_job_bundle_dir + "/file/inside/job_bundle.txt") - }, - "FileInout": { - "string": os.path.normpath(temp_assets_dir + "/file/inside/asset-dir-2.txt") - }, - "DirNone": {"string": os.path.normpath(temp_assets_dir + "/dir/inside/asset-dir")}, - "DirIn": { - "string": os.path.normpath(temp_job_bundle_dir + "/dir/inside/job_bundle") - }, - "DirOut": {"string": os.path.normpath(os.path.abspath("dir/inside"))}, - "DirInout": { - "string": os.path.normpath(temp_assets_dir + "/dir/inside/asset-dir-2") - }, - }, - ) diff --git a/test/deadline_client/unit/job_bundle/test_job_bundle_loader_deprecated_uihint.py b/test/deadline_client/unit/job_bundle/test_job_bundle_loader_deprecated_uihint.py deleted file mode 100644 index 48776032..00000000 --- a/test/deadline_client/unit/job_bundle/test_job_bundle_loader_deprecated_uihint.py +++ /dev/null @@ -1,232 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. - -""" -Tests the functionality for loading data from a job bundle. For example, -to load all the job parameter metadata, merging the parameter values -and temporary uihint files. - -This is testing deprecated uihint functionality. -""" - -import json -import os - -import pytest -import yaml - -from deadline.client.job_bundle.parameters import read_job_bundle_parameters - -JOB_TEMPLATE_WITH_PARAMETERS_2022_09_01 = """ -specificationVersion: '2022-09-01' -name: CLI Job -parameters: -- name: LineEditControl - type: STRING - description: "Unrestricted line of text!" - default: Default line edit value. -- name: IntSpinner - type: NUMBER - description: A default integer spinner. - default: 42 -- name: StringDropdown - type: STRING - description: A dropdown with string values. - default: WEDNESDAY - allowedValues: [MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY] -- name: DirectoryPicker - type: STRING - description: Choose a directory. -- name: DirectoryPickDef1 - type: STRING - description: Choose a directory. - default: ./internal/directory -- name: DirectoryPickDef2 - type: STRING - description: Choose a directory. - default: ./internal/directory -steps: -- name: CliScript - script: - attachments: - runScript: - type: TEXT - runnable: true - data: | - #!/usr/bin/env bash - - echo "Running the task" - sleep 35 - actions: - onRun: - command: "{{Task.Attachment.runScript.Path}}" -""" - -JOB_TEMPLATE_UIHINT = """ -parameters: -- name: LineEditControl - uiHint: - label: Line Edit Control - ojioFutureType: STRING - controlType: LINE_EDIT -- name: IntSpinner - uiHint: - label: Int Spinner - ojioFutureType: INT - controlType: INT_SPIN_BOX -- name: StringDropdown - uiHint: - label: String Dropdown - ojioFutureType: STRING - controlType: DROPDOWN_LIST -- name: DirectoryPicker - uiHint: - label: Directory Picker - ojioFutureType: PATH - controlType: CHOOSE_DIRECTORY - assetReference: INOUT -- name: DirectoryPickDef1 - uiHint: - label: Directory Picker 1 - ojioFutureType: PATH - controlType: CHOOSE_DIRECTORY - assetReference: INOUT -- name: DirectoryPickDef2 - uiHint: - label: Directory Picker 2 - ojioFutureType: PATH - controlType: CHOOSE_DIRECTORY - assetReference: INOUT -""" - -PARAMETER_VALUES = """ -parameterValues: -- name: deadline:targetTaskRunStatus - value: READY -- name: LineEditControl - value: Testing one two three. -- name: DirectoryPicker - value: "C:\\\\Users\\\\username\\\\mydir" -- name: DirectoryPickDef1 - value: "C:\\\\Users\\\\username\\\\value" -""" - -READ_JOB_BUNDLE_PARAMETERS_RESULT = """ -- name: LineEditControl - type: STRING - description: "Unrestricted line of text!" - default: Default line edit value. - value: Testing one two three. - uiHint: - label: Line Edit Control - ojioFutureType: STRING - controlType: LINE_EDIT -- name: IntSpinner - type: NUMBER - description: A default integer spinner. - default: 42 - uiHint: - label: Int Spinner - ojioFutureType: INT - controlType: INT_SPIN_BOX -- name: StringDropdown - type: STRING - description: A dropdown with string values. - default: WEDNESDAY - allowedValues: [MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY] - uiHint: - label: String Dropdown - ojioFutureType: STRING - controlType: DROPDOWN_LIST -- name: DirectoryPicker - type: STRING - description: Choose a directory. - value: "C:\\\\Users\\\\username\\\\mydir" - uiHint: - label: Directory Picker - ojioFutureType: PATH - controlType: CHOOSE_DIRECTORY - assetReference: INOUT -- name: DirectoryPickDef1 - type: STRING - description: Choose a directory. - default: ./internal/directory - value: "C:\\\\Users\\\\username\\\\value" - uiHint: - label: Directory Picker 1 - ojioFutureType: PATH - controlType: CHOOSE_DIRECTORY - assetReference: INOUT -- name: DirectoryPickDef2 - type: STRING - description: Choose a directory. - default: ./internal/directory - value: {DIRECTORY_PICKER_2_VALUE} - uiHint: - label: Directory Picker 2 - ojioFutureType: PATH - controlType: CHOOSE_DIRECTORY - assetReference: INOUT -- name: deadline:targetTaskRunStatus - value: READY -""" - - -@pytest.mark.parametrize( - "template_data,uihint_data,parameter_values,expected_result", - [ - pytest.param( - JOB_TEMPLATE_WITH_PARAMETERS_2022_09_01, - JOB_TEMPLATE_UIHINT, - PARAMETER_VALUES, - READ_JOB_BUNDLE_PARAMETERS_RESULT, - id="2022-09-01", - ), - ], -) -def test_read_job_bundle_parameters_deprecated_uihint( - template_data, - uihint_data, - parameter_values, - expected_result, - fresh_deadline_config, - temp_job_bundle_dir, -): - """ - Tests that the read_job_bundle_parameters function loads the - """ - # Write the template to the job bundle - with open( - os.path.join(temp_job_bundle_dir, "template.yaml"), - "w", - encoding="utf8", - ) as f: - f.write(template_data) - - # Write the uihint to the job bundle - with open( - os.path.join(temp_job_bundle_dir, "template_uihint.yaml"), - "w", - encoding="utf8", - ) as f: - f.write(uihint_data) - - # Write the parameter values to the job bundle - with open( - os.path.join(temp_job_bundle_dir, "parameter_values.yaml"), - "w", - encoding="utf8", - ) as f: - f.write(parameter_values) - - # Now load the parameters from this job bundle - result = read_job_bundle_parameters(temp_job_bundle_dir) - - # In the test data, we set the directory picker 1 parameter value, but let - # the directory picker 2 parameter value fall back to the default, which causes - # it to expand into a path internal to the job bundle. - directory_picker_2_value = json.dumps( - os.path.normpath(os.path.join(temp_job_bundle_dir, "./internal/directory")) - ) - assert result == yaml.safe_load( - expected_result.format(DIRECTORY_PICKER_2_VALUE=directory_picker_2_value) - ) diff --git a/test/deadline_job_attachments/unit/asset_manifests/v2023_03_03/__init__.py b/test/deadline_job_attachments/unit/asset_manifests/v2023_03_03/__init__.py deleted file mode 100644 index 8d929cc8..00000000 --- a/test/deadline_job_attachments/unit/asset_manifests/v2023_03_03/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. diff --git a/test/deadline_job_attachments/unit/aws/__init__.py b/test/deadline_job_attachments/unit/aws/__init__.py deleted file mode 100644 index 8d929cc8..00000000 --- a/test/deadline_job_attachments/unit/aws/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. diff --git a/test/__init__.py b/test/integ/deadline_job_attachments/__init__.py similarity index 100% rename from test/__init__.py rename to test/integ/deadline_job_attachments/__init__.py diff --git a/test/deadline_job_attachments/integ/conftest.py b/test/integ/deadline_job_attachments/conftest.py similarity index 100% rename from test/deadline_job_attachments/integ/conftest.py rename to test/integ/deadline_job_attachments/conftest.py diff --git a/test/deadline_job_attachments/integ/test_data/inputs/scene.ma b/test/integ/deadline_job_attachments/test_data/inputs/scene.ma similarity index 100% rename from test/deadline_job_attachments/integ/test_data/inputs/scene.ma rename to test/integ/deadline_job_attachments/test_data/inputs/scene.ma diff --git a/test/deadline_job_attachments/integ/test_data/inputs/textures/brick.png b/test/integ/deadline_job_attachments/test_data/inputs/textures/brick.png similarity index 100% rename from test/deadline_job_attachments/integ/test_data/inputs/textures/brick.png rename to test/integ/deadline_job_attachments/test_data/inputs/textures/brick.png diff --git a/test/deadline_job_attachments/integ/test_data/inputs/textures/cloth.png b/test/integ/deadline_job_attachments/test_data/inputs/textures/cloth.png similarity index 100% rename from test/deadline_job_attachments/integ/test_data/inputs/textures/cloth.png rename to test/integ/deadline_job_attachments/test_data/inputs/textures/cloth.png diff --git a/test/deadline_job_attachments/integ/test_job_attachments.py b/test/integ/deadline_job_attachments/test_job_attachments.py similarity index 100% rename from test/deadline_job_attachments/integ/test_job_attachments.py rename to test/integ/deadline_job_attachments/test_job_attachments.py diff --git a/test/deadline_client/__init__.py b/test/unit/__init__.py similarity index 100% rename from test/deadline_client/__init__.py rename to test/unit/__init__.py diff --git a/test/deadline_client/unit/__init__.py b/test/unit/deadline_client/__init__.py similarity index 100% rename from test/deadline_client/unit/__init__.py rename to test/unit/deadline_client/__init__.py diff --git a/test/deadline_client/unit/api/__init__.py b/test/unit/deadline_client/api/__init__.py similarity index 100% rename from test/deadline_client/unit/api/__init__.py rename to test/unit/deadline_client/api/__init__.py diff --git a/test/deadline_client/unit/api/test_api_farm.py b/test/unit/deadline_client/api/test_api_farm.py similarity index 100% rename from test/deadline_client/unit/api/test_api_farm.py rename to test/unit/deadline_client/api/test_api_farm.py diff --git a/test/deadline_client/unit/api/test_api_job.py b/test/unit/deadline_client/api/test_api_job.py similarity index 100% rename from test/deadline_client/unit/api/test_api_job.py rename to test/unit/deadline_client/api/test_api_job.py diff --git a/test/deadline_client/unit/api/test_api_queue.py b/test/unit/deadline_client/api/test_api_queue.py similarity index 100% rename from test/deadline_client/unit/api/test_api_queue.py rename to test/unit/deadline_client/api/test_api_queue.py diff --git a/test/deadline_client/unit/api/test_api_session.py b/test/unit/deadline_client/api/test_api_session.py similarity index 100% rename from test/deadline_client/unit/api/test_api_session.py rename to test/unit/deadline_client/api/test_api_session.py diff --git a/test/deadline_client/unit/api/test_api_storage_profile.py b/test/unit/deadline_client/api/test_api_storage_profile.py similarity index 97% rename from test/deadline_client/unit/api/test_api_storage_profile.py rename to test/unit/deadline_client/api/test_api_storage_profile.py index a2ad8dd5..76364c6a 100644 --- a/test/deadline_client/unit/api/test_api_storage_profile.py +++ b/test/unit/deadline_client/api/test_api_storage_profile.py @@ -1,4 +1,4 @@ -# Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. """ tests the deadline.client.api functions relating to storage profiles diff --git a/test/deadline_client/unit/api/test_job_bundle_submission.py b/test/unit/deadline_client/api/test_job_bundle_submission.py similarity index 98% rename from test/deadline_client/unit/api/test_job_bundle_submission.py rename to test/unit/deadline_client/api/test_job_bundle_submission.py index 688517af..30d9bb72 100644 --- a/test/deadline_client/unit/api/test_job_bundle_submission.py +++ b/test/unit/deadline_client/api/test_job_bundle_submission.py @@ -165,7 +165,7 @@ }, { "name": "deadline:maxFailedTasksCount", - "value": 100 + "value": 20 }, { "name": "deadline:maxRetriesPerTask", @@ -177,7 +177,7 @@ { "priority": 45, "targetTaskRunStatus": "SUSPENDED", - "maxFailedTasksCount": 100, + "maxFailedTasksCount": 20, "maxRetriesPerTask": 5, }, ), @@ -277,7 +277,7 @@ def test_create_job_from_job_bundle( config.set_setting("defaults.farm_id", MOCK_FARM_ID) config.set_setting("defaults.queue_id", MOCK_QUEUE_ID) - config.set_setting("defaults.storage_profile_id", MOCK_STORAGE_PROFILE_ID) + config.set_setting("settings.storage_profile_id", MOCK_STORAGE_PROFILE_ID) # Write the template to the job bundle with open( @@ -442,7 +442,7 @@ def test_create_job_from_job_bundle_job_attachments( config.set_setting("defaults.farm_id", MOCK_FARM_ID) config.set_setting("defaults.queue_id", MOCK_QUEUE_ID) - config.set_setting("defaults.storage_profile_id", MOCK_STORAGE_PROFILE_ID) + config.set_setting("settings.storage_profile_id", MOCK_STORAGE_PROFILE_ID) # Write a JSON template with open(os.path.join(temp_job_bundle_dir, "template.json"), "w", encoding="utf8") as f: @@ -524,7 +524,7 @@ def test_create_job_from_job_bundle_with_empty_asset_references( config.set_setting("defaults.farm_id", MOCK_FARM_ID) config.set_setting("defaults.queue_id", MOCK_QUEUE_ID) - config.set_setting("defaults.storage_profile_id", MOCK_STORAGE_PROFILE_ID) + config.set_setting("settings.storage_profile_id", MOCK_STORAGE_PROFILE_ID) # Write the template to the job bundle with open( @@ -594,7 +594,7 @@ def test_create_job_from_job_bundle_with_single_asset_file( config.set_setting("defaults.farm_id", MOCK_FARM_ID) config.set_setting("defaults.queue_id", MOCK_QUEUE_ID) - config.set_setting("defaults.storage_profile_id", MOCK_STORAGE_PROFILE_ID) + config.set_setting("settings.storage_profile_id", MOCK_STORAGE_PROFILE_ID) # Write a JSON template with open(os.path.join(temp_job_bundle_dir, "template.json"), "w", encoding="utf8") as f: diff --git a/test/deadline_client/unit/api/test_job_bundle_submission_asset_refs.py b/test/unit/deadline_client/api/test_job_bundle_submission_asset_refs.py similarity index 93% rename from test/deadline_client/unit/api/test_job_bundle_submission_asset_refs.py rename to test/unit/deadline_client/api/test_job_bundle_submission_asset_refs.py index 3aff5a18..f7032291 100644 --- a/test/deadline_client/unit/api/test_job_bundle_submission_asset_refs.py +++ b/test/unit/deadline_client/api/test_job_bundle_submission_asset_refs.py @@ -282,19 +282,19 @@ def test_create_job_from_job_bundle_with_all_asset_ref_variants( "outputRelativeDirectories": ["."], }, ], - "assetLoadingMethod": AssetLoadingMethod.PRELOAD, + "assetLoadingMethod": AssetLoadingMethod.PRELOAD.value, }, # The job parameter values are the second thing this test needs to verify, # confirming that the parameters were processed according to their types. parameters={ "FileNoneDefault": { "path": os.path.join( - temp_assets_dir, "file/inside/asset-dir-filenonedefault.txt" + temp_assets_dir, "file", "inside", "asset-dir-filenonedefault.txt" ), }, "FileNone": { "path": os.path.normpath( - temp_assets_dir + "/file/inside/asset-dir-filenone.txt" + os.path.join(temp_assets_dir, "file", "inside", "asset-dir-filenone.txt") ) }, "FileOut": {"path": os.path.normpath(os.path.abspath("file/inside/cwd.txt"))}, @@ -307,15 +307,23 @@ def test_create_job_from_job_bundle_with_all_asset_ref_variants( ) }, "DirNoneDefault": { - "path": os.path.join(temp_assets_dir, "dir/inside/asset-dir-dirnonedefault"), + "path": os.path.join( + temp_assets_dir, "dir", "inside", "asset-dir-dirnonedefault" + ), }, "DirNone": { - "path": os.path.join(temp_assets_dir, "./dir/inside/asset-dir-dirnone") + "path": os.path.join(temp_assets_dir, "dir", "inside", "asset-dir-dirnone") + }, + "DirIn": { + "path": os.path.normpath( + os.path.join(temp_job_bundle_dir, "dir", "inside", "job_bundle") + ) }, - "DirIn": {"path": os.path.normpath(temp_job_bundle_dir + "/dir/inside/job_bundle")}, "DirOut": {"path": os.path.normpath(os.path.abspath("dir/inside/cwd-dirout"))}, "DirInout": { - "path": os.path.normpath(temp_assets_dir + "/./dir/inside/asset-dir-dirinout") + "path": os.path.normpath( + os.path.join(temp_assets_dir, "dir", "inside", "asset-dir-dirinout") + ) }, }, ) diff --git a/test/deadline_client/unit/cli/__init__.py b/test/unit/deadline_client/cli/__init__.py similarity index 100% rename from test/deadline_client/unit/cli/__init__.py rename to test/unit/deadline_client/cli/__init__.py diff --git a/test/deadline_client/unit/cli/test_cli.py b/test/unit/deadline_client/cli/test_cli.py similarity index 100% rename from test/deadline_client/unit/cli/test_cli.py rename to test/unit/deadline_client/cli/test_cli.py diff --git a/test/deadline_client/unit/cli/test_cli_bundle.py b/test/unit/deadline_client/cli/test_cli_bundle.py similarity index 100% rename from test/deadline_client/unit/cli/test_cli_bundle.py rename to test/unit/deadline_client/cli/test_cli_bundle.py diff --git a/test/deadline_client/unit/cli/test_cli_config.py b/test/unit/deadline_client/cli/test_cli_config.py similarity index 94% rename from test/deadline_client/unit/cli/test_cli_config.py rename to test/unit/deadline_client/cli/test_cli_config.py index 739a54c6..1eda7cbc 100644 --- a/test/deadline_client/unit/cli/test_cli_config.py +++ b/test/unit/deadline_client/cli/test_cli_config.py @@ -34,7 +34,7 @@ def test_cli_config_show_defaults(fresh_deadline_config): assert fresh_deadline_config in result.output # Assert the expected number of settings - assert len(settings.keys()) == 9 + assert len(settings.keys()) == 10 for setting_name in settings.keys(): assert setting_name in result.output @@ -93,14 +93,17 @@ def test_cli_config_show_modified_config(fresh_deadline_config): config.set_setting("settings.job_history_dir", "~/alternate/job_history") config.set_setting("settings.deadline_endpoint_url", "https://some-url-value") config.set_setting("defaults.farm_id", "farm-82934h23k4j23kjh") + config.set_setting("settings.storage_profile_id", "sp-12345abcde12345") config.set_setting("defaults.queue_id", "queue-389348u234jhk34") - config.set_setting("defaults.storage_profile_id", "sp-12345abcde12345") + config.set_setting("defaults.job_id", "job-239u40234jkl234nkl23") config.set_setting("settings.auto_accept", "False") config.set_setting("settings.log_level", "DEBUG") runner = CliRunner() result = runner.invoke(deadline_cli.cli, ["config", "show"]) + print(result.output) + assert result.exit_code == 0 # We should see all the overridden values in the output @@ -109,6 +112,8 @@ def test_cli_config_show_modified_config(fresh_deadline_config): assert result.output.count("False") == 1 assert "https://some-url-value" in result.output assert "farm-82934h23k4j23kjh" in result.output + assert "queue-389348u234jhk34" in result.output + assert "job-239u40234jkl234nkl23" in result.output # It shouldn't say anywhere that there is a default setting assert "(default)" not in result.output assert "settings.log_level:\n DEBUG" in result.output diff --git a/test/deadline_client/unit/cli/test_cli_farm.py b/test/unit/deadline_client/cli/test_cli_farm.py similarity index 100% rename from test/deadline_client/unit/cli/test_cli_farm.py rename to test/unit/deadline_client/cli/test_cli_farm.py diff --git a/test/deadline_client/unit/cli/test_cli_fleet.py b/test/unit/deadline_client/cli/test_cli_fleet.py similarity index 67% rename from test/deadline_client/unit/cli/test_cli_fleet.py rename to test/unit/deadline_client/cli/test_cli_fleet.py index 918619ae..eae92286 100644 --- a/test/deadline_client/unit/cli/test_cli_fleet.py +++ b/test/unit/deadline_client/cli/test_cli_fleet.py @@ -4,6 +4,7 @@ Tests for the CLI fleet commands. """ from unittest.mock import patch +from copy import deepcopy import boto3 # type: ignore[import] from botocore.exceptions import ClientError # type: ignore[import] @@ -12,7 +13,7 @@ from deadline.client import api, config from deadline.client.cli import deadline_cli -from ..shared_constants import MOCK_FARM_ID +from ..shared_constants import MOCK_FARM_ID, MOCK_QUEUE_ID, MOCK_QUEUES_LIST MOCK_FLEET_ID = "fleet-0123456789abcdef0123456789abcdef" @@ -126,6 +127,85 @@ def test_cli_fleet_get(fresh_deadline_config): assert result.exit_code == 0 +def test_cli_fleet_get_with_queue_id(fresh_deadline_config): + """ + Confirm that the CLI interface prints out the expected fleets, given mock data + and a queue id parameter. + """ + config.set_setting("defaults.farm_id", MOCK_FARM_ID) + config.set_setting("defaults.queue_id", MOCK_QUEUE_ID) + + with patch.object(api._session, "get_boto3_session") as session_mock: + session_mock().client("deadline").get_queue.return_value = MOCK_QUEUES_LIST[0] + session_mock().client("deadline").get_fleet.side_effect = deepcopy(MOCK_FLEETS_LIST) + session_mock().client("deadline").list_queue_fleet_associations.return_value = { + "queueFleetAssociations": [ + { + "queueId": MOCK_QUEUES_LIST[0]["queueId"], + "fleetId": MOCK_FLEETS_LIST[0]["fleetId"], + "status": "ACTIVE", + }, + { + "queueId": MOCK_QUEUES_LIST[0]["queueId"], + "fleetId": MOCK_FLEETS_LIST[1]["fleetId"], + "status": "ACTIVE", + }, + ] + } + + runner = CliRunner() + result = runner.invoke(deadline_cli.cli, ["fleet", "get"]) + + assert ( + result.output + == """Showing all fleets (2 total) associated with queue: Testing Queue + +fleetId: fleet-0123456789abcdef0123456789abcdef +farmId: farm-0123456789abcdefabcdefabcdefabcd +description: The best fleet. +displayName: MadFleet +status: ACTIVE +platform: EC2_SPOT +workerRequirements: + vCpus: + min: 2 + max: 4 + memInGiB: + min: 8 + max: 16 +autoScalerCapacities: + min: 0 + max: 10 +createdAt: '2022-11-22T06:37:36+00:00' +createdBy: arn:aws:sts::123456789012:assumed-role/Admin +queueFleetAssociationStatus: ACTIVE + + +fleetId: fleet-0223456789abcdef0223456789abcdef +farmId: farm-0123456789abcdefabcdefabcdefabcd +description: The maddest fleet. +displayName: MadderFleet +status: ACTIVE +platform: EC2_SPOT +workerRequirements: + vCpus: + min: 2 + max: 4 + memInGiB: + min: 8 + max: 16 +autoScalerCapacities: + min: 0 + max: 50 +createdAt: '2022-11-22T06:37:36+00:00' +createdBy: arn:aws:sts::123456789012:assumed-role/Admin +queueFleetAssociationStatus: ACTIVE + +""" + ) + assert result.exit_code == 0 + + def test_cli_fleet_get_override_profile(fresh_deadline_config): """ Confirms that the --profile option overrides the option to boto3.Session. @@ -152,6 +232,24 @@ def test_cli_fleet_get_override_profile(fresh_deadline_config): assert result.exit_code == 0 +def test_cli_fleet_get_both_fleet_id_and_queue_id_provided(fresh_deadline_config): + """ + Confirm that the CLI interface fails when both fleet and queue id are provided + """ + config.set_setting("defaults.farm_id", "farm-overriddenid") + + with patch.object(api._session, "get_boto3_session") as session_mock: + session_mock().client("deadline").get_fleet.return_value = MOCK_FLEETS_LIST[0] + + runner = CliRunner() + result = runner.invoke( + deadline_cli.cli, ["fleet", "get", "--fleet-id", "fleetid", "--queue-id", "queueid"] + ) + + assert "Only one of the --fleet-id and --queue-id options may be provided." in result.output + assert result.exit_code != 0 + + def test_cli_fleet_get_no_fleet_id_provided(fresh_deadline_config): """ Confirm that the CLI interface fails when no fleet id is provided @@ -164,7 +262,9 @@ def test_cli_fleet_get_no_fleet_id_provided(fresh_deadline_config): runner = CliRunner() result = runner.invoke(deadline_cli.cli, ["fleet", "get"]) - assert "Missing option '--fleet-id'" in result.output + assert ( + "Missing '--fleet-id', '--queue-id', or default Queue ID configuration" in result.output + ) assert result.exit_code != 0 diff --git a/test/deadline_client/unit/cli/test_cli_handle_web_url.py b/test/unit/deadline_client/cli/test_cli_handle_web_url.py similarity index 100% rename from test/deadline_client/unit/cli/test_cli_handle_web_url.py rename to test/unit/deadline_client/cli/test_cli_handle_web_url.py diff --git a/test/deadline_client/unit/cli/test_cli_job.py b/test/unit/deadline_client/cli/test_cli_job.py similarity index 100% rename from test/deadline_client/unit/cli/test_cli_job.py rename to test/unit/deadline_client/cli/test_cli_job.py diff --git a/test/deadline_client/unit/cli/test_cli_loginout.py b/test/unit/deadline_client/cli/test_cli_loginout.py similarity index 100% rename from test/deadline_client/unit/cli/test_cli_loginout.py rename to test/unit/deadline_client/cli/test_cli_loginout.py diff --git a/test/deadline_client/unit/cli/test_cli_queue.py b/test/unit/deadline_client/cli/test_cli_queue.py similarity index 93% rename from test/deadline_client/unit/cli/test_cli_queue.py rename to test/unit/deadline_client/cli/test_cli_queue.py index 55fd76aa..7cb34898 100644 --- a/test/deadline_client/unit/cli/test_cli_queue.py +++ b/test/unit/deadline_client/cli/test_cli_queue.py @@ -12,20 +12,7 @@ from deadline.client import api, config from deadline.client.cli import deadline_cli -from ..shared_constants import MOCK_FARM_ID - -MOCK_QUEUES_LIST = [ - { - "queueId": "queue-0123456789abcdef0123456789abcdef", - "name": "Testing Queue", - "description": "", - }, - { - "queueId": "queue-0123456789abcdef0123456789abcdeg", - "name": "Another Queue", - "description": "With a description!", - }, -] +from ..shared_constants import MOCK_FARM_ID, MOCK_QUEUES_LIST def test_cli_queue_list(fresh_deadline_config): diff --git a/test/deadline_client/unit/config/__init__.py b/test/unit/deadline_client/config/__init__.py similarity index 100% rename from test/deadline_client/unit/config/__init__.py rename to test/unit/deadline_client/config/__init__.py diff --git a/test/deadline_client/unit/config/test_config_file.py b/test/unit/deadline_client/config/test_config_file.py similarity index 94% rename from test/deadline_client/unit/config/test_config_file.py rename to test/unit/deadline_client/config/test_config_file.py index 4c2fcbcf..7f1efa05 100644 --- a/test/deadline_client/unit/config/test_config_file.py +++ b/test/unit/deadline_client/config/test_config_file.py @@ -47,7 +47,7 @@ def test_config_settings_hierarchy(fresh_deadline_config): # First set some settings that apply to the defaults, changing the # hierarchy from queue inwards. config.set_setting("settings.deadline_endpoint_url", "nondefault-endpoint-url") - config.set_setting("defaults.storage_profile_id", "storage-profile-for-queue-default") + config.set_setting("settings.storage_profile_id", "storage-profile-for-farm-default") config.set_setting("defaults.queue_id", "queue-for-farm-default") config.set_setting("defaults.farm_id", "farm-for-profile-default") config.set_setting("defaults.aws_profile_name", "NonDefaultProfile") @@ -57,7 +57,7 @@ def test_config_settings_hierarchy(fresh_deadline_config): assert config.get_setting("settings.deadline_endpoint_url") == DEFAULT_DEADLINE_ENDPOINT_URL assert config.get_setting("defaults.farm_id") == "" assert config.get_setting("defaults.queue_id") == "" - assert config.get_setting("defaults.storage_profile_id") == "" + assert config.get_setting("settings.storage_profile_id") == "" # Switch back to the default profile, and check the next layer of the onion config.set_setting("defaults.aws_profile_name", "") @@ -66,17 +66,17 @@ def test_config_settings_hierarchy(fresh_deadline_config): # The queue id is still default assert config.get_setting("defaults.queue_id") == "" # The storage profile id is still default - assert config.get_setting("defaults.storage_profile_id") == "" + assert config.get_setting("settings.storage_profile_id") == "" # Switch back to the default farm config.set_setting("defaults.farm_id", "") assert config.get_setting("defaults.queue_id") == "queue-for-farm-default" - # Storage profile needs "profile - farm_id - queue_id" so it should still be empty - assert config.get_setting("defaults.storage_profile_id") == "" + # Storage profile needs "profile - farm_id" so it should be back to the original + assert config.get_setting("settings.storage_profile_id") == "storage-profile-for-farm-default" # Switch to default farm and default queue config.set_setting("defaults.queue_id", "") - assert config.get_setting("defaults.storage_profile_id") == "storage-profile-for-queue-default" + assert config.get_setting("settings.storage_profile_id") == "storage-profile-for-farm-default" def test_config_get_setting_nonexistant(fresh_deadline_config): diff --git a/test/deadline_client/unit/conftest.py b/test/unit/deadline_client/conftest.py similarity index 100% rename from test/deadline_client/unit/conftest.py rename to test/unit/deadline_client/conftest.py diff --git a/test/deadline_client/unit/job_bundle/__init__.py b/test/unit/deadline_client/job_bundle/__init__.py similarity index 100% rename from test/deadline_client/unit/job_bundle/__init__.py rename to test/unit/deadline_client/job_bundle/__init__.py diff --git a/test/deadline_client/unit/job_bundle/test_adaptors.py b/test/unit/deadline_client/job_bundle/test_adaptors.py similarity index 100% rename from test/deadline_client/unit/job_bundle/test_adaptors.py rename to test/unit/deadline_client/job_bundle/test_adaptors.py diff --git a/test/deadline_client/unit/job_bundle/test_job_bundle_loader.py b/test/unit/deadline_client/job_bundle/test_job_bundle_loader.py similarity index 100% rename from test/deadline_client/unit/job_bundle/test_job_bundle_loader.py rename to test/unit/deadline_client/job_bundle/test_job_bundle_loader.py diff --git a/test/deadline_client/unit/job_bundle/test_job_history_folders.py b/test/unit/deadline_client/job_bundle/test_job_history_folders.py similarity index 100% rename from test/deadline_client/unit/job_bundle/test_job_history_folders.py rename to test/unit/deadline_client/job_bundle/test_job_history_folders.py diff --git a/test/deadline_client/unit/job_bundle/test_job_parameters.py b/test/unit/deadline_client/job_bundle/test_job_parameters.py similarity index 100% rename from test/deadline_client/unit/job_bundle/test_job_parameters.py rename to test/unit/deadline_client/job_bundle/test_job_parameters.py diff --git a/test/deadline_client/unit/job_bundle/test_job_submission.py b/test/unit/deadline_client/job_bundle/test_job_submission.py similarity index 100% rename from test/deadline_client/unit/job_bundle/test_job_submission.py rename to test/unit/deadline_client/job_bundle/test_job_submission.py diff --git a/test/deadline_client/unit/job_bundle/test_job_template.py b/test/unit/deadline_client/job_bundle/test_job_template.py similarity index 100% rename from test/deadline_client/unit/job_bundle/test_job_template.py rename to test/unit/deadline_client/job_bundle/test_job_template.py diff --git a/test/deadline_client/unit/shared_constants.py b/test/unit/deadline_client/shared_constants.py similarity index 62% rename from test/deadline_client/unit/shared_constants.py rename to test/unit/deadline_client/shared_constants.py index 2327dd37..29732657 100644 --- a/test/deadline_client/unit/shared_constants.py +++ b/test/unit/deadline_client/shared_constants.py @@ -8,3 +8,15 @@ MOCK_STEP_ID = "step-0123456789abcdefabcdefabcdefabcd" MOCK_TASK_ID = "task-0123456789abcdefabcdefabcdefabcd" MOCK_PROFILE_NAME = "my-studio-profile" +MOCK_QUEUES_LIST = [ + { + "queueId": "queue-0123456789abcdef0123456789abcdef", + "name": "Testing Queue", + "description": "", + }, + { + "queueId": "queue-0123456789abcdef0123456789abcdeg", + "name": "Another Queue", + "description": "With a description!", + }, +] diff --git a/test/deadline_client/unit/testing_utilities.py b/test/unit/deadline_client/testing_utilities.py similarity index 100% rename from test/deadline_client/unit/testing_utilities.py rename to test/unit/deadline_client/testing_utilities.py diff --git a/test/deadline_client/unit/ui/__init__.py b/test/unit/deadline_client/ui/__init__.py similarity index 100% rename from test/deadline_client/unit/ui/__init__.py rename to test/unit/deadline_client/ui/__init__.py diff --git a/test/deadline_job_attachments/__init__.py b/test/unit/deadline_job_attachments/__init__.py similarity index 100% rename from test/deadline_job_attachments/__init__.py rename to test/unit/deadline_job_attachments/__init__.py diff --git a/test/deadline_job_attachments/integ/__init__.py b/test/unit/deadline_job_attachments/asset_manifests/__init__.py similarity index 100% rename from test/deadline_job_attachments/integ/__init__.py rename to test/unit/deadline_job_attachments/asset_manifests/__init__.py diff --git a/test/deadline_job_attachments/unit/asset_manifests/test_decode.py b/test/unit/deadline_job_attachments/asset_manifests/test_decode.py similarity index 100% rename from test/deadline_job_attachments/unit/asset_manifests/test_decode.py rename to test/unit/deadline_job_attachments/asset_manifests/test_decode.py diff --git a/test/deadline_job_attachments/unit/asset_manifests/test_manifest_model.py b/test/unit/deadline_job_attachments/asset_manifests/test_manifest_model.py similarity index 100% rename from test/deadline_job_attachments/unit/asset_manifests/test_manifest_model.py rename to test/unit/deadline_job_attachments/asset_manifests/test_manifest_model.py diff --git a/test/deadline_job_attachments/unit/__init__.py b/test/unit/deadline_job_attachments/asset_manifests/v2022_06_06/__init__.py similarity index 100% rename from test/deadline_job_attachments/unit/__init__.py rename to test/unit/deadline_job_attachments/asset_manifests/v2022_06_06/__init__.py diff --git a/test/deadline_job_attachments/unit/asset_manifests/v2022_06_06/test_asset_manifest.py b/test/unit/deadline_job_attachments/asset_manifests/v2022_06_06/test_asset_manifest.py similarity index 100% rename from test/deadline_job_attachments/unit/asset_manifests/v2022_06_06/test_asset_manifest.py rename to test/unit/deadline_job_attachments/asset_manifests/v2022_06_06/test_asset_manifest.py diff --git a/test/deadline_job_attachments/unit/asset_manifests/__init__.py b/test/unit/deadline_job_attachments/asset_manifests/v2023_03_03/__init__.py similarity index 100% rename from test/deadline_job_attachments/unit/asset_manifests/__init__.py rename to test/unit/deadline_job_attachments/asset_manifests/v2023_03_03/__init__.py diff --git a/test/deadline_job_attachments/unit/asset_manifests/v2023_03_03/test_asset_manifest.py b/test/unit/deadline_job_attachments/asset_manifests/v2023_03_03/test_asset_manifest.py similarity index 100% rename from test/deadline_job_attachments/unit/asset_manifests/v2023_03_03/test_asset_manifest.py rename to test/unit/deadline_job_attachments/asset_manifests/v2023_03_03/test_asset_manifest.py diff --git a/test/deadline_job_attachments/unit/asset_manifests/v2022_06_06/__init__.py b/test/unit/deadline_job_attachments/aws/__init__.py similarity index 100% rename from test/deadline_job_attachments/unit/asset_manifests/v2022_06_06/__init__.py rename to test/unit/deadline_job_attachments/aws/__init__.py diff --git a/test/deadline_job_attachments/unit/aws/test_aws_clients.py b/test/unit/deadline_job_attachments/aws/test_aws_clients.py similarity index 94% rename from test/deadline_job_attachments/unit/aws/test_aws_clients.py rename to test/unit/deadline_job_attachments/aws/test_aws_clients.py index 44c16e16..0a466919 100644 --- a/test/deadline_job_attachments/unit/aws/test_aws_clients.py +++ b/test/unit/deadline_job_attachments/aws/test_aws_clients.py @@ -7,7 +7,6 @@ get_sts_client, ) from deadline.job_attachments.aws.aws_config import ( - DEADLINE_ENDPOINT, S3_CONNECT_TIMEOUT_IN_SECS, S3_READ_TIMEOUT_IN_SECS, ) @@ -20,7 +19,6 @@ def test_get_deadline_client(boto_config): deadline_client = get_deadline_client() assert deadline_client.meta.service_model.service_name == "deadline" - assert deadline_client.meta.endpoint_url == DEADLINE_ENDPOINT def test_get_deadline_client_non_default_endpoint(boto_config): diff --git a/test/deadline_job_attachments/unit/aws/test_deadline.py b/test/unit/deadline_job_attachments/aws/test_deadline.py similarity index 100% rename from test/deadline_job_attachments/unit/aws/test_deadline.py rename to test/unit/deadline_job_attachments/aws/test_deadline.py diff --git a/test/deadline_job_attachments/unit/conftest.py b/test/unit/deadline_job_attachments/conftest.py similarity index 100% rename from test/deadline_job_attachments/unit/conftest.py rename to test/unit/deadline_job_attachments/conftest.py diff --git a/test/deadline_job_attachments/unit/data/boto_module/deadline/2020-08-21/service-2.json b/test/unit/deadline_job_attachments/data/boto_module/deadline/2020-08-21/service-2.json similarity index 100% rename from test/deadline_job_attachments/unit/data/boto_module/deadline/2020-08-21/service-2.json rename to test/unit/deadline_job_attachments/data/boto_module/deadline/2020-08-21/service-2.json diff --git a/test/deadline_job_attachments/unit/data/manifest_bados.json b/test/unit/deadline_job_attachments/data/manifest_bados.json similarity index 100% rename from test/deadline_job_attachments/unit/data/manifest_bados.json rename to test/unit/deadline_job_attachments/data/manifest_bados.json diff --git a/test/deadline_job_attachments/unit/data/manifest_v2022_06_06.json b/test/unit/deadline_job_attachments/data/manifest_v2022_06_06.json similarity index 100% rename from test/deadline_job_attachments/unit/data/manifest_v2022_06_06.json rename to test/unit/deadline_job_attachments/data/manifest_v2022_06_06.json diff --git a/test/deadline_job_attachments/unit/data/manifest_v2023_03_03.json b/test/unit/deadline_job_attachments/data/manifest_v2023_03_03.json similarity index 100% rename from test/deadline_job_attachments/unit/data/manifest_v2023_03_03.json rename to test/unit/deadline_job_attachments/data/manifest_v2023_03_03.json diff --git a/test/deadline_job_attachments/unit/test_asset_sync.py b/test/unit/deadline_job_attachments/test_asset_sync.py similarity index 94% rename from test/deadline_job_attachments/unit/test_asset_sync.py rename to test/unit/deadline_job_attachments/test_asset_sync.py index 7d9701df..ce49570c 100644 --- a/test/deadline_job_attachments/unit/test_asset_sync.py +++ b/test/unit/deadline_job_attachments/test_asset_sync.py @@ -4,11 +4,9 @@ import json import shutil -import sys from math import trunc from pathlib import Path from typing import Optional -from unittest import mock from unittest.mock import MagicMock, call, mock_open, patch import boto3 @@ -30,8 +28,6 @@ from deadline.job_attachments.asset_manifests.decode import decode_manifest -# TODO: Remove the skip once we support Windows for AssetSync -@pytest.mark.skipif(sys.platform == "win32", reason="Asset Sync doesn't currently support Windows") class TestAssetSync: @pytest.fixture(autouse=True) def before_test( @@ -83,14 +79,6 @@ def test_progress_logger_one_file(self) -> None: calls.append(call(1, True)) mock_progress_tracker_callback.assert_has_calls(calls) - # TODO: Remove this test once we support Windows for AssetSync - @mock.patch("sys.platform", "win32") - def test_init_fails_on_windows(self, farm_id: str) -> None: - """Asserts an error is raised when trying to create an AssetSync - instance on a Windows OS""" - with pytest.raises(NotImplementedError): - AssetSync(farm_id) - @pytest.mark.parametrize( ("file_size", "expected_output"), [ @@ -144,7 +132,7 @@ def test_sync_inputs_no_inputs_successful( default_job.attachments = attachments_no_inputs session_dir = str(tmp_path) dest_dir = "assetroot-27bggh78dd2b568ab123" - local_root = f"{session_dir}/{dest_dir}" + local_root = str(Path(session_dir) / dest_dir) # WHEN with patch( @@ -191,10 +179,6 @@ def test_sync_inputs_no_inputs_successful( ) assert summary_statistics == expected_summary_statistics - # TODO: Mock the FS so we can test this on Windows - @pytest.mark.skipif( - sys.platform == "win32", reason="Doesn't run on Windows since we're not mocking FS" - ) @pytest.mark.parametrize( ("job_fixture_name"), [ @@ -223,7 +207,7 @@ def test_sync_inputs_successful( default_queue.jobAttachmentSettings = s3_settings session_dir = str(tmp_path) dest_dir = "assetroot-27bggh78dd2b568ab123" - local_root = f"{session_dir}/{dest_dir}" + local_root = str(Path(session_dir) / dest_dir) assert job.attachments # WHEN @@ -267,10 +251,6 @@ def test_sync_inputs_successful( } ] - # TODO: Mock the FS so we can test this on Windows - @pytest.mark.skipif( - sys.platform == "win32", reason="Doesn't run on Windows since we're not mocking FS" - ) @pytest.mark.parametrize( ("s3_settings_fixture_name"), [ @@ -291,7 +271,7 @@ def test_sync_inputs_with_step_dependencies( default_queue.jobAttachmentSettings = s3_settings session_dir = str(tmp_path) dest_dir = "assetroot-27bggh78dd2b568ab123" - local_root = f"{session_dir}/{dest_dir}" + local_root = str(Path(session_dir) / dest_dir) assert default_job.attachments step_output_root = "/home/outputs_roots" @@ -340,17 +320,13 @@ def test_sync_inputs_with_step_dependencies( }, ] - # TODO: Mock the FS so we can test this on Windows - @pytest.mark.skipif( - sys.platform == "win32", reason="Doesn't run on Windows since we're not mocking FS" - ) @pytest.mark.parametrize( ("s3_settings_fixture_name"), [ ("default_job_attachment_s3_settings"), ], ) - def test_sync_inputs_with_step_dependencies_same_root_vfs( + def test_sync_inputs_with_step_dependencies_same_root_vfs_on_posix( self, tmp_path: Path, default_queue: Queue, @@ -367,7 +343,7 @@ def test_sync_inputs_with_step_dependencies_same_root_vfs( default_queue.jobAttachmentSettings = s3_settings session_dir = str(tmp_path) dest_dir = "assetroot-27bggh78dd2b568ab123" - local_root = f"{session_dir}/{dest_dir}" + local_root = str(Path(session_dir) / dest_dir) assert job.attachments test_manifest = decode_manifest(json.dumps(test_manifest_two)) @@ -392,6 +368,8 @@ def test_sync_inputs_with_step_dependencies_same_root_vfs( ) as merge_manifests_mock, patch( f"{deadline.__package__}.job_attachments.download.write_manifest_to_temp_file", return_value="tmp_manifest", + ), patch( + "sys.platform", "linux" ): mock_on_downloading_files = MagicMock(return_value=True) diff --git a/test/deadline_job_attachments/unit/test_download.py b/test/unit/deadline_job_attachments/test_download.py similarity index 100% rename from test/deadline_job_attachments/unit/test_download.py rename to test/unit/deadline_job_attachments/test_download.py diff --git a/test/deadline_job_attachments/unit/test_fus3.py b/test/unit/deadline_job_attachments/test_fus3.py similarity index 100% rename from test/deadline_job_attachments/unit/test_fus3.py rename to test/unit/deadline_job_attachments/test_fus3.py diff --git a/test/deadline_job_attachments/unit/test_hash_cache.py b/test/unit/deadline_job_attachments/test_hash_cache.py similarity index 100% rename from test/deadline_job_attachments/unit/test_hash_cache.py rename to test/unit/deadline_job_attachments/test_hash_cache.py diff --git a/test/deadline_job_attachments/unit/test_progress_tracker.py b/test/unit/deadline_job_attachments/test_progress_tracker.py similarity index 100% rename from test/deadline_job_attachments/unit/test_progress_tracker.py rename to test/unit/deadline_job_attachments/test_progress_tracker.py diff --git a/test/deadline_job_attachments/unit/test_upload.py b/test/unit/deadline_job_attachments/test_upload.py similarity index 99% rename from test/deadline_job_attachments/unit/test_upload.py rename to test/unit/deadline_job_attachments/test_upload.py index 8407a493..b266595a 100644 --- a/test/deadline_job_attachments/unit/test_upload.py +++ b/test/unit/deadline_job_attachments/test_upload.py @@ -385,6 +385,7 @@ def test_asset_management_windows_multi_root( }, { "rootPath": f"{output_d}", + "osType": OperatingSystemFamily.get_os_family("windows"), "outputRelativeDirectories": [ ".", ], diff --git a/test/deadline_job_attachments/unit/test_utils.py b/test/unit/deadline_job_attachments/test_utils.py similarity index 100% rename from test/deadline_job_attachments/unit/test_utils.py rename to test/unit/deadline_job_attachments/test_utils.py diff --git a/test/test_copyright_headers.py b/test/unit/test_copyright_headers.py similarity index 98% rename from test/test_copyright_headers.py rename to test/unit/test_copyright_headers.py index f5f04e1e..5ab09214 100644 --- a/test/test_copyright_headers.py +++ b/test/unit/test_copyright_headers.py @@ -11,7 +11,7 @@ def _check_file(filename: Path) -> None: - with open(filename) as infile: + with open(filename, encoding="utf8") as infile: lines_read = 0 for line in infile: if _copyright_header_re.search(line):