From 33f75b67bf8a926da492d7c1bdccb5cc885c3aec Mon Sep 17 00:00:00 2001 From: Mark Wiebe <399551+mwiebe@users.noreply.github.com> Date: Mon, 18 Sep 2023 07:26:40 -0700 Subject: [PATCH] feat!: Update the Job Submit GUI to display Queue Parameters (#34) BREAKING-CHANGE: This changes the interface that consumers of the deadline cloud client UI library use. - Added an implementation of get_queue_parameter_definitions to deadline.client.api - Set the default group label to queue parameters to be the queue environment name. Created a function in deadline.client.job_bundle.parameters to get the control type, so that this could be implemented by adding the 'userInterface' property. Modified the job template parameters widget to use the function instead of duplicating the code. - Rename the job_template_parameters_widget to openjd_parameters_widget so it's a name applicable to both job and queue parameters. Add a signal called parameter_changed to the widget that gets emitted whenever a parameter value changed, with a deep copy of the parameter definition and the new value in its "value" key. - Update the shared_job_settings_tab to move properties of the job out of the "Deadline Settings" into the top group. Rename that top group from "Description" to "Job Settings", and rename "Deadline Settings" to "Deadline Cloud Settings". - Add get_parameter_values and set_parameter_value functions to the shared job settings tab and the job settings tab, so that they can be queried and set using general interface functions. - Connect the parameter_changed signals from the shared job settings tab to set the job settings, and vice versa so that when a queue parameter and a job parameter are the same, the values between the two tabs are always up to date. - Load the Queue parameters in a background thread so that the submission dialog opens sooner and shows the loading state. - Wire up the deadline settings refresh to reload the queue parameters if the default queue id has changed in the settings. - Move the 'deadline:*' parameters from the settings dataclasses into the queue parameters dictionaries. This makes their parameter values more consistent with the job bundle handling of them end-to-end. - Rename FlatAssetReferences to AssetReferences, and add the ability to reference paths without making them input or output data flow. This affects job attachments, as any PATH parameter with NONE dataflow will become an asset reference that should have path mapping applied, and therefore needs to be in one of the AssetRootGroup objects that the job attachment library deduces. - Update the tests for the reference paths ability. The job bundle submission asset refs tests pick up this case, because they include all the variations of data flow. Signed-off-by: Mark Wiebe <399551+mwiebe@users.noreply.github.com> --- examples/submit_job.py | 2 +- examples/upload_cancel_test.py | 1 + examples/upload_scale_test.py | 2 +- src/deadline/client/api/__init__.py | 115 +----- src/deadline/client/api/_list_apis.py | 111 ++++++ src/deadline/client/api/_queue_parameters.py | 71 ++++ src/deadline/client/api/_submit_job_bundle.py | 38 +- .../client/cli/_groups/bundle_group.py | 39 +- .../client/cli/_groups/fleet_group.py | 2 +- .../client/cli/_groups/queue_group.py | 27 +- src/deadline/client/job_bundle/parameters.py | 173 +++++++-- src/deadline/client/job_bundle/submission.py | 90 +---- src/deadline/client/ui/cli_job_submitter.py | 28 +- .../client/ui/dataclasses/__init__.py | 17 +- .../ui/dialogs/submit_job_progress_dialog.py | 39 +- .../dialogs/submit_job_to_deadline_dialog.py | 105 +++-- .../client/ui/job_bundle_submitter.py | 66 ++-- .../client/ui/widgets/job_attachments_tab.py | 8 +- .../ui/widgets/job_bundle_settings_tab.py | 49 ++- ..._widget.py => openjd_parameters_widget.py} | 290 +++++++++----- .../client/ui/widgets/path_widgets.py | 2 +- .../ui/widgets/shared_job_settings_tab.py | 363 ++++++++++++------ src/deadline/job_attachments/models.py | 3 +- src/deadline/job_attachments/upload.py | 34 +- .../test_job_attachments.py | 6 + .../api/test_job_bundle_submission.py | 27 +- .../test_job_bundle_submission_asset_refs.py | 16 +- .../deadline_client/cli/test_cli_bundle.py | 111 +++++- .../job_bundle/test_job_parameters.py | 330 +++++++++++++++- .../job_bundle/test_job_submission.py | 49 +-- .../deadline_job_attachments/test_upload.py | 26 +- 31 files changed, 1603 insertions(+), 637 deletions(-) create mode 100644 src/deadline/client/api/_list_apis.py create mode 100644 src/deadline/client/api/_queue_parameters.py rename src/deadline/client/ui/widgets/{job_template_parameters_widget.py => openjd_parameters_widget.py} (74%) diff --git a/examples/submit_job.py b/examples/submit_job.py index d7952daf..9758d517 100644 --- a/examples/submit_job.py +++ b/examples/submit_job.py @@ -40,7 +40,7 @@ def process_job_attachments(farm_id, queue_id, inputs, outputDir, deadline_clien queue_id=queue_id, job_attachment_settings=JobAttachmentS3Settings(**queue["jobAttachmentSettings"]), ) - (_, manifests) = asset_manager.hash_assets_and_create_manifest(inputs, [outputDir]) + (_, manifests) = asset_manager.hash_assets_and_create_manifest(inputs, [outputDir], []) (_, attachments) = asset_manager.upload_assets(manifests) attachments = attachments.to_dict() total = time.perf_counter() - start diff --git a/examples/upload_cancel_test.py b/examples/upload_cancel_test.py index 934c18eb..68a5f014 100644 --- a/examples/upload_cancel_test.py +++ b/examples/upload_cancel_test.py @@ -87,6 +87,7 @@ def run(): (summary_statistics_hashing, manifests) = asset_manager.hash_assets_and_create_manifest( files, [root_path / "outputs"], + [], on_preparing_to_submit=mock_on_preparing_to_submit, ) print(f"Hashing Summary Statistics:\n{summary_statistics_hashing}") diff --git a/examples/upload_scale_test.py b/examples/upload_scale_test.py index d44cafe1..2dd463e5 100644 --- a/examples/upload_scale_test.py +++ b/examples/upload_scale_test.py @@ -106,7 +106,7 @@ start = time.perf_counter() (summary_statistics_hashing, manifests) = asset_manager.hash_assets_and_create_manifest( - files, [root_path / "outputs"] + files, [root_path / "outputs"], [] ) print(f"Summary Statistics for file hashing:\n{summary_statistics_hashing}") diff --git a/src/deadline/client/api/__init__.py b/src/deadline/client/api/__init__.py index e04b452a..bf5d38a3 100644 --- a/src/deadline/client/api/__init__.py +++ b/src/deadline/client/api/__init__.py @@ -19,6 +19,7 @@ "list_fleets", "list_storage_profiles_for_queue", "get_queue_boto3_session", + "get_queue_parameter_definitions", "get_telemetry_client", ] @@ -35,9 +36,15 @@ get_boto3_client, get_boto3_session, get_credentials_type, - get_user_and_identity_store_id, - get_studio_id, ) +from ._list_apis import ( + list_farms, + list_queues, + list_jobs, + list_fleets, + list_storage_profiles_for_queue, +) +from ._queue_parameters import get_queue_parameter_definitions from ._submit_job_bundle import create_job_from_job_bundle, wait_for_create_job_to_complete from ._telemetry import get_telemetry_client, TelemetryClient @@ -65,107 +72,3 @@ def check_deadline_api_available(config: Optional[ConfigParser] = None) -> bool: except Exception: logger.exception("Error invoking ListFarms") return False - - -def _call_paginated_deadline_list_api(list_api, list_property_name, **kwargs): - """ - Calls a deadline:List* API repeatedly to concatenate all pages. - - Example: - deadline = get_boto3_client("deadline") - return _call_paginated_deadline_list_api(deadline.list_farms, "farms", **kwargs) - - Args: - list_api (callable): The List* API function to call, from the boto3 client. - list_property_name (str): The name of the property in the response that contains - the list. - """ - response = list_api(**kwargs) - if kwargs.get("dryRun", False): - return response - else: - result = {list_property_name: response[list_property_name]} - - while "nextToken" in response: - response = list_api(nextToken=response["nextToken"], **kwargs) - result[list_property_name].extend(response[list_property_name]) - - return result - - -def list_farms(config=None, **kwargs): - """ - Calls the deadline:ListFarms API call, applying the filter for user membership - depending on the configuration. If the response is paginated, it repeated - calls the API to get all the farms. - """ - if "principalId" not in kwargs: - user_id, _ = get_user_and_identity_store_id(config=config) - if user_id: - kwargs["principalId"] = user_id - - if "studioId" not in kwargs: - studio_id = get_studio_id(config=config) - if studio_id: - kwargs["studioId"] = studio_id - - deadline = get_boto3_client("deadline", config=config) - return _call_paginated_deadline_list_api(deadline.list_farms, "farms", **kwargs) - - -def list_queues(config=None, **kwargs): - """ - Calls the deadline:ListQueues API call, applying the filter for user membership - depending on the configuration. If the response is paginated, it repeated - calls the API to get all the queues. - """ - if "principalId" not in kwargs: - user_id, _ = get_user_and_identity_store_id(config=config) - if user_id: - kwargs["principalId"] = user_id - - deadline = get_boto3_client("deadline", config=config) - return _call_paginated_deadline_list_api(deadline.list_queues, "queues", **kwargs) - - -def list_jobs(config=None, **kwargs): - """ - Calls the deadline:ListJobs API call, applying the filter for user membership - depending on the configuration. If the response is paginated, it repeated - calls the API to get all the jobs. - """ - if "principalId" not in kwargs: - user_id, _ = get_user_and_identity_store_id(config=config) - if user_id: - kwargs["principalId"] = user_id - - deadline = get_boto3_client("deadline", config=config) - return _call_paginated_deadline_list_api(deadline.list_jobs, "jobs", **kwargs) - - -def list_fleets(config=None, **kwargs): - """ - Calls the deadline:ListFleets API call, applying the filter for user membership - depending on the configuration. If the response is paginated, it repeated - calls the API to get all the fleets. - """ - if "principalId" not in kwargs: - user_id, _ = get_user_and_identity_store_id(config=config) - if user_id: - kwargs["principalId"] = user_id - - deadline = get_boto3_client("deadline", config=config) - return _call_paginated_deadline_list_api(deadline.list_fleets, "fleets", **kwargs) - - -def list_storage_profiles_for_queue(config=None, **kwargs): - """ - Calls the deadline:ListStorageProfilesForQueue API call, applying the filter for user membership - depending on the configuration. If the response is paginated, it repeated - calls the API to get all the storage profiles. - """ - deadline = get_boto3_client("deadline", config=config) - - return _call_paginated_deadline_list_api( - deadline.list_storage_profiles_for_queue, "storageProfiles", **kwargs - ) diff --git a/src/deadline/client/api/_list_apis.py b/src/deadline/client/api/_list_apis.py new file mode 100644 index 00000000..eeb16d71 --- /dev/null +++ b/src/deadline/client/api/_list_apis.py @@ -0,0 +1,111 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +from ._session import ( + get_boto3_client, + get_user_and_identity_store_id, + get_studio_id, +) + + +def _call_paginated_deadline_list_api(list_api, list_property_name, **kwargs): + """ + Calls a deadline:List* API repeatedly to concatenate all pages. + + Example: + deadline = get_boto3_client("deadline") + return _call_paginated_deadline_list_api(deadline.list_farms, "farms", **kwargs) + + Args: + list_api (callable): The List* API function to call, from the boto3 client. + list_property_name (str): The name of the property in the response that contains + the list. + """ + response = list_api(**kwargs) + if kwargs.get("dryRun", False): + return response + else: + result = {list_property_name: response[list_property_name]} + + while "nextToken" in response: + response = list_api(nextToken=response["nextToken"], **kwargs) + result[list_property_name].extend(response[list_property_name]) + + return result + + +def list_farms(config=None, **kwargs): + """ + Calls the deadline:ListFarms API call, applying the filter for user membership + depending on the configuration. If the response is paginated, it repeated + calls the API to get all the farms. + """ + if "principalId" not in kwargs: + user_id, _ = get_user_and_identity_store_id(config=config) + if user_id: + kwargs["principalId"] = user_id + + if "studioId" not in kwargs: + studio_id = get_studio_id(config=config) + if studio_id: + kwargs["studioId"] = studio_id + + deadline = get_boto3_client("deadline", config=config) + return _call_paginated_deadline_list_api(deadline.list_farms, "farms", **kwargs) + + +def list_queues(config=None, **kwargs): + """ + Calls the deadline:ListQueues API call, applying the filter for user membership + depending on the configuration. If the response is paginated, it repeated + calls the API to get all the queues. + """ + if "principalId" not in kwargs: + user_id, _ = get_user_and_identity_store_id(config=config) + if user_id: + kwargs["principalId"] = user_id + + deadline = get_boto3_client("deadline", config=config) + return _call_paginated_deadline_list_api(deadline.list_queues, "queues", **kwargs) + + +def list_jobs(config=None, **kwargs): + """ + Calls the deadline:ListJobs API call, applying the filter for user membership + depending on the configuration. If the response is paginated, it repeated + calls the API to get all the jobs. + """ + if "principalId" not in kwargs: + user_id, _ = get_user_and_identity_store_id(config=config) + if user_id: + kwargs["principalId"] = user_id + + deadline = get_boto3_client("deadline", config=config) + return _call_paginated_deadline_list_api(deadline.list_jobs, "jobs", **kwargs) + + +def list_fleets(config=None, **kwargs): + """ + Calls the deadline:ListFleets API call, applying the filter for user membership + depending on the configuration. If the response is paginated, it repeated + calls the API to get all the fleets. + """ + if "principalId" not in kwargs: + user_id, _ = get_user_and_identity_store_id(config=config) + if user_id: + kwargs["principalId"] = user_id + + deadline = get_boto3_client("deadline", config=config) + return _call_paginated_deadline_list_api(deadline.list_fleets, "fleets", **kwargs) + + +def list_storage_profiles_for_queue(config=None, **kwargs): + """ + Calls the deadline:ListStorageProfilesForQueue API call, applying the filter for user membership + depending on the configuration. If the response is paginated, it repeated + calls the API to get all the storage profiles. + """ + deadline = get_boto3_client("deadline", config=config) + + return _call_paginated_deadline_list_api( + deadline.list_storage_profiles_for_queue, "storageProfiles", **kwargs + ) diff --git a/src/deadline/client/api/_queue_parameters.py b/src/deadline/client/api/_queue_parameters.py new file mode 100644 index 00000000..16e9ae8f --- /dev/null +++ b/src/deadline/client/api/_queue_parameters.py @@ -0,0 +1,71 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +from __future__ import annotations + +__all__ = ["get_queue_parameter_definitions"] + +import yaml +from typing import Any + +from ._list_apis import _call_paginated_deadline_list_api +from ._session import get_boto3_client +from ..exceptions import DeadlineOperationError +from ..job_bundle.parameters import ( + get_ui_control_for_parameter_definition, + parameter_definition_difference, +) + + +def get_queue_parameter_definitions( + *, farmId: str, queueId: str, config=None +) -> list[dict[str, Any]]: + """ + This gets all the queue parameters definitions from the specified Queue. It does so + by getting all the full templates for queue environments, and then combining + them equivalently to the Deadline Cloud service logic. + """ + deadline = get_boto3_client("deadline", config=config) + response = _call_paginated_deadline_list_api( + deadline.list_queue_environments, + "environments", + farmId=farmId, + queueId=queueId, + ) + queue_environments = sorted( + ( + deadline.get_queue_environment( + farmId=farmId, + queueId=queueId, + queueEnvironmentId=queue_env["queueEnvironmentId"], + ) + for queue_env in response["environments"] + ), + key=lambda queue_env: queue_env["priority"], + ) + queue_environment_templates = [ + yaml.safe_load(queue_env["template"]) for queue_env in queue_environments + ] + + queue_parameters_definitions: dict[str, dict[str, Any]] = {} + for template in queue_environment_templates: + for parameter in template["parameters"]: # TODO: change to parameterDefinitions + # If there is no group label, set it to the name of the Queue Environment + if not parameter.get("userInterface", {}).get("groupLabel"): + if "userInterface" not in parameter: + parameter["userInterface"] = { + "control": get_ui_control_for_parameter_definition(parameter) + } + parameter["userInterface"][ + "groupLabel" + ] = f"Queue Environment: {template['environment']['name']}" + existing_parameter = queue_parameters_definitions.get(parameter["name"]) + if existing_parameter: + differences = parameter_definition_difference(existing_parameter, parameter) + if differences: + raise DeadlineOperationError( + f"Job template parameter {parameter['name']} is duplicated across queue environments with mismatched fields:\n" + + " ".join(differences) + ) + else: + queue_parameters_definitions[parameter["name"]] = parameter + + return list(queue_parameters_definitions.values()) diff --git a/src/deadline/client/api/_submit_job_bundle.py b/src/deadline/client/api/_submit_job_bundle.py index 960ac6dd..68502b20 100644 --- a/src/deadline/client/api/_submit_job_bundle.py +++ b/src/deadline/client/api/_submit_job_bundle.py @@ -3,13 +3,14 @@ """ Provides the function to submit a job bundle to Amazon Deadline Cloud. """ +from __future__ import annotations import json import logging import time import os from configparser import ConfigParser -from typing import Any, Callable, Dict, List, Optional, Tuple, Set +from typing import Any, Callable, Dict, List, Optional, Tuple from deadline.client import api from deadline.client.exceptions import DeadlineOperationError, CreateJobWaiterCanceled @@ -17,9 +18,8 @@ from deadline.client.job_bundle.loader import read_yaml_or_json, read_yaml_or_json_object from deadline.client.job_bundle.parameters import apply_job_parameters, read_job_bundle_parameters from deadline.client.job_bundle.submission import ( - FlatAssetReferences, + AssetReferences, split_parameter_args, - upload_job_attachments, ) from deadline.job_attachments.models import ( AssetRootManifest, @@ -33,7 +33,8 @@ def create_job_from_job_bundle( job_bundle_dir: str, - job_parameters: List[Dict[str, Any]] = [], + job_parameters: list[dict[str, Any]] = [], + queue_parameter_definitions: list[dict[str, Any]] = [], config: Optional[ConfigParser] = None, hashing_progress_callback: Optional[Callable] = None, upload_progress_callback: Optional[Callable] = None, @@ -109,9 +110,15 @@ def create_job_from_job_bundle( asset_references_obj = read_yaml_or_json_object( job_bundle_dir, "asset_references", required=False ) - asset_references = FlatAssetReferences.from_dict(asset_references_obj) - - apply_job_parameters(job_parameters, job_bundle_dir, job_bundle_parameters, asset_references) + asset_references = AssetReferences.from_dict(asset_references_obj) + + apply_job_parameters( + job_parameters, + job_bundle_dir, + job_bundle_parameters, + queue_parameter_definitions, + asset_references, + ) app_parameters_formatted, job_parameters_formatted = split_parameter_args( job_bundle_parameters, job_bundle_dir ) @@ -147,8 +154,7 @@ def create_job_from_job_bundle( asset_manifests = _hash_attachments( asset_manager, - asset_references.input_filenames, - asset_references.output_directories, + asset_references, storage_profile_id, hashing_progress_callback, ) @@ -243,8 +249,7 @@ def wait_for_create_job_to_complete( def _hash_attachments( asset_manager: S3AssetManager, - input_paths: Set[str], - output_paths: Set[str], + asset_references: AssetReferences, storage_profile_id: Optional[str] = None, hashing_progress_callback: Optional[Callable] = None, config: Optional[ConfigParser] = None, @@ -261,8 +266,9 @@ def _default_update_hash_progress(hashing_metadata: Dict[str, str]) -> bool: hashing_progress_callback = _default_update_hash_progress hashing_summary, manifests = asset_manager.hash_assets_and_create_manifest( - input_paths=sorted(input_paths), - output_paths=sorted(output_paths), + input_paths=sorted(asset_references.input_filenames), + output_paths=sorted(asset_references.output_directories), + referenced_paths=sorted(asset_references.referenced_paths), storage_profile_id=storage_profile_id, hash_cache_dir=os.path.expanduser(os.path.join("~", ".deadline", "cache")), on_preparing_to_submit=hashing_progress_callback, @@ -289,9 +295,9 @@ def _default_update_upload_progress(upload_metadata: Dict[str, str]) -> bool: if not upload_progress_callback: upload_progress_callback = _default_update_upload_progress - upload_summary, attachment_settings = upload_job_attachments( - asset_manager, manifests, upload_progress_callback + upload_summary, attachment_settings = asset_manager.upload_assets( + manifests, upload_progress_callback ) api.get_telemetry_client(config=config).record_upload_summary(upload_summary) - return attachment_settings + return attachment_settings.to_dict() diff --git a/src/deadline/client/cli/_groups/bundle_group.py b/src/deadline/client/cli/_groups/bundle_group.py index 02c09d78..2cc9c6cf 100644 --- a/src/deadline/client/cli/_groups/bundle_group.py +++ b/src/deadline/client/cli/_groups/bundle_group.py @@ -3,6 +3,7 @@ """ All the `deadline bundle` commands. """ +from __future__ import annotations import json import logging @@ -11,7 +12,7 @@ import signal import textwrap from configparser import ConfigParser -from typing import Any, Dict, List, Optional, Set, Tuple +from typing import Any, Dict, List, Optional, Tuple import click from botocore.exceptions import ClientError # type: ignore[import] @@ -23,9 +24,8 @@ from deadline.client.job_bundle.loader import read_yaml_or_json, read_yaml_or_json_object from deadline.client.job_bundle.parameters import apply_job_parameters, read_job_bundle_parameters from deadline.client.job_bundle.submission import ( - FlatAssetReferences, + AssetReferences, split_parameter_args, - upload_job_attachments, ) from deadline.job_attachments.exceptions import AssetSyncError, AssetSyncCancelledError from deadline.job_attachments.models import ( @@ -125,6 +125,10 @@ def bundle_submit(job_bundle_dir, asset_loading_method, parameter, yes, **args): queue = deadline.get_queue(farmId=farm_id, queueId=queue_id) click.echo(f"Submitting to Queue: {queue['displayName']}") + queue_parameter_definitions = api.get_queue_parameter_definitions( + farmId=farm_id, queueId=queue_id + ) + # Read in the job template file_contents, file_type = read_yaml_or_json(job_bundle_dir, "template", required=True) @@ -145,9 +149,15 @@ def bundle_submit(job_bundle_dir, asset_loading_method, parameter, yes, **args): asset_references_obj = read_yaml_or_json_object( job_bundle_dir, "asset_references", required=False ) - asset_references = FlatAssetReferences.from_dict(asset_references_obj) - - apply_job_parameters(parameter, job_bundle_dir, job_bundle_parameters, asset_references) + asset_references = AssetReferences.from_dict(asset_references_obj) + + apply_job_parameters( + parameter, + job_bundle_dir, + job_bundle_parameters, + queue_parameter_definitions, + asset_references, + ) app_parameters_formatted, job_parameters_formatted = split_parameter_args( job_bundle_parameters, job_bundle_dir ) @@ -179,8 +189,7 @@ def bundle_submit(job_bundle_dir, asset_loading_method, parameter, yes, **args): hash_summary, asset_manifests = _hash_attachments( asset_manager, - asset_references.input_filenames, - asset_references.output_directories, + asset_references, storage_profile_id=storage_profile_id, config=config, ) @@ -299,8 +308,7 @@ def bundle_gui_submit(job_bundle_dir, **args): def _hash_attachments( asset_manager: S3AssetManager, - input_paths: Set[str], - output_paths: Set[str], + asset_references: AssetReferences, storage_profile_id: Optional[str] = None, config: Optional[ConfigParser] = None, ) -> Tuple[SummaryStatistics, List[AssetRootManifest]]: @@ -318,8 +326,9 @@ def _update_hash_progress(hashing_metadata: ProgressReportMetadata) -> bool: return continue_submission hashing_summary, manifests = asset_manager.hash_assets_and_create_manifest( - input_paths=sorted(input_paths), - output_paths=sorted(output_paths), + input_paths=sorted(asset_references.input_filenames), + output_paths=sorted(asset_references.output_directories), + referenced_paths=sorted(asset_references.referenced_paths), storage_profile_id=storage_profile_id, hash_cache_dir=os.path.expanduser(os.path.join("~", ".deadline", "cache")), on_preparing_to_submit=_update_hash_progress, @@ -355,12 +364,12 @@ def _update_upload_progress(upload_metadata: ProgressReportMetadata) -> bool: upload_progress.update(new_progress) return continue_submission - upload_summary, attachment_settings = upload_job_attachments( - asset_manager, manifests, _update_upload_progress + upload_summary, attachment_settings = asset_manager.upload_assets( + manifests, _update_upload_progress ) api.get_telemetry_client(config=config).record_upload_summary(upload_summary) click.echo("Upload Summary:") click.echo(textwrap.indent(str(upload_summary), " ")) - return attachment_settings + return attachment_settings.to_dict() diff --git a/src/deadline/client/cli/_groups/fleet_group.py b/src/deadline/client/cli/_groups/fleet_group.py index 75011713..de2f2097 100644 --- a/src/deadline/client/cli/_groups/fleet_group.py +++ b/src/deadline/client/cli/_groups/fleet_group.py @@ -85,7 +85,7 @@ def fleet_get(fleet_id, queue_id, **args): response = deadline.get_queue(farmId=farm_id, queueId=queue_id) queue_name = response["displayName"] - response = api._call_paginated_deadline_list_api( + response = api._list_apis._call_paginated_deadline_list_api( deadline.list_queue_fleet_associations, "queueFleetAssociations", farmId=farm_id, diff --git a/src/deadline/client/cli/_groups/queue_group.py b/src/deadline/client/cli/_groups/queue_group.py index 61b7b85a..b361d503 100644 --- a/src/deadline/client/cli/_groups/queue_group.py +++ b/src/deadline/client/cli/_groups/queue_group.py @@ -17,7 +17,7 @@ @_handle_error def cli_queue(): """ - Commands to work with Amazon Deadline Cloud Queue resources. + Commands for Amazon Deadline Cloud Queues. """ @@ -48,6 +48,31 @@ def queue_list(**args): click.echo(_cli_object_repr(structured_queue_list)) +@cli_queue.command(name="paramdefs") +@click.option("--profile", help="The AWS profile to use.") +@click.option("--farm-id", help="The Amazon Deadline Cloud Farm to use.") +@click.option("--queue-id", help="The Amazon Deadline Cloud Queue to use.") +@_handle_error +def queue_paramdefs(**args): + """ + Lists a Queue's Parameters Definitions. + """ + # Get a temporary config object with the standard options handled + config = _apply_cli_options_to_config(required_options={"farm_id", "queue_id"}, **args) + + farm_id = config_file.get_setting("defaults.farm_id", config=config) + queue_id = config_file.get_setting("defaults.queue_id", config=config) + + try: + response = api.get_queue_parameter_definitions(farmId=farm_id, queueId=queue_id) + except ClientError as exc: + raise DeadlineOperationError( + f"Failed to get Queue Parameter Definitions from Deadline:\n{exc}" + ) from exc + + click.echo(_cli_object_repr(response)) + + @cli_queue.command(name="get") @click.option("--profile", help="The AWS profile to use.") @click.option("--farm-id", help="The Amazon Deadline Cloud Farm to use.") diff --git a/src/deadline/client/job_bundle/parameters.py b/src/deadline/client/job_bundle/parameters.py index 9af9c0d7..33e2bf66 100644 --- a/src/deadline/client/job_bundle/parameters.py +++ b/src/deadline/client/job_bundle/parameters.py @@ -5,6 +5,8 @@ __all__ = [ "apply_job_parameters", "read_job_bundle_parameters", + "get_ui_control_for_parameter_definition", + "parameter_definition_difference", ] import os @@ -12,14 +14,15 @@ from ..exceptions import DeadlineOperationError from .loader import read_yaml_or_json_object -from ..job_bundle.submission import FlatAssetReferences +from ..job_bundle.submission import AssetReferences def apply_job_parameters( job_parameters: list[dict[str, Any]], job_bundle_dir: str, job_bundle_parameters: list[dict[str, Any]], - asset_references: FlatAssetReferences, + queue_parameter_definitions: list[dict[str, Any]], + asset_references: AssetReferences, ) -> None: """ Modifies the provided job_bundle_parameters and asset_references to incorporate @@ -39,11 +42,21 @@ def apply_job_parameters( param_dict = {parameter["name"]: parameter["value"] for parameter in job_parameters} modified_job_parameters = param_dict.copy() + queue_param_dict = {parameter["name"]: parameter for parameter in queue_parameter_definitions} + for parameter in job_bundle_parameters: parameter_name = parameter["name"] # Skip application-specific parameters like "deadline:priority" if ":" in parameter_name: continue + if "type" not in parameter: + if parameter_name in queue_param_dict: + # Use the parameter definition from the queue if the job didn't supply one + parameter.update(queue_param_dict[parameter_name]) + else: + raise DeadlineOperationError( + f"Job Template for job bundle {job_bundle_dir}:\nJob Template parameter {parameter_name} is missing its type." + ) parameter_type = parameter["type"] # Apply the job_parameters value if available @@ -61,7 +74,7 @@ def apply_job_parameters( parameter_value = parameter.get("value", parameter.get("default")) if parameter_value is None: raise DeadlineOperationError( - f"No parameter value provided for Job Template parameter {parameter_name}, and it has no default value." + f"Job Template for job bundle {job_bundle_dir}:\nNo parameter value provided for Job Template parameter {parameter_name}, and it has no default value." ) # If it's a PATH parameter with dataFlow, add it to asset_references @@ -69,11 +82,15 @@ def apply_job_parameters( data_flow = parameter.get("dataFlow", "NONE") if data_flow not in ("NONE", "IN", "OUT", "INOUT"): raise DeadlineOperationError( - f"Job Template parameter {parameter_name} had an incorrect " + f"Job Template for job bundle {job_bundle_dir}:\nJob Template parameter {parameter_name} had an incorrect " + f"value {data_flow} for 'dataFlow'. Valid values are " + "['NONE', 'IN', 'OUT', 'INOUT']" ) - if data_flow != "NONE": + if data_flow == "NONE": + # This path is referenced, but its contents are not necessarily + # input or output. + asset_references.referenced_paths.add(parameter_value) + else: object_type = parameter.get("objectType") if "IN" in data_flow: @@ -116,31 +133,31 @@ def read_job_bundle_parameters(bundle_dir: str) -> list[dict[str, Any]]: bundle_dir=bundle_dir, filename="parameter_values", required=False ) - # Get the spec version of the template - schema_version: str = "" - if isinstance(template, dict): - version = template.get("specificationVersion") - if not isinstance(version, str): - raise DeadlineOperationError("Job Template's 'specificationVersion' must be a string.") - schema_version = version - if schema_version not in ["jobtemplate-2023-09"]: - raise DeadlineOperationError( - f"The Job Bundle's Job Template has an unsupported specificationVersion: {schema_version}" - ) + if not isinstance(template, dict): + raise DeadlineOperationError( + f"Job Template for job bundle {bundle_dir}:\nThe document does not contain a top-level object." + ) - # Start with the template parameters - template_parameters = {} - if isinstance(template, dict): - template_parameters = template.get("parameterDefinitions", {}) + # Get the spec version of the template + if "specificationVersion" not in template: + raise DeadlineOperationError( + f"Job Template for job bundle {bundle_dir}:\nDocument does not contain a specificationVersion." + ) + elif template.get("specificationVersion") not in ["jobtemplate-2023-09"]: + raise DeadlineOperationError( + f"Job Template for job bundle {bundle_dir}:\nDocument has an unsupported specificationVersion: {template.get('specificationVersion')}" + ) - if template_parameters: + # Start with the template parameters, converting them from a list into a dictionary + template_parameters: dict[str, dict[str, Any]] = {} + if "parameterDefinitions" in template: # parameters are a list of objects. Convert it to a map # from name -> parameter - if not isinstance(template_parameters, list): + if not isinstance(template["parameterDefinitions"], list): raise DeadlineOperationError( - f"Job parameters must be a list in a '{schema_version}' Job Template." + f"Job Template for job bundle {bundle_dir}:\nJob parameter definitions must be a list." ) - template_parameters = {param["name"]: param for param in template_parameters} + template_parameters = {param["name"]: param for param in template["parameterDefinitions"]} # Add the parameter values where provided if parameter_values: @@ -149,15 +166,14 @@ def read_job_bundle_parameters(bundle_dir: str) -> list[dict[str, Any]]: if name in template_parameters: template_parameters[name]["value"] = parameter_value["value"] else: - if ":" in name: - # Names with a ':' are for the system using the job bundle, like Amazon Deadline Cloud - template_parameters[name] = parameter_value + # Keep the other parameter values around, they may be + # provide values for queue parameters or specific render farm + # values such as "deadline:*" + template_parameters[name] = parameter_value - # Make valueless PATH parameters with default and are not constrained + # Make valueless PATH parameters with default but not constrained # by allowedValues, absolute by joining with the job bundle directory - for name in template_parameters: - parameter = template_parameters[name] - + for parameter in template_parameters.values(): if ( "value" not in parameter and parameter["type"] == "PATH" @@ -174,3 +190,98 @@ def read_job_bundle_parameters(bundle_dir: str) -> list[dict[str, Any]]: # Rearrange the dict from the template into a list return [{"name": name, **values} for name, values in template_parameters.items()] + + +_SUPPORTED_CONTROLS_FOR_TYPE = { + "STRING": {"LINE_EDIT", "MULTILINE_EDIT", "DROPDOWN_LIST", "CHECK_BOX", "HIDDEN"}, + "PATH": { + "CHOOSE_INPUT_FILE", + "CHOOSE_OUTPUT_FILE", + "CHOOSE_DIRECTORY", + "DROPDOWN_LIST", + "HIDDEN", + }, + "INT": {"SPIN_BOX", "DROPDOWN_LIST", "HIDDEN"}, + "FLOAT": {"SPIN_BOX", "DROPDOWN_LIST", "HIDDEN"}, +} + + +def get_ui_control_for_parameter_definition(param_def: dict[str, Any]) -> str: + """Returns the UI control for the given parameter definition, determining + the default if not explicitly set.""" + # If it's explicitly provided, return that + control = param_def.get("userInterface", {}).get("control") + param_type = param_def["type"] + if not control: + if "allowedValues" in param_def: + control = "DROPDOWN_LIST" + elif param_type == "STRING": + return "LINE_EDIT" + elif param_type == "PATH": + if param_def.get("objectType", "DIRECTORY") == "FILE": + if param_def.get("dataFlow", "NONE") == "OUT": + return "CHOOSE_OUTPUT_FILE" + else: + return "CHOOSE_INPUT_FILE" + else: + return "CHOOSE_DIRECTORY" + elif param_type in ("INT", "FLOAT"): + return "SPIN_BOX" + else: + raise DeadlineOperationError( + f"The job template parameter '{param_def.get('name', '')}' " + + f"specifies an unsupported type '{param_type}'." + ) + + if control not in _SUPPORTED_CONTROLS_FOR_TYPE[param_type]: + raise DeadlineOperationError( + f"The job template parameter '{param_def.get('name', '')}' " + + f"specifies an unsupported control '{control}' for its type '{param_type}'." + ) + + if control == "DROPDOWN_LIST" and "allowedValues" not in param_def: + raise DeadlineOperationError( + f"The job template parameter '{param_def.get('name', '')}' " + + "must supply 'allowedValues' if it uses a DROPDOWN_LIST control." + ) + + return control + + +def _parameter_definition_fields_equivalent( + lhs: dict[str, Any], + rhs: dict[str, Any], + field_name: str, + set_comparison: bool = False, +) -> bool: + lhs_value = lhs.get(field_name) + rhs_value = rhs.get(field_name) + if set_comparison and lhs_value is not None and rhs_value is not None: + return set(lhs_value) == set(rhs_value) + else: + return lhs_value == rhs_value + + +def parameter_definition_difference(lhs: dict[str, Any], rhs: dict[str, Any]) -> list[str]: + """Compares the two parameter definitions, returning a list of fields which differ. + Does not compare the userInterface properties. + """ + differences = [] + # Compare these properties as values + for name in ( + "name", + "type", + "minValue", + "maxValue", + "minLength", + "maxLength", + "dataFlow", + "objectType", + ): + if not _parameter_definition_fields_equivalent(lhs, rhs, name): + differences.append(name) + # Compare these properties as sets + for name in ("allowedValues",): + if not _parameter_definition_fields_equivalent(lhs, rhs, name, set_comparison=True): + differences.append(name) + return differences diff --git a/src/deadline/client/job_bundle/submission.py b/src/deadline/client/job_bundle/submission.py index a369bf57..f0e159d2 100644 --- a/src/deadline/client/job_bundle/submission.py +++ b/src/deadline/client/job_bundle/submission.py @@ -8,11 +8,7 @@ import dataclasses import logging import os -from typing import Any, Callable, Dict, List, Tuple, Optional - -from deadline.job_attachments.models import AssetRootManifest -from deadline.job_attachments.progress_tracker import SummaryStatistics -from deadline.job_attachments.upload import S3AssetManager +from typing import Any, Tuple, Optional from ..exceptions import DeadlineOperationError @@ -28,12 +24,17 @@ @dataclasses.dataclass -class FlatAssetReferences: - """Flat representation of a job bundle's asset references.""" +class AssetReferences: + """Holds the asset references for a job bundle.""" input_filenames: set[str] = dataclasses.field(default_factory=set) + """Filenames whose file contents are input to the job.""" input_directories: set[str] = dataclasses.field(default_factory=set) + """Directories whose contents are input to the job.""" output_directories: set[str] = dataclasses.field(default_factory=set) + """Directories whose contents are output from the job.""" + referenced_paths: set[str] = dataclasses.field(default_factory=set) + """Paths that are referenced by the job, but not necessarily input or output.""" def __init__( self, @@ -41,19 +42,12 @@ def __init__( input_filenames: Optional[set[str]] = None, input_directories: Optional[set[str]] = None, output_directories: Optional[set[str]] = None, + referenced_paths: Optional[set[str]] = None, ): - if input_filenames: - self.input_filenames = input_filenames - else: - self.input_filenames = set() - if input_directories: - self.input_directories = input_directories - else: - self.input_directories = set() - if output_directories: - self.output_directories = output_directories - else: - self.output_directories = set() + self.input_filenames = input_filenames or set() + self.input_directories = input_directories or set() + self.output_directories = output_directories or set() + self.referenced_paths = referenced_paths or set() def __bool__(self) -> bool: """Returns whether the object has any asset references.""" @@ -61,27 +55,31 @@ def __bool__(self) -> bool: bool(self.input_filenames) or bool(self.input_directories) or bool(self.output_directories) + or bool(self.referenced_paths) ) - def union(self, other: FlatAssetReferences): + def union(self, other: AssetReferences): """Returns the union of the asset references.""" - return FlatAssetReferences( + return AssetReferences( input_filenames=self.input_filenames.union(other.input_filenames), input_directories=self.input_directories.union(other.input_directories), output_directories=self.output_directories.union(other.output_directories), + referenced_paths=self.referenced_paths.union(other.referenced_paths), ) @classmethod - def from_dict(cls, obj: Optional[dict[str, Any]]) -> FlatAssetReferences: + def from_dict(cls, obj: Optional[dict[str, Any]]) -> AssetReferences: if obj: input_filenames = obj["assetReferences"].get("inputs", {}).get("filenames", []) input_directories = obj["assetReferences"].get("inputs", {}).get("directories", []) output_directories = obj["assetReferences"].get("outputs", {}).get("directories", []) + referenced_paths = obj["assetReferences"].get("referencedPaths", []) return cls( input_filenames=set(os.path.normpath(path) for path in input_filenames), input_directories=set(os.path.normpath(path) for path in input_directories), output_directories=set(os.path.normpath(path) for path in output_directories), + referenced_paths=set(os.path.normpath(path) for path in referenced_paths), ) else: return cls() @@ -94,34 +92,11 @@ def to_dict(self) -> dict[str, Any]: "filenames": sorted(self.input_filenames), }, "outputs": {"directories": sorted(self.output_directories)}, + "referencedPaths": sorted(self.referenced_paths), } } -def upload_job_attachments( - asset_manager: S3AssetManager, - manifests: List[AssetRootManifest], - upload_progress_callback: Callable, -) -> Tuple[SummaryStatistics, Dict[str, Any]]: - (upload_summary, attachments) = asset_manager.upload_assets( - manifests=manifests, on_uploading_assets=upload_progress_callback - ) - - # TODO: dataclasses.asdict doesn't respect the "metadata(exclude)" from dataclasses_json - # that DeadlineJobAttachments is using. Would like to consider removing - # DeadlineJobAttachments' dependency on dataclasses_json to reduce the dependency - # footprint we place inside of DCCs, and find a consistent way to handle this. - uploaded_attachments = _remove_nones(dataclasses.asdict(attachments)) - for manifest_properties in uploaded_attachments["manifests"]: - if ( - "outputRelativeDirectories" in manifest_properties - and manifest_properties["outputRelativeDirectories"] == [] - ): - del manifest_properties["outputRelativeDirectories"] - - return upload_summary, uploaded_attachments - - def split_parameter_args( job_bundle_parameters: list[dict[str, Any]], job_bundle_dir: str, @@ -172,26 +147,3 @@ def split_parameter_args( job_parameters[parameter_name] = {parameter_type: str(parameter_value)} return app_parameters, job_parameters - - -def _remove_nones(obj: Any) -> Any: - """ - Removes any fields from dicts contained - within the object whose value is None. Recursively - processes any dict or list within the object. - - Modifies obj in place, and returns it. - """ - if isinstance(obj, dict): - keys_to_remove = [] - for key, value in obj.items(): - if value is None: - keys_to_remove.append(key) - elif isinstance(value, (dict, list)): - _remove_nones(value) - for key in keys_to_remove: - del obj[key] - elif isinstance(obj, list): - for i in range(len(obj)): - _remove_nones(obj[i]) - return obj diff --git a/src/deadline/client/ui/cli_job_submitter.py b/src/deadline/client/ui/cli_job_submitter.py index 0442816d..bfcdf47b 100644 --- a/src/deadline/client/ui/cli_job_submitter.py +++ b/src/deadline/client/ui/cli_job_submitter.py @@ -1,4 +1,5 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +from __future__ import annotations import json import os @@ -16,7 +17,7 @@ from .dataclasses import CliJobSettings from .dialogs.submit_job_to_deadline_dialog import SubmitJobToDeadlineDialog from .widgets.cli_job_settings_tab import CliJobSettingsWidget -from ..job_bundle.submission import FlatAssetReferences +from ..job_bundle.submission import AssetReferences logger = getLogger(__name__) @@ -42,9 +43,10 @@ def show_cli_job_submitter(parent=None, f=Qt.WindowFlags()) -> None: def on_create_job_bundle_callback( widget: SubmitJobToDeadlineDialog, - settings: CliJobSettings, job_bundle_dir: str, - asset_references: FlatAssetReferences, + settings: CliJobSettings, + queue_parameters: list[dict[str, Any]], + asset_references: AssetReferences, ) -> None: """ Perform a submission when the submit button is pressed @@ -64,6 +66,7 @@ def on_create_job_bundle_callback( job_template: Dict[str, Any] = { "specificationVersion": "jobtemplate-2023-09", "name": settings.name, + "description": settings.description, "parameterDefinitions": [ { "name": "DataDir", @@ -74,6 +77,8 @@ def on_create_job_bundle_callback( } ], } + if not settings.description: + del job_template["description"] step = { "name": "CliScript", "script": { @@ -122,11 +127,9 @@ def on_create_job_bundle_callback( elif settings.file_format == "JSON": json.dump(job_template, f, sort_keys=False, indent=1) + # Filter the provided queue parameters to just their values parameters_values = [ - {"name": "deadline:priority", "value": settings.priority}, - {"name": "deadline:targetTaskRunStatus", "value": settings.initial_status}, - {"name": "deadline:maxFailedTasksCount", "value": settings.max_failed_tasks_count}, - {"name": "deadline:maxRetriesPerTask", "value": settings.max_retries_per_task}, + {"name": param["name"], "value": param["value"]} for param in queue_parameters ] with open( @@ -151,11 +154,12 @@ def on_create_job_bundle_callback( json.dump(asset_references.to_dict(), f, indent=1) __submitter_dialog = SubmitJobToDeadlineDialog( - CliJobSettingsWidget, - CliJobSettings(), - FlatAssetReferences(), - FlatAssetReferences(), - on_create_job_bundle_callback, + job_setup_widget_type=CliJobSettingsWidget, + initial_job_settings=CliJobSettings(), + initial_shared_parameter_values={}, + auto_detected_attachments=AssetReferences(), + attachments=AssetReferences(), + on_create_job_bundle_callback=on_create_job_bundle_callback, parent=parent, f=f, ) diff --git a/src/deadline/client/ui/dataclasses/__init__.py b/src/deadline/client/ui/dataclasses/__init__.py index 5c58972f..8ff717f2 100644 --- a/src/deadline/client/ui/dataclasses/__init__.py +++ b/src/deadline/client/ui/dataclasses/__init__.py @@ -3,10 +3,11 @@ """ Contains dataclasses for holding UI parameter values, used by the widgets. """ +from __future__ import annotations import os from dataclasses import dataclass, field -from typing import Any, Dict, List +from typing import Any @dataclass @@ -21,16 +22,10 @@ class JobBundleSettings: # pylint: disable=too-many-instance-attributes # Shared settings name: str = field(default="Job Bundle") description: str = field(default="") - initial_status: str = field(default="READY") - max_failed_tasks_count: int = field(default=100) - max_retries_per_task: int = field(default=5) - priority: int = field(default=50) - override_rez_packages: bool = field(default=False) - rez_packages: str = field(default="") # Job Bundle settings input_job_bundle_dir: str = field(default="") - parameter_values: List[Dict[str, Any]] = field(default_factory=list) + parameters: list[dict[str, Any]] = field(default_factory=list) @dataclass @@ -45,12 +40,6 @@ class CliJobSettings: # pylint: disable=too-many-instance-attributes # Shared settings name: str = field(default="CLI Job") description: str = field(default="") - initial_status: str = field(default="READY") - max_failed_tasks_count: int = field(default=20) - max_retries_per_task: int = field(default=5) - priority: int = field(default=50) - override_rez_packages: bool = field(default=False) - rez_packages: str = field(default="") # CLI job settings bash_script_contents: str = field( diff --git a/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py b/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py index a6390b13..cca67498 100644 --- a/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py +++ b/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py @@ -3,8 +3,7 @@ Provides a modal dialog box for the submission progress when submitting to Amazon Deadline Cloud """ - -__all__ = ["SubmitJobProgressDialog"] +from __future__ import annotations import json import logging @@ -36,9 +35,8 @@ from deadline.client.job_bundle.loader import read_yaml_or_json, read_yaml_or_json_object from deadline.client.job_bundle.parameters import apply_job_parameters, read_job_bundle_parameters from deadline.client.job_bundle.submission import ( - FlatAssetReferences, + AssetReferences, split_parameter_args, - upload_job_attachments, ) from deadline.job_attachments.exceptions import AssetSyncCancelledError from deadline.job_attachments.models import AssetRootManifest @@ -46,6 +44,8 @@ from deadline.job_attachments.upload import S3AssetManager from deadline.job_attachments._utils import _human_readable_file_size +__all__ = ["SubmitJobProgressDialog"] + logger = logging.getLogger(__name__) @@ -80,6 +80,7 @@ def start_submission( queue_id: str, storage_profile_id: str, job_bundle_dir: str, + queue_parameters: list[dict[str, Any]], asset_manager: S3AssetManager, deadline_client: BaseClient, auto_accept: bool = False, @@ -105,6 +106,7 @@ def start_submission( self._queue_id = queue_id self._storage_profile_id = storage_profile_id self._job_bundle_dir = job_bundle_dir + self._queue_parameters = queue_parameters self._asset_manager = asset_manager self._deadline_client = deadline_client self._auto_accept = auto_accept @@ -189,9 +191,15 @@ def _start_submission(self): asset_references_obj = read_yaml_or_json_object( self._job_bundle_dir, "asset_references", required=False ) - self.asset_references = FlatAssetReferences.from_dict(asset_references_obj) - - apply_job_parameters([], self._job_bundle_dir, job_bundle_parameters, self.asset_references) + self.asset_references = AssetReferences.from_dict(asset_references_obj) + + apply_job_parameters( + [], + self._job_bundle_dir, + job_bundle_parameters, + self._queue_parameters, + self.asset_references, + ) app_parameters_formatted, job_parameters_formatted = split_parameter_args( job_bundle_parameters, self._job_bundle_dir @@ -218,6 +226,7 @@ def _start_submission(self): self._start_hashing( self.asset_references.input_filenames, self.asset_references.output_directories, + self.asset_references.referenced_paths, ) else: self.hashing_progress_bar.setVisible(False) @@ -226,7 +235,9 @@ def _start_submission(self): self.upload_progress_message.setVisible(False) self._start_create_job() - def _hashing_background_thread(self, input_paths: Set[str], output_paths: Set[str]) -> None: + def _hashing_background_thread( + self, input_paths: Set[str], output_paths: Set[str], referenced_paths: Set[str] + ) -> None: """ This function gets started in a background thread to start the hashing of any job attachments. @@ -242,6 +253,7 @@ def _update_hash_progress(hashing_metadata: ProgressReportMetadata) -> bool: hashing_summary, manifests = self._asset_manager.hash_assets_and_create_manifest( input_paths=sorted(input_paths), output_paths=sorted(output_paths), + referenced_paths=sorted(referenced_paths), storage_profile_id=self._storage_profile_id, hash_cache_dir=os.path.expanduser(os.path.join("~", ".deadline", "cache")), on_preparing_to_submit=_update_hash_progress, @@ -273,15 +285,14 @@ def _update_upload_progress(upload_metadata: ProgressReportMetadata) -> bool: logger.info("Uploading job attachments files...") - upload_summary, attachment_settings = upload_job_attachments( - self._asset_manager, + upload_summary, attachment_settings = self._asset_manager.upload_assets( manifests, _update_upload_progress, ) logger.info("Finished uploading job attachments files.") - self.upload_thread_succeeded.emit(upload_summary, attachment_settings) + self.upload_thread_succeeded.emit(upload_summary, attachment_settings.to_dict()) except AssetSyncCancelledError as e: # If it wasn't canceled, send the exception to the dialog if self._continue_submission: @@ -336,7 +347,9 @@ def _continue_create_job_wait() -> bool: # Send the exception to the dialog self.create_job_thread_exception.emit(e) - def _start_hashing(self, input_paths: Set[str], output_paths: Set[str]) -> None: + def _start_hashing( + self, input_paths: Set[str], output_paths: Set[str], referenced_paths: Set[str] + ) -> None: """ Starts the background hashing thread. """ @@ -344,7 +357,7 @@ def _start_hashing(self, input_paths: Set[str], output_paths: Set[str]) -> None: self.__hashing_thread = threading.Thread( target=self._hashing_background_thread, name="Amazon Deadline Cloud Hashing Background Thread", - args=(input_paths, output_paths), + args=(input_paths, output_paths, referenced_paths), ) self.__hashing_thread.start() diff --git a/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py b/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py index a50c0d29..c07878ea 100644 --- a/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py +++ b/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py @@ -28,6 +28,7 @@ from deadline.job_attachments.upload import S3AssetManager from ... import api +from .. import block_signals from ...config import get_setting from ...config.config_file import str2bool from ...job_bundle import create_job_history_bundle_dir @@ -35,7 +36,7 @@ from ..widgets.job_attachments_tab import JobAttachmentsWidget from ..widgets.shared_job_settings_tab import SharedJobSettingsWidget from . import DeadlineConfigDialog, DeadlineLoginDialog -from ...job_bundle.submission import FlatAssetReferences +from ...job_bundle.submission import AssetReferences logger = logging.getLogger(__name__) @@ -48,21 +49,27 @@ class SubmitJobToDeadlineDialog(QDialog): pass f=Qt.Tool, a flag that tells it to do that. Args: - job_setup_widget_type: QWidget - The type of the widget for the job-specific settings. - initial_job_settings: dataclass - A dataclass containing the initial job settings + job_setup_widget_type (QWidget): The type of the widget for the job-specific settings. + initial_job_settings (dataclass): A dataclass containing the initial job settings + initial_shared_parameter_values (dict[str, Any]): A dict of parameter values {, , ...} + to override default queue parameter values from the queue. For example, + a Rez queue environment may have a default "" for the RezPackages parameter, but a Maya + submitter would override that default with "maya-2023" or similar. auto_detected_attachments (FlatAssetReferences): The job attachments that were automatically detected from the input document/scene file or starting job bundle. attachments: (FlatAssetReferences): The job attachments that have been added to the job by the user. on_create_job_bundle_callback: A function to call when the dialog needs to create a Job Bundle. It - is called with arguments (settings, job_bundle_dir, asset_references) + is called with arguments (widget, job_bundle_dir, settings, queue_parameters, asset_references) """ def __init__( self, + *, job_setup_widget_type: QWidget, initial_job_settings, - auto_detected_attachments: FlatAssetReferences, - attachments: FlatAssetReferences, + initial_shared_parameter_values: dict[str, Any], + auto_detected_attachments: AssetReferences, + attachments: AssetReferences, on_create_job_bundle_callback, parent=None, f=Qt.WindowFlags(), @@ -77,7 +84,11 @@ def __init__( self.create_job_response: Optional[Dict[str, Any]] = None self._build_ui( - job_setup_widget_type, initial_job_settings, auto_detected_attachments, attachments + job_setup_widget_type, + initial_job_settings, + initial_shared_parameter_values, + auto_detected_attachments, + attachments, ) self.gui_update_counter: Any = None @@ -103,16 +114,19 @@ def refresh_deadline_settings(self): and get_setting("defaults.queue_id") != "" ) - self.shared_job_settings.deadline_settings_box.refresh_setting_controls( + self.shared_job_settings.deadline_cloud_settings_box.refresh_setting_controls( self.creds_status_box.deadline_authorized ) + # If necessary, this reloads the queue parameters + self.shared_job_settings.refresh_queue_parameters() def _build_ui( self, job_setup_widget_type, initial_job_settings, - auto_detected_attachments: FlatAssetReferences, - attachments: FlatAssetReferences, + initial_shared_parameter_values, + auto_detected_attachments: AssetReferences, + attachments: AssetReferences, ): self.lyt = QVBoxLayout(self) self.lyt.setContentsMargins(5, 5, 5, 5) @@ -122,7 +136,7 @@ def _build_ui( self.tabs = QTabWidget() self.lyt.addWidget(self.tabs) - self._build_shared_job_settings_tab(initial_job_settings) + self._build_shared_job_settings_tab(initial_job_settings, initial_shared_parameter_values) self._build_job_settings_tab(job_setup_widget_type, initial_job_settings) self._build_job_attachments_tab(auto_detected_attachments, attachments) @@ -138,7 +152,7 @@ def _build_ui( self.logout_button.clicked.connect(self.on_logout) self.button_box.addButton(self.logout_button, QDialogButtonBox.ResetRole) self.settings_button = QPushButton("Settings...") - self.settings_button.clicked.connect(self.on_settings) + self.settings_button.clicked.connect(self.on_settings_button_clicked) self.button_box.addButton(self.settings_button, QDialogButtonBox.ResetRole) self.submit_button = QPushButton("Submit") self.submit_button.clicked.connect(self.on_submit) @@ -157,14 +171,18 @@ def keyPressEvent(self, event: QKeyEvent) -> None: return super().keyPressEvent(event) - def _build_shared_job_settings_tab(self, initial_job_settings): + def _build_shared_job_settings_tab(self, initial_job_settings, initial_shared_parameter_values): self.shared_job_settings_tab = QScrollArea() self.tabs.addTab(self.shared_job_settings_tab, "Shared Job Settings") self.shared_job_settings = SharedJobSettingsWidget( - initial_settings=initial_job_settings, parent=self + initial_settings=initial_job_settings, + initial_shared_parameter_values=initial_shared_parameter_values, + parent=self, ) + self.shared_job_settings.parameter_changed.connect(self.on_shared_job_parameter_changed) self.shared_job_settings_tab.setWidget(self.shared_job_settings) self.shared_job_settings_tab.setWidgetResizable(True) + self.shared_job_settings.parameter_changed.connect(self.on_shared_job_parameter_changed) def _build_job_settings_tab(self, job_setup_widget_type, initial_job_settings): self.job_settings_tab = QScrollArea() @@ -175,9 +193,11 @@ def _build_job_settings_tab(self, job_setup_widget_type, initial_job_settings): initial_settings=initial_job_settings, parent=self ) self.job_settings_tab.setWidget(self.job_settings) + if hasattr(self.job_settings, "parameter_changed"): + self.job_settings.parameter_changed.connect(self.on_job_template_parameter_changed) def _build_job_attachments_tab( - self, auto_detected_attachments: FlatAssetReferences, attachments: FlatAssetReferences + self, auto_detected_attachments: AssetReferences, attachments: AssetReferences ): self.job_attachments_tab = QScrollArea() self.tabs.addTab(self.job_attachments_tab, "Job Attachments") @@ -187,6 +207,38 @@ def _build_job_attachments_tab( self.job_attachments_tab.setWidget(self.job_attachments) self.job_attachments_tab.setWidgetResizable(True) + def on_shared_job_parameter_changed(self, parameter: dict[str, Any]): + """ + Handles an edit to a shared job parameter, for example one of the + queue parameters. + + When a queue parameter and a job template parameter have + the same name, we update between them to keep them consistent. + """ + try: + if hasattr(self.job_settings, "set_parameter_value"): + with block_signals(self.job_settings): + self.job_settings.set_parameter_value(parameter) + except KeyError: + # If there is no corresponding job template parameter, + # just ignore it. + pass + + def on_job_template_parameter_changed(self, parameter: dict[str, Any]): + """ + Handles an edit to a job template parameter. + + When a queue parameter and a job template parameter have + the same name, we update between them to keep them consistent. + """ + try: + with block_signals(self.shared_job_settings): + self.shared_job_settings.set_parameter_value(parameter) + except KeyError: + # If there is no corresponding queue parameter, + # just ignore it. + pass + def on_login(self): DeadlineLoginDialog.login(parent=self) self.refresh_deadline_settings() @@ -201,7 +253,7 @@ def on_logout(self): # not always catch a change so force a refresh here. self.creds_status_box.refresh_status() - def on_settings(self): + def on_settings_button_clicked(self): if DeadlineConfigDialog.configure_settings(parent=self): self.refresh_deadline_settings() @@ -211,17 +263,19 @@ def on_save_bundle(self): """ # Retrieve all the settings into the dataclass settings = self.job_settings_type() - self.shared_job_settings.deadline_settings_box.update_settings(settings) - self.shared_job_settings.desc_box.update_settings(settings) + self.shared_job_settings.update_settings(settings) self.job_settings.update_settings(settings) - self.shared_job_settings.rez_packages_box.update_settings(settings) + + queue_parameters = self.shared_job_settings.get_parameters() asset_references = self.job_attachments.get_asset_references() # Save the bundle try: job_bundle_dir = create_job_history_bundle_dir(settings.submitter_name, settings.name) - self.on_create_job_bundle_callback(self, settings, job_bundle_dir, asset_references) + self.on_create_job_bundle_callback( + self, job_bundle_dir, settings, queue_parameters, asset_references + ) logger.info("Saved the submission as a job bundle:") logger.info(job_bundle_dir) @@ -246,10 +300,10 @@ def on_submit(self): """ # Retrieve all the settings into the dataclass settings = self.job_settings_type() - self.shared_job_settings.deadline_settings_box.update_settings(settings) - self.shared_job_settings.desc_box.update_settings(settings) + self.shared_job_settings.update_settings(settings) self.job_settings.update_settings(settings) - self.shared_job_settings.rez_packages_box.update_settings(settings) + + queue_parameters = self.shared_job_settings.get_parameters() asset_references = self.job_attachments.get_asset_references() @@ -262,7 +316,9 @@ def on_submit(self): deadline = api.get_boto3_client("deadline") job_bundle_dir = create_job_history_bundle_dir(settings.submitter_name, settings.name) - self.on_create_job_bundle_callback(self, settings, job_bundle_dir, asset_references) + self.on_create_job_bundle_callback( + self, job_bundle_dir, settings, queue_parameters, asset_references + ) farm_id = get_setting("defaults.farm_id") queue_id = get_setting("defaults.queue_id") @@ -289,6 +345,7 @@ def on_submit(self): queue_id, storage_profile_id, job_bundle_dir, + queue_parameters, asset_manager, deadline, auto_accept=str2bool(get_setting("settings.auto_accept")), diff --git a/src/deadline/client/ui/job_bundle_submitter.py b/src/deadline/client/ui/job_bundle_submitter.py index 1d11af3d..d906a29a 100644 --- a/src/deadline/client/ui/job_bundle_submitter.py +++ b/src/deadline/client/ui/job_bundle_submitter.py @@ -1,9 +1,10 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +from __future__ import annotations import json import os from logging import getLogger -from typing import Any, Dict, List, Optional +from typing import Any, Optional from PySide2.QtCore import Qt # pylint: disable=import-error from PySide2.QtWidgets import ( # pylint: disable=import-error; type: ignore @@ -12,17 +13,18 @@ QMainWindow, ) +from ..exceptions import DeadlineOperationError from ..job_bundle import deadline_yaml_dump from ..job_bundle.loader import ( parse_yaml_or_json_content, read_yaml_or_json, read_yaml_or_json_object, ) -from ..job_bundle.parameters import apply_job_parameters +from ..job_bundle.parameters import apply_job_parameters, read_job_bundle_parameters from .dataclasses import JobBundleSettings from .dialogs.submit_job_to_deadline_dialog import SubmitJobToDeadlineDialog from .widgets.job_bundle_settings_tab import JobBundleSettingsWidget -from ..job_bundle.submission import FlatAssetReferences +from ..job_bundle.submission import AssetReferences logger = getLogger(__name__) @@ -55,9 +57,10 @@ def show_job_bundle_submitter( def on_create_job_bundle_callback( widget: SubmitJobToDeadlineDialog, - settings: JobBundleSettings, job_bundle_dir: str, - asset_references: FlatAssetReferences, + settings: JobBundleSettings, + queue_parameters: list[dict[str, Any]], + asset_references: AssetReferences, ) -> None: """ Perform a submission when the submit button is pressed @@ -87,25 +90,30 @@ def on_create_job_bundle_callback( elif file_type == "JSON": json.dump(template, f, indent=1) - parameters_values: List[Dict[str, Any]] = [ - {"name": "deadline:priority", "value": settings.priority}, - {"name": "deadline:targetTaskRunStatus", "value": settings.initial_status}, - {"name": "deadline:maxFailedTasksCount", "value": settings.max_failed_tasks_count}, - {"name": "deadline:maxRetriesPerTask", "value": settings.max_retries_per_task}, - ] - if asset_references: with open( os.path.join(job_bundle_dir, "asset_references.yaml"), "w", encoding="utf8" ) as f: deadline_yaml_dump(asset_references.to_dict(), f) - job_bundle_parameters = widget.job_settings.job_bundle_parameters - - parameters_values.extend(settings.parameter_values) + # First filter the queue parameters to exclude any from the job template, + # then extend it with the job template parameters. + job_parameter_names = {param["name"] for param in settings.parameters} + parameters_values: list[dict[str, Any]] = [ + {"name": param["name"], "value": param["value"]} + for param in queue_parameters + if param["name"] not in job_parameter_names + ] + parameters_values.extend( + {"name": param["name"], "value": param["value"]} for param in settings.parameters + ) apply_job_parameters( - parameters_values, job_bundle_dir, job_bundle_parameters, FlatAssetReferences() + parameters_values, + job_bundle_dir, + settings.parameters, + queue_parameters, + AssetReferences(), ) with open(os.path.join(job_bundle_dir, "parameter_values.yaml"), "w", encoding="utf8") as f: @@ -117,18 +125,32 @@ def on_create_job_bundle_callback( asset_references_obj = ( read_yaml_or_json_object(input_job_bundle_dir, "asset_references", False) or {} ) - asset_references = FlatAssetReferences.from_dict(asset_references_obj) + asset_references = AssetReferences.from_dict(asset_references_obj) name = "Job Bundle Submission" if template: name = template.get("name", name) + if not os.path.isdir(input_job_bundle_dir): + raise DeadlineOperationError(f"Input Job Bundle Dir is not valid: {input_job_bundle_dir}") + initial_settings = JobBundleSettings(input_job_bundle_dir=input_job_bundle_dir, name=name) + initial_settings.parameters = read_job_bundle_parameters(input_job_bundle_dir) + + # Populate the initial queue parameter values based on the job template parameter values + initial_shared_parameter_values = {} + for parameter in initial_settings.parameters: + if "default" in parameter or "value" in parameter: + initial_shared_parameter_values[parameter["name"]] = parameter.get( + "value", parameter.get("default") + ) + submitter_dialog = SubmitJobToDeadlineDialog( - JobBundleSettingsWidget, - JobBundleSettings(input_job_bundle_dir=input_job_bundle_dir, name=name), - asset_references, - FlatAssetReferences(), - on_create_job_bundle_callback, + job_setup_widget_type=JobBundleSettingsWidget, + initial_job_settings=initial_settings, + initial_shared_parameter_values=initial_shared_parameter_values, + auto_detected_attachments=asset_references, + attachments=AssetReferences(), + on_create_job_bundle_callback=on_create_job_bundle_callback, parent=parent, f=f, ) diff --git a/src/deadline/client/ui/widgets/job_attachments_tab.py b/src/deadline/client/ui/widgets/job_attachments_tab.py index 8bfaea0b..e671ae7a 100644 --- a/src/deadline/client/ui/widgets/job_attachments_tab.py +++ b/src/deadline/client/ui/widgets/job_attachments_tab.py @@ -23,7 +23,7 @@ QMessageBox, ) -from ...job_bundle.submission import FlatAssetReferences +from ...job_bundle.submission import AssetReferences from .. import block_signals logger = getLogger(__name__) @@ -43,8 +43,8 @@ class JobAttachmentsWidget(QWidget): def __init__( self, - auto_detected_attachments: FlatAssetReferences, - attachments: FlatAssetReferences, + auto_detected_attachments: AssetReferences, + attachments: AssetReferences, parent: QWidget = None, ) -> None: super().__init__(parent=parent) @@ -284,7 +284,7 @@ def _remove_selected_output_directories(self) -> None: f"The selected directories from the auto-detected list ({len(unremoved_dirs)} items) were not removed.", ) - def get_asset_references(self) -> FlatAssetReferences: + def get_asset_references(self) -> AssetReferences: """ Creates an asset_references object that can be saved as the asset_references.json|yaml file in a Job Bundle. diff --git a/src/deadline/client/ui/widgets/job_bundle_settings_tab.py b/src/deadline/client/ui/widgets/job_bundle_settings_tab.py index 744126a1..50e25c38 100644 --- a/src/deadline/client/ui/widgets/job_bundle_settings_tab.py +++ b/src/deadline/client/ui/widgets/job_bundle_settings_tab.py @@ -3,24 +3,32 @@ """ UI widgets for the Scene Settings tab. """ -import os +from __future__ import annotations +from typing import Any + +from PySide2.QtCore import Signal # type: ignore from PySide2.QtWidgets import QVBoxLayout, QWidget # type: ignore -from ...job_bundle import read_job_bundle_parameters from ..dataclasses import JobBundleSettings -from .job_template_parameters_widget import JobTemplateParametersWidget +from .openjd_parameters_widget import OpenJDParametersWidget class JobBundleSettingsWidget(QWidget): """ Widget containing job setup specific to CLI jobs. + Signals: + parameter_changed: This is sent whenever a parameter value in the widget changes. The message + is a copy of the parameter definition with the "value" key containing the new value. + Args: initial_settings (CliJobSettings): dataclass containing the job-specific settings. parent: The parent Qt Widget. """ + parameter_changed = Signal(dict) + def __init__(self, initial_settings: JobBundleSettings, parent=None): super().__init__(parent=parent) @@ -29,24 +37,35 @@ def __init__(self, initial_settings: JobBundleSettings, parent=None): def _build_ui(self, initial_settings: JobBundleSettings): layout = QVBoxLayout(self) - if not os.path.isdir(initial_settings.input_job_bundle_dir): - raise RuntimeError( - f"Input Job Bundle Dir is not valid: {initial_settings.input_job_bundle_dir}" - ) - self.input_job_bundle_dir = initial_settings.input_job_bundle_dir - self.job_bundle_parameters = read_job_bundle_parameters( - initial_settings.input_job_bundle_dir - ) - self.job_template_parameters_widget = JobTemplateParametersWidget( - self.job_bundle_parameters, parent=self + self.parameters_widget = OpenJDParametersWidget( + parameter_definitions=initial_settings.parameters, parent=self + ) + layout.addWidget(self.parameters_widget) + self.parameters_widget.parameter_changed.connect( + lambda message: self.parameter_changed.emit(message) ) - layout.addWidget(self.job_template_parameters_widget) def update_settings(self, settings: JobBundleSettings): """ Update a settings object with the latest values. """ settings.input_job_bundle_dir = self.input_job_bundle_dir - settings.parameter_values = self.job_template_parameters_widget.get_parameter_values() + settings.parameters = self.parameters_widget.get_parameters() + + def get_parameters(self): + """ + Returns a list of OpenJD parameter definition dicts with + a "value" key filled from the widget. + """ + return self.parameters_widget.get_parameters() + + def set_parameter_value(self, parameter: dict[str, Any]): + """ + Given an OpenJD parameter definition with a "value" key, + set the parameter value in the widget. + + If the parameter value cannot be set, raises a KeyError. + """ + self.parameters_widget.set_parameter_value(parameter) diff --git a/src/deadline/client/ui/widgets/job_template_parameters_widget.py b/src/deadline/client/ui/widgets/openjd_parameters_widget.py similarity index 74% rename from src/deadline/client/ui/widgets/job_template_parameters_widget.py rename to src/deadline/client/ui/widgets/openjd_parameters_widget.py index 3673b571..5e3d0a1c 100644 --- a/src/deadline/client/ui/widgets/job_template_parameters_widget.py +++ b/src/deadline/client/ui/widgets/openjd_parameters_widget.py @@ -1,12 +1,14 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. - """ UI widgets for the Scene Settings tab. """ +from __future__ import annotations + import os from typing import Any, Dict, List +from copy import deepcopy -from PySide2.QtCore import QRegularExpression, Qt # type: ignore +from PySide2.QtCore import QRegularExpression, Qt, Signal # type: ignore from PySide2.QtGui import QValidator from PySide2.QtWidgets import ( # type: ignore QCheckBox, @@ -25,106 +27,129 @@ ) from ...job_bundle.job_template import ControlType -from ..widgets.path_widgets import ( +from ...job_bundle.parameters import get_ui_control_for_parameter_definition +from .path_widgets import ( DirectoryPickerWidget, InputFilePickerWidget, OutputFilePickerWidget, ) -from ..widgets.spinbox_widgets import DecimalMode, FloatDragSpinBox, IntDragSpinBox +from .spinbox_widgets import DecimalMode, FloatDragSpinBox, IntDragSpinBox -class JobTemplateParametersWidget(QWidget): +class OpenJDParametersWidget(QWidget): """ - Widget that takes the set of parameters from a job template, and generated - a UI form to edit them with. + Widget that takes the set of Open Job Description parameters, for example from a job template or a queue, + and generates a UI form to edit them. Open Job Description has optional UI metadata for each parameter specified under "userInterface". + Signals: + parameter_changed: This is sent whenever a parameter value in the widget changes. The message + is a copy of the parameter definition with the "value" key containing the new value. + Args: - initial_job_parameters (Dict[str, Any]): Open Job Description parameters block. + parameter_definitions (List[Dict[str, Any]]): A list of Open Job Description parameter definitions. + async_loading_state (str): A message to show its async loading state. Cannot provide both this + message and the parameter_definitions. parent: The parent Qt Widget. """ - def __init__(self, job_parameters: List[Dict[str, Any]], parent=None): + parameter_changed = Signal(dict) + + def __init__( + self, + *, + parameter_definitions: List[Dict[str, Any]] = [], + async_loading_state: str = "", + parent=None, + ): super().__init__(parent=parent) - self._build_ui(job_parameters) + self.rebuild_ui( + parameter_definitions=parameter_definitions, async_loading_state=async_loading_state + ) + + def rebuild_ui( + self, *, parameter_definitions: list[dict[str, Any]] = [], async_loading_state: str = "" + ): + """ + Rebuilds the widget's UI to the new parameter_definitions, or to display the + async_loading_state message. + """ + if parameter_definitions and async_loading_state: + raise RuntimeError( + "Constructing or updating an OpenJD parameters widget in the " + + "async_loading_state requires an empty parameter_definitions list." + ) - def _build_ui(self, job_parameters: List[Dict[str, Any]]): - layout = QVBoxLayout(self) + layout = self.layout() + if isinstance(layout, QVBoxLayout): + for index in reversed(range(layout.count())): + child = layout.takeAt(index) + if child.widget(): + child.widget().deleteLater() + elif child.layout(): + child.layout().deleteLater() + else: + layout = QVBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) - self.controls: List[Any] = [] + self.controls: dict[str, Any] = {} + + if async_loading_state: + loading = QLabel(async_loading_state, self) + loading.setAlignment(Qt.AlignCenter) + loading.setMinimumSize(100, 30) + layout.addWidget(loading) + layout.addItem(QSpacerItem(0, 0, QSizePolicy.Minimum, QSizePolicy.Expanding)) + self.async_loading_state = async_loading_state + return + else: + self.async_loading_state = "" + need_spacer = True control_map = { - ControlType.LINE_EDIT: _JobTemplateLineEditWidget, - ControlType.MULTILINE_EDIT: _JobTemplateMultiLineEditWidget, - ControlType.INT_SPIN_BOX: _JobTemplateIntSpinBoxWidget, - ControlType.FLOAT_SPIN_BOX: _JobTemplateFloatSpinBoxWidget, - ControlType.DROPDOWN_LIST: _JobTemplateDropdownListWidget, - ControlType.CHOOSE_INPUT_FILE: _JobTemplateInputFileWidget, - ControlType.CHOOSE_OUTPUT_FILE: _JobTemplateOutputFileWidget, - ControlType.CHOOSE_DIRECTORY: _JobTemplateDirectoryWidget, - ControlType.CHECK_BOX: _JobTemplateCheckBoxWidget, - ControlType.HIDDEN: _JobTemplateHiddenWidget, + ControlType.LINE_EDIT.name: _JobTemplateLineEditWidget, + ControlType.MULTILINE_EDIT.name: _JobTemplateMultiLineEditWidget, + ControlType.DROPDOWN_LIST.name: _JobTemplateDropdownListWidget, + ControlType.CHOOSE_INPUT_FILE.name: _JobTemplateInputFileWidget, + ControlType.CHOOSE_OUTPUT_FILE.name: _JobTemplateOutputFileWidget, + ControlType.CHOOSE_DIRECTORY.name: _JobTemplateDirectoryWidget, + ControlType.CHECK_BOX.name: _JobTemplateCheckBoxWidget, + ControlType.HIDDEN.name: _JobTemplateHiddenWidget, } - for parameter in job_parameters: + for parameter in parameter_definitions: # Skip application-specific parameters like "deadline:priority" if ":" in parameter["name"]: continue - try: - control_type_name = "" - if "userInterface" in parameter: - control_type_name = parameter["userInterface"].get("control", "") - - # If not explicitly provided, determine the default control type name based on the OPENJD specification - if not control_type_name: - if parameter.get("allowedValues"): - control_type_name = "DROPDOWN_LIST" - else: - if parameter["type"] == "STRING": - control_type_name = "LINE_EDIT" - elif parameter["type"] == "PATH": - if parameter.get("objectType") == "FILE": - if parameter.get("dataFlow") == "OUT": - control_type_name = "CHOOSE_OUTPUT_FILE" - else: - control_type_name = "CHOOSE_INPUT_FILE" - else: - control_type_name = "CHOOSE_DIRECTORY" - elif parameter["type"] in ["INT", "FLOAT"]: - control_type_name = "SPIN_BOX" - - if control_type_name == "SPIN_BOX": - control_type_name = f"{parameter['type']}_{control_type_name}" - - control_type = ControlType[control_type_name] - except KeyError: - raise RuntimeError( - f"Job Template parameter {parameter['name']} specifies unsupported control type {control_type_name}." - ) + control_type_name = get_ui_control_for_parameter_definition(parameter) - if "userInterface" in parameter: - group_label = parameter["userInterface"].get("groupLabel", "") + if parameter["type"] == "INT" and control_type_name == "SPIN_BOX": + control_widget = _JobTemplateIntSpinBoxWidget + elif parameter["type"] == "FLOAT" and control_type_name == "SPIN_BOX": + control_widget = _JobTemplateFloatSpinBoxWidget else: - group_label = "" + control_widget = control_map[control_type_name] - control_widget = control_map[control_type] - self.controls.append(control_widget(self, parameter)) + group_label = parameter.get("userInterface", {}).get("groupLabel", "") - if control_type != ControlType.HIDDEN: + control = control_widget(self, parameter) + self.controls[control.name()] = control + control.connect_parameter_changed(lambda message: self.parameter_changed.emit(message)) + + if control_type_name != ControlType.HIDDEN.name: if group_label: group_layout = self.findChild(_JobTemplateGroupLayout, group_label) if not group_layout: group_layout = _JobTemplateGroupLayout(self, group_label) group_layout.setObjectName(group_label) layout.addWidget(group_layout) - group_layout.layout().addWidget(self.controls[-1]) + group_layout.layout().addWidget(control) else: - layout.addWidget(self.controls[-1]) + layout.addWidget(control) if control_widget.IS_VERTICAL_EXPANDING: # Turn off the spacer at the end, as there's already a stretchy control @@ -133,8 +158,26 @@ def _build_ui(self, job_parameters: List[Dict[str, Any]]): if need_spacer: layout.addItem(QSpacerItem(0, 0, QSizePolicy.Minimum, QSizePolicy.Expanding)) - def get_parameter_values(self): - return [{"name": control.name(), "value": control.value()} for control in self.controls] + def get_parameters(self): + """ + Returns a list of OpenJD parameter definition dicts with + a "value" key filled from the widget. + """ + parameter_values = [] + for control in self.controls.values(): + parameter = deepcopy(control.job_template_parameter) + parameter["value"] = control.value() + parameter_values.append(parameter) + return parameter_values + + def set_parameter_value(self, parameter: dict[str, Any]): + """ + Given an OpenJD parameter definition with a "value" key, + set the parameter value in the widget. + + If the parameter value cannot be set, raises a KeyError. + """ + self.controls[parameter["name"]].set_value(parameter["value"]) def _get_parameter_label(parameter): @@ -222,6 +265,12 @@ def __init__(self, parent, parameter): value = parameter.get("value", parameter.get("default", self.OPENJD_DEFAULT_VALUE)) self.set_value(value) + def name(self): + return self.job_template_parameter["name"] + + def type(self): + return self.job_template_parameter["type"] + class _JobTemplateLineEditWidget(_JobTemplateWidget): OPENJD_CONTROL_TYPE: ControlType = ControlType.LINE_EDIT @@ -257,15 +306,22 @@ def _build_ui(self, parameter): for widget in (self.label, self.edit_control): widget.setToolTip(parameter["description"]) - def name(self): - return self.job_template_parameter["name"] - def value(self): return self.edit_control.text() def set_value(self, value): self.edit_control.setText(value) + def _handle_text_changed(self, text, callback): + message = deepcopy(self.job_template_parameter) + message["value"] = text + callback(message) + + def connect_parameter_changed(self, callback): + self.edit_control.textChanged.connect( + lambda text: self._handle_text_changed(text, callback) + ) + class _JobTemplateMultiLineEditWidget(_JobTemplateWidget): OPENJD_CONTROL_TYPE: ControlType = ControlType.MULTILINE_EDIT @@ -281,6 +337,7 @@ def _build_ui(self, parameter): layout.setContentsMargins(0, 0, 0, 0) self.label = QLabel(_get_parameter_label(parameter)) self.edit_control = QTextEdit(self) + self.edit_control.setAcceptRichText(False) if os.name == "nt": font_family = "Consolas" elif os.name == "darwin": @@ -303,15 +360,22 @@ def _build_ui(self, parameter): for widget in (self.label, self.edit_control): widget.setToolTip(parameter["description"]) - def name(self): - return self.job_template_parameter["name"] - def value(self): return self.edit_control.toPlainText() def set_value(self, value): self.edit_control.setPlainText(value) + def _handle_text_changed(self, text, callback): + message = deepcopy(self.job_template_parameter) + message["value"] = text + callback(message) + + def connect_parameter_changed(self, callback): + self.edit_control.textChanged.connect( + lambda: self._handle_text_changed(self.value(), callback) + ) + class _JobTemplateIntSpinBoxWidget(_JobTemplateWidget): OPENJD_CONTROL_TYPE: ControlType = ControlType.INT_SPIN_BOX @@ -374,15 +438,22 @@ def _build_ui(self, parameter): for widget in (self.label, self.edit_control): widget.setToolTip(parameter["description"]) - def name(self): - return self.job_template_parameter["name"] - def value(self): return self.edit_control.value() def set_value(self, value): self.edit_control.setValue(value) + def _handle_value_changed(self, value, callback): + message = deepcopy(self.job_template_parameter) + message["value"] = value + callback(message) + + def connect_parameter_changed(self, callback): + self.edit_control.valueChanged.connect( + lambda value: self._handle_value_changed(value, callback) + ) + class _JobTemplateFloatSpinBoxWidget(_JobTemplateWidget): OPENJD_CONTROL_TYPE: ControlType = ControlType.FLOAT_SPIN_BOX @@ -452,15 +523,22 @@ def _build_ui(self, parameter): for widget in (self.label, self.edit_control): widget.setToolTip(parameter["description"]) - def name(self): - return self.job_template_parameter["name"] - def value(self): return self.edit_control.value() def set_value(self, value): self.edit_control.setValue(value) + def _handle_value_changed(self, value, callback): + message = deepcopy(self.job_template_parameter) + message["value"] = value + callback(message) + + def connect_parameter_changed(self, callback): + self.edit_control.valueChanged.connect( + lambda value: self._handle_value_changed(value, callback) + ) + class _JobTemplateDropdownListWidget(_JobTemplateWidget): OPENJD_CONTROL_TYPE: ControlType = ControlType.DROPDOWN_LIST @@ -496,9 +574,6 @@ def _build_ui(self, parameter): for widget in (self.label, self.edit_control): widget.setToolTip(parameter["description"]) - def name(self): - return self.job_template_parameter["name"] - def value(self): return self.edit_control.currentData() @@ -507,6 +582,16 @@ def set_value(self, value): if index >= 0: self.edit_control.setCurrentIndex(index) + def _handle_index_changed(self, value, callback): + message = deepcopy(self.job_template_parameter) + message["value"] = value + callback(message) + + def connect_parameter_changed(self, callback): + self.edit_control.currentIndexChanged.connect( + lambda _: self._handle_index_changed(self.value(), callback) + ) + class _JobTemplateBaseFileWidget(_JobTemplateWidget): OPENJD_TYPES: List[str] = ["PATH"] @@ -555,15 +640,22 @@ def _build_ui(self, parameter): for widget in (self.label, self.edit_control): widget.setToolTip(parameter["description"]) - def name(self): - return self.job_template_parameter["name"] - def value(self): return self.edit_control.text() def set_value(self, value): self.edit_control.setText(value) + def _handle_path_changed(self, value, callback): + message = deepcopy(self.job_template_parameter) + message["value"] = value + callback(message) + + def connect_parameter_changed(self, callback): + self.edit_control.path_changed.connect( + lambda path: self._handle_path_changed(path, callback) + ) + class _JobTemplateInputFileWidget(_JobTemplateBaseFileWidget): OPENJD_CONTROL_TYPE: ControlType = ControlType.CHOOSE_INPUT_FILE @@ -600,15 +692,22 @@ def _build_ui(self, parameter): for widget in (self.label, self.edit_control): widget.setToolTip(parameter["description"]) - def name(self): - return self.job_template_parameter["name"] - def value(self): return self.edit_control.text() def set_value(self, value): self.edit_control.setText(value) + def _handle_path_changed(self, value, callback): + message = deepcopy(self.job_template_parameter) + message["value"] = value + callback(message) + + def connect_parameter_changed(self, callback): + self.edit_control.path_changed.connect( + lambda path: self._handle_path_changed(path, callback) + ) + # These are the permitted sets of values that can be in a string job parameter 'allowedValues' # when the user interface control is CHECK_BOX. @@ -658,9 +757,6 @@ def _build_ui(self, parameter: Dict[str, Any]) -> None: for widget in (self.label, self.edit_control): widget.setToolTip(parameter["description"]) - def name(self) -> str: - return self.job_template_parameter["name"] - def value(self) -> str: if self.edit_control.isChecked(): return self.true_value @@ -673,6 +769,16 @@ def set_value(self, value: str) -> None: else: self.edit_control.setChecked(False) + def _handle_value_changed(self, value, callback): + message = deepcopy(self.job_template_parameter) + message["value"] = value + callback(message) + + def connect_parameter_changed(self, callback): + self.edit_control.stateChanged.connect( + lambda _: self._handle_value_changed(self.value(), callback) + ) + class _JobTemplateHiddenWidget(_JobTemplateWidget): OPENJD_CONTROL_TYPE: ControlType = ControlType.HIDDEN @@ -692,15 +798,15 @@ def __init__(self, parent: QWidget, parameter: Dict[str, Any]): def _build_ui(self, parameter: Dict[str, Any]) -> None: pass - def name(self) -> str: - return self.job_template_parameter["name"] - def value(self) -> Any: return self._value def set_value(self, value: Any) -> None: self._value = value + def connect_parameter_changed(self, callback): + pass + class _JobTemplateGroupLayout(QGroupBox): def __init__(self, parent: QWidget, group_name: str): diff --git a/src/deadline/client/ui/widgets/path_widgets.py b/src/deadline/client/ui/widgets/path_widgets.py index 2e81784f..a6d82e1e 100644 --- a/src/deadline/client/ui/widgets/path_widgets.py +++ b/src/deadline/client/ui/widgets/path_widgets.py @@ -17,7 +17,7 @@ class _FileWidget(QWidget): - # Emitted when the directory changes + # Emitted when the file changes path_changed = Signal(str) def __init__( diff --git a/src/deadline/client/ui/widgets/shared_job_settings_tab.py b/src/deadline/client/ui/widgets/shared_job_settings_tab.py index 22cb26b2..4b1dcc7f 100644 --- a/src/deadline/client/ui/widgets/shared_job_settings_tab.py +++ b/src/deadline/client/ui/widgets/shared_job_settings_tab.py @@ -3,22 +3,20 @@ """ A UI Widget containing the render setup tab """ +from __future__ import annotations + import sys import threading from typing import Any, Dict, Optional -from PySide2.QtCore import Qt, Signal # type: ignore +from PySide2.QtCore import Signal # type: ignore from PySide2.QtWidgets import ( # type: ignore - QCheckBox, QComboBox, QFormLayout, - QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, - QSizePolicy, - QSpacerItem, QSpinBox, QVBoxLayout, QWidget, @@ -27,29 +25,156 @@ from ... import api from ...config import get_setting from .. import CancelationFlag +from .openjd_parameters_widget import OpenJDParametersWidget +from ...api import get_queue_parameter_definitions class SharedJobSettingsWidget(QWidget): # pylint: disable=too-few-public-methods """ Widget that holds Job setup shared across all job types. + + + Signals: + parameter_changed: This is sent whenever a parameter value in the widget changes. The message + is a copy of the parameter definition with the "value" key containing the new value. + + Args: + initial_settings: dataclass containing the job-specific settings. + initial_shared_parameter_values: (dict[str, Any]): A dict of parameter values {, , ...} + to override default queue parameter values from the queue. For example, + a Rez queue environment may have a default "" for the RezPackages parameter, but a Maya + submitter would override that default with "maya-2023" or similar. + parent: The parent Qt Widget. """ - def __init__(self, initial_settings, parent=None): - super().__init__(parent) + parameter_changed = Signal(dict) + + # Emitted when the background refresh thread catches an exception, + # provides (operation_name, BaseException) + _background_exception = Signal(str, BaseException) + + # Emitted when an async queue parameters loading thread completes, + # provides (refresh_id, queue_parameters) + _queue_parameters_update = Signal(int, list) + + def __init__( + self, *, initial_settings, initial_shared_parameter_values: dict[str, Any], parent=None + ): + super().__init__(parent=parent) layout = QVBoxLayout(self) - self.desc_box = SubmissionDescriptionWidget(initial_settings, self) - layout.addWidget(self.desc_box) - self.deadline_settings_box = DeadlineSettingsWidget(initial_settings, self) - layout.addWidget(self.deadline_settings_box) + # This is a dictionary {: } containing values to + # override the queue parameter defaults. + self.initial_shared_parameter_values = initial_shared_parameter_values + + self.shared_job_properties_box = SharedJobPropertiesWidget( + initial_settings=initial_settings, parent=self + ) + layout.addWidget(self.shared_job_properties_box) + + self.deadline_cloud_settings_box = DeadlineCloudSettingsWidget(parent=self) + layout.addWidget(self.deadline_cloud_settings_box) + + self.queue_parameters_box = OpenJDParametersWidget( + async_loading_state="Loading Queue Environments...", parent=self + ) + layout.addWidget(self.queue_parameters_box) + self.queue_parameters_box.parameter_changed.connect( + lambda message: self.parameter_changed.emit(message) + ) + + self.__refresh_queue_parameters_thread: Optional[threading.Thread] = None + self.__refresh_queue_parameters_id = 0 + self.canceled = CancelationFlag() + self.destroyed.connect(self.canceled.set_canceled) + self._queue_parameters_update.connect(self._handle_queue_parameters_update) + self._background_exception.connect(self._handle_background_queue_parameters_exception) + self._start_load_queue_parameters_thread() + + # Set any "deadline:*" parameters, like deadline:priority. + # The queue parameters will be set asynchronously by the background thread. + for name, value in initial_shared_parameter_values.items(): + if name.startswith("deadline:"): + self.set_parameter_value({"name": name, "value": value}) + + def refresh_queue_parameters(self): + """ + If the default queue id has changed, refresh the queue parameters. + """ + queue_id = get_setting("defaults.queue_id") + if self.queue_parameters_box.async_loading_state or queue_id != self.queue_id: + self.queue_parameters_box.rebuild_ui( + async_loading_state="Reloading Queue Environments..." + ) + self._start_load_queue_parameters_thread() + + def _handle_background_queue_parameters_exception(self, e): + self.queue_parameters_box.rebuild_ui( + async_loading_state="Error Loading Queue Environments." + ) + + def _start_load_queue_parameters_thread(self): + """ + Starts a background thread to load the queue parameters. + """ + self.farm_id = farm_id = get_setting("defaults.farm_id") + self.queue_id = queue_id = get_setting("defaults.queue_id") + self.__refresh_queue_parameters_id += 1 + self.__refresh_queue_parameters_thread = threading.Thread( + target=self._load_queue_parameters_thread_function, + name="Amazon Deadline Cloud Load Queue Parameters Thread", + args=(self.__refresh_queue_parameters_id, farm_id, queue_id), + ) + self.__refresh_queue_parameters_thread.start() + + def _handle_queue_parameters_update(self, refresh_id, queue_parameters): + # Apply the refresh if it's still for the latest call + if refresh_id == self.__refresh_queue_parameters_id: + # Apply the initial queue parameter values + for parameter in queue_parameters: + if parameter["name"] in self.initial_shared_parameter_values: + parameter["value"] = self.initial_shared_parameter_values[parameter["name"]] + self.queue_parameters_box.rebuild_ui(parameter_definitions=queue_parameters) + + def _load_queue_parameters_thread_function(self, refresh_id: int, farm_id: str, queue_id: str): + """ + This function gets started in a background thread to refresh the list. + """ + try: + queue_parameters = get_queue_parameter_definitions(farmId=farm_id, queueId=queue_id) + if not self.canceled: + self._queue_parameters_update.emit(refresh_id, queue_parameters) + except BaseException as e: + if not self.canceled: + self._background_exception.emit("Load Queue Parameters", e) + + def update_settings(self, settings): + self.shared_job_properties_box.update_settings(settings) + + def get_parameters(self): + """ + Returns a list of OpenJD parameter definition dicts with + a "value" key filled from the widget. + """ + queue_parameters = self.queue_parameters_box.get_parameters() + deadline_shared_job_parameters = self.shared_job_properties_box.get_parameters() + + return queue_parameters + deadline_shared_job_parameters - self.rez_packages_box = InstallationRequirementsWidget(initial_settings, self) - layout.addWidget(self.rez_packages_box) + def set_parameter_value(self, parameter: dict[str, Any]): + """ + Given an OpenJD parameter definition with a "value" key, + set the parameter value in the widget. - layout.addItem(QSpacerItem(0, 0, QSizePolicy.Minimum, QSizePolicy.Expanding)) + If the parameter value cannot be set, raises a KeyError. + """ + if parameter["name"].startswith("deadline:"): + self.shared_job_properties_box.set_parameter_value(parameter) + else: + self.queue_parameters_box.set_parameter_value(parameter) -class SubmissionDescriptionWidget(QGroupBox): # pylint: disable=too-few-public-methods +class SharedJobPropertiesWidget(QGroupBox): # pylint: disable=too-few-public-methods """ UI element to hold top level description components of the submission @@ -58,14 +183,15 @@ class SubmissionDescriptionWidget(QGroupBox): # pylint: disable=too-few-public- - `description: str` The description of the Job to submit. """ - def __init__(self, initial_settings, parent=None): - super().__init__("Description", parent) + def __init__(self, *, initial_settings, parent=None): + super().__init__("Job Properties", parent=parent) self._build_ui() self._load_initial_settings(initial_settings) def _build_ui(self): self.layout = QFormLayout(self) + self.layout.setFieldGrowthPolicy(QFormLayout.AllNonFixedFieldsGrow) self.sub_name_edit = QLineEdit() self.layout.addRow("Name", self.sub_name_edit) @@ -74,9 +200,98 @@ def _build_ui(self): self.desc_edit = QLineEdit() self.layout.addRow(self.desc_label, self.desc_edit) + self.priority_box_label = QLabel("Priority") + self.priority_box = QSpinBox(parent=self) + self.layout.addRow(self.priority_box_label, self.priority_box) + + self.initial_status_box_label = QLabel("Initial State") + self.initial_status_box = QComboBox(parent=self) + self.initial_status_box.addItems(["READY", "SUSPENDED"]) + self.layout.addRow(self.initial_status_box_label, self.initial_status_box) + + self.max_failed_tasks_count_box_label = QLabel("Maximum Failed Tasks Count") + self.max_failed_tasks_count_box_label.setToolTip( + "Maximum number of Tasks that can fail before the Job will be marked as failed." + ) + self.max_failed_tasks_count_box = QSpinBox(parent=self) + self.max_failed_tasks_count_box.setRange(0, 2147483647) + self.layout.addRow(self.max_failed_tasks_count_box_label, self.max_failed_tasks_count_box) + + self.max_retries_per_task_box_label = QLabel("Maximum Retries Per Task") + self.max_retries_per_task_box_label.setToolTip( + "Maximum number of times that a Task will retry before it's marked as failed." + ) + self.max_retries_per_task_box = QSpinBox(parent=self) + self.max_retries_per_task_box.setRange(0, 2147483647) + self.layout.addRow(self.max_retries_per_task_box_label, self.max_retries_per_task_box) + def _load_initial_settings(self, settings): self.sub_name_edit.setText(settings.name) self.desc_edit.setText(settings.description) + self.initial_status_box.setCurrentText("READY") + self.max_failed_tasks_count_box.setValue(20) + self.max_retries_per_task_box.setValue(5) + self.priority_box.setValue(50) + + def set_parameter_value(self, parameter: dict[str, Any]): + """ + Given an OpenJD parameter definition with a "value" key, + set the parameter value in the widget. + + If the parameter value cannot be set, raises a KeyError. + """ + parameter_name = parameter["name"] + if parameter_name == "deadline:targetTaskRunStatus": + self.initial_status_box.setCurrentText(parameter["value"]) + elif parameter_name == "deadline:maxFailedTasksCount": + self.max_failed_tasks_count_box.setValue(parameter["value"]) + elif parameter_name == "deadline:maxRetriesPerTask": + self.max_retries_per_task_box.setValue(parameter["value"]) + elif parameter_name == "deadline:priority": + self.priority_box.setValue(parameter["value"]) + else: + raise KeyError(parameter_name) + + def get_parameters(self): + """ + Returns a list of OpenJD parameter definition dicts with + a "value" key filled from the widget. + """ + return [ + { + "name": "deadline:targetTaskRunStatus", + "type": "STRING", + "userInterface": { + "control": "DROPDOWN_LIST", + "label": "Initial State", + }, + "allowedValues": ["READY", "SUSPENDED"], + "value": self.initial_status_box.currentText(), + }, + { + "name": "deadline:maxFailedTasksCount", + "description": "Maximum number of Tasks that can fail before the Job will be marked as failed.", + "type": "INT", + "userInterface": { + "control": "SPIN_BOX", + "label": "Maximum Failed Tasks Count", + }, + "minValue": 0, + "value": self.max_failed_tasks_count_box.value(), + }, + { + "name": "deadline:maxRetriesPerTask", + "description": "Maximum number of times that a Task will retry before it's marked as failed.", + "type": "INT", + "userInterface": { + "control": "SPIN_BOX", + "label": "Maximum Retries Per Task", + }, + "minValue": 0, + "value": self.max_retries_per_task_box.value(), + }, + {"name": "deadline:priority", "type": "INT", "value": self.priority_box.value()}, + ] def update_settings(self, settings): """ @@ -86,19 +301,18 @@ def update_settings(self, settings): settings.description = self.desc_edit.text() -class DeadlineSettingsWidget(QGroupBox): +class DeadlineCloudSettingsWidget(QGroupBox): """ UI component for the Deadline Render Manager. """ - def __init__(self, initial_settings, parent: Optional[QWidget] = None): - super().__init__("Deadline Settings", parent=parent) + def __init__(self, *, parent: Optional[QWidget] = None): + super().__init__("Deadline Cloud Settings", parent=parent) self.deadline_settings: Dict[str, Any] = {"counter": -1} - self.lyt = QFormLayout(self) - self.lyt.setFieldGrowthPolicy(QFormLayout.AllNonFixedFieldsGrow) + self.layout = QFormLayout(self) + self.layout.setFieldGrowthPolicy(QFormLayout.AllNonFixedFieldsGrow) self._build_ui() - self._load_initial_settings(initial_settings) def _set_enabled_with_label(self, prop_name: str, enabled: bool): """Enable/disable a control w/ its label""" @@ -111,36 +325,11 @@ def _build_ui(self): """ self.farm_box_label = QLabel("Farm") self.farm_box = DeadlineFarmDisplay() - self.lyt.addRow(self.farm_box_label, self.farm_box) + self.layout.addRow(self.farm_box_label, self.farm_box) self.queue_box_label = QLabel("Queue") self.queue_box = DeadlineQueueDisplay() - self.lyt.addRow(self.queue_box_label, self.queue_box) - - self.initial_status_box_label = QLabel("Initial State") - self.initial_status_box = QComboBox(parent=self) - self.initial_status_box.addItems(["READY", "SUSPENDED"]) - self.lyt.addRow(self.initial_status_box_label, self.initial_status_box) - - self.max_failed_tasks_count_box_label = QLabel("Maximum Failed Tasks Count") - self.max_failed_tasks_count_box_label.setToolTip( - "Maximum number of Tasks that can fail before the Job will be marked as failed." - ) - self.max_failed_tasks_count_box = QSpinBox(parent=self) - self.max_failed_tasks_count_box.setRange(0, 2147483647) - self.lyt.addRow(self.max_failed_tasks_count_box_label, self.max_failed_tasks_count_box) - - self.max_retries_per_task_box_label = QLabel("Maximum Retries Per Task") - self.max_retries_per_task_box_label.setToolTip( - "Maximum number of times that a Task will retry before it's marked as failed." - ) - self.max_retries_per_task_box = QSpinBox(parent=self) - self.max_retries_per_task_box.setRange(0, 2147483647) - self.lyt.addRow(self.max_retries_per_task_box_label, self.max_retries_per_task_box) - - self.priority_box_label = QLabel("Priority") - self.priority_box = QSpinBox(parent=self) - self.lyt.addRow(self.priority_box_label, self.priority_box) + self.layout.addRow(self.queue_box_label, self.queue_box) def refresh_setting_controls(self, deadline_authorized): """ @@ -155,70 +344,6 @@ def refresh_setting_controls(self, deadline_authorized): self.farm_box.refresh(deadline_authorized) self.queue_box.refresh(deadline_authorized) - def _load_initial_settings(self, settings): - self.initial_status_box.setCurrentText(settings.initial_status) - self.max_failed_tasks_count_box.setValue(settings.max_failed_tasks_count) - self.max_retries_per_task_box.setValue(settings.max_retries_per_task) - self.priority_box.setValue(settings.priority) - - def update_settings(self, settings) -> None: - """ - Updates an Amazon Deadline Cloud settings object with the latest values. - - The settings object should be a dataclass with: - initial_status: str (or enum of base str) - max_failed_tasks_count: int - max_retries_per_task: int - priority: int - """ - settings.initial_status = self.initial_status_box.currentText() - settings.max_failed_tasks_count = self.max_failed_tasks_count_box.value() - settings.max_retries_per_task = self.max_retries_per_task_box.value() - settings.priority = self.priority_box.value() - - -class InstallationRequirementsWidget(QGroupBox): # pylint: disable=too-few-public-methods - """ - UI element to hold list of Installation Requirements - - The settings object should be a dataclass with: - - `override_rez_packages: bool` - - `rez_packages: str` - """ - - def __init__(self, initial_settings, parent=None): - super().__init__("Installation Requirements", parent) - - self._build_ui() - self._load_initial_settings(initial_settings) - - def _build_ui(self): - self.layout = QGridLayout(self) - - self.requirements_chck = QCheckBox("Override Rez Packages", self) - self.requirements_edit = QLineEdit(self) - self.layout.addWidget(self.requirements_chck, 4, 0) - self.layout.addWidget(self.requirements_edit, 4, 1) - self.requirements_chck.stateChanged.connect(self.enable_requirements_override_changed) - - def _load_initial_settings(self, settings): - self.requirements_chck.setChecked(settings.override_rez_packages) - self.requirements_edit.setEnabled(settings.override_rez_packages) - self.requirements_edit.setText(settings.rez_packages) - - def update_settings(self, settings): - """ - Update a given instance of scene settings with updated values. - """ - settings.rez_packages = self.requirements_edit.text() - settings.override_rez_packages = self.requirements_chck.isChecked() - - def enable_requirements_override_changed(self, state): - """ - Set the enabled/disabled status of the requirements override text box - """ - self.requirements_edit.setEnabled(state == Qt.Checked) - class _DeadlineNamedResourceDisplay(QWidget): """ @@ -240,8 +365,8 @@ class _DeadlineNamedResourceDisplay(QWidget): # provides (refresh_id, id, name, description) _item_update = Signal(int, str, str, str) - def __init__(self, resource_name, setting_name, parent=None): - super().__init__(parent) + def __init__(self, *, resource_name, setting_name, parent=None): + super().__init__(parent=parent) self.__refresh_thread = None self.__refresh_id = 0 @@ -329,7 +454,7 @@ def _refresh_thread_function(self, refresh_id: int): class DeadlineFarmDisplay(_DeadlineNamedResourceDisplay): - def __init__(self, parent=None): + def __init__(self, *, parent=None): super().__init__(resource_name="Farm", setting_name="defaults.farm_id", parent=parent) def get_item(self): @@ -343,7 +468,7 @@ def get_item(self): class DeadlineQueueDisplay(_DeadlineNamedResourceDisplay): - def __init__(self, parent=None): + def __init__(self, *, parent=None): super().__init__(resource_name="Queue", setting_name="defaults.queue_id", parent=parent) def get_item(self): @@ -362,7 +487,7 @@ class DeadlineStorageProfileNameDisplay(_DeadlineNamedResourceDisplay): MAC_OS = "Macos" LINUX_OS = "Linux" - def __init__(self, parent=None): + def __init__(self, *, parent=None): super().__init__( resource_name="Storage Profile Name", setting_name="settings.storage_profile_id", diff --git a/src/deadline/job_attachments/models.py b/src/deadline/job_attachments/models.py index def30c9d..d9c27f36 100644 --- a/src/deadline/job_attachments/models.py +++ b/src/deadline/job_attachments/models.py @@ -36,12 +36,13 @@ class AssetRootManifest: @dataclass class AssetRootGroup: - """Represents lists of input and output files grouped under the same root""" + """Represents lists of input files, output files and path references grouped under the same root""" file_system_location_name: Optional[str] = None root_path: str = "" inputs: Set[Path] = field(default_factory=set) outputs: Set[Path] = field(default_factory=set) + references: Set[Path] = field(default_factory=set) @dataclass diff --git a/src/deadline/job_attachments/upload.py b/src/deadline/job_attachments/upload.py index 0d275d6e..328df9ef 100644 --- a/src/deadline/job_attachments/upload.py +++ b/src/deadline/job_attachments/upload.py @@ -564,6 +564,7 @@ def _get_asset_groups( self, input_paths: set[str], output_paths: set[str], + referenced_paths: set[str], local_type_locations: dict[str, str] = {}, shared_type_locations: dict[str, str] = {}, ) -> list[AssetRootGroup]: @@ -577,6 +578,8 @@ def _get_asset_groups( - The given `local_type_locations` paths can each form a group based on its root path. In other words, if there are paths relative to any of the `local_type_locations` paths, they are grouped together as one. + - The referenced paths may have no files or directories associated, but they always live + relative to one of the AssetRootGroup objects returned. """ groupings: dict[str, AssetRootGroup] = {} @@ -585,10 +588,11 @@ def _get_asset_groups( for _path in input_paths: # Need to use absolute to not resolve symlinks, but need normpath to get rid of relative paths, i.e. '..' abs_path = Path(os.path.normpath(Path(_path).absolute())) - if not abs_path.exists() or abs_path.is_dir(): - logger.warning( - f"Skipping uploading input as it either doesn't exist or is a directory: {abs_path}" - ) + if not abs_path.exists(): + logger.warning(f"Skipping uploading input as it doesn't exist: {abs_path}") + continue + if abs_path.is_dir(): + logger.warning(f"Skipping uploading input as it is a directory: {abs_path}") continue # Skips the upload if the path is relative to any of the File System Location @@ -622,10 +626,28 @@ def _get_asset_groups( ) groupings[matched_root].outputs.add(abs_path) + for _path in referenced_paths: + abs_path = Path(os.path.normpath(Path(_path).absolute())) + + # Skips the reference if the path is relative to any of the File System Location + # of SHARED type that was set in the Job. + if any(_is_relative_to(abs_path, shared) for shared in shared_type_locations): + continue + # If the path is relative to any of the File System Location of LOCAL type, + # groups the references into a single group using the path of that location. + matched_root = self._find_matched_root_from_local_type_locations( + groupings=groupings, + abs_path=abs_path, + local_type_locations=local_type_locations, + ) + groupings[matched_root].references.add(abs_path) + # Finally, build the list of asset root groups for asset_group in groupings.values(): common_path: Path = Path( - os.path.commonpath(list(asset_group.inputs | asset_group.outputs)) + os.path.commonpath( + list(asset_group.inputs | asset_group.outputs | asset_group.references) + ) ) if common_path.is_file(): common_path = common_path.parent @@ -733,6 +755,7 @@ def hash_assets_and_create_manifest( self, input_paths: list[str], output_paths: list[str], + referenced_paths: list[str], storage_profile_id: Optional[str] = None, hash_cache_dir: Optional[str] = None, on_preparing_to_submit: Optional[Callable[[Any], bool]] = None, @@ -766,6 +789,7 @@ def hash_assets_and_create_manifest( asset_groups: list[AssetRootGroup] = self._get_asset_groups( {ip_path for ip_path in input_paths if ip_path}, {op_path for op_path in output_paths if op_path}, + {rf_path for rf_path in referenced_paths if rf_path}, local_type_locations, shared_type_locations, ) diff --git a/test/integ/deadline_job_attachments/test_job_attachments.py b/test/integ/deadline_job_attachments/test_job_attachments.py index 8c4bbb95..bedd44c0 100644 --- a/test/integ/deadline_job_attachments/test_job_attachments.py +++ b/test/integ/deadline_job_attachments/test_job_attachments.py @@ -136,6 +136,7 @@ def upload_input_files_assets_not_in_cas(job_attachment_test: JobAttachmentTest) (_, manifests) = asset_manager.hash_assets_and_create_manifest( input_paths=[str(job_attachment_test.SCENE_MA_PATH)], output_paths=[str(job_attachment_test.OUTPUT_PATH)], + referenced_paths=[], hash_cache_dir=str(job_attachment_test.hash_cache_dir), on_preparing_to_submit=mock_on_preparing_to_submit, ) @@ -202,6 +203,7 @@ def upload_input_files_one_asset_in_cas( (_, manifests) = asset_manager.hash_assets_and_create_manifest( input_paths=input_paths, output_paths=[str(job_attachment_test.OUTPUT_PATH)], + referenced_paths=[], hash_cache_dir=str(job_attachment_test.hash_cache_dir), on_preparing_to_submit=mock_on_preparing_to_submit, ) @@ -281,6 +283,7 @@ def test_upload_input_files_all_assets_in_cas( (_, manifests) = asset_manager.hash_assets_and_create_manifest( input_paths=input_paths, output_paths=[str(job_attachment_test.OUTPUT_PATH)], + referenced_paths=[], hash_cache_dir=str(job_attachment_test.hash_cache_dir), on_preparing_to_submit=mock_on_preparing_to_submit, ) @@ -1028,6 +1031,7 @@ def upload_input_files_no_input_paths( (_, manifests) = asset_manager.hash_assets_and_create_manifest( input_paths=[], output_paths=[str(job_attachment_test.OUTPUT_PATH)], + referenced_paths=[], hash_cache_dir=str(job_attachment_test.hash_cache_dir), on_preparing_to_submit=mock_on_preparing_to_submit, ) @@ -1087,6 +1091,7 @@ def test_upload_input_files_no_download_paths(job_attachment_test: JobAttachment (_, manifests) = asset_manager.hash_assets_and_create_manifest( input_paths=[str(job_attachment_test.SCENE_MA_PATH)], output_paths=[], + referenced_paths=[], hash_cache_dir=str(job_attachment_test.hash_cache_dir), on_preparing_to_submit=mock_on_preparing_to_submit, ) @@ -1191,6 +1196,7 @@ def test_upload_bucket_wrong_account(external_bucket: str, job_attachment_test: (_, manifests) = asset_manager.hash_assets_and_create_manifest( input_paths=[str(job_attachment_test.SCENE_MA_PATH)], output_paths=[str(job_attachment_test.OUTPUT_PATH)], + referenced_paths=[], hash_cache_dir=str(job_attachment_test.hash_cache_dir), on_preparing_to_submit=mock_on_preparing_to_submit, ) diff --git a/test/unit/deadline_client/api/test_job_bundle_submission.py b/test/unit/deadline_client/api/test_job_bundle_submission.py index d7ae2db8..2ea4771b 100644 --- a/test/unit/deadline_client/api/test_job_bundle_submission.py +++ b/test/unit/deadline_client/api/test_job_bundle_submission.py @@ -15,13 +15,14 @@ from deadline.client import api, config from deadline.client.api import _submit_job_bundle -from deadline.client.job_bundle import submission +from deadline.client.job_bundle.submission import AssetReferences from deadline.job_attachments.models import ( AssetLoadingMethod, Attachments, ManifestProperties, OperatingSystemFamily, ) +from deadline.job_attachments.upload import S3AssetManager from deadline.job_attachments.progress_tracker import SummaryStatistics from ..shared_constants import ( @@ -434,7 +435,7 @@ def test_create_job_from_job_bundle_job_attachments( ) as client_mock, patch.object(_submit_job_bundle.api, "get_queue_boto3_session"), patch.object( api._submit_job_bundle, "_hash_attachments" ) as mock_hash_attachments, patch.object( - submission.S3AssetManager, "upload_assets" + S3AssetManager, "upload_assets" ) as mock_upload_assets, patch.object( _submit_job_bundle.api, "get_telemetry_client" ): @@ -490,14 +491,17 @@ def fake_upload_callback(metadata: Dict[str, Any]) -> bool: mock_hash_attachments.assert_called_once_with( ANY, - set( - [ - os.path.join(temp_assets_dir, "asset-1.txt"), - os.path.join(temp_assets_dir, os.path.normpath("somedir/asset-2.txt")), - os.path.join(temp_assets_dir, os.path.normpath("somedir/asset-3.bat")), - ] + AssetReferences( + input_filenames=set( + [ + os.path.join(temp_assets_dir, "asset-1.txt"), + os.path.join(temp_assets_dir, os.path.normpath("somedir/asset-2.txt")), + os.path.join(temp_assets_dir, os.path.normpath("somedir/asset-3.bat")), + ] + ), + output_directories=set([os.path.join(temp_assets_dir, "somedir")]), + referenced_paths=set(), ), - set([os.path.join(temp_assets_dir, "somedir")]), MOCK_STORAGE_PROFILE_ID, fake_hashing_callback, ) @@ -579,7 +583,7 @@ def test_create_job_from_job_bundle_with_single_asset_file( ) as client_mock, patch.object(_submit_job_bundle.api, "get_queue_boto3_session"), patch.object( api._submit_job_bundle, "_hash_attachments" ) as mock_hash_attachments, patch.object( - submission.S3AssetManager, "upload_assets" + S3AssetManager, "upload_assets" ) as mock_upload_assets, patch.object( _submit_job_bundle.api, "get_telemetry_client" ): @@ -641,8 +645,7 @@ def fake_upload_callback(metadata: Dict[str, Any]) -> bool: mock_hash_attachments.assert_called_once_with( ANY, - set([os.path.join(temp_assets_dir, "asset-1.txt")]), - set(), + AssetReferences(input_filenames=set([os.path.join(temp_assets_dir, "asset-1.txt")])), MOCK_STORAGE_PROFILE_ID, fake_hashing_callback, ) diff --git a/test/unit/deadline_client/api/test_job_bundle_submission_asset_refs.py b/test/unit/deadline_client/api/test_job_bundle_submission_asset_refs.py index 9ef4439e..d2cda0f0 100644 --- a/test/unit/deadline_client/api/test_job_bundle_submission_asset_refs.py +++ b/test/unit/deadline_client/api/test_job_bundle_submission_asset_refs.py @@ -10,7 +10,6 @@ from deadline.client import api, config from deadline.client.api import _submit_job_bundle -from deadline.client.job_bundle import submission from deadline.job_attachments.models import ( Attachments, AssetLoadingMethod, @@ -18,6 +17,7 @@ ManifestProperties, OperatingSystemFamily, ) +from deadline.job_attachments.upload import S3AssetManager from deadline.job_attachments.progress_tracker import SummaryStatistics from ..shared_constants import MOCK_FARM_ID, MOCK_QUEUE_ID @@ -136,9 +136,9 @@ def test_create_job_from_job_bundle_with_all_asset_ref_variants( with patch.object(_submit_job_bundle.api, "get_boto3_session"), patch.object( _submit_job_bundle.api, "get_boto3_client" ) as client_mock, patch.object(_submit_job_bundle.api, "get_queue_boto3_session"), patch.object( - submission.S3AssetManager, "hash_assets_and_create_manifest" + S3AssetManager, "hash_assets_and_create_manifest" ) as mock_hash_assets, patch.object( - submission.S3AssetManager, "upload_assets" + S3AssetManager, "upload_assets" ) as mock_upload_assets, patch.object( _submit_job_bundle.api, "get_telemetry_client" ): @@ -268,9 +268,19 @@ def test_create_job_from_job_bundle_with_all_asset_ref_variants( "./file/inside", ] ) + referenced_paths = sorted( + os.path.normpath(os.path.abspath(p)) + for p in [ + os.path.join(temp_assets_dir, "./dir/inside/asset-dir-dirnone"), + os.path.join(temp_assets_dir, "./dir/inside/asset-dir-dirnonedefault"), + os.path.join(temp_assets_dir, "file/inside/asset-dir-filenonedefault.txt"), + os.path.join(temp_assets_dir, "file/inside/asset-dir-filenone.txt"), + ] + ) mock_hash_assets.assert_called_once_with( input_paths=input_paths, output_paths=output_paths, + referenced_paths=referenced_paths, storage_profile_id="", hash_cache_dir=os.path.expanduser(os.path.join("~", ".deadline", "cache")), on_preparing_to_submit=ANY, diff --git a/test/unit/deadline_client/cli/test_cli_bundle.py b/test/unit/deadline_client/cli/test_cli_bundle.py index 2c346b0c..69e46715 100644 --- a/test/unit/deadline_client/cli/test_cli_bundle.py +++ b/test/unit/deadline_client/cli/test_cli_bundle.py @@ -13,6 +13,7 @@ import pytest from deadline.client import config +from deadline.client.api import _queue_parameters from deadline.client.cli import main from deadline.client.cli._groups import bundle_group from deadline.client.config.config_file import set_setting @@ -28,6 +29,65 @@ MOCK_QUEUE_ID, ) +MOCK_LIST_QUEUE_ENVIRONMENTS_RESPONSE = { + "environments": [ + {"queueEnvironmentId": "queueenv-123", "name": "First Env", "priority": 2}, + {"queueEnvironmentId": "queueenv-234", "name": "Second Env", "priority": 1}, + ] +} + +MOCK_QUEUE_ENV_TEMPLATE_1 = """ +specificationVersion: 'jobtemplate-2023-09' +parameters: +- name: RezPackages + type: STRING + description: Choose which rez packages to install for the render. + default: "" + userInterface: + control: LINE_EDIT + label: Rez Packages +environment: + name: Rez Non-Final + script: + actions: + onEnter: + command: "say-hello" +""" + +MOCK_QUEUE_ENV_TEMPLATE_2 = """ +specificationVersion: 'jobtemplate-2023-09' +parameters: +- name: IntParam + type: INT + default: "" + userInterface: + control: SPIN_BOX + label: Int Param +environment: + name: Int Param Env + script: + actions: + onEnter: + command: "say-hello" +""" + +MOCK_GET_QUEUE_ENVIRONMENTS_RESPONSES = [ + { + "queueEnvironmentId": "queueenv-123", + "name": "Rez Non-Final", + "priority": 1, + "templateType": "YAML", + "template": MOCK_QUEUE_ENV_TEMPLATE_1, + }, + { + "queueEnvironmentId": "queueenv-234", + "name": "Int Param Env", + "priority": 1, + "templateType": "YAML", + "template": MOCK_QUEUE_ENV_TEMPLATE_1, + }, +] + def test_cli_bundle_submit(fresh_deadline_config, temp_job_bundle_dir): """ @@ -50,14 +110,24 @@ def test_cli_bundle_submit(fresh_deadline_config, temp_job_bundle_dir): f.write(MOCK_PARAMETERS_CASES["TEMPLATE_ONLY_JSON"][1]) with patch.object(bundle_group, "get_boto3_client") as get_boto3_client_mock, patch.object( + _queue_parameters, "get_boto3_client" + ) as qp_boto3_client_mock, patch.object( bundle_group, "_hash_attachments", return_value=[] - ), patch.object(bundle_group, "get_queue_boto3_session"), patch.object( + ), patch.object( + bundle_group, "get_queue_boto3_session" + ), patch.object( bundle_group, "_upload_attachments" ), patch.object( bundle_group.api, "get_telemetry_client" ): get_boto3_client_mock().create_job.return_value = MOCK_CREATE_JOB_RESPONSE get_boto3_client_mock().get_job.return_value = MOCK_GET_JOB_RESPONSE + qp_boto3_client_mock().list_queue_environments.return_value = ( + MOCK_LIST_QUEUE_ENVIRONMENTS_RESPONSE + ) + qp_boto3_client_mock().get_queue_environment.side_effect = ( + MOCK_GET_QUEUE_ENVIRONMENTS_RESPONSES + ) runner = CliRunner() result = runner.invoke(main, ["bundle", "submit", temp_job_bundle_dir]) @@ -111,7 +181,7 @@ def test_cli_bundle_explicit_parameters(fresh_deadline_config): ], ) - session_mock.assert_called_once_with(profile_name="NonDefaultProfileName") + session_mock.assert_called_with(profile_name="NonDefaultProfileName") session_mock().client().create_job.assert_called_once_with( farmId=MOCK_FARM_ID, queueId=MOCK_QUEUE_ID, @@ -168,21 +238,31 @@ def test_cli_bundle_asset_load_method(fresh_deadline_config, temp_job_bundle_dir attachment_mock.total_bytes = 0 attachment_mock.total_files.return_value = 0 - with patch.object(bundle_group, "get_boto3_client") as get_boto3_client_mock, patch.object( + with patch.object(bundle_group, "get_boto3_client") as bundle_boto3_client_mock, patch.object( + _queue_parameters, "get_boto3_client" + ) as qp_boto3_client_mock, patch.object( bundle_group, "_hash_attachments", return_value=(attachment_mock, {}) - ), patch.object(bundle_group, "_upload_attachments", return_value={}), patch.object( + ), patch.object( + bundle_group, "_upload_attachments", return_value={} + ), patch.object( bundle_group.api, "get_boto3_session" ), patch.object( bundle_group, "get_queue_boto3_session" ), patch.object( bundle_group.api, "get_telemetry_client" ): - get_boto3_client_mock().create_job.return_value = MOCK_CREATE_JOB_RESPONSE - get_boto3_client_mock().get_job.return_value = MOCK_GET_JOB_RESPONSE - get_boto3_client_mock().get_queue.return_value = { + bundle_boto3_client_mock().create_job.return_value = MOCK_CREATE_JOB_RESPONSE + bundle_boto3_client_mock().get_job.return_value = MOCK_GET_JOB_RESPONSE + bundle_boto3_client_mock().get_queue.return_value = { "displayName": "Test Queue", "jobAttachmentSettings": {"s3BucketName": "mock", "rootPrefix": "root"}, } + qp_boto3_client_mock().list_queue_environments.return_value = ( + MOCK_LIST_QUEUE_ENVIRONMENTS_RESPONSE + ) + qp_boto3_client_mock().get_queue_environment.side_effect = ( + MOCK_GET_QUEUE_ENVIRONMENTS_RESPONSES + ) params = ["bundle", "submit", temp_job_bundle_dir] @@ -199,7 +279,8 @@ def test_cli_bundle_asset_load_method(fresh_deadline_config, temp_job_bundle_dir else config.get_setting("defaults.job_attachments_file_system") ) - get_boto3_client_mock().create_job.assert_called_with( + assert temp_job_bundle_dir in result.output + bundle_boto3_client_mock().create_job.assert_called_with( farmId=MOCK_FARM_ID, queueId=MOCK_QUEUE_ID, parameters=MOCK_PARAMETERS_CASES["TEMPLATE_ONLY_JSON"][2]["parameters"], # type: ignore @@ -207,7 +288,6 @@ def test_cli_bundle_asset_load_method(fresh_deadline_config, temp_job_bundle_dir templateType="JSON", attachments={"assetLoadingMethod": expected_loading_method}, ) - assert temp_job_bundle_dir in result.output assert MOCK_CREATE_JOB_RESPONSE["jobId"] in result.output assert MOCK_GET_JOB_RESPONSE["lifecycleStatusMessage"] in result.output assert result.exit_code == 0 @@ -397,8 +477,12 @@ def test_cli_bundle_reject_upload_confirmation(fresh_deadline_config, temp_job_b json.dump(data, f) with patch.object(bundle_group, "get_boto3_client") as get_boto3_client_mock, patch.object( + _queue_parameters, "get_boto3_client" + ) as qp_boto3_client_mock, patch.object( bundle_group, "_hash_attachments", return_value=[SummaryStatistics(), "test"] - ), patch.object(bundle_group, "_upload_attachments") as upload_attachments_mock, patch.object( + ), patch.object( + bundle_group, "_upload_attachments" + ) as upload_attachments_mock, patch.object( bundle_group.api, "get_boto3_session" ), patch.object( bundle_group, "get_queue_boto3_session" @@ -409,6 +493,13 @@ def test_cli_bundle_reject_upload_confirmation(fresh_deadline_config, temp_job_b "displayName": "Test Queue", "jobAttachmentSettings": {"s3BucketName": "mock", "rootPrefix": "root"}, } + qp_boto3_client_mock().list_queue_environments.return_value = ( + MOCK_LIST_QUEUE_ENVIRONMENTS_RESPONSE + ) + qp_boto3_client_mock().get_queue_environment.side_effect = ( + MOCK_GET_QUEUE_ENVIRONMENTS_RESPONSES + ) + set_setting("settings.auto_accept", "false") runner = CliRunner() result = runner.invoke( diff --git a/test/unit/deadline_client/job_bundle/test_job_parameters.py b/test/unit/deadline_client/job_bundle/test_job_parameters.py index 241b6772..aaeec5bc 100644 --- a/test/unit/deadline_client/job_bundle/test_job_parameters.py +++ b/test/unit/deadline_client/job_bundle/test_job_parameters.py @@ -85,8 +85,336 @@ def test_apply_job_parameters_parameter_without_value( """ with pytest.raises(exceptions.DeadlineOperationError) as excinfo: parameters.apply_job_parameters( - job_parameters, job_bundle_dir, job_bundle_parameters, asset_references + job_parameters, job_bundle_dir, job_bundle_parameters, [], asset_references ) assert "TestParameterName" in str(excinfo) assert "no default value" in str(excinfo).lower() assert "no parameter value" in str(excinfo).lower() + + +@pytest.mark.parametrize( + "parameter_def, expected_control", + [ + # All the defaults + ({"name": "X", "type": "STRING"}, "LINE_EDIT"), + ({"name": "X", "type": "PATH"}, "CHOOSE_DIRECTORY"), + ({"name": "X", "type": "INT"}, "SPIN_BOX"), + ({"name": "X", "type": "FLOAT"}, "SPIN_BOX"), + # When there is an allowedValues list + ({"name": "X", "type": "STRING", "allowedValues": ["A", "B"]}, "DROPDOWN_LIST"), + ({"name": "X", "type": "PATH", "allowedValues": ["/A", "/B"]}, "DROPDOWN_LIST"), + ({"name": "X", "type": "INT", "allowedValues": [1, 2]}, "DROPDOWN_LIST"), + ({"name": "X", "type": "FLOAT", "allowedValues": [1.0, 2.5]}, "DROPDOWN_LIST"), + # Variations for PATH parameters + ({"name": "X", "type": "PATH", "objectType": "FILE"}, "CHOOSE_INPUT_FILE"), + ( + {"name": "X", "type": "PATH", "objectType": "FILE", "dataFlow": "NONE"}, + "CHOOSE_INPUT_FILE", + ), + ( + {"name": "X", "type": "PATH", "objectType": "FILE", "dataFlow": "IN"}, + "CHOOSE_INPUT_FILE", + ), + ( + {"name": "X", "type": "PATH", "objectType": "FILE", "dataFlow": "INOUT"}, + "CHOOSE_INPUT_FILE", + ), + ( + {"name": "X", "type": "PATH", "objectType": "FILE", "dataFlow": "OUT"}, + "CHOOSE_OUTPUT_FILE", + ), + ({"name": "X", "type": "PATH", "objectType": "DIRECTORY"}, "CHOOSE_DIRECTORY"), + # When the control is specified explicitly for STRING + ({"name": "X", "type": "STRING", "userInterface": {"control": "LINE_EDIT"}}, "LINE_EDIT"), + ( + {"name": "X", "type": "STRING", "userInterface": {"control": "MULTILINE_EDIT"}}, + "MULTILINE_EDIT", + ), + ( + { + "name": "X", + "type": "STRING", + "userInterface": {"control": "DROPDOWN_LIST"}, + "allowedValues": ["A", "B"], + }, + "DROPDOWN_LIST", + ), + ( + { + "name": "X", + "type": "STRING", + "userInterface": {"control": "CHECK_BOX"}, + "allowedValues": ["true", "false"], + }, + "CHECK_BOX", + ), + ({"name": "X", "type": "STRING", "userInterface": {"control": "HIDDEN"}}, "HIDDEN"), + # When the control is specified explicitly for PATH + ( + {"name": "X", "type": "PATH", "userInterface": {"control": "CHOOSE_INPUT_FILE"}}, + "CHOOSE_INPUT_FILE", + ), + ( + { + "name": "X", + "type": "PATH", + "objectType": "FILE", + "dataFlow": "NONE", + "userInterface": {"control": "CHOOSE_INPUT_FILE"}, + }, + "CHOOSE_INPUT_FILE", + ), + ( + { + "name": "X", + "type": "PATH", + "objectType": "FILE", + "dataFlow": "NONE", + "userInterface": {"control": "CHOOSE_OUTPUT_FILE"}, + }, + "CHOOSE_OUTPUT_FILE", + ), + ( + { + "name": "X", + "type": "PATH", + "objectType": "FILE", + "dataFlow": "IN", + "userInterface": {"control": "CHOOSE_INPUT_FILE"}, + }, + "CHOOSE_INPUT_FILE", + ), + ( + { + "name": "X", + "type": "PATH", + "objectType": "FILE", + "dataFlow": "INOUT", + "userInterface": {"control": "CHOOSE_INPUT_FILE"}, + }, + "CHOOSE_INPUT_FILE", + ), + ( + { + "name": "X", + "type": "PATH", + "objectType": "FILE", + "dataFlow": "INOUT", + "userInterface": {"control": "CHOOSE_OUTPUT_FILE"}, + }, + "CHOOSE_OUTPUT_FILE", + ), + ( + { + "name": "X", + "type": "PATH", + "objectType": "FILE", + "dataFlow": "OUT", + "userInterface": {"control": "CHOOSE_OUTPUT_FILE"}, + }, + "CHOOSE_OUTPUT_FILE", + ), + ( + { + "name": "X", + "type": "PATH", + "objectType": "DIRECTORY", + "userInterface": {"control": "CHOOSE_DIRECTORY"}, + }, + "CHOOSE_DIRECTORY", + ), + ( + { + "name": "X", + "type": "PATH", + "userInterface": {"control": "DROPDOWN_LIST"}, + "allowedValues": ["/A", "/B"], + }, + "DROPDOWN_LIST", + ), + ({"name": "X", "type": "PATH", "userInterface": {"control": "HIDDEN"}}, "HIDDEN"), + # When the control is specified explicitly for INT + ({"name": "X", "type": "INT", "userInterface": {"control": "SPIN_BOX"}}, "SPIN_BOX"), + ( + { + "name": "X", + "type": "INT", + "userInterface": {"control": "DROPDOWN_LIST"}, + "allowedValues": [1, 2], + }, + "DROPDOWN_LIST", + ), + ({"name": "X", "type": "INT", "userInterface": {"control": "HIDDEN"}}, "HIDDEN"), + # When the control is specified explicitly for FLOAT + ({"name": "X", "type": "FLOAT", "userInterface": {"control": "SPIN_BOX"}}, "SPIN_BOX"), + ( + { + "name": "X", + "type": "FLOAT", + "userInterface": {"control": "DROPDOWN_LIST"}, + "allowedValues": [1, 2], + }, + "DROPDOWN_LIST", + ), + ({"name": "X", "type": "FLOAT", "userInterface": {"control": "HIDDEN"}}, "HIDDEN"), + ], +) +def test_ui_control_for_parameter_definition(parameter_def, expected_control): + """Test that the correct UI control for a parameter definition is returned.""" + assert parameters.get_ui_control_for_parameter_definition(parameter_def) == expected_control + + +@pytest.mark.parametrize( + "parameter_def", + [ + # Unsupported type + ({"name": "X", "type": "UNSUPPORTED"}), + # Dropdown list requires allowedValues + ({"name": "X", "type": "STRING", "userInterface": {"control": "DROPDOWN_LIST"}}), + ({"name": "X", "type": "PATH", "userInterface": {"control": "DROPDOWN_LIST"}}), + ({"name": "X", "type": "INT", "userInterface": {"control": "DROPDOWN_LIST"}}), + ({"name": "X", "type": "FLOAT", "userInterface": {"control": "DROPDOWN_LIST"}}), + # Supported controls, but not for the STRING type + ({"name": "X", "type": "STRING", "userInterface": {"control": "CHOOSE_INPUT_FILE"}}), + ({"name": "X", "type": "STRING", "userInterface": {"control": "CHOOSE_OUTPUT_FILE"}}), + ({"name": "X", "type": "STRING", "userInterface": {"control": "CHOOSE_DIRECTORY"}}), + ({"name": "X", "type": "STRING", "userInterface": {"control": "SPIN_BOX"}}), + # Supported controls, but not for the PATH type + ({"name": "X", "type": "PATH", "userInterface": {"control": "LINE_EDIT"}}), + ({"name": "X", "type": "PATH", "userInterface": {"control": "MULTILINE_EDIT"}}), + ({"name": "X", "type": "PATH", "userInterface": {"control": "CHECK_BOX"}}), + ({"name": "X", "type": "PATH", "userInterface": {"control": "SPIN_BOX"}}), + # Supported controls, but not for the INT type + ({"name": "X", "type": "INT", "userInterface": {"control": "LINE_EDIT"}}), + ({"name": "X", "type": "INT", "userInterface": {"control": "MULTILINE_EDIT"}}), + ({"name": "X", "type": "INT", "userInterface": {"control": "CHECK_BOX"}}), + ({"name": "X", "type": "INT", "userInterface": {"control": "CHOOSE_INPUT_FILE"}}), + ({"name": "X", "type": "INT", "userInterface": {"control": "CHOOSE_OUTPUT_FILE"}}), + ({"name": "X", "type": "INT", "userInterface": {"control": "CHOOSE_DIRECTORY"}}), + # Supported controls, but not for the FLOAT type + ({"name": "X", "type": "FLOAT", "userInterface": {"control": "LINE_EDIT"}}), + ({"name": "X", "type": "FLOAT", "userInterface": {"control": "MULTILINE_EDIT"}}), + ({"name": "X", "type": "FLOAT", "userInterface": {"control": "CHECK_BOX"}}), + ({"name": "X", "type": "FLOAT", "userInterface": {"control": "CHOOSE_INPUT_FILE"}}), + ({"name": "X", "type": "FLOAT", "userInterface": {"control": "CHOOSE_OUTPUT_FILE"}}), + ({"name": "X", "type": "FLOAT", "userInterface": {"control": "CHOOSE_DIRECTORY"}}), + ], +) +def test_ui_control_for_parameter_definition_errors(parameter_def): + """Test that an error is raised with incorrect parameter definition controls.""" + with pytest.raises(exceptions.DeadlineOperationError): + parameters.get_ui_control_for_parameter_definition(parameter_def) + + +@pytest.mark.parametrize( + "parameter1, parameter2, expected_difference", + [ + # Cases where they match + ({"name": "X", "type": "STRING"}, {"name": "X", "type": "STRING"}, []), + ( + {"name": "X", "type": "STRING"}, + {"name": "X", "type": "STRING", "userInterface": {"control": "HIDDEN"}}, + [], + ), + ( + {"name": "X", "type": "STRING", "allowedValues": ["A", "B"]}, + {"name": "X", "type": "STRING", "allowedValues": ["B", "A"]}, + [], + ), + # Different name + ({"name": "X", "type": "STRING"}, {"name": "Y", "type": "STRING"}, ["name"]), + # Different type + ({"name": "X", "type": "STRING"}, {"name": "X", "type": "PATH"}, ["type"]), + # Different minValue + ( + {"name": "X", "type": "INT", "minValue": 3}, + {"name": "X", "type": "INT", "minValue": 2}, + ["minValue"], + ), + # Different maxValue + ( + {"name": "X", "type": "INT", "maxValue": 3}, + {"name": "X", "type": "INT", "maxValue": 2}, + ["maxValue"], + ), + # Different minLength + ( + {"name": "X", "type": "STRING", "minLength": 3}, + {"name": "X", "type": "STRING", "minLength": 2}, + ["minLength"], + ), + # Different maxLength + ( + {"name": "X", "type": "STRING", "maxLength": 3}, + {"name": "X", "type": "STRING", "maxLength": 2}, + ["maxLength"], + ), + # Different dataFlow + ( + { + "name": "X", + "type": "PATH", + "dataFlow": "NONE", + }, + { + "name": "X", + "type": "PATH", + "dataFlow": "IN", + }, + ["dataFlow"], + ), + ( + { + "name": "X", + "type": "PATH", + "dataFlow": "IN", + }, + { + "name": "X", + "type": "PATH", + }, + ["dataFlow"], + ), + # Different objectType + ( + { + "name": "X", + "type": "PATH", + "objectType": "FILE", + }, + { + "name": "X", + "type": "PATH", + "objectType": "DIRECTORY", + }, + ["objectType"], + ), + # Different allowedValues + ( + {"name": "X", "type": "STRING", "allowedValues": ["A", "B"]}, + {"name": "X", "type": "STRING", "allowedValues": ["B", "C"]}, + ["allowedValues"], + ), + # Many differences + ( + { + "name": "X", + "type": "PATH", + "dataFlow": "IN", + "objectType": "FILE", + "minLength": 3, + "maxLength": 5, + }, + { + "name": "Y", + "type": "STRING", + "allowedValues": ["B", "C"], + "minLength": 2, + }, + ["name", "type", "minLength", "maxLength", "dataFlow", "objectType", "allowedValues"], + ), + ], +) +def test_parameter_definition_difference(parameter1, parameter2, expected_difference): + """Test that parameter_definition_difference returns expected differences.""" + assert parameters.parameter_definition_difference(parameter1, parameter2) == expected_difference diff --git a/test/unit/deadline_client/job_bundle/test_job_submission.py b/test/unit/deadline_client/job_bundle/test_job_submission.py index ace94dd7..9cb908e3 100644 --- a/test/unit/deadline_client/job_bundle/test_job_submission.py +++ b/test/unit/deadline_client/job_bundle/test_job_submission.py @@ -5,14 +5,11 @@ arguments to call CreateJob with. """ from __future__ import annotations -from typing import Any, Dict, Tuple -from unittest.mock import Mock +from typing import Any, Dict import pytest from deadline.client.job_bundle import submission -from deadline.job_attachments.models import AssetLoadingMethod, Attachments -from deadline.job_attachments.progress_tracker import SummaryStatistics MOCK_FARM_ID = "farm-0123456789abcdef0123456789abcdef" MOCK_QUEUE_ID = "queue-0123456789abcdef0123456789abcdef" @@ -23,7 +20,7 @@ [ pytest.param( {}, - submission.FlatAssetReferences(), + submission.AssetReferences(), ), pytest.param( { @@ -32,7 +29,7 @@ "outputs": {"directories": []}, } }, - submission.FlatAssetReferences(), + submission.AssetReferences(), ), pytest.param( { @@ -44,7 +41,7 @@ "outputs": {"directories": ["output_dir_1"]}, } }, - submission.FlatAssetReferences( + submission.AssetReferences( input_filenames=set(["input_file_1.txt", "input_file_2.txt"]), output_directories=set(["output_dir_1"]), ), @@ -59,7 +56,7 @@ "outputs": {"directories": ["output_dir_1"]}, } }, - submission.FlatAssetReferences( + submission.AssetReferences( input_filenames=set( [ "input_file_1.txt", @@ -74,51 +71,19 @@ ) def test_flatten_asset_references( assets_input: Dict[str, Any], - expected_output: submission.FlatAssetReferences, + expected_output: submission.AssetReferences, ) -> None: """ Test that FlatAssetReferences.from_dict creates a FlatAssetReferences object with all of the filenames/directories from an input. """ - response = submission.FlatAssetReferences.from_dict(assets_input) + response = submission.AssetReferences.from_dict(assets_input) assert response.input_filenames == expected_output.input_filenames assert response.input_directories == expected_output.input_directories assert response.output_directories == expected_output.output_directories -def test_upload_job_attachments() -> None: - """ - Test that upload_job_attachments starts the assets upload and returns the - attachment settings. - """ - - def fake_progress_callback(): - pass - - expected_output: Tuple[SummaryStatistics, Dict[str, Any]] = ( - SummaryStatistics(), - { - "manifests": [], - "assetLoadingMethod": AssetLoadingMethod.PRELOAD, - }, - ) - - asset_manager = Mock() - asset_manager.upload_assets.return_value = [ - SummaryStatistics(), - Attachments(), - ] - - summary, response = submission.upload_job_attachments(asset_manager, [], fake_progress_callback) - - asset_manager.upload_assets.assert_called_once_with( - manifests=[], on_uploading_assets=fake_progress_callback - ) - - assert (summary, response) == expected_output - - def test_split_parameter_args() -> None: """ Tests that split_parameter_args parses the input job bundle paramters diff --git a/test/unit/deadline_job_attachments/test_upload.py b/test/unit/deadline_job_attachments/test_upload.py index ce82986c..57f5f989 100644 --- a/test/unit/deadline_job_attachments/test_upload.py +++ b/test/unit/deadline_job_attachments/test_upload.py @@ -174,6 +174,7 @@ def test_asset_management( str(output_dir2), "", ], + referenced_paths=[], hash_cache_dir=str(cache_dir), on_preparing_to_submit=mock_on_preparing_to_submit, ) @@ -336,6 +337,7 @@ def test_asset_management_windows_multi_root( ) = asset_manager.hash_assets_and_create_manifest( input_paths=[input_c, input_d], output_paths=[output_d], + referenced_paths=[], hash_cache_dir=cache_dir, on_preparing_to_submit=mock_on_preparing_to_submit, ) @@ -506,6 +508,7 @@ def test_asset_management_many_inputs( ) = asset_manager.hash_assets_and_create_manifest( input_paths=input_files, output_paths=[str(Path(asset_root).joinpath("outputs"))], + referenced_paths=[], hash_cache_dir=cache_dir, on_preparing_to_submit=mock_on_preparing_to_submit, ) @@ -656,6 +659,7 @@ def test_asset_management_many_inputs_with_same_hash( ) = asset_manager.hash_assets_and_create_manifest( input_paths=input_files, output_paths=[str(Path(asset_root).joinpath("outputs"))], + referenced_paths=[], hash_cache_dir=cache_dir, on_preparing_to_submit=mock_on_preparing_to_submit, ) @@ -772,6 +776,7 @@ def mock_hash_file(file_path: str): ) = asset_manager.hash_assets_and_create_manifest( input_paths=[already_uploaded_file, not_yet_uploaded_file], output_paths=[], + referenced_paths=[], hash_cache_dir=cache_dir, on_preparing_to_submit=mock_on_preparing_to_submit, ) @@ -908,6 +913,7 @@ def test_asset_management_no_outputs_large_number_of_inputs_already_uploaded( ) = asset_manager.hash_assets_and_create_manifest( input_paths=input_files, output_paths=[], + referenced_paths=[], hash_cache_dir=cache_dir, on_preparing_to_submit=mock_on_preparing_to_submit, ) @@ -1010,6 +1016,7 @@ def test_asset_management_no_inputs( ) = asset_manager.hash_assets_and_create_manifest( input_paths=[], output_paths=[output_dir], + referenced_paths=[], hash_cache_dir=cache_dir, on_preparing_to_submit=mock_on_preparing_to_submit, ) @@ -1188,7 +1195,7 @@ def test_asset_management_manifest_version_not_implemented(self, farm_id, queue_ test_file = tmpdir.join("test.txt") test_file.write("test") asset_manager.hash_assets_and_create_manifest( - [test_file], [], hash_cache_dir=cache_dir + [test_file], [], [], hash_cache_dir=cache_dir ) @mock_sts @@ -1446,6 +1453,7 @@ def test_asset_management_input_not_exists(self, farm_id, queue_id, tmpdir, capl ) = asset_manager.hash_assets_and_create_manifest( input_paths=[input_not_exist, scene_file], output_paths=[str(Path(asset_root).joinpath("outputs"))], + referenced_paths=[], hash_cache_dir=cache_dir, on_preparing_to_submit=mock_on_preparing_to_submit, ) @@ -1456,10 +1464,7 @@ def test_asset_management_input_not_exists(self, farm_id, queue_id, tmpdir, capl ) # Then - assert ( - "Skipping uploading input as it either doesn't exist or is a directory: " - in caplog.text - ) + assert "Skipping uploading input as it doesn't exist: " in caplog.text assert_progress_report_last_callback( num_input_files=1, @@ -1565,6 +1570,7 @@ def test_manage_assets_with_symlinks( ) = asset_manager.hash_assets_and_create_manifest( input_paths=[str(symlink_input_path)], output_paths=[str(symlink_output_path)], + referenced_paths=[], hash_cache_dir=str(cache_dir), on_preparing_to_submit=mock_on_preparing_to_submit, ) @@ -1725,11 +1731,12 @@ def test_get_file_system_locations_by_type( ) @patch.object(Path, "exists", return_value=True) @pytest.mark.parametrize( - "input_paths, output_paths, local_type_locations, shared_type_locations, expected_result", + "input_paths, output_paths, referenced_paths, local_type_locations, shared_type_locations, expected_result", [ ( set(), # input paths set(), # output paths + set(), # referenced paths {}, # File System Location (LOCAL type) {}, # File System Location (SHARED type) [], @@ -1737,6 +1744,7 @@ def test_get_file_system_locations_by_type( ( {"/home/username/docs/inputs/input1.txt"}, # input paths {"/home/username/docs/outputs"}, # output paths + set(), # referenced paths {"/home/username/movie1": "Movie 1 - Local"}, # File System Location (LOCAL type) {}, # File System Location (SHARED type) [ @@ -1754,6 +1762,7 @@ def test_get_file_system_locations_by_type( ( {"/home/username/movie1/inputs/input1.txt"}, # input paths {"/home/username/movie1/outputs"}, # output paths + set(), # referenced paths {"/home/username/movie1": "Movie 1 - Local"}, # File System Location (LOCAL type) {}, # File System Location (SHARED type) [ @@ -1772,6 +1781,7 @@ def test_get_file_system_locations_by_type( ( {"/mnt/shared/movie1/something.txt"}, # input paths {"/home/username/movie1/outputs"}, # output paths + set(), # referenced paths {"/home/username/movie1": "Movie 1 - Local"}, # File System Location (LOCAL type) {"/mnt/shared/movie1": "Movi 1 - Shared"}, # File System Location (SHARED type) [ @@ -1798,6 +1808,7 @@ def test_get_file_system_locations_by_type( "/home/username/movie1/outputs1", "/home/username/movie1/outputs2", }, # output paths + {"/home/username/movie1/outputs1/referenced/path"}, # referenced paths {"/home/username/movie1": "Movie 1 - Local"}, # File System Location (LOCAL type) {"/mnt/shared/movie1": "Movi 1 - Shared"}, # File System Location (SHARED type) [ @@ -1812,6 +1823,7 @@ def test_get_file_system_locations_by_type( Path("/home/username/movie1/outputs1"), Path("/home/username/movie1/outputs2"), }, + references={Path("/home/username/movie1/outputs1/referenced/path")}, ), AssetRootGroup( root_path="/home/username", @@ -1832,6 +1844,7 @@ def test_get_asset_groups( queue_id: str, input_paths: Set[str], output_paths: Set[str], + referenced_paths: Set[str], local_type_locations: Dict[str, str], shared_type_locations: Dict[str, str], expected_result: List[AssetRootGroup], @@ -1844,6 +1857,7 @@ def test_get_asset_groups( result = asset_manager._get_asset_groups( input_paths, output_paths, + referenced_paths, local_type_locations, shared_type_locations, )