diff --git a/src/deadline/client/api/__init__.py b/src/deadline/client/api/__init__.py index 9a2597a6..86336960 100644 --- a/src/deadline/client/api/__init__.py +++ b/src/deadline/client/api/__init__.py @@ -16,6 +16,7 @@ "list_queues", "list_jobs", "list_fleets", + "list_storage_profiles_for_queue", "get_queue_boto3_session", ] @@ -152,3 +153,21 @@ def list_fleets(config=None, **kwargs): deadline = get_boto3_client("deadline", config=config) return _call_paginated_deadline_list_api(deadline.list_fleets, "fleets", **kwargs) + + +def list_storage_profiles_for_queue(config=None, **kwargs): + """ + Calls the deadline:ListStorageProfilesForQueue API call, applying the filter for user membership + depending on the configuration. If the response is paginated, it repeated + calls the API to get all the storage profiles. + """ + if "principalId" not in kwargs: + user_id, _ = get_user_and_identity_store_id(config=config) + if user_id: + kwargs["principalId"] = user_id + + deadline = get_boto3_client("deadline", config=config) + + return _call_paginated_deadline_list_api( + deadline.list_storage_profiles_for_queue, "storageProfiles", **kwargs + ) diff --git a/src/deadline/client/api/_submit_job_bundle.py b/src/deadline/client/api/_submit_job_bundle.py index 7d186f8d..455e63f2 100644 --- a/src/deadline/client/api/_submit_job_bundle.py +++ b/src/deadline/client/api/_submit_job_bundle.py @@ -99,6 +99,10 @@ def create_job_from_job_bundle( "templateType": file_type, } + storage_profile_id = get_setting("defaults.storage_profile_id", config=config) + if storage_profile_id: + create_job_args["storageProfileId"] = storage_profile_id + # The job parameters job_bundle_parameters = read_job_bundle_parameters(job_bundle_dir) diff --git a/src/deadline/client/cli/groups/bundle_group.py b/src/deadline/client/cli/groups/bundle_group.py index 42232aa7..2f845464 100644 --- a/src/deadline/client/cli/groups/bundle_group.py +++ b/src/deadline/client/cli/groups/bundle_group.py @@ -123,6 +123,10 @@ def bundle_submit(job_bundle_dir, asset_loading_method, parameter, **args): "templateType": file_type, } + storage_profile_id = get_setting("defaults.storage_profile_id", config=config) + if storage_profile_id: + create_job_args["storageProfileId"] = storage_profile_id + # The job parameters job_bundle_parameters = read_job_bundle_parameters(job_bundle_dir) diff --git a/src/deadline/client/cli/groups/config_group.py b/src/deadline/client/cli/groups/config_group.py index 68d32ce5..73036902 100644 --- a/src/deadline/client/cli/groups/config_group.py +++ b/src/deadline/client/cli/groups/config_group.py @@ -27,6 +27,9 @@ def cli_config(): defaults.queue_id: The default queue ID to use for job submissions or CLI operations. + defaults.storage_profile_id: + The default storage profile ID to use for job submission or CLI operations. + settings.job_history_dir: The directory in which to create new job bundles for submitting to Amazon Deadline Cloud, to produce a history of job submissions. diff --git a/src/deadline/client/config/config_file.py b/src/deadline/client/config/config_file.py index 1b32b03e..178884a6 100644 --- a/src/deadline/client/config/config_file.py +++ b/src/deadline/client/config/config_file.py @@ -73,6 +73,11 @@ "depend": "defaults.farm_id", "section_format": "{}", }, + "defaults.storage_profile_id": { + "default": "", + "depend": "defaults.queue_id", + "section_format": "{}", + }, "settings.auto_accept": { "default": "false", }, diff --git a/src/deadline/client/ui/dialogs/deadline_config_dialog.py b/src/deadline/client/ui/dialogs/deadline_config_dialog.py index 5bcea903..f98bbd67 100644 --- a/src/deadline/client/ui/dialogs/deadline_config_dialog.py +++ b/src/deadline/client/ui/dialogs/deadline_config_dialog.py @@ -10,6 +10,7 @@ __all__ = ["DeadlineConfigDialog"] +import sys import threading from configparser import ConfigParser from logging import getLogger, root @@ -152,6 +153,9 @@ class DeadlineWorkstationConfigWidget(QWidget): # Emitted when an async refresh_queues_list thread completes, # provides (aws_profile_name, farm_id, [(queue_id, queue_name), ...]) _queue_list_update = Signal(str, str, list) + # Emitted when an async refresh_storage_profiles_name_list thread completes, + # provides (aws_profile_name, farm_id, queue_id, [storage_profile_id, ...]) + _storage_profile_list_update = Signal(str, str, list) # This signal is sent when any background refresh thread catches an exception, # provides (operation_name, BaseException) _background_exception = Signal(str, BaseException) @@ -238,6 +242,18 @@ def _build_farm_settings_ui(self, group, layout): self.default_queue_box.background_exception.connect(self.handle_background_exception) layout.addRow(default_queue_box_label, self.default_queue_box) + self.default_storage_profile_box = DeadlineStorageProfileNameListComboBox(parent=group) + default_storage_profile_box_label = self.labels["defaults.storage_profile_id"] = QLabel( + "Default Storage Profile" + ) + self.default_storage_profile_box.box.currentIndexChanged.connect( + self.default_storage_profile_name_changed + ) + self.default_storage_profile_box.background_exception.connect( + self.handle_background_exception + ) + layout.addRow(default_storage_profile_box_label, self.default_storage_profile_box) + def _build_general_settings_ui(self, group, layout): self.auto_accept = self._init_checkbox_setting( group, layout, "settings.auto_accept", "Auto Accept Confirmation Prompts" @@ -360,6 +376,7 @@ def _fill_aws_profiles_box(self): def refresh_lists(self): self.default_farm_box.refresh_list() self.default_queue_box.refresh_list() + self.default_storage_profile_box.refresh_list() def refresh(self): """ @@ -372,6 +389,7 @@ def refresh(self): config_file.set_setting(setting_name, value, self.config) self.default_farm_box.set_config(self.config) self.default_queue_box.set_config(self.config) + self.default_storage_profile_box.set_config(self.config) with block_signals(self.aws_profiles_box): aws_profile_name = config_file.get_setting( @@ -404,6 +422,7 @@ def refresh(self): refresh_callback() self.default_queue_box.refresh_selected_id() + self.default_storage_profile_box.refresh_selected_id() # Put an orange box around the labels for any settings that are changed for setting_name, label in self.labels.items(): @@ -447,6 +466,7 @@ def aws_profile_changed(self, value): self.changes["defaults.aws_profile_name"] = value self.default_farm_box.clear_list() self.default_queue_box.clear_list() + self.default_storage_profile_box.clear_list() self.refresh() def job_history_dir_changed(self): @@ -462,6 +482,7 @@ def default_farm_changed(self, index): self.changes["defaults.farm_id"] = self.default_farm_box.box.itemData(index) self.refresh() self.default_queue_box.refresh_list() + self.default_storage_profile_box.refresh_list() def deadline_endpoint_url_edited(self): deadline_endpoint_url = self.deadline_endpoint_url_edit.text() @@ -475,6 +496,13 @@ def deadline_endpoint_url_edited(self): def default_queue_changed(self, index): self.changes["defaults.queue_id"] = self.default_queue_box.box.itemData(index) self.refresh() + self.default_storage_profile_box.refresh_list() + + def default_storage_profile_name_changed(self, index): + self.changes["defaults.storage_profile_id"] = self.default_storage_profile_box.box.itemData( + index + ) + self.refresh() class _DeadlineResourceListComboBox(QWidget): @@ -621,3 +649,47 @@ def list_resources(self, config: Optional[ConfigParser]): return sorted([(item["displayName"], item["queueId"]) for item in response["queues"]]) else: return [] + + +class DeadlineStorageProfileNameListComboBox(_DeadlineResourceListComboBox): + WINDOWS_OS = "windows" + MAC_OS = "macos" + LINUX_OS = "linux" + + def __init__(self, parent=None): + super().__init__( + resource_name="Storage Profile", + setting_name="defaults.storage_profile_id", + parent=parent, + ) + + def list_resources(self, config: Optional[ConfigParser]): + default_farm_id = config_file.get_setting("defaults.farm_id", config=config) + default_queue_id = config_file.get_setting("defaults.queue_id", config=config) + if default_farm_id and default_queue_id: + response = api.list_storage_profiles_for_queue( + config=config, farmId=default_farm_id, queueId=default_queue_id + ) + storage_profiles = response.get("storageProfiles", []) + return sorted( + (item["displayName"], item["storageProfileId"]) + for item in storage_profiles + if self._get_current_os() == item["osFamily"] + ) + else: + return [] + + def _get_current_os(self) -> str: + """ + Get a string specifying what the OS is, following the format the Deadline storage profile API expects. + """ + if sys.platform.startswith("linux"): + return self.LINUX_OS + + if sys.platform.startswith("darwin"): + return self.MAC_OS + + if sys.platform.startswith("win"): + return self.WINDOWS_OS + + return "Unknown" diff --git a/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py b/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py index 56f2e484..fb1ac784 100644 --- a/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py +++ b/src/deadline/client/ui/dialogs/submit_job_progress_dialog.py @@ -71,6 +71,7 @@ class SubmitJobProgressDialog(QDialog): def start_submission( farm_id: str, queue_id: str, + storage_profile_id: str, job_bundle_dir: str, asset_manager: S3AssetManager, deadline_client: BaseClient, @@ -86,6 +87,8 @@ def start_submission( Args: farm_id (str): Id of the farm to submit to queue_id (str): Id of the queue to submit to + storage_profile_id (str): Id of the storage profile to associate + with the job. job_bundle_dir (str): Path to the folder containing the job bundle to submit. asset_manager (S3AssetManager): A job attachments S3AssetManager @@ -100,6 +103,7 @@ def start_submission( job_progress_dialog = SubmitJobProgressDialog( farm_id, queue_id, + storage_profile_id, job_bundle_dir, asset_manager, deadline_client, @@ -113,6 +117,7 @@ def __init__( self, farm_id: str, queue_id: str, + storage_profile_id: str, job_bundle_dir: str, asset_manager: S3AssetManager, deadline_client: BaseClient, @@ -124,6 +129,7 @@ def __init__( self._farm_id = farm_id self._queue_id = queue_id + self._storage_profile_id = storage_profile_id self._job_bundle_dir = job_bundle_dir self._asset_manager = asset_manager self._deadline_client = deadline_client @@ -201,6 +207,9 @@ def _start_submission(self): self._create_job_args["template"] = file_contents self._create_job_args["templateType"] = file_type + if self._storage_profile_id: + self._create_job_args["storageProfileId"] = self._storage_profile_id + # The job parameters job_bundle_parameters = read_job_bundle_parameters(self._job_bundle_dir) diff --git a/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py b/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py index cd0ddb5c..26a43aa7 100644 --- a/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py +++ b/src/deadline/client/ui/dialogs/submit_job_to_deadline_dialog.py @@ -261,6 +261,7 @@ def on_submit(self): farm_id = get_setting("defaults.farm_id") queue_id = get_setting("defaults.queue_id") + storage_profile_id = get_setting("defaults.storage_profile_id") queue = deadline.get_queue(farmId=farm_id, queueId=queue_id) @@ -281,6 +282,7 @@ def on_submit(self): self.create_job_response = SubmitJobProgressDialog.start_submission( farm_id, queue_id, + storage_profile_id, job_bundle_dir, asset_manager, deadline, diff --git a/src/deadline/client/ui/widgets/shared_job_settings_tab.py b/src/deadline/client/ui/widgets/shared_job_settings_tab.py index cf660617..080810c0 100644 --- a/src/deadline/client/ui/widgets/shared_job_settings_tab.py +++ b/src/deadline/client/ui/widgets/shared_job_settings_tab.py @@ -3,6 +3,7 @@ """ A UI Widget containing the render setup tab """ +import sys import threading from typing import Any, Dict, Optional @@ -358,3 +359,51 @@ def get_item(self): return (response["queueId"], response["displayName"], response["description"]) else: return ("", "", "") + + +class DeadlineStorageProfileNameDisplay(_DeadlineNamedResourceDisplay): + WINDOWS_OS = "Windows" + MAC_OS = "Macos" + LINUX_OS = "Linux" + + def __init__(self, parent=None): + super().__init__( + resource_name="Storage Profile Name", + setting_name="defaults.storage_profile_id", + parent=parent, + ) + + def get_item(self): + farm_id = get_setting("defaults.farm_id") + queue_id = get_setting("defaults.queue_id") + storage_profile_id = get_setting(self.setting_name) + + if farm_id and queue_id and storage_profile_id: + deadline = api.get_boto3_client("deadline") + response = deadline.list_storage_profiles_for_queue(farmId=farm_id, queueId=queue_id) + farm_storage_profiles = response.get("storageProfiles", {}) + + if farm_storage_profiles: + storage_profile = [ + (item["storageProfileId"], item["displayName"], item["osFamily"]) + for item in farm_storage_profiles + if storage_profile_id == item["storageProfileId"] + ] + return storage_profile[0] + + return ("", "", "") + + def _get_default_storage_profile_name(self) -> str: + """ + Get a string specifying what the OS is, following the format the Deadline storage profile API expects. + """ + if sys.platform.startswith("linux"): + return self.LINUX_OS + + if sys.platform.startswith("darwin"): + return self.MAC_OS + + if sys.platform.startswith("win"): + return self.WINDOWS_OS + + return "" diff --git a/test/deadline_client/unit/api/test_api_storage_profile.py b/test/deadline_client/unit/api/test_api_storage_profile.py new file mode 100644 index 00000000..a2ad8dd5 --- /dev/null +++ b/test/deadline_client/unit/api/test_api_storage_profile.py @@ -0,0 +1,94 @@ +# Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved. + +""" +tests the deadline.client.api functions relating to storage profiles +""" + +from unittest.mock import patch + +import pytest + +from deadline.client import api + +STORAGE_PROFILES_LIST = [ + { + "storageProfileId": "sp-0123456789abcdef0123456789abcdef", + "osFamily": "windows", + "displayName": "Testing storage profile", + }, + { + "storageProfileId": "sp-0123456789abcdef0123456789abcdeg", + "osFamily": "macos", + "displayName": "Another storage profile", + }, + { + "storageProfileId": "sp-0123756789abcdef0123456789abcdeg", + "osFamily": "linux", + "displayName": "Third storage profile", + }, + { + "storageProfileId": "sp-0123456789abcdef012a456789abcdeg", + "osFamily": "linux", + "displayName": "storage profile six", + }, + { + "storageProfileId": "sp-0123456789abcdef0123450789abcaeg", + "osFamily": "macos", + "displayName": "storage profile", + }, +] + + +def test_list_storage_profiles_for_queue_paginated(fresh_deadline_config): + """Confirm api.list_storage_profiles_for_queue concatenates multiple pages""" + with patch.object(api._session, "get_boto3_session") as session_mock: + session_mock().client("deadline").list_storage_profiles_for_queue.side_effect = [ + {"storageProfiles": STORAGE_PROFILES_LIST[:2], "nextToken": "abc"}, + {"storageProfiles": STORAGE_PROFILES_LIST[2:3], "nextToken": "def"}, + {"storageProfiles": STORAGE_PROFILES_LIST[3:]}, + ] + + # Call the API + storage_profiles = api.list_storage_profiles_for_queue() + + assert storage_profiles["storageProfiles"] == STORAGE_PROFILES_LIST + + +@pytest.mark.parametrize("pass_principal_id_filter", [True, False]) +@pytest.mark.parametrize("user_identities", [True, False]) +def test_list_storage_profiles_for_queue_principal_id( + fresh_deadline_config, pass_principal_id_filter, user_identities +): + """Confirm api.list_storage_profiles_for_queue sets the principalId parameter appropriately""" + + with patch.object(api._session, "get_boto3_session") as session_mock: + session_mock().client("deadline").list_storage_profiles_for_queue.side_effect = [ + {"storageProfiles": STORAGE_PROFILES_LIST}, + ] + if user_identities: + session_mock()._session.get_scoped_config.return_value = { + "studio_id": "studioid", + "user_id": "userid", + "identity_store_id": "idstoreid", + } + + # Call the API + if pass_principal_id_filter: + storage_profiles = api.list_storage_profiles_for_queue(principalId="otheruserid") + else: + storage_profiles = api.list_storage_profiles_for_queue() + + assert storage_profiles["storageProfiles"] == STORAGE_PROFILES_LIST + + if pass_principal_id_filter: + session_mock().client( + "deadline" + ).list_storage_profiles_for_queue.assert_called_once_with(principalId="otheruserid") + elif user_identities: + session_mock().client( + "deadline" + ).list_storage_profiles_for_queue.assert_called_once_with(principalId="userid") + else: + session_mock().client( + "deadline" + ).list_storage_profiles_for_queue.assert_called_once_with() diff --git a/test/deadline_client/unit/api/test_job_bundle_submission.py b/test/deadline_client/unit/api/test_job_bundle_submission.py index 8009393c..688517af 100644 --- a/test/deadline_client/unit/api/test_job_bundle_submission.py +++ b/test/deadline_client/unit/api/test_job_bundle_submission.py @@ -20,7 +20,12 @@ from deadline.job_attachments.progress_tracker import SummaryStatistics from deadline.job_attachments.utils import AssetLoadingMethod, OperatingSystemFamily -from ..shared_constants import MOCK_BUCKET_NAME, MOCK_FARM_ID, MOCK_QUEUE_ID +from ..shared_constants import ( + MOCK_BUCKET_NAME, + MOCK_FARM_ID, + MOCK_STORAGE_PROFILE_ID, + MOCK_QUEUE_ID, +) MOCK_GET_QUEUE_RESPONSE = { "queueId": MOCK_QUEUE_ID, @@ -272,6 +277,8 @@ def test_create_job_from_job_bundle( config.set_setting("defaults.farm_id", MOCK_FARM_ID) config.set_setting("defaults.queue_id", MOCK_QUEUE_ID) + config.set_setting("defaults.storage_profile_id", MOCK_STORAGE_PROFILE_ID) + # Write the template to the job bundle with open( os.path.join(temp_job_bundle_dir, f"template.{job_template_type.lower()}"), @@ -305,6 +312,7 @@ def test_create_job_from_job_bundle( queueId=MOCK_QUEUE_ID, template=job_template, templateType=job_template_type, + storageProfileId=MOCK_STORAGE_PROFILE_ID, **expected_create_job_parameters_dict, ) @@ -434,6 +442,7 @@ def test_create_job_from_job_bundle_job_attachments( config.set_setting("defaults.farm_id", MOCK_FARM_ID) config.set_setting("defaults.queue_id", MOCK_QUEUE_ID) + config.set_setting("defaults.storage_profile_id", MOCK_STORAGE_PROFILE_ID) # Write a JSON template with open(os.path.join(temp_job_bundle_dir, "template.json"), "w", encoding="utf8") as f: @@ -490,6 +499,7 @@ def fake_upload_callback(metadata: Dict[str, Any]) -> bool: queueId=MOCK_QUEUE_ID, template=ANY, templateType=ANY, + storageProfileId=MOCK_STORAGE_PROFILE_ID, attachments={ "manifests": [], "assetLoadingMethod": AssetLoadingMethod.PRELOAD, @@ -514,6 +524,7 @@ def test_create_job_from_job_bundle_with_empty_asset_references( config.set_setting("defaults.farm_id", MOCK_FARM_ID) config.set_setting("defaults.queue_id", MOCK_QUEUE_ID) + config.set_setting("defaults.storage_profile_id", MOCK_STORAGE_PROFILE_ID) # Write the template to the job bundle with open( @@ -543,6 +554,7 @@ def test_create_job_from_job_bundle_with_empty_asset_references( queueId=MOCK_QUEUE_ID, template=job_template, templateType=job_template_type, + storageProfileId=MOCK_STORAGE_PROFILE_ID, priority=50, ) @@ -582,6 +594,7 @@ def test_create_job_from_job_bundle_with_single_asset_file( config.set_setting("defaults.farm_id", MOCK_FARM_ID) config.set_setting("defaults.queue_id", MOCK_QUEUE_ID) + config.set_setting("defaults.storage_profile_id", MOCK_STORAGE_PROFILE_ID) # Write a JSON template with open(os.path.join(temp_job_bundle_dir, "template.json"), "w", encoding="utf8") as f: @@ -625,6 +638,7 @@ def fake_upload_callback(metadata: Dict[str, Any]) -> bool: queueId=MOCK_QUEUE_ID, template=ANY, templateType=ANY, + storageProfileId=MOCK_STORAGE_PROFILE_ID, attachments={ "manifests": [ { diff --git a/test/deadline_client/unit/cli/test_cli_config.py b/test/deadline_client/unit/cli/test_cli_config.py index 1b0cc624..739a54c6 100644 --- a/test/deadline_client/unit/cli/test_cli_config.py +++ b/test/deadline_client/unit/cli/test_cli_config.py @@ -1,7 +1,7 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. """ -Tests for the CLI list-farms command. +Tests for the CLI config command. """ import importlib @@ -34,7 +34,7 @@ def test_cli_config_show_defaults(fresh_deadline_config): assert fresh_deadline_config in result.output # Assert the expected number of settings - assert len(settings.keys()) == 8 + assert len(settings.keys()) == 9 for setting_name in settings.keys(): assert setting_name in result.output @@ -94,6 +94,7 @@ def test_cli_config_show_modified_config(fresh_deadline_config): config.set_setting("settings.deadline_endpoint_url", "https://some-url-value") config.set_setting("defaults.farm_id", "farm-82934h23k4j23kjh") config.set_setting("defaults.queue_id", "queue-389348u234jhk34") + config.set_setting("defaults.storage_profile_id", "sp-12345abcde12345") config.set_setting("settings.auto_accept", "False") config.set_setting("settings.log_level", "DEBUG") diff --git a/test/deadline_client/unit/config/test_config_file.py b/test/deadline_client/unit/config/test_config_file.py index fb4d47ee..4c2fcbcf 100644 --- a/test/deadline_client/unit/config/test_config_file.py +++ b/test/deadline_client/unit/config/test_config_file.py @@ -47,6 +47,7 @@ def test_config_settings_hierarchy(fresh_deadline_config): # First set some settings that apply to the defaults, changing the # hierarchy from queue inwards. config.set_setting("settings.deadline_endpoint_url", "nondefault-endpoint-url") + config.set_setting("defaults.storage_profile_id", "storage-profile-for-queue-default") config.set_setting("defaults.queue_id", "queue-for-farm-default") config.set_setting("defaults.farm_id", "farm-for-profile-default") config.set_setting("defaults.aws_profile_name", "NonDefaultProfile") @@ -56,6 +57,7 @@ def test_config_settings_hierarchy(fresh_deadline_config): assert config.get_setting("settings.deadline_endpoint_url") == DEFAULT_DEADLINE_ENDPOINT_URL assert config.get_setting("defaults.farm_id") == "" assert config.get_setting("defaults.queue_id") == "" + assert config.get_setting("defaults.storage_profile_id") == "" # Switch back to the default profile, and check the next layer of the onion config.set_setting("defaults.aws_profile_name", "") @@ -63,10 +65,18 @@ def test_config_settings_hierarchy(fresh_deadline_config): assert config.get_setting("defaults.farm_id") == "farm-for-profile-default" # The queue id is still default assert config.get_setting("defaults.queue_id") == "" + # The storage profile id is still default + assert config.get_setting("defaults.storage_profile_id") == "" # Switch back to the default farm config.set_setting("defaults.farm_id", "") assert config.get_setting("defaults.queue_id") == "queue-for-farm-default" + # Storage profile needs "profile - farm_id - queue_id" so it should still be empty + assert config.get_setting("defaults.storage_profile_id") == "" + + # Switch to default farm and default queue + config.set_setting("defaults.queue_id", "") + assert config.get_setting("defaults.storage_profile_id") == "storage-profile-for-queue-default" def test_config_get_setting_nonexistant(fresh_deadline_config): diff --git a/test/deadline_client/unit/shared_constants.py b/test/deadline_client/unit/shared_constants.py index db2a775a..2327dd37 100644 --- a/test/deadline_client/unit/shared_constants.py +++ b/test/deadline_client/unit/shared_constants.py @@ -3,6 +3,7 @@ MOCK_FARM_ID = "farm-0123456789abcdefabcdefabcdefabcd" MOCK_QUEUE_ID = "queue-0123456789abcdefabcdefabcdefabcd" MOCK_BUCKET_NAME = "deadline-job-attachments-mock-bucket" +MOCK_STORAGE_PROFILE_ID = "sp-0123456789abcdefabcdefabcdefabcd" MOCK_JOB_ID = "job-0123456789abcdefabcdefabcdefabcd" MOCK_STEP_ID = "step-0123456789abcdefabcdefabcdefabcd" MOCK_TASK_ID = "task-0123456789abcdefabcdefabcdefabcd"