diff --git a/.github/workflows/pr-ci.yaml b/.github/workflows/pr-ci.yaml index 5f9ba4c2..6f578ed9 100644 --- a/.github/workflows/pr-ci.yaml +++ b/.github/workflows/pr-ci.yaml @@ -10,7 +10,7 @@ jobs: AWS_DEFAULT_REGION: us-east-1 strategy: matrix: - python: [ 3.7, 3.8, 3.9, "3.10" ] + python: [ 3.8, 3.9, "3.10" ] os: [ubuntu-latest, macos-latest, windows-latest] runs-on: ${{ matrix.os }} steps: diff --git a/.pylintrc b/.pylintrc index 2b4fb647..13ea7479 100644 --- a/.pylintrc +++ b/.pylintrc @@ -9,7 +9,6 @@ persistent=yes disable= missing-docstring, # not everything needs a docstring fixme, # work in progress - bad-continuation, # clashes with black duplicate-code, # finds dupes between tests and plugins too-few-public-methods, # triggers when inheriting ungrouped-imports, # clashes with isort diff --git a/requirements.txt b/requirements.txt index f3dd1a16..fd8d22f4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,18 +1,18 @@ # better interactive session, debugger -ipython>=7.7.0 -ipdb>=0.12 +ipython>=8.0.0 +ipdb>=0.13 # testing tools -pylint==2.8.3 -coverage>=4.5.4 -pytest>=6.0.0 -pytest-cov>=2.7.1 -pytest-random-order>=1.0.4 -hypothesis>=4.32.3 -pytest-localserver>=0.5.0 +pylint==3.0.1 +coverage>=7.3.2 +pytest>=7.4.2 +pytest-cov>=4.1.0 +pytest-random-order>=1.1.0 +hypothesis>=6.87.1 +pytest-localserver>=0.8.0 # commit hooks -pre-commit>=1.18.1 +pre-commit>=3.4.0 # packaging -twine>=3.1.0 +twine>=4.0.2 diff --git a/setup.py b/setup.py index 505c8dd5..2950663a 100644 --- a/setup.py +++ b/setup.py @@ -70,8 +70,6 @@ def find_version(*file_paths): "Topic :: Software Development :: Code Generators", "Operating System :: OS Independent", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", diff --git a/src/rpdk/core/boto_helpers.py b/src/rpdk/core/boto_helpers.py index c1dfa21a..67aad6cf 100644 --- a/src/rpdk/core/boto_helpers.py +++ b/src/rpdk/core/boto_helpers.py @@ -39,9 +39,7 @@ def get_temporary_credentials(session, key_names=BOTO_CRED_KEYS, role_arn=None): region_name=session.region_name, ) if role_arn: - session_name = "CloudFormationContractTest-{:%Y%m%d%H%M%S}".format( - datetime.now() - ) + session_name = f"CloudFormationContractTest-{datetime.now():%Y%m%d%H%M%S}" try: response = sts_client.assume_role( RoleArn=role_arn, RoleSessionName=session_name, DurationSeconds=900 @@ -54,7 +52,7 @@ def get_temporary_credentials(session, key_names=BOTO_CRED_KEYS, role_arn=None): role_arn, ) raise DownstreamError() from Exception( - "Could not assume specified role '{}'".format(role_arn) + "Could not assume specified role '{role_arn}'" ) temp = response["Credentials"] creds = (temp["AccessKeyId"], temp["SecretAccessKey"], temp["SessionToken"]) diff --git a/src/rpdk/core/build_image.py b/src/rpdk/core/build_image.py index 630e8342..7be29d45 100644 --- a/src/rpdk/core/build_image.py +++ b/src/rpdk/core/build_image.py @@ -53,7 +53,9 @@ def setup_subparser(subparsers, parents): parser.add_argument("--image-name", help="Image name") parser.add_argument( "--executable", - help="The relative path to the handler executable" - " that will be built into the docker image" - " (ie target/myjar.jar)", + help=( + "The relative path to the handler executable" + " that will be built into the docker image" + " (ie target/myjar.jar)" + ), ) diff --git a/src/rpdk/core/contract/hook_client.py b/src/rpdk/core/contract/hook_client.py index 782ec51b..31fe5251 100644 --- a/src/rpdk/core/contract/hook_client.py +++ b/src/rpdk/core/contract/hook_client.py @@ -378,9 +378,9 @@ def is_update_invocation_point(invocation_point): def assert_time(self, start_time, end_time, action): timeout_in_seconds = self._timeout_in_seconds - assert end_time - start_time <= timeout_in_seconds, ( - "Handler %r timed out." % action - ) + assert ( + end_time - start_time <= timeout_in_seconds + ), f"Handler {action!r} timed out." def _make_payload( self, @@ -472,7 +472,7 @@ def call_and_assert( **kwargs, ): if assert_status not in [HookStatus.SUCCESS, HookStatus.FAILED]: - raise ValueError("Assert status {} not supported.".format(assert_status)) + raise ValueError(f"Assert status {assert_status} not supported.") status, response = self.call(invocation_point, target, target_model, **kwargs) if assert_status == HookStatus.SUCCESS: diff --git a/src/rpdk/core/contract/resource_client.py b/src/rpdk/core/contract/resource_client.py index 3427fd6b..77a219d2 100644 --- a/src/rpdk/core/contract/resource_client.py +++ b/src/rpdk/core/contract/resource_client.py @@ -125,8 +125,8 @@ def path_exists(document, path): _prop, _resolved_path, _parent = traverse(document, path) except LookupError: return False - else: - return True + + return True def prune_properties_from_model(model, paths): @@ -282,7 +282,6 @@ def has_only_writable_identifiers(self): ) def assert_write_only_property_does_not_exist(self, resource_model): - error_list = [] if self.write_only_paths: for write_only_property in self.write_only_paths: @@ -290,10 +289,9 @@ def assert_write_only_property_does_not_exist(self, resource_model): if val: error_list.append(write_only_property[1]) assertion_error_message = ( - "The model MUST NOT return properties defined as " - "writeOnlyProperties in the resource schema " - "\n Write only properties in resource model : %s \n Output Resource Model : %s \n" - % (error_list, resource_model) + "The model MUST NOT return properties defined as writeOnlyProperties" + " in the resource schema \n Write only properties in resource model :" + f" {error_list} \n Output Resource Model : {resource_model} \n" ) assert not any(error_list), assertion_error_message @@ -302,13 +300,13 @@ def get_metadata(self): properties = self._schema["properties"] except KeyError: return set() - else: - return { - prop - for prop in properties.keys() - if "insertionOrder" in properties[prop] - and properties[prop]["insertionOrder"] == "false" - } + + return { + prop + for prop in properties.keys() + if "insertionOrder" in properties[prop] + and properties[prop]["insertionOrder"] == "false" + } @property def strategy(self): @@ -456,11 +454,10 @@ def compare(self, inputs, outputs): def compare_model(self, inputs, outputs, path=()): assertion_error_message = ( - "All properties specified in the request MUST " - "be present in the model returned, and they MUST" - " match exactly, with the exception of properties" - " defined as writeOnlyProperties in the resource schema \n Request Model : %s \n Returned Model : %s \n" - % (inputs, outputs) + "All properties specified in the request MUST be present in the model" + " returned, and they MUST match exactly, with the exception of properties" + " defined as writeOnlyProperties in the resource schema \n Request Model :" + f" {inputs} \n Returned Model : {outputs} \n" ) try: if isinstance(inputs, dict): @@ -488,13 +485,9 @@ def compare_model(self, inputs, outputs, path=()): else: if inputs[key] != outputs[key]: assertion_error_message = ( - "%s Value for property %s in Request Model(%s) and Response Model(%s) does not match" - % ( - assertion_error_message, - key, - inputs[key], - outputs[key], - ) + f"{assertion_error_message} Value for property {key} in" + f" Request Model({inputs[key]}) and Response" + f" Model({outputs[key]}) does not match" ) assert inputs[key] == outputs[key], assertion_error_message else: @@ -612,9 +605,9 @@ def assert_time(self, start_time, end_time, action): if action in (Action.READ, Action.LIST) else self._timeout_in_seconds * 2 ) - assert end_time - start_time <= timeout_in_seconds, ( - "Handler %r timed out." % action - ) + assert ( + end_time - start_time <= timeout_in_seconds + ), f"Handler {action!r} timed out." @staticmethod def assert_primary_identifier(primary_identifier_paths, resource_model): @@ -646,8 +639,8 @@ def is_primary_identifier_equal( ) except KeyError as e: raise AssertionError( - "The primaryIdentifier returned in every progress event must\ - match the primaryIdentifier passed into the request" + "The primaryIdentifier returned in every progress event must " + "match the primaryIdentifier passed into the request" ) from e @staticmethod @@ -662,8 +655,8 @@ def get_primary_identifier(primary_identifier_path, model): return pid_list except KeyError as e: raise AssertionError( - "The primaryIdentifier returned in every progress event must\ - match the primaryIdentifier passed into the request \n" + "The primaryIdentifier returned in every progress event must " + "match the primaryIdentifier passed into the request \n" ) from e def _make_payload( @@ -761,7 +754,7 @@ def call_and_assert( if not self.has_required_handlers(): raise ValueError("Create/Read/Delete handlers are required") if assert_status not in [OperationStatus.SUCCESS, OperationStatus.FAILED]: - raise ValueError("Assert status {} not supported.".format(assert_status)) + raise ValueError(f"Assert status {assert_status} not supported.") status, response = self.call(action, current_model, previous_model, **kwargs) if assert_status == OperationStatus.SUCCESS: @@ -857,7 +850,7 @@ def validate_model_contain_tags(self, inputs): if key == tag_property_name: return True else: - raise assertion_error_message + raise AssertionError(assertion_error_message) except Exception as exception: raise AssertionError(assertion_error_message) from exception return False diff --git a/src/rpdk/core/contract/suite/hook/hook_handler_commons.py b/src/rpdk/core/contract/suite/hook/hook_handler_commons.py index a50dee41..b8add5be 100644 --- a/src/rpdk/core/contract/suite/hook/hook_handler_commons.py +++ b/src/rpdk/core/contract/suite/hook/hook_handler_commons.py @@ -24,9 +24,8 @@ def test_hook_success(hook_client, invocation_point, target, target_model): if HookClient.is_update_invocation_point(invocation_point): raise ValueError( - "Invocation point {} not supported for this testing operation".format( - invocation_point - ) + f"Invocation point {invocation_point} not supported for this testing" + " operation" ) _status, response, _error_code = hook_client.call_and_assert( @@ -39,9 +38,8 @@ def test_hook_success(hook_client, invocation_point, target, target_model): def test_update_hook_success(hook_client, invocation_point, target, target_model): if not HookClient.is_update_invocation_point(invocation_point): raise ValueError( - "Invocation point {} not supported for testing UPDATE hook operation".format( - invocation_point - ) + f"Invocation point {invocation_point} not supported for testing UPDATE hook" + " operation" ) _status, response, _error_code = hook_client.call_and_assert( @@ -85,7 +83,10 @@ def test_hook_handlers_failed(hook_client, invocation_point): @failed_event( error_code=HandlerErrorCode.UnsupportedTarget, - msg="A hook handler MUST return FAILED with a UnsupportedTarget error code if the target is not supported", + msg=( + "A hook handler MUST return FAILED with a UnsupportedTarget error code if the" + " target is not supported" + ), ) def test_hook_unsupported_target(hook_client, invocation_point): if not hook_client.handler_has_wildcard_targets(invocation_point): diff --git a/src/rpdk/core/contract/suite/resource/contract_asserts.py b/src/rpdk/core/contract/suite/resource/contract_asserts.py index 978c7713..bf813850 100644 --- a/src/rpdk/core/contract/suite/resource/contract_asserts.py +++ b/src/rpdk/core/contract/suite/resource/contract_asserts.py @@ -15,9 +15,11 @@ def response_contains_resource_model_equal_updated_model( assert response["resourceModel"] == { **current_resource_model, **update_resource_model, - }, "All properties specified in the update request MUST be present in the \ - model returned, and they MUST match exactly, with the exception of \ - properties defined as writeOnlyProperties in the resource schema" + }, ( + "All properties specified in the update request MUST be present in the " + "model returned, and they MUST match exactly, with the exception of " + "properties defined as writeOnlyProperties in the resource schema" + ) @decorate() @@ -35,8 +37,10 @@ def response_contains_unchanged_primary_identifier( resource_client.primary_identifier_paths, current_resource_model, response["resourceModel"], - ), "PrimaryIdentifier returned in every progress event must match \ - the primaryIdentifier passed into the request" + ), ( + "PrimaryIdentifier returned in every progress event must match the" + " primaryIdentifier passed into the request" + ) @decorate(after=False) diff --git a/src/rpdk/core/contract/suite/resource/handler_commons.py b/src/rpdk/core/contract/suite/resource/handler_commons.py index eedfe70e..bc4fc430 100644 --- a/src/rpdk/core/contract/suite/resource/handler_commons.py +++ b/src/rpdk/core/contract/suite/resource/handler_commons.py @@ -28,13 +28,14 @@ def test_create_success(resource_client, current_resource_model): @failed_event( error_code=HandlerErrorCode.AlreadyExists, - msg="A create handler MUST NOT create multiple resources given\ - the same idempotency token", + msg=( + "A create handler MUST NOT create multiple resources given the same " + "idempotency token" + ), ) def test_create_failure_if_repeat_writeable_id(resource_client, current_resource_model): LOG.debug( - "at least one identifier is writeable; " - "performing duplicate-CREATE-failed test" + "at least one identifier is writeable; performing duplicate-CREATE-failed test" ) # Should fail, because different clientRequestToken for the same # resource model means that the same resource is trying to be @@ -59,8 +60,10 @@ def test_read_success(resource_client, current_resource_model): @failed_event( error_code=HandlerErrorCode.NotFound, - msg="A read handler MUST return FAILED with a NotFound error code\ - if the resource does not exist", + msg=( + "A read handler MUST return FAILED with a NotFound error code if the " + "resource does not exist" + ), ) def test_read_failure_not_found( resource_client, @@ -114,16 +117,11 @@ def error_test_model_in_list(resource_client, current_resource_model, message): ) if resource_model_primary_identifier != current_model_primary_identifier: assertion_error_message = ( - "%s \n Resource Model primary identifier %s does not match with " - "Current Resource Model primary identifier %s \n Resource Model : %s" - " \n Currrent Model : %s " - % ( - message, - resource_model_primary_identifier[0], - current_model_primary_identifier[0], - resource_model, - current_resource_model, - ) + f"{message} \n Resource Model primary identifier" + f" {resource_model_primary_identifier[0]} does not match with Current" + " Resource Model primary identifier" + f" {current_model_primary_identifier[0]} \n Resource Model :" + f" {resource_model} \n Currrent Model : {current_resource_model} " ) return assertion_error_message return assertion_error_message @@ -145,8 +143,10 @@ def test_update_success(resource_client, update_resource_model, current_resource @failed_event( error_code=HandlerErrorCode.NotFound, - msg="An update handler MUST return FAILED with a NotFound error code\ - if the resource did not exist prior to the update request", + msg=( + "An update handler MUST return FAILED with a NotFound error code if the " + "resource did not exist prior to the update request" + ), ) def test_update_failure_not_found(resource_client, current_resource_model): update_model = resource_client.generate_update_example(current_resource_model) @@ -165,8 +165,10 @@ def test_delete_success(resource_client, current_resource_model): @failed_event( error_code=HandlerErrorCode.NotFound, - msg="A delete hander MUST return FAILED with a NotFound error code\ - if the resource did not exist prior to the delete request", + msg=( + "A delete hander MUST return FAILED with a NotFound error code if the " + "resource did not exist prior to the delete request" + ), ) def test_delete_failure_not_found(resource_client, current_resource_model): _status, _response, error_code = resource_client.call_and_assert( diff --git a/src/rpdk/core/contract/suite/resource/handler_delete.py b/src/rpdk/core/contract/suite/resource/handler_delete.py index 6ea3c0c0..42cdaad7 100644 --- a/src/rpdk/core/contract/suite/resource/handler_delete.py +++ b/src/rpdk/core/contract/suite/resource/handler_delete.py @@ -44,10 +44,10 @@ def deleted_resource(resource_client): # a failed status is allowed if the error code is NotFound if status == OperationStatus.FAILED: error_code = resource_client.assert_failed(status, response) - assert ( - error_code == HandlerErrorCode.NotFound - ), "A delete hander MUST return FAILED with a NotFound error code\ - if the resource did not exist prior to the delete request" + assert error_code == HandlerErrorCode.NotFound, ( + "A delete hander MUST return FAILED with a NotFound error code " + " if the resource did not exist prior to the delete request" + ) else: resource_client.assert_success(status, response) @@ -67,10 +67,10 @@ def contract_delete_list(resource_client, deleted_resource): # remove the model from the list, however. deleted_model, _request = deleted_resource - assert not test_model_in_list( - resource_client, deleted_model - ), "A list operation MUST NOT return the primaryIdentifier \ - of any deleted resource instance" + assert not test_model_in_list(resource_client, deleted_model), ( + "A list operation MUST NOT return the primaryIdentifier of any deleted" + " resource instance" + ) @pytest.mark.delete diff --git a/src/rpdk/core/contract/suite/resource/handler_update.py b/src/rpdk/core/contract/suite/resource/handler_update.py index 654b57f5..74edddb6 100644 --- a/src/rpdk/core/contract/suite/resource/handler_update.py +++ b/src/rpdk/core/contract/suite/resource/handler_update.py @@ -63,15 +63,11 @@ def contract_update_read(updated_resource, resource_client): resource_client.primary_identifier_paths, updated_model ) assertion_error_message = ( - "The primaryIdentifier returned must match " - "the primaryIdentifier passed into the request " - "Create Model primary identifier %s does not match with Update Model primary identifier %s \n Create Model : %s \n Update Model : %s " - % ( - create_primary_identifiers, - update_primary_identifiers, - _created_model, - updated_model, - ) + "The primaryIdentifier returned must match the primaryIdentifier passed into" + " the request Create Model primary identifier" + f" {create_primary_identifiers} does not match with Update Model primary" + f" identifier {update_primary_identifiers} \n Create Model :" + f" {_created_model} \n Update Model : {updated_model} " ) assert resource_client.is_primary_identifier_equal( resource_client.primary_identifier_paths, _created_model, updated_model @@ -96,8 +92,10 @@ def contract_update_list(updated_resource, resource_client): ) = updated_resource assert resource_client.is_primary_identifier_equal( resource_client.primary_identifier_paths, _created_model, updated_model - ), "The primaryIdentifier returned must match\ - the primaryIdentifier passed into the request" + ), ( + "The primaryIdentifier returned must match the primaryIdentifier passed" + " into the request" + ) assert test_model_in_list(resource_client, updated_model), error_test_model_in_list( resource_client, updated_model, diff --git a/src/rpdk/core/contract/suite/resource/handler_update_invalid.py b/src/rpdk/core/contract/suite/resource/handler_update_invalid.py index 85eff212..7e7766c7 100644 --- a/src/rpdk/core/contract/suite/resource/handler_update_invalid.py +++ b/src/rpdk/core/contract/suite/resource/handler_update_invalid.py @@ -16,8 +16,10 @@ def contract_update_without_create(resource_client): @failed_event( error_code=HandlerErrorCode.NotFound, - msg="An update handler MUST return FAILED with a NotFound error code\ - if the resource did not exist prior to the update request", + msg=( + "An update handler MUST return FAILED with a NotFound error code if the" + " resource did not exist prior to the update request" + ), ) def test_update_without_create(resource_client): create_request = resource_client.generate_create_example() @@ -25,9 +27,8 @@ def test_update_without_create(resource_client): _status, response, _error = resource_client.call_and_assert( Action.UPDATE, OperationStatus.FAILED, update_request, create_request ) - assert response[ - "message" - ], "The progress event MUST return an error message\ - when the status is failed" + assert response["message"], ( + "The progress event MUST return an error message when the status is" " failed" + ) return _error diff --git a/src/rpdk/core/contract/type_configuration.py b/src/rpdk/core/contract/type_configuration.py index 209b2574..b93e3c94 100644 --- a/src/rpdk/core/contract/type_configuration.py +++ b/src/rpdk/core/contract/type_configuration.py @@ -33,7 +33,7 @@ def get_type_configuration(typeconfigloc): type_config_file_path, ) raise InvalidProjectError( - "Type configuration file '%s' is invalid" % type_config_file_path + f"Type configuration file '{type_config_file_path}' is invalid" ) from json_decode_error except FileNotFoundError: LOG.debug( diff --git a/src/rpdk/core/data_loaders.py b/src/rpdk/core/data_loaders.py index f3077979..38a6d79b 100644 --- a/src/rpdk/core/data_loaders.py +++ b/src/rpdk/core/data_loaders.py @@ -62,7 +62,7 @@ def get_schema_store(schema_search_path): for schema_fname in schema_fnames: schema_path = os.path.join(schema_search_path, schema_fname) if schema_path.endswith(".json"): - with open(schema_path, "r") as schema_f: + with open(schema_path, "r", encoding="utf-8") as schema_f: schema = json.load(schema_f) if "$id" in schema: schema_store[schema["$id"]] = schema @@ -91,21 +91,24 @@ def make_resource_validator_with_additional_properties_check(): dependencies = schema["definitions"]["validations"]["dependencies"] properties_check = { "properties": { - "$comment": "An object cannot have both defined and undefined \ -properties; therefore, patternProperties is not allowed when properties is specified.\ - Provider should mark additionalProperties as false if the \ -property is of object type and has properties defined \ -in it.", + "$comment": ( + "An object cannot have both defined and undefined properties;" + " therefore, patternProperties is not allowed when properties is" + " specified. Provider should mark additionalProperties as false if the" + " property is of object type and has properties defined in it." + ), "not": {"required": ["patternProperties"]}, "required": ["additionalProperties"], } } pattern_properties_check = { "patternProperties": { - "$comment": "An object cannot have both defined and undefined \ -properties; therefore, properties is not allowed when patternProperties is specified. \ -Provider should mark additionalProperties as false if the property is of object type \ -and has patternProperties defined in it.", + "$comment": ( + "An object cannot have both defined and undefined properties;" + " therefore, properties is not allowed when patternProperties is" + " specified. Provider should mark additionalProperties as false if the" + " property is of object type and has patternProperties defined in it." + ), "not": {"required": ["properties"]}, "required": ["additionalProperties"], } @@ -130,8 +133,7 @@ def get_file_base_uri(file): name = file.name except AttributeError: LOG.error( - "Resource spec has no filename associated, " - "relative references may not work" + "Resource spec has no filename associated, relative references may not work" ) name = STDIN_NAME @@ -165,7 +167,8 @@ def load_resource_spec(resource_spec_file): # pylint: disable=R # noqa: C901 for property_name, property_details in schema.get("properties", {}).items(): if property_name[0].islower(): LOG.warning( - "CloudFormation properties don't usually start with lowercase letters: %s", + "CloudFormation properties don't usually start with lowercase" + " letters: %s", property_name, ) try: @@ -231,7 +234,8 @@ def load_resource_spec(resource_spec_file): # pylint: disable=R # noqa: C901 & property_keywords ): LOG.warning( - "Incorrect JSON schema keyword(s) %s for type: %s for property: %s", + "Incorrect JSON schema keyword(s) %s for type: %s for" + " property: %s", type_specific_keywords - allowed_keywords & property_keywords, property_type, @@ -259,8 +263,9 @@ def load_resource_spec(resource_spec_file): # pylint: disable=R # noqa: C901 for enum in nested_lookup("enum", resource_spec): if len(enum) > 15: LOG.warning( - "Consider not manually maintaining large constantly evolving enums like \ -instance types, lambda runtimes, partitions, regions, availability zones, etc. that get outdated quickly: %s", + "Consider not manually maintaining large constantly evolving enums like" + " instance types, lambda runtimes, partitions, regions, availability" + " zones, etc. that get outdated quickly: %s", enum, ) @@ -284,8 +289,9 @@ def load_resource_spec(resource_spec_file): # pylint: disable=R # noqa: C901 } & set(map(str.lower, resource_spec.get("properties", []))) if list_options: LOG.warning( - "LIST API inputs like MaxResults, MaxRecords, MaxItems, NextToken, NextMarker, NextPageToken, PageToken, and Filters are not resource properties. \ -%s should not be present in resource schema", + "LIST API inputs like MaxResults, MaxRecords, MaxItems, NextToken," + " NextMarker, NextPageToken, PageToken, and Filters are not resource" + " properties. %s should not be present in resource schema", list_options, ) @@ -302,7 +308,8 @@ def load_resource_spec(resource_spec_file): # pylint: disable=R # noqa: C901 ) if read_only_properties_intersection: LOG.warning( - "readOnlyProperties cannot be specified by customers and should not overlap with writeOnlyProperties, createOnlyProperties, or required: %s", + "readOnlyProperties cannot be specified by customers and should not overlap" + " with writeOnlyProperties, createOnlyProperties, or required: %s", read_only_properties_intersection, ) @@ -318,10 +325,10 @@ def load_resource_spec(resource_spec_file): # pylint: disable=R # noqa: C901 additional_properties_validator.validate(resource_spec) except ValidationError as e: LOG.warning( - "[Warning] Resource spec validation would fail from next \ -major version. Provider should mark additionalProperties as false if the \ -property is of object type and has properties or patternProperties defined \ -in it. Please fix the warnings: %s", + "[Warning] Resource spec validation would fail from next major version." + " Provider should mark additionalProperties as false if the property is of" + " object type and has properties or patternProperties defined in it. Please" + " fix the warnings: %s", str(e), ) @@ -331,19 +338,21 @@ def load_resource_spec(resource_spec_file): # pylint: disable=R # noqa: C901 and primary_id not in create_only_properties ): LOG.warning( - "Property 'primaryIdentifier' - %s must be specified \ -as either readOnly or createOnly", + "Property 'primaryIdentifier' - %s must be specified as either readOnly" + " or createOnly", primary_id, ) if conditional_create_only_properties & create_only_properties: raise SpecValidationError( - "createOnlyProperties and conditionalCreateOnlyProperties MUST NOT have common properties" + "createOnlyProperties and conditionalCreateOnlyProperties MUST NOT have" + " common properties" ) if conditional_create_only_properties & read_only_properties: raise SpecValidationError( - "readOnlyProperties and conditionalCreateOnlyProperties MUST NOT have common properties" + "readOnlyProperties and conditionalCreateOnlyProperties MUST NOT have" + " common properties" ) if "tagging" not in resource_spec: @@ -388,7 +397,8 @@ def load_hook_spec(hook_spec_file): # pylint: disable=R # noqa: C901 if hook_spec.get("properties"): raise SpecValidationError( - "Hook types do not support 'properties' directly. Properties must be specified in the 'typeConfiguration' section." + "Hook types do not support 'properties' directly. Properties must be" + " specified in the 'typeConfiguration' section." ) validator = make_hook_validator() @@ -403,17 +413,20 @@ def load_hook_spec(hook_spec_file): # pylint: disable=R # noqa: C901 for permission in handler["permissions"]: if "cloudformation:*" in permission: raise SpecValidationError( - f"Wildcards for cloudformation are not allowed for hook handler permissions: '{permission}'" + "Wildcards for cloudformation are not allowed for hook handler" + f" permissions: '{permission}'" ) if permission in blocked_handler_permissions: raise SpecValidationError( - f"Permission is not allowed for hook handler permissions: '{permission}'" + "Permission is not allowed for hook handler permissions:" + f" '{permission}'" ) for target_name in handler["targetNames"]: if "*?" in target_name: raise SpecValidationError( - f"Wildcard pattern '*?' is not allowed in target name: '{target_name}'" + "Wildcard pattern '*?' is not allowed in target name:" + f" '{target_name}'" ) try: diff --git a/src/rpdk/core/extensions.py b/src/rpdk/core/extensions.py index e6d5b061..bb6a718c 100644 --- a/src/rpdk/core/extensions.py +++ b/src/rpdk/core/extensions.py @@ -4,7 +4,8 @@ def _check_command_name_collision(subparsers, command_name): if command_name in subparsers.choices: raise RuntimeError( - f'"{command_name}" is already registered as an extension. Please use a different name.' + f'"{command_name}" is already registered as an extension. Please use a' + " different name." ) diff --git a/src/rpdk/core/filters.py b/src/rpdk/core/filters.py index e9f4ab6e..d97d23b9 100644 --- a/src/rpdk/core/filters.py +++ b/src/rpdk/core/filters.py @@ -27,7 +27,7 @@ def parse_resource_type(resource_type): """ segments = resource_type.split("::") if len(segments) != 3: - raise ValueError("Resource type '{}' is invalid".format(resource_type)) + raise ValueError(f"Resource type '{resource_type}' is invalid") return segments @@ -135,4 +135,4 @@ def package_prefix(full_package_name): package_segments = full_package_name.rpartition(".") if package_segments[0]: return package_segments[0] - raise ValueError("Package name '{}' is invalid".format(full_package_name)) + raise ValueError(f"Package name '{full_package_name}' is invalid") diff --git a/src/rpdk/core/fragment/generator.py b/src/rpdk/core/fragment/generator.py index 6d240b2a..26287d8f 100644 --- a/src/rpdk/core/fragment/generator.py +++ b/src/rpdk/core/fragment/generator.py @@ -221,11 +221,11 @@ def __build_resources(raw_fragments): "type": raw_fragments["Resources"][resource]["Type"] } resources_properties = {} - for resource in raw_resources: + for resource, resource_value in raw_resources.items(): type_object = {"type": "object", "properties": {}} type_object["properties"]["Type"] = { "type": "string", - "const": raw_resources[resource]["type"], + "const": resource_value["type"], } type_object["properties"]["Properties"] = {"type": "object"} resources_properties[resource] = type_object @@ -247,8 +247,8 @@ def __build_parameters(raw_fragments): "description": description, } parameter_properties = {} - for raw_param in raw_parameters: - description = raw_parameters[raw_param]["description"] + for raw_param, raw_param_value in raw_parameters.items(): + description = raw_param_value["description"] type_name = "object" properties = {"Type": {"type": "string"}} required = ["Type"] diff --git a/src/rpdk/core/fragment/lint_warning_printer.py b/src/rpdk/core/fragment/lint_warning_printer.py index f0cb8765..d2518c90 100644 --- a/src/rpdk/core/fragment/lint_warning_printer.py +++ b/src/rpdk/core/fragment/lint_warning_printer.py @@ -21,7 +21,7 @@ def print_cfn_lint_warnings(fragment_dir): ) for lint_warning in lint_warnings: print( - "\t{} (from rule {})".format(lint_warning.message, lint_warning.rule), + f"\t{lint_warning.message} (from rule {lint_warning.rule})", ) diff --git a/src/rpdk/core/fragment/module_fragment_reader.py b/src/rpdk/core/fragment/module_fragment_reader.py index 01040da9..39cdffca 100644 --- a/src/rpdk/core/fragment/module_fragment_reader.py +++ b/src/rpdk/core/fragment/module_fragment_reader.py @@ -24,7 +24,7 @@ def _load_fragment(fragment_file): return load_yaml(__first_pass_syntax_check(f.read())) except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e: raise FragmentValidationError( - "Fragment file '{}' is invalid: {}".format(fragment_file, str(e)) + f"Fragment file '{fragment_file}' is invalid: {str(e)}" ) from e @@ -37,12 +37,12 @@ def _get_fragment_file(fragment_dir): all_fragment_files.append(os.path.join(root, f)) if len(all_fragment_files) == 0: raise FragmentValidationError( - f"No module fragment files found in the fragments folder ending in one of {ALLOWED_EXTENSIONS}" + "No module fragment files found in the fragments folder ending in one of" + f" {ALLOWED_EXTENSIONS}" ) if len(all_fragment_files) > 1: raise FragmentValidationError( - "A Module can only consist of a " - "single template file, but there are " + "A Module can only consist of a single template file, but there are " + str(len(all_fragment_files)) + ": " + str(all_fragment_files) diff --git a/src/rpdk/core/hook/init_hook.py b/src/rpdk/core/hook/init_hook.py index d43b6b33..8967d6f7 100644 --- a/src/rpdk/core/hook/init_hook.py +++ b/src/rpdk/core/hook/init_hook.py @@ -91,7 +91,7 @@ def validate_type_name(value): return value LOG.debug("'%s' did not match '%s'", value, HOOK_TYPE_NAME_REGEX) raise WizardValidationError( - "Please enter a value matching '{}'".format(HOOK_TYPE_NAME_REGEX) + f"Please enter a value matching '{HOOK_TYPE_NAME_REGEX}'" ) @@ -101,7 +101,7 @@ def __init__(self, choices): self.max = len(self.choices) pretty = "\n".join( - "[{}] {}".format(i, choice) for i, choice in enumerate(self.choices, 1) + f"[{i}] {choice}" for i, choice in enumerate(self.choices, 1) ) self.message = ( "Select a language for code generation:\n" diff --git a/src/rpdk/core/init.py b/src/rpdk/core/init.py index 11141a42..712bbfba 100644 --- a/src/rpdk/core/init.py +++ b/src/rpdk/core/init.py @@ -51,7 +51,7 @@ def validate_type_name(value): return value LOG.debug("'%s' did not match '%s'", value, TYPE_NAME_REGEX) raise WizardValidationError( - "Please enter a resource type name matching '{}'".format(TYPE_NAME_REGEX) + f"Please enter a resource type name matching '{TYPE_NAME_REGEX}'" ) @@ -61,7 +61,7 @@ def __init__(self, choices): self.max = len(self.choices) pretty = "\n".join( - "[{}] {}".format(i, choice) for i, choice in enumerate(self.choices, 1) + f"[{i}] {choice}" for i, choice in enumerate(self.choices, 1) ) self.message = ( "Select a language for code generation:\n" diff --git a/src/rpdk/core/invoke.py b/src/rpdk/core/invoke.py index 2df756f6..42d776d8 100644 --- a/src/rpdk/core/invoke.py +++ b/src/rpdk/core/invoke.py @@ -152,14 +152,18 @@ def _setup_invoke_subparser(subparser): "--max-reinvoke", type=int, default=None, - help="Maximum number of IN_PROGRESS re-invocations allowed before " - "exiting. If not specified, will continue to " - "re-invoke until terminal status is reached.", + help=( + "Maximum number of IN_PROGRESS re-invocations allowed before " + "exiting. If not specified, will continue to " + "re-invoke until terminal status is reached." + ), ) subparser.add_argument( "--docker-image", - help="Docker image name to run. If specified, invoke will use docker instead " - "of SAM", + help=( + "Docker image name to run. If specified, invoke will use docker instead " + "of SAM" + ), ) @@ -182,7 +186,10 @@ def setup_subparser(subparsers, parents): hook_parser.add_argument( "action_invocation_point", choices=list(HookInvocationPoint.__members__), - help="The provisioning action invocation point, i.e. which hook handler to invoke.", + help=( + "The provisioning action invocation point, i.e. which hook handler to" + " invoke." + ), ) _setup_invoke_subparser(hook_parser) diff --git a/src/rpdk/core/jsonutils/flattener.py b/src/rpdk/core/jsonutils/flattener.py index 70cd4626..705e1bf9 100644 --- a/src/rpdk/core/jsonutils/flattener.py +++ b/src/rpdk/core/jsonutils/flattener.py @@ -34,7 +34,6 @@ def flatten_schema(self): return self._schema_map def _walk(self, sub_schema, property_path): - # have we already seen this path? if property_path in self._schema_map: return {"$ref": property_path} @@ -79,9 +78,7 @@ def _flatten_ref_type(self, ref_path): ref_parts = fragment_decode(ref_path) except ValueError as e: # pylint: disable=W0707 - raise FlatteningError( - "Invalid ref at path '{}': {}".format(ref_path, str(e)) - ) + raise FlatteningError(f"Invalid ref at path '{ref_path}': { str(e)}") ref_schema, ref_parts, _ref_parent = self._find_subschema_by_ref(ref_parts) return self._walk(ref_schema, ref_parts) @@ -160,7 +157,6 @@ def _flatten_combiners(self, sub_schema, path): pass else: for i, nested_schema in enumerate(schema_array): - ref_path = path + (arr_key, i) ref_path_is_used = ref_path in self._schema_map walked_schema = self._walk(nested_schema, ref_path) @@ -187,4 +183,4 @@ def _find_subschema_by_ref(self, ref_path): return traverse(self._full_schema, ref_path) except (LookupError, ValueError): # pylint: disable=W0707 - raise FlatteningError("Invalid ref: {}".format(ref_path)) + raise FlatteningError(f"Invalid ref: {ref_path}") diff --git a/src/rpdk/core/jsonutils/pointer.py b/src/rpdk/core/jsonutils/pointer.py index c7dec219..5e88f0ee 100644 --- a/src/rpdk/core/jsonutils/pointer.py +++ b/src/rpdk/core/jsonutils/pointer.py @@ -93,7 +93,7 @@ def fragment_decode(pointer, prefix="#", output=tuple): decoded = (part_decode(unquote(segment)) for segment in segments) actual = next(decoded) if prefix != actual: - raise ValueError("Expected prefix '{}', but was '{}'".format(prefix, actual)) + raise ValueError(f"Expected prefix '{prefix}', but was '{actual}'") return output(decoded) @@ -117,5 +117,5 @@ def fragment_list(segments, prefix="properties", output=list): decoded = (part_decode(unquote(segment)) for segment in segments) actual = next(decoded) if prefix != actual: - raise ValueError("Expected prefix '{}', but was '{}'".format(prefix, actual)) + raise ValueError(f"Expected prefix '{prefix}', but was '{actual}'") return output(decoded) diff --git a/src/rpdk/core/jsonutils/renamer.py b/src/rpdk/core/jsonutils/renamer.py index 3bec22a6..c3ce4e22 100644 --- a/src/rpdk/core/jsonutils/renamer.py +++ b/src/rpdk/core/jsonutils/renamer.py @@ -9,7 +9,7 @@ def __init__(self, renames=None): self.renames = renames if renames else {} # this generator never completes self.names = ( - name for name in ("schema{}".format(i) for i in count()) + name for name in (f"schema{i}" for i in count()) ) # pragma: no cover def items(self): diff --git a/src/rpdk/core/jsonutils/resolver.py b/src/rpdk/core/jsonutils/resolver.py index e7ee5e05..8b55c374 100644 --- a/src/rpdk/core/jsonutils/resolver.py +++ b/src/rpdk/core/jsonutils/resolver.py @@ -70,8 +70,7 @@ def _get_model_name_from_ref(self, ref_path): return class_name raise ModelResolverError( - "Model name conflict. " - f"'{class_name}' found at {dupe_path} and {ref_path}" + f"Model name conflict. '{class_name}' found at {dupe_path} and {ref_path}" ) def resolve_models(self): @@ -218,9 +217,7 @@ def base_class_from_ref(ref_path): return uppercase_first_letter(elem.rpartition("/")[2]) raise ModelResolverError( - "Could not create a valid class from schema at '{}'".format( - fragment_encode(ref_path) - ) + f"Could not create a valid class from schema at '{fragment_encode(ref_path)}'" ) diff --git a/src/rpdk/core/module/init_module.py b/src/rpdk/core/module/init_module.py index 130ca109..ba82f491 100644 --- a/src/rpdk/core/module/init_module.py +++ b/src/rpdk/core/module/init_module.py @@ -44,5 +44,5 @@ def validate_type_name(value): return value LOG.debug("'%s' did not match '%s'", value, MODULE_TYPE_NAME_REGEX) raise WizardValidationError( - "Please enter a value matching '{}'".format(MODULE_TYPE_NAME_REGEX) + f"Please enter a value matching '{MODULE_TYPE_NAME_REGEX}'" ) diff --git a/src/rpdk/core/plugin_base.py b/src/rpdk/core/plugin_base.py index d90ba708..35d75d2c 100644 --- a/src/rpdk/core/plugin_base.py +++ b/src/rpdk/core/plugin_base.py @@ -23,7 +23,6 @@ def _module_name(self): return self.MODULE_NAME def _setup_jinja_env(self, **options): - if "loader" not in options: # Try loading module with PEP 451 loaders spec = importlib.util.find_spec(self._module_name) @@ -64,7 +63,8 @@ class ExtensionPlugin(ABC): def command_name(self): if not self.COMMAND_NAME: raise RuntimeError( - "Set COMMAND_NAME to the command you want to extend cfn with: `cfn COMMAND_NAME`." + "Set COMMAND_NAME to the command you want to extend cfn with: `cfn" + " COMMAND_NAME`." ) return self.COMMAND_NAME diff --git a/src/rpdk/core/project.py b/src/rpdk/core/project.py index ae36784c..5b5c1b4e 100644 --- a/src/rpdk/core/project.py +++ b/src/rpdk/core/project.py @@ -122,7 +122,7 @@ def escape_markdown(string): if not string: return string if string[0] in MARKDOWN_RESERVED_CHARACTERS: - return "\\{}".format(string) + return f"\\{string}" return string @@ -181,7 +181,7 @@ def schema_filename(self): @property def configuration_schema_filename(self): - return "{}-configuration.json".format(self.hypenated_name) + return f"{self.hypenated_name}-configuration.json" @property def schema_path(self): @@ -303,7 +303,6 @@ def _write(f): self.safewrite(self.schema_path, _write) def _write_example_inputs(self): - shutil.rmtree(self.example_inputs_path, ignore_errors=True) self.example_inputs_path.mkdir(exist_ok=True) @@ -445,7 +444,8 @@ def load_configuration_schema(self): def write_configuration_schema(self, path): LOG.debug( - "Writing type configuration resource specification from resource specification: %s", + "Writing type configuration resource specification from resource" + " specification: %s", path, ) @@ -551,7 +551,8 @@ def load(self): self.load_settings() except FileNotFoundError as e: self._raise_invalid_project( - f"Project file {self.settings_path} not found. Have you run 'init' or in a wrong directory?", + f"Project file {self.settings_path} not found. Have you run 'init' or" + " in a wrong directory?", e, ) @@ -688,7 +689,8 @@ def _add_resources_content_to_zip(self, zip_file): cli_metadata = self._plugin.get_plugin_information(self) except AttributeError: LOG.debug( - "Version info is not available for plugins, not writing to metadata file" + "Version info is not available for plugins, not writing to metadata" + " file" ) cli_metadata["cli-version"] = __version__ zip_file.writestr(CFN_METADATA_FILENAME, json.dumps(cli_metadata)) @@ -724,9 +726,7 @@ def _add_hooks_content_to_zip( if target_info: zip_file.writestr(TARGET_INFO_FILENAME, json.dumps(target_info, indent=4)) for target_name, info in target_info.items(): - filename = "{}.json".format( - "-".join(s.lower() for s in target_name.split("::")) - ) + filename = f'{"-".join(s.lower() for s in target_name.split("::"))}.json' content = json.dumps(info.get("Schema", {}), indent=4).encode("utf-8") zip_file.writestr(TARGET_SCHEMAS_FOLDER + "/" + filename, content) LOG.debug("%s found. Writing to package.", filename) @@ -737,7 +737,8 @@ def _add_hooks_content_to_zip( cli_metadata = self._plugin.get_plugin_information(self) except AttributeError: LOG.debug( - "Version info is not available for plugins, not writing to metadata file" + "Version info is not available for plugins, not writing to metadata" + " file" ) cli_metadata["cli-version"] = __version__ zip_file.writestr(CFN_METADATA_FILENAME, json.dumps(cli_metadata)) @@ -829,7 +830,8 @@ def generate_docs(self): def generate_image_build_config(self): if not hasattr(self._plugin, "generate_image_build_config"): raise InvalidProjectError( - f"Plugin for the {self.runtime} runtime does not support building an image" + f"Plugin for the {self.runtime} runtime does not support building an" + " image" ) return self._plugin.generate_image_build_config(self) @@ -927,13 +929,14 @@ def _set_docs_properties( # noqa: C901 def __join(item1, item2): if not item1 or item2 == item1: return item2 - return "{}, {}".format(item1, item2) + return f"{item1}, {item2}" def __set_property_type(prop_type, single_type=True): nonlocal prop # mark down formatting of the target value - used for complex objects # ($ref) and arrays of such objects + # pylint: disable=unnecessary-lambda-assignment markdown_lambda = ( lambda fname, name: f'{name}' # noqa: B950, C0301 ) @@ -943,7 +946,6 @@ def __set_property_type(prop_type, single_type=True): # primitives should not occur for circular ref; type_json = type_yaml = type_longform = BASIC_TYPE_MAPPINGS[prop_type] elif prop_type == "array": - # lambdas to reuse formatting markdown_json = ( lambda markdown_value: f"[ {markdown_value}, ... ]" @@ -1113,10 +1115,10 @@ def _upload( except ClientError as e: LOG.debug("Registering type resulted in unknown ClientError", exc_info=e) raise DownstreamError("Unknown CloudFormation error") from e - else: - self._wait_for_registration( - cfn_client, response["RegistrationToken"], set_default - ) + + self._wait_for_registration( + cfn_client, response["RegistrationToken"], set_default + ) @staticmethod def _wait_for_registration(cfn_client, registration_token, set_default): diff --git a/src/rpdk/core/resource/init_resource.py b/src/rpdk/core/resource/init_resource.py index 8e5b3328..f87e4615 100644 --- a/src/rpdk/core/resource/init_resource.py +++ b/src/rpdk/core/resource/init_resource.py @@ -68,7 +68,7 @@ def validate_type_name(value): return value LOG.debug("'%s' did not match '%s'", value, RESOURCE_TYPE_NAME_REGEX) raise WizardValidationError( - "Please enter a value matching '{}'".format(RESOURCE_TYPE_NAME_REGEX) + f"Please enter a value matching '{RESOURCE_TYPE_NAME_REGEX}'" ) @@ -78,7 +78,7 @@ def __init__(self, choices): self.max = len(self.choices) pretty = "\n".join( - "[{}] {}".format(i, choice) for i, choice in enumerate(self.choices, 1) + f"[{i}] {choice}" for i, choice in enumerate(self.choices, 1) ) self.message = ( "Select a language for code generation:\n" diff --git a/src/rpdk/core/submit.py b/src/rpdk/core/submit.py index 27364c6a..8d6acefd 100644 --- a/src/rpdk/core/submit.py +++ b/src/rpdk/core/submit.py @@ -52,8 +52,10 @@ def setup_subparser(subparsers, parents): "--no-role", action="store_false", dest="use_role", - help="Register the type without an explicit execution role " - "(Will not be able to invoke AWS APIs).", + help=( + "Register the type without an explicit execution role " + "(Will not be able to invoke AWS APIs)." + ), ) nodocker_group = parser.add_mutually_exclusive_group() diff --git a/src/rpdk/core/test.py b/src/rpdk/core/test.py index 5d64e755..3fefa31a 100644 --- a/src/rpdk/core/test.py +++ b/src/rpdk/core/test.py @@ -168,7 +168,7 @@ def get_overrides(root, region_name, endpoint_url, role_arn, profile_name): try: RESOURCE_OVERRIDES_VALIDATOR.validate(overrides_raw) except ValidationError as e: - LOG.warning("Override file invalid: %s\n" "No overrides will be applied", e) + LOG.warning("Override file invalid: %s\nNo overrides will be applied", e) return empty_override() overrides = empty_override() @@ -205,7 +205,7 @@ def get_hook_overrides(root, region_name, endpoint_url, role_arn, profile_name): try: HOOK_OVERRIDES_VALIDATOR.validate(overrides_raw) except ValidationError as e: - LOG.warning("Override file invalid: %s\n" "No overrides will be applied", e) + LOG.warning("Override file invalid: %s\nNo overrides will be applied", e) return empty_hook_override() overrides = empty_hook_override() @@ -461,27 +461,38 @@ def setup_subparser(subparsers, parents): parser.add_argument( "--log-group-name", - help="The log group to which contract tests lambda handler logs will be delivered. " - "Specified log group doesn't have to exist as long as log-role-arn specified has logs:CreateLogGroup " - "permission. Need to be used together with --log-role-arn", + help=( + "The log group to which contract tests lambda handler logs will be" + " delivered. Specified log group doesn't have to exist as long as" + " log-role-arn specified has logs:CreateLogGroup permission. Need to be" + " used together with --log-role-arn" + ), ) parser.add_argument( "--log-role-arn", - help="Role for delivering contract tests lambda handler logs. Need to be used together with --log-group-name", + help=( + "Role for delivering contract tests lambda handler logs. Need to be used" + " together with --log-group-name" + ), ) parser.add_argument("passed_to_pytest", nargs="*", help=SUPPRESS) parser.add_argument( "--docker-image", - help="Docker image name to run. If specified, invoke will use docker instead " - "of SAM", + help=( + "Docker image name to run. If specified, invoke will use docker instead " + "of SAM" + ), ) parser.add_argument( "--typeconfig", - help="typeConfiguration file to use. Default: '~/.cfn-cli/typeConfiguration.json.'", + help=( + "typeConfiguration file to use. Default:" + " '~/.cfn-cli/typeConfiguration.json.'" + ), ) @@ -505,17 +516,12 @@ def _sam_arguments(parser): parser.add_argument( "--region", default=DEFAULT_REGION, - help=( - "The region used for temporary credentials " f"(Default: {DEFAULT_REGION})" - ), + help=f"The region used for temporary credentials (Default: {DEFAULT_REGION})", ) parser.add_argument( "--profile", default=DEFAULT_PROFILE, - help=( - "The profile used for temporary credentials " - f"(Default: {DEFAULT_PROFILE})" - ), + help=f"The profile used for temporary credentials (Default: {DEFAULT_PROFILE})", ) diff --git a/src/rpdk/core/type_schema_loader.py b/src/rpdk/core/type_schema_loader.py index 764671e7..2771da28 100644 --- a/src/rpdk/core/type_schema_loader.py +++ b/src/rpdk/core/type_schema_loader.py @@ -69,8 +69,9 @@ def load_type_info(self, type_names, local_schemas=None, local_info=None): if "Schema" in target_info: if target_info["Schema"] != schemas[type_name]: raise InvalidTypeSchemaError( - f"Duplicate conflicting schemas for '{type_name}' target type in 'target-info.json' " - f"file and 'target-schemas' directory. " + f"Duplicate conflicting schemas for '{type_name}'" + " target type in 'target-info.json' file and" + " 'target-schemas' directory. " ) else: target_info["Schema"] = schemas[type_name] @@ -86,12 +87,14 @@ def load_type_info(self, type_names, local_schemas=None, local_info=None): ) elif self.local_only: LOG.warning( - "Attempting to load local type info %s with incorrect configuration. Local target schema file or " - "'target-info.json' are required to load local target info", + "Attempting to load local type info %s with incorrect" + " configuration. Local target schema file or 'target-info.json' are" + " required to load local target info", type_name, ) raise InvalidTypeSchemaError( - "Local type schema or 'target-info.json' are required to load local type info" + "Local type schema or 'target-info.json' are required to load local" + " type info" ) else: target_info.update( @@ -175,7 +178,8 @@ def _validate_and_load_local_schemas(self, local_schemas): schemas = self.load_type_schemas(local_schemas) else: raise InvalidTypeSchemaError( - "Local Schemas must be either list of schemas to load or mapping of type names to schemas" + "Local Schemas must be either list of schemas to load or mapping of" + " type names to schemas" ) return schemas @@ -221,7 +225,7 @@ def load_type_schema_from_uri(self, schema_uri): @staticmethod def load_type_schema_from_file(schema_path): try: - with open(schema_path, "r") as file: + with open(schema_path, "r", encoding="utf-8") as file: return TypeSchemaLoader.load_type_schema_from_json(file.read()) except FileNotFoundError as e: LOG.debug("Target schema file '%s' not found", schema_path, exc_info=e) @@ -253,7 +257,8 @@ def _get_type_schema_from_s3(self, bucket, key): return self.load_type_schema_from_json(type_schema) except ClientError as err: LOG.debug( - "Getting S3 object in bucket '%s' with key '%s' resulted in unknown ClientError", + "Getting S3 object in bucket '%s' with key '%s' resulted in unknown" + " ClientError", bucket, key, exc_info=err, diff --git a/src/rpdk/core/upload.py b/src/rpdk/core/upload.py index 827c52f7..546a0cde 100644 --- a/src/rpdk/core/upload.py +++ b/src/rpdk/core/upload.py @@ -58,7 +58,7 @@ def _wait_for_stack(self, stack_id, waiter_name, success_msg): stack_id, ) raise UploadError( - "Failed to create or update the '{}' stack".format(stack_id) + f"Failed to create or update the '{stack_id}' stack" ) from e LOG.info(success_msg) @@ -94,7 +94,7 @@ def _create_or_update_stack(self, template, stack_name): Capabilities=["CAPABILITY_IAM"], ) except self.cfn_client.exceptions.AlreadyExistsException: - LOG.info("%s already exists. " "Attempting to update", stack_name) + LOG.info("%s already exists. Attempting to update", stack_name) try: result = self.cfn_client.update_stack( **args, Capabilities=["CAPABILITY_IAM"] @@ -107,7 +107,7 @@ def _create_or_update_stack(self, template, stack_name): stack_id = stack_name else: LOG.debug( - "%s stack update " "resulted in unknown ClientError", + "%s stack update resulted in unknown ClientError", stack_name, exc_info=e, ) @@ -117,11 +117,11 @@ def _create_or_update_stack(self, template, stack_name): self._wait_for_stack( stack_id, "stack_update_complete", - "{} stack is up to date".format(stack_name), + f"{stack_name} stack is up to date", ) except ClientError as e: LOG.debug( - "%s stack create " "resulted in unknown ClientError", + "%s stack create resulted in unknown ClientError", stack_name, exc_info=e, ) @@ -131,7 +131,7 @@ def _create_or_update_stack(self, template, stack_name): self._wait_for_stack( stack_id, "stack_create_complete", - "{} stack was successfully created".format(stack_name), + f"{stack_name} stack was successfully created", ) return stack_id @@ -150,9 +150,7 @@ def create_or_update_role(self, template_path, resource_type): ) # pylint: disable=W0707 raise InvalidProjectError() - stack_id = self._create_or_update_stack( - template, "{}-role-stack".format(resource_type) - ) + stack_id = self._create_or_update_stack(template, f"{resource_type}-role-stack") return self._get_stack_output(stack_id, EXECUTION_ROLE_ARN_OUTPUT_NAME) def upload(self, file_prefix, fileobj): @@ -164,7 +162,7 @@ def upload(self, file_prefix, fileobj): ) timestamp = datetime.utcnow().isoformat(timespec="seconds").replace(":", "-") - key = "{}-{}.zip".format(file_prefix, timestamp) + key = f"{file_prefix}-{timestamp}.zip" LOG.debug("Uploading to '%s/%s'...", self.bucket_name, key) try: @@ -175,7 +173,7 @@ def upload(self, file_prefix, fileobj): LOG.debug("Upload complete") - return "s3://{0}/{1}".format(self.bucket_name, key) + return f"s3://{self.bucket_name}/{key}" def get_log_delivery_role_arn(self): return self.log_delivery_role_arn diff --git a/src/rpdk/core/utils/init_utils.py b/src/rpdk/core/utils/init_utils.py index dc1fdede..6394e151 100644 --- a/src/rpdk/core/utils/init_utils.py +++ b/src/rpdk/core/utils/init_utils.py @@ -25,13 +25,13 @@ def init_artifact_type(args=None): except WizardValidationError as error: print_error(error) artifact_type = input_with_validation( - "Do you want to develop a new {}?.".format(INPUT_TYPES_STRING), + f"Do you want to develop a new {INPUT_TYPES_STRING}?.", validate_artifact_type, ) else: artifact_type = input_with_validation( - "Do you want to develop a new {}?.".format(INPUT_TYPES_STRING), + f"Do you want to develop a new {INPUT_TYPES_STRING}?.", validate_artifact_type, ) @@ -68,9 +68,7 @@ def validate_artifact_type(value): return ARTIFACT_TYPE_MODULE if value.lower() in VALID_HOOKS_REPRESENTATION: return ARTIFACT_TYPE_HOOK - raise WizardValidationError( - "Please enter a value matching {}".format(INPUT_TYPES_STRING) - ) + raise WizardValidationError(f"Please enter a value matching {INPUT_TYPES_STRING}") def validate_yes(value): diff --git a/tests/contract/test_hook_client.py b/tests/contract/test_hook_client.py index a22e2f84..e9269c0a 100644 --- a/tests/contract/test_hook_client.py +++ b/tests/contract/test_hook_client.py @@ -56,7 +56,10 @@ "handlers": {"create": {}, "delete": {}, "read": {}}, } -HOOK_CONFIGURATION = '{"CloudFormationConfiguration": {"HookConfiguration": {"Properties": {"key": "value"}}}}' +HOOK_CONFIGURATION = ( + '{"CloudFormationConfiguration": {"HookConfiguration": {"Properties": {"key":' + ' "value"}}}}' +) HOOK_TARGET_INFO = { "My::Example::Resource": { @@ -346,7 +349,6 @@ def test_get_handler_target_multiple_targets(hook_client): def test_get_handler_target_no_targets(hook_client): - schema = {"handlers": {"preCreate": {"permissions": []}}} hook_client._update_schema(schema) TestCase().assertFalse( @@ -734,7 +736,9 @@ def test_call_docker(): return_value=ACCOUNT, ) patch_docker = patch("rpdk.core.contract.hook_client.docker", autospec=True) - with patch_sesh as mock_create_sesh, patch_docker as mock_docker, patch_creds, patch_config: + with patch_sesh as mock_create_sesh, patch_docker as mock_docker, ( + patch_creds + ), patch_config: with patch_account: mock_client = mock_docker.from_env.return_value mock_sesh = mock_create_sesh.return_value @@ -750,8 +754,7 @@ def test_call_docker(): ) hook_client._type_name = HOOK_TYPE_NAME response_str = ( - "__CFN_HOOK_START_RESPONSE__" - '{"hookStatus": "SUCCESS"}__CFN_HOOK_END_RESPONSE__' + '__CFN_HOOK_START_RESPONSE__{"hookStatus": "SUCCESS"}__CFN_HOOK_END_RESPONSE__' ) mock_client.containers.run.return_value = str.encode(response_str) with patch_creds, patch_config: @@ -900,7 +903,8 @@ def test_call_and_assert_failed(hook_client): mock_client = hook_client._client mock_client.invoke.return_value = { "Payload": StringIO( - '{"hookStatus": "FAILED","errorCode": "NotFound", "message": "I have failed you"}' + '{"hookStatus": "FAILED","errorCode": "NotFound", "message": "I have failed' + ' you"}' ) } with patch_creds, patch_config: diff --git a/tests/contract/test_resource_client.py b/tests/contract/test_resource_client.py index e29a2cdf..5182ac03 100644 --- a/tests/contract/test_resource_client.py +++ b/tests/contract/test_resource_client.py @@ -407,8 +407,8 @@ def test_error_test_model_in_list(resource_client): resource_client, current_resource_model, "" ) assert ( - "abc123 does not match with Current Resource Model primary identifier xyz456" - in assertion_error_message + "abc123 does not match with Current Resource Model primary identifier" + " xyz456" in assertion_error_message ) @@ -1323,7 +1323,8 @@ def test_call_and_assert_fails(resource_client_no_handler): ) except ValueError: LOG.debug( - "Value Error Exception is expected when required CRD handlers are not present" + "Value Error Exception is expected when required CRD handlers are not" + " present" ) diff --git a/tests/contract/test_type_configuration.py b/tests/contract/test_type_configuration.py index 6c4b4528..7a022129 100644 --- a/tests/contract/test_type_configuration.py +++ b/tests/contract/test_type_configuration.py @@ -12,9 +12,15 @@ TYPE_CONFIGURATION_INVALID = '{"Credentials" :{"ApiKey": "123", xxxx}}' -HOOK_CONFIGURATION_TEST_SETTING = '{"CloudFormationConfiguration": {"HookConfiguration": {"Properties": {"Credentials" :{"ApiKey": "123", "ApplicationKey": "123"}}}}}' +HOOK_CONFIGURATION_TEST_SETTING = ( + '{"CloudFormationConfiguration": {"HookConfiguration": {"Properties":' + ' {"Credentials" :{"ApiKey": "123", "ApplicationKey": "123"}}}}}' +) -HOOK_CONFIGURATION_INVALID = '{"CloudFormationConfiguration": {"TypeConfiguration": {"Properties": {"Credentials" :{"ApiKey": "123", "ApplicationKey": "123"}}}}}' +HOOK_CONFIGURATION_INVALID = ( + '{"CloudFormationConfiguration": {"TypeConfiguration": {"Properties":' + ' {"Credentials" :{"ApiKey": "123", "ApplicationKey": "123"}}}}}' +) def setup_function(): diff --git a/tests/jsonutils/test_inliner.py b/tests/jsonutils/test_inliner.py index b4eadb68..33c656ae 100644 --- a/tests/jsonutils/test_inliner.py +++ b/tests/jsonutils/test_inliner.py @@ -113,7 +113,7 @@ def test_refinliner_remote_refs_on_filesystem_are_inlined(tmpdir): filename = tmpdir.mkdir("bar").join("remote.json") with filename.open("w", encoding="utf-8") as f: json.dump(remote, f) - base_uri = "file://{}/foo/".format(tmpdir.strpath) + base_uri = f"file://{tmpdir.strpath}/foo/" ref = "../bar/remote.json#/nested/bar" inliner = make_inliner( {"type": "object", "properties": {"foo": {"$ref": ref}}}, base_uri=base_uri diff --git a/tests/test_cli.py b/tests/test_cli.py index 2d3153f0..7aed3432 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -130,7 +130,7 @@ def test_main_unhandled_exception_before_logging(capsys): def test_main_unhandled_exception_after_logging(capsys): def raise_exception(_args): - raise Exception + raise Exception # pylint: disable=broad-exception-raised def setup_subparser(subparsers, parents): parser = subparsers.add_parser("fail", parents=parents) diff --git a/tests/test_command_commons.py b/tests/test_command_commons.py index 075fc664..09ba0c81 100644 --- a/tests/test_command_commons.py +++ b/tests/test_command_commons.py @@ -10,7 +10,7 @@ "command", ["init", "generate", "submit", "validate", "test", "invoke"] ) def test_command_help(capsys, command): - with patch("rpdk.core.{0}.{0}".format(command), autospec=True) as mock_func: + with patch(f"rpdk.core.{command}.{command}", autospec=True) as mock_func: with pytest.raises(SystemExit) as excinfo: main(args_in=[command, "--help"]) assert excinfo.value.code != EXIT_UNHANDLED_EXCEPTION @@ -21,7 +21,7 @@ def test_command_help(capsys, command): @pytest.mark.parametrize("command", ["invoke"]) def test_command_with_required_params(capsys, command): - with patch("rpdk.core.{0}.{0}".format(command), autospec=True) as mock_func: + with patch(f"rpdk.core.{command}.{command}", autospec=True) as mock_func: with pytest.raises(SystemExit) as excinfo: main(args_in=[command]) assert excinfo.value.code != EXIT_UNHANDLED_EXCEPTION @@ -34,7 +34,7 @@ def test_command_with_required_params(capsys, command): def test_command_default(command): mock_project = Mock(spec=Project) with patch( - "rpdk.core.{0}.Project".format(command), + f"rpdk.core.{command}.Project", autospec=True, return_value=mock_project, ): diff --git a/tests/test_data_loaders.py b/tests/test_data_loaders.py index 98a64e80..265514c4 100644 --- a/tests/test_data_loaders.py +++ b/tests/test_data_loaders.py @@ -151,7 +151,8 @@ def test_load_resource_spec_conditionally_create_only_match_create_only(): load_resource_spec(json_s(schema)) assert ( str(excinfo.value) - == "createOnlyProperties and conditionalCreateOnlyProperties MUST NOT have common properties" + == "createOnlyProperties and conditionalCreateOnlyProperties MUST NOT have" + " common properties" ) @@ -169,7 +170,8 @@ def test_load_resource_spec_conditionally_create_only_match_read_only(): load_resource_spec(json_s(schema)) assert ( str(excinfo.value) - == "readOnlyProperties and conditionalCreateOnlyProperties MUST NOT have common properties" + == "readOnlyProperties and conditionalCreateOnlyProperties MUST NOT have common" + " properties" ) diff --git a/tests/test_extensions.py b/tests/test_extensions.py index c9443e77..7a787d7e 100644 --- a/tests/test_extensions.py +++ b/tests/test_extensions.py @@ -6,7 +6,7 @@ class ExtensionTest(TestCase): - def test_setup_subparsers(self): # pylint: disable=no-self-use + def test_setup_subparsers(self): expeted_command_name = "expected-command-name" mock_extension = MagicMock() @@ -56,5 +56,6 @@ def test_setup_subparsers_should_raise_error_when_collision_occur(self): assert ( str(context.exception) - == '"command-name" is already registered as an extension. Please use a different name.' + == '"command-name" is already registered as an extension. Please use a' + " different name." ) diff --git a/tests/test_project.py b/tests/test_project.py index 58e66e7a..d9337ca5 100644 --- a/tests/test_project.py +++ b/tests/test_project.py @@ -59,7 +59,6 @@ "java8", "java11", "go1.x", - "python3.7", "python3.8", "python3.9", "dotnetcore2.1", @@ -1174,7 +1173,7 @@ def create_hook_input_file(base): def _get_target_schema_filename(target_name): - return "{}.json".format("-".join(s.lower() for s in target_name.split("::"))) + return f'{"-".join(s.lower() for s in target_name.split("::"))}.json' def create_target_schema_file(base, target_schema): @@ -1235,7 +1234,7 @@ def test_submit_dry_run(project, is_type_configuration_available): # fmt: on mock_temp.assert_not_called() - mock_path.assert_called_with("{}.zip".format(project.hypenated_name)) + mock_path.assert_called_with(f"{project.hypenated_name}.zip") mock_plugin.package.assert_called_once_with(project, ANY) mock_upload.assert_not_called() @@ -1336,7 +1335,7 @@ def test_submit_dry_run_modules(project): # fmt: on mock_temp.assert_not_called() - mock_path.assert_called_with("{}.zip".format(project.hypenated_name)) + mock_path.assert_called_with(f"{project.hypenated_name}.zip") mock_plugin.package.assert_not_called() mock_upload.assert_not_called() @@ -1402,7 +1401,7 @@ def test_submit_dry_run_hooks(project): # fmt: on mock_temp.assert_not_called() - mock_path.assert_called_with("{}.zip".format(project.hypenated_name)) + mock_path.assert_called_with(f"{project.hypenated_name}.zip") mock_plugin.package.assert_called_once_with(project, ANY) mock_upload.assert_not_called() @@ -1528,7 +1527,7 @@ def test_submit_dry_run_hooks_with_target_info(project, session): # fmt: on mock_temp.assert_not_called() - mock_path.assert_called_with("{}.zip".format(project.hypenated_name)) + mock_path.assert_called_with(f"{project.hypenated_name}.zip") mock_plugin.package.assert_called_once_with(project, ANY) mock_upload.assert_not_called() @@ -1833,7 +1832,7 @@ def test__upload_good_path_create_role_and_set_default_hook(project): @pytest.mark.parametrize( - ("use_role,expected_additional_args"), + "use_role,expected_additional_args", [(True, {"ExecutionRoleArn": "someArn"}), (False, {})], ) def test__upload_good_path_skip_role_creation( @@ -1888,7 +1887,7 @@ def test__upload_good_path_skip_role_creation( @pytest.mark.parametrize( - ("use_role,expected_additional_args"), + "use_role,expected_additional_args", [(True, {"ExecutionRoleArn": "someArn"}), (False, {})], ) def test__upload_good_path_skip_role_creation_hook( @@ -2353,11 +2352,11 @@ def test__load_target_info_for_hooks(project): "primaryIdentifier": ["/properties/Name"], "additionalProperties": False, }, - "ProvisioningType": test_type_info[target_name]["ProvisioningType"], + "ProvisioningType": target_value["ProvisioningType"], "IsCfnRegistrySupportedType": True, "SchemaFileAvailable": True, } - for target_name in test_type_info + for target_name, target_value in test_type_info.items() }, ) @@ -2505,11 +2504,11 @@ def test__load_target_info_for_hooks_local_only(project): "primaryIdentifier": ["/properties/Name"], "additionalProperties": False, }, - "ProvisioningType": test_type_info[target_name]["ProvisioningType"], + "ProvisioningType": target_value["ProvisioningType"], "IsCfnRegistrySupportedType": True, "SchemaFileAvailable": True, } - for target_name in test_type_info + for target_name, target_value in test_type_info.items() }, ) @@ -2528,7 +2527,9 @@ def test__load_target_info_for_hooks_local_only(project): patch_is_file = patch("os.path.isfile", return_value=True) # pylint: disable=line-too-long,confusing-with-statement - with patch_sdk as mock_sdk, patch_loader as mock_loader, patch_is_dir, patch_list_dir, patch_path_is_file, patch_is_file: + with patch_sdk as mock_sdk, patch_loader as mock_loader, ( + patch_is_dir + ), patch_list_dir, patch_path_is_file, patch_is_file: mock_sdk.return_value.region_name = "us-east-1" mock_sdk.return_value.client.side_effect = [MagicMock(), MagicMock()] project.target_info_path.open.return_value.__enter__.return_value = StringIO( diff --git a/tests/test_type_name_resolver.py b/tests/test_type_name_resolver.py index 45d61133..ffd25baa 100644 --- a/tests/test_type_name_resolver.py +++ b/tests/test_type_name_resolver.py @@ -35,7 +35,9 @@ def list_types_result(type_names): { "Type": "RESOURCE", "TypeName": type_name, - "TypeArn": f'arn:aws:cloudformation:us-east-1:123456789012:type/resource/{type_name.replace("::", "-")}', + "TypeArn": ( + f'arn:aws:cloudformation:us-east-1:123456789012:type/resource/{type_name.replace("::", "-")}' + ), } for type_name in type_names ] @@ -249,6 +251,6 @@ def test_resolve_type_names_locally_no_local_info(resolver): def test_create_list_types_request(type_names, expected): req = TypeNameResolver._create_list_types_request(type_names) if not expected: - assert req == {} + assert not req else: assert req == {"Filters": {"TypeNamePrefix": expected}} diff --git a/tests/test_type_schema_loader.py b/tests/test_type_schema_loader.py index 03c74c74..e416e16c 100644 --- a/tests/test_type_schema_loader.py +++ b/tests/test_type_schema_loader.py @@ -36,14 +36,10 @@ def get_test_schema(type_name): TEST_TARGET_SCHEMA_BUCKET = "TestTargetSchemaBucket" TEST_TARGET_SCHEMA_KEY = "test-target-schema.json" -TEST_TARGET_SCHEMA_FILE_PATH = "/files/{}".format(TEST_TARGET_SCHEMA_KEY) -TEST_TARGET_SCHEMA_FILE_URI = "file://{}".format(TEST_TARGET_SCHEMA_FILE_PATH) -TEST_S3_TARGET_SCHEMA_URI = "s3://{}/{}".format( - TEST_TARGET_SCHEMA_BUCKET, TEST_TARGET_SCHEMA_KEY -) -TEST_HTTPS_TARGET_SCHEMA_URI = "https://{}.s3.us-west-2.amazonaws.com/{}".format( - TEST_TARGET_SCHEMA_BUCKET, TEST_TARGET_SCHEMA_KEY -) +TEST_TARGET_SCHEMA_FILE_PATH = f"/files/{TEST_TARGET_SCHEMA_KEY}" +TEST_TARGET_SCHEMA_FILE_URI = f"file://{TEST_TARGET_SCHEMA_FILE_PATH}" +TEST_S3_TARGET_SCHEMA_URI = f"s3://{TEST_TARGET_SCHEMA_BUCKET}/{TEST_TARGET_SCHEMA_KEY}" +TEST_HTTPS_TARGET_SCHEMA_URI = f"https://{TEST_TARGET_SCHEMA_BUCKET}.s3.us-west-2.amazonaws.com/{TEST_TARGET_SCHEMA_KEY}" # pylint: disable=C0103 @@ -57,7 +53,9 @@ def get_test_type_info(type_name, visibility, provisioning_type): "TargetName": type_name, "TargetType": "RESOURCE", "Type": "RESOURCE", - "Arn": f'arn:aws:cloudformation:us-east-1:12345678902:type:resource:{type_name.replace("::", "-")}', + "Arn": ( + f'arn:aws:cloudformation:us-east-1:12345678902:type:resource:{type_name.replace("::", "-")}' + ), "IsDefaultVersion": True, "Description": "Test Schema", "ProvisioningType": provisioning_type, @@ -69,7 +67,9 @@ def get_test_type_info(type_name, visibility, provisioning_type): def describe_type_result(type_name, visibility, provisioning_type): return { - "Arn": f'arn:aws:cloudformation:us-east-1:12345678902:type:resource:{type_name.replace("::", "-")}', + "Arn": ( + f'arn:aws:cloudformation:us-east-1:12345678902:type:resource:{type_name.replace("::", "-")}' + ), "Type": "RESOURCE", "TypeName": type_name, "IsDefaultVersion": True, @@ -374,8 +374,8 @@ def test_load_type_info_invalid_local_schemas(loader): loader.load_type_info(type_names, local_schemas=0) assert ( - "Local Schemas must be either list of schemas to load or mapping of type names to schemas" - in str(excinfo.value) + "Local Schemas must be either list of schemas to load or mapping of type names" + " to schemas" in str(excinfo.value) ) @@ -474,7 +474,9 @@ def test_load_type_schemas(loader): mock_path_is_file.assert_any_call(TEST_TARGET_SCHEMA_FILE_PATH) mock_load_file.assert_called_with(TEST_TARGET_SCHEMA_FILE_PATH) - mock_file.assert_called_with(TEST_TARGET_SCHEMA_FILE_PATH, "r") + mock_file.assert_called_with( + TEST_TARGET_SCHEMA_FILE_PATH, "r", encoding="utf-8" + ) mock_get_from_url.assert_called_with(TEST_HTTPS_TARGET_SCHEMA_URI) mock_get_request.assert_called_with(TEST_HTTPS_TARGET_SCHEMA_URI, timeout=60) @@ -555,7 +557,7 @@ def test_load_type_schema_from_file(loader): assert_dict_equals(TEST_TARGET_SCHEMA, type_schema) mock_path_is_file.assert_called_with(TEST_TARGET_SCHEMA_FILE_PATH) mock_load_file.assert_called_with(TEST_TARGET_SCHEMA_FILE_PATH) - mock_file.assert_called_with(TEST_TARGET_SCHEMA_FILE_PATH, "r") + mock_file.assert_called_with(TEST_TARGET_SCHEMA_FILE_PATH, "r", encoding="utf-8") def test_load_type_schema_from_file_file_not_found(loader): @@ -574,7 +576,7 @@ def test_load_type_schema_from_file_file_not_found(loader): mock_path_is_file.assert_has_calls(calls=[call(TEST_TARGET_SCHEMA_FILE_PATH)]) mock_load_file.assert_called_with(TEST_TARGET_SCHEMA_FILE_PATH) - mock_file.assert_called_with(TEST_TARGET_SCHEMA_FILE_PATH, "r") + mock_file.assert_called_with(TEST_TARGET_SCHEMA_FILE_PATH, "r", encoding="utf-8") assert excinfo.value.__cause__ is e @@ -593,7 +595,7 @@ def test_load_type_schema_from_file_uri(loader): assert_dict_equals(TEST_TARGET_SCHEMA, type_schema) mock_load_from_uri.assert_called_with(TEST_TARGET_SCHEMA_FILE_URI) mock_load_file.assert_called_with(TEST_TARGET_SCHEMA_FILE_PATH) - mock_file.assert_called_with(TEST_TARGET_SCHEMA_FILE_PATH, "r") + mock_file.assert_called_with(TEST_TARGET_SCHEMA_FILE_PATH, "r", encoding="utf-8") def test_load_type_schema_from_file_uri_file_not_found(loader): @@ -613,7 +615,7 @@ def test_load_type_schema_from_file_uri_file_not_found(loader): mock_load_from_uri.assert_called_with(TEST_TARGET_SCHEMA_FILE_URI) mock_load_file.assert_called_with(TEST_TARGET_SCHEMA_FILE_PATH) - mock_file.assert_called_with(TEST_TARGET_SCHEMA_FILE_PATH, "r") + mock_file.assert_called_with(TEST_TARGET_SCHEMA_FILE_PATH, "r", encoding="utf-8") assert excinfo.value.__cause__ is e @@ -796,8 +798,8 @@ def test_load_type_schemas_invalid_schema_format(loader): "ftp://unsupportedurlschema.com/test-schema.json" ) assert ( - "Provided schema is invalid or not supported: ftp://unsupportedurlschema.com/test-schema.json" - in str(excinfo.value) + "Provided schema is invalid or not supported:" + " ftp://unsupportedurlschema.com/test-schema.json" in str(excinfo.value) ) diff --git a/tests/test_upload.py b/tests/test_upload.py index 72f5ae43..19ed8c84 100644 --- a/tests/test_upload.py +++ b/tests/test_upload.py @@ -177,7 +177,7 @@ def test_upload_s3_success(uploader): mock_stack.assert_called_once_with(ANY, INFRA_STACK_NAME) mock_time.utcnow.assert_called_once_with() - expected_key = "{}-2004-11-17T20-54-33.zip".format(CONTENTS_UTF8) + expected_key = f"{CONTENTS_UTF8}-2004-11-17T20-54-33.zip" uploader.s3_client.upload_fileobj.assert_called_once_with( fileobj, BUCKET_OUTPUT_VALUE, expected_key ) diff --git a/tests/utils.py b/tests/utils.py index 918b2765..9071e717 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -58,7 +58,7 @@ def random_type_name(): - return "Test::{0}::{1}".format(*sample(NAMES, 2)) + return f"Test::{NAMES[0]}::{NAMES[1]}" def random_name():