From 134285ef2134965dd0255e6605e22b6be46e6e84 Mon Sep 17 00:00:00 2001 From: Mark Chappell Date: Sat, 17 Dec 2022 11:34:20 +0100 Subject: [PATCH] [6.0.0] s3_object - Remove support for creation/deletion of buckets. (#1302) [6.0.0] s3_object - Remove support for creation/deletion of buckets. SUMMARY We previously deprecated support for creation/deletion of buckets using the s3_object module (s3_bucket performs the action much better). It is slated for removal in 6.0.0 Fixes #1112 ISSUE TYPE Feature Pull Request COMPONENT NAME s3_object ADDITIONAL INFORMATION Reviewed-by: GomathiselviS Reviewed-by: Alina Buzachis --- .../1112-s3_object-delete-create.yml | 4 + plugins/modules/s3_object.py | 307 ++++-------------- .../targets/s3_object/tasks/copy_object.yml | 21 +- .../tasks/copy_object_acl_disabled_bucket.yml | 6 - .../targets/s3_object/tasks/delete_bucket.yml | 6 +- .../targets/s3_object/tasks/main.yml | 82 +---- tests/sanity/ignore-2.11.txt | 1 - tests/sanity/ignore-2.12.txt | 1 - tests/sanity/ignore-2.13.txt | 1 - tests/sanity/ignore-2.14.txt | 1 - tests/sanity/ignore-2.15.txt | 1 - tests/unit/plugins/modules/test_s3_object.py | 24 -- 12 files changed, 101 insertions(+), 354 deletions(-) create mode 100644 changelogs/fragments/1112-s3_object-delete-create.yml diff --git a/changelogs/fragments/1112-s3_object-delete-create.yml b/changelogs/fragments/1112-s3_object-delete-create.yml new file mode 100644 index 00000000000..5b1ac95e0ee --- /dev/null +++ b/changelogs/fragments/1112-s3_object-delete-create.yml @@ -0,0 +1,4 @@ +removed_features: +- s3_object - support for creating and deleting buckets using the ``s3_object`` module has been removed. + S3 buckets can be created and deleted using the ``amazon.aws.s3_bucket`` module + (https://github.com/ansible-collections/amazon.aws/issues/1112). diff --git a/plugins/modules/s3_object.py b/plugins/modules/s3_object.py index 0117ea682f1..26fb97e38af 100644 --- a/plugins/modules/s3_object.py +++ b/plugins/modules/s3_object.py @@ -11,8 +11,6 @@ - This module allows the user to manage the objects and directories within S3 buckets. Includes support for creating and deleting objects and directories, retrieving objects as files or strings, generating download links and copying objects that are already stored in Amazon S3. - - Support for creating or deleting S3 buckets with this module has been deprecated and will be - removed in release 6.0.0. - S3 buckets can be created or deleted using the M(amazon.aws.s3_bucket) module. - Compatible with AWS, DigitalOcean, Ceph, Walrus, FakeS3 and StorageGRID. - When using non-AWS services, I(endpoint_url) should be specified. @@ -80,14 +78,12 @@ - 'C(getstr): download object as string' - 'C(list): list keys' - 'C(create): create bucket directories' - - 'C(delete): delete bucket directories' - 'C(delobj): delete object' - 'C(copy): copy object that is already stored in another bucket' - - Support for creating and deleting buckets has been deprecated and will - be removed in release 6.0.0. To create and manage the bucket itself - please use the M(amazon.aws.s3_bucket) module. + - Support for creating and deleting buckets was removed in release 6.0.0. + To create and manage the bucket itself please use the M(amazon.aws.s3_bucket) module. required: true - choices: ['get', 'put', 'delete', 'create', 'geturl', 'getstr', 'delobj', 'list', 'copy'] + choices: ['get', 'put', 'create', 'geturl', 'getstr', 'delobj', 'list', 'copy'] type: str object: description: @@ -240,6 +236,7 @@ - Support for the C(S3_URL) environment variable has been deprecated and will be removed in a release after 2024-12-01, please use the I(endpoint_url) parameter or the C(AWS_URL) environment variable. + - Support for creating and deleting buckets was removed in release 6.0.0. extends_documentation_fragment: - amazon.aws.aws - amazon.aws.ec2 @@ -528,71 +525,29 @@ def is_local_object_latest(s3, bucket, obj, version=None, local_file=None): def bucket_check(module, s3, bucket, validate=True): try: s3.head_bucket(Bucket=bucket) - except is_boto3_error_code("404"): - return False + except is_boto3_error_code("404") as e: + if validate: + raise S3ObjectFailure( + f"Bucket '{bucket}' not found (during bucket_check). " + "Support for automatically creating buckets was removed in release 6.0.0. " + "The amazon.aws.s3_bucket module can be used to create buckets.", + e, + ) except is_boto3_error_code("403") as e: # pylint: disable=duplicate-except if validate: - module.fail_json_aws( + raise S3ObjectFailure( + f"Permission denied accessing bucket '{bucket}' (during bucket_check).", e, - msg="Failed while looking up bucket (during bucket_check) %s." - % bucket, ) except ( botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError, ) as e: # pylint: disable=duplicate-except raise S3ObjectFailure( - "Failed while looking up bucket (during bucket_check) %s." + f"Failed while looking up bucket '{bucket}' (during bucket_check)." % bucket, e, ) - return True - - -def create_bucket(module, s3, bucket, location=None): - module.deprecate( - "Support for creating S3 buckets using the s3_object module" - " has been deprecated. Please use the ``s3_bucket`` module" - " instead.", - version="6.0.0", - collection_name="amazon.aws", - ) - if module.check_mode: - module.exit_json( - msg="CREATE operation skipped - running in check mode", - changed=True, - ) - configuration = {} - if location not in ("us-east-1", None): - configuration["LocationConstraint"] = location - try: - if len(configuration) > 0: - s3.create_bucket( - Bucket=bucket, CreateBucketConfiguration=configuration - ) - else: - s3.create_bucket(Bucket=bucket) - if module.params.get("permission"): - # Wait for the bucket to exist before setting ACLs - s3.get_waiter("bucket_exists").wait(Bucket=bucket) - for acl in module.params.get("permission"): - AWSRetry.jittered_backoff( - max_delay=120, catch_extra_error_codes=["NoSuchBucket"] - )(s3.put_bucket_acl)(ACL=acl, Bucket=bucket) - except is_boto3_error_code(IGNORE_S3_DROP_IN_EXCEPTIONS): - module.warn( - "PutBucketAcl is not implemented by your storage provider. Set the permission parameters to the empty list to avoid this warning" - ) - except ( - botocore.exceptions.BotoCoreError, - botocore.exceptions.ClientError, - ) as e: # pylint: disable=duplicate-except - raise S3ObjectFailure( - "Failed while creating bucket or setting acl (check that you have CreateBucket and PutBucketAcl permission).", - e, - ) - if bucket: - return True def paginated_list(s3, **pagination_params): @@ -643,38 +598,6 @@ def list_keys(module, s3, bucket, prefix, marker, max_keys): ) -def delete_bucket(module, s3, bucket): - module.deprecate( - "Support for deleting S3 buckets using the s3_object module" - " has been deprecated. Please use the ``s3_bucket`` module" - " instead.", - version="6.0.0", - collection_name="amazon.aws", - ) - if module.check_mode: - module.exit_json( - msg="DELETE operation skipped - running in check mode", - changed=True, - ) - try: - exists = bucket_check(module, s3, bucket) - if not exists: - return False - # if there are contents then we need to delete them before we can delete the bucket - for keys in paginated_versioned_list_with_fallback(s3, Bucket=bucket): - if keys: - s3.delete_objects(Bucket=bucket, Delete={"Objects": keys}) - s3.delete_bucket(Bucket=bucket) - return True - except is_boto3_error_code("NoSuchBucket"): - return False - except ( - botocore.exceptions.ClientError, - botocore.exceptions.BotoCoreError, - ) as e: # pylint: disable=duplicate-except - raise S3ObjectFailure("Failed while deleting bucket %s." % bucket, e) - - def delete_key(module, s3, bucket, obj): if module.check_mode: module.exit_json( @@ -1297,23 +1220,14 @@ def s3_object_do_put(module, connection, s3_vars): % (s3_vars["src"]) ) - keyrtn = None - if s3_vars["bucketrtn"]: - keyrtn = key_check( - module, - connection, - s3_vars["bucket"], - s3_vars["object"], - version=s3_vars["version"], - validate=s3_vars["validate"], - ) - else: - # If the bucket doesn't exist we should create it. - # only use valid bucket acls for create_bucket function - s3_vars["permission"] = s3_vars["bucket_acl"] - create_bucket( - module, connection, s3_vars["bucket"], s3_vars["location"] - ) + keyrtn = key_check( + module, + connection, + s3_vars["bucket"], + s3_vars["object"], + version=s3_vars["version"], + validate=s3_vars["validate"], + ) # the content will be uploaded as a byte string, so we must encode it first bincontent = get_binary_content(s3_vars) @@ -1376,93 +1290,44 @@ def s3_object_do_delobj(module, connection, s3_vars): module.fail_json(msg="Bucket parameter is required.") -def s3_object_do_delete(module, connection, s3_vars): - if not s3_vars.get("bucket"): - module.fail_json(msg="Bucket parameter is required.") - elif s3_vars["bucket"] and delete_bucket( - module, connection, s3_vars["bucket"] - ): - # Delete an entire bucket, including all objects in the bucket - module.exit_json( - msg="Bucket %s and all keys have been deleted." - % s3_vars["bucket"], - changed=True, - ) - - def s3_object_do_list(module, connection, s3_vars): # If the bucket does not exist then bail out - if not s3_vars.get("bucketrtn"): - module.fail_json( - msg="Target bucket (%s) cannot be found" % s3_vars["bucket"] - ) - else: - list_keys( - module, - connection, - s3_vars["bucket"], - s3_vars["prefix"], - s3_vars["marker"], - s3_vars["max_keys"], - ) + list_keys( + module, + connection, + s3_vars["bucket"], + s3_vars["prefix"], + s3_vars["marker"], + s3_vars["max_keys"], + ) def s3_object_do_create(module, connection, s3_vars): # if both creating a bucket and putting an object in it, acls for the bucket and/or the object may be specified # these were separated above into the variables bucket_acl and object_acl - if s3_vars["bucket"] and not s3_vars["object"]: - if s3_vars["bucketrtn"]: - module.exit_json(msg="Bucket already exists.", changed=False) - # only use valid bucket acls when creating the bucket - s3_vars["permission"] = s3_vars["bucket_acl"] + if not s3_vars["object"].endswith("/"): + s3_vars["object"] = s3_vars["object"] + "/" + + if key_check( + module, connection, s3_vars["bucket"], s3_vars["object"] + ): module.exit_json( - msg="Bucket created successfully", - changed=create_bucket( - module, connection, s3_vars["bucket"], s3_vars["location"] - ), + msg="Bucket %s and key %s already exists." + % (s3_vars["bucket"], s3_vars["object"]), + changed=False, ) - if s3_vars["bucket"] and s3_vars["object"]: - if not s3_vars["object"].endswith("/"): - s3_vars["object"] = s3_vars["object"] + "/" - - if s3_vars["bucketrtn"]: - if key_check( - module, connection, s3_vars["bucket"], s3_vars["object"] - ): - module.exit_json( - msg="Bucket %s and key %s already exists." - % (s3_vars["bucket"], s3_vars["object"]), - changed=False, - ) - if not s3_vars["acl_disabled"]: - # setting valid object acls for the create_dirkey function - s3_vars["permission"] = s3_vars["object_acl"] - create_dirkey( - module, - connection, - s3_vars["bucket"], - s3_vars["object"], - s3_vars["encrypt"], - s3_vars["expiry"], - ) - else: - # only use valid bucket acls for the create_bucket function - s3_vars["permission"] = s3_vars["bucket_acl"] - create_bucket( - module, connection, s3_vars["bucket"], s3_vars["location"] - ) - if not s3_vars["acl_disabled"]: - # only use valid object acls for the create_dirkey function - s3_vars["permission"] = s3_vars["object_acl"] - create_dirkey( - module, - connection, - s3_vars["bucket"], - s3_vars["object"], - s3_vars["encrypt"], - s3_vars["expiry"], - ) + if not s3_vars["acl_disabled"]: + # setting valid object acls for the create_dirkey function + s3_vars["permission"] = s3_vars["object_acl"] + create_dirkey( + module, + connection, + s3_vars["bucket"], + s3_vars["object"], + s3_vars["encrypt"], + s3_vars["expiry"], + ) def s3_object_do_geturl(module, connection, s3_vars): @@ -1541,16 +1406,7 @@ def s3_object_do_getstr(module, connection, s3_vars): def s3_object_do_copy(module, connection, s3_vars): # if copying an object in a bucket yet to be created, acls for the bucket and/or the object may be specified # these were separated into the variables bucket_acl and object_acl above - d_etag = None - if s3_vars["bucketrtn"]: - d_etag = get_etag(connection, s3_vars["bucket"], s3_vars["object"]) - else: - # If the bucket doesn't exist we should create it. - # only use valid bucket acls for create_bucket function - s3_vars["permission"] = s3_vars["bucket_acl"] - create_bucket( - module, connection, s3_vars["bucket"], s3_vars["location"] - ) + d_etag = get_etag(connection, s3_vars["bucket"], s3_vars["object"]) if not s3_vars["acl_disabled"]: # only use valid object acls for the copy operation s3_vars["permission"] = s3_vars["object_acl"] @@ -1579,12 +1435,6 @@ def populate_facts(module, **variable_dict): "bucket-owner-read", "bucket-owner-full-control", ] - variable_dict["bucket_canned_acl"] = [ - "private", - "public-read", - "public-read-write", - "authenticated-read", - ] if variable_dict["validate_bucket_name"]: validate_bucket_name(variable_dict["bucket"]) @@ -1649,29 +1499,23 @@ def populate_facts(module, **variable_dict): def validate_bucket(module, s3, var_dict): - exists = bucket_check(module, s3, var_dict["bucket"]) + bucket_check(module, s3, var_dict["bucket"], validate=var_dict["validate"]) - if exists: - try: - ownership_controls = s3.get_bucket_ownership_controls( - Bucket=var_dict["bucket"] - )["OwnershipControls"] - if ownership_controls.get("Rules"): - object_ownership = ownership_controls["Rules"][0][ - "ObjectOwnership" - ] - if object_ownership == "BucketOwnerEnforced": - var_dict["acl_disabled"] = True - # if bucket ownership controls are not found - except botocore.exceptions.ClientError: - pass + try: + ownership_controls = s3.get_bucket_ownership_controls( + Bucket=var_dict["bucket"] + )["OwnershipControls"] + if ownership_controls.get("Rules"): + object_ownership = ownership_controls["Rules"][0][ + "ObjectOwnership" + ] + if object_ownership == "BucketOwnerEnforced": + var_dict["acl_disabled"] = True + # if bucket ownership controls are not found + except botocore.exceptions.ClientError: + pass if not var_dict["acl_disabled"]: - var_dict["bucket_acl"] = [ - acl - for acl in var_dict.get("permission") - if acl in var_dict["bucket_canned_acl"] - ] var_dict["object_acl"] = [ acl for acl in var_dict.get("permission") @@ -1680,27 +1524,13 @@ def validate_bucket(module, s3, var_dict): error_acl = [ acl for acl in var_dict.get("permission") - if ( - acl not in var_dict["bucket_canned_acl"] - and acl not in var_dict["object_canned_acl"] - ) + if acl not in var_dict["object_canned_acl"] ] if error_acl: module.fail_json( msg="Unknown permission specified: %s" % error_acl ) - var_dict["bucketrtn"] = bucket_check( - module, s3, var_dict["bucket"], validate=var_dict["validate"] - ) - - if ( - var_dict["validate"] - and var_dict["mode"] not in ("create", "put", "delete", "copy") - and not var_dict["bucketrtn"] - ): - module.fail_json(msg="Source bucket cannot be found.") - return var_dict @@ -1723,7 +1553,6 @@ def main(): choices=[ "get", "put", - "delete", "create", "geturl", "getstr", @@ -1763,6 +1592,7 @@ def main(): required_if = [ ["ceph", True, ["endpoint_url"]], ["mode", "put", ["object"]], + ["mode", "create", ["object"]], ["mode", "get", ["dest", "object"]], ["mode", "getstr", ["object"]], ["mode", "geturl", ["object"]], @@ -1792,7 +1622,6 @@ def main(): "get": s3_object_do_get, "put": s3_object_do_put, "delobj": s3_object_do_delobj, - "delete": s3_object_do_delete, "list": s3_object_do_list, "create": s3_object_do_create, "geturl": s3_object_do_geturl, diff --git a/tests/integration/targets/s3_object/tasks/copy_object.yml b/tests/integration/targets/s3_object/tasks/copy_object.yml index aff38eba1bc..44e306bca30 100644 --- a/tests/integration/targets/s3_object/tasks/copy_object.yml +++ b/tests/integration/targets/s3_object/tasks/copy_object.yml @@ -6,9 +6,14 @@ dst: "{{ bucket_name }}-copydst" - name: create bucket source - s3_object: - bucket: "{{ copy_bucket.src }}" - mode: create + s3_bucket: + name: "{{ copy_bucket.src }}" + state: present + + - name: create bucket destination + s3_bucket: + name: "{{ copy_bucket.dst }}" + state: present - name: Create content set_fact: @@ -26,7 +31,9 @@ retries: 3 delay: 3 register: put_result - until: "put_result.msg == 'PUT operation complete'" + until: + - '"not found" not in put_result.msg' + ignore_errors: True - name: Copy the content of the source bucket into dest bucket s3_object: @@ -36,6 +43,12 @@ copy_src: bucket: "{{ copy_bucket.src }}" object: source.txt + retries: 3 + delay: 3 + register: put_result + until: + - '"not found" not in put_result.msg' + ignore_errors: True - name: Get the content copied into {{ copy_bucket.dst }} s3_object: diff --git a/tests/integration/targets/s3_object/tasks/copy_object_acl_disabled_bucket.yml b/tests/integration/targets/s3_object/tasks/copy_object_acl_disabled_bucket.yml index fe2e86f9133..4f9bdb6df6f 100644 --- a/tests/integration/targets/s3_object/tasks/copy_object_acl_disabled_bucket.yml +++ b/tests/integration/targets/s3_object/tasks/copy_object_acl_disabled_bucket.yml @@ -124,9 +124,3 @@ name: "{{ bucket_name }}-acl-disabled" state: absent register: delete_result - - - name: Ensure bucket deletion - assert: - that: - - delete_result is changed - - delete_result is not failed diff --git a/tests/integration/targets/s3_object/tasks/delete_bucket.yml b/tests/integration/targets/s3_object/tasks/delete_bucket.yml index d285c7a9571..1c6f4892743 100644 --- a/tests/integration/targets/s3_object/tasks/delete_bucket.yml +++ b/tests/integration/targets/s3_object/tasks/delete_bucket.yml @@ -19,7 +19,7 @@ ignore_errors: true - name: delete the bucket - s3_object: - bucket: "{{ item }}" - mode: delete + s3_bucket: + name: "{{ item }}" + state: absent ignore_errors: true diff --git a/tests/integration/targets/s3_object/tasks/main.yml b/tests/integration/targets/s3_object/tasks/main.yml index 308aaff30dc..81157ae5ce5 100644 --- a/tests/integration/targets/s3_object/tasks/main.yml +++ b/tests/integration/targets/s3_object/tasks/main.yml @@ -31,50 +31,16 @@ set_fact: content: "{{ lookup('password', '/dev/null chars=ascii_letters,digits,hexdigits,punctuation') }}" - - name: test create bucket without permissions - module_defaults: { group/aws: {} } - s3_object: - bucket: "{{ bucket_name }}" - mode: create - register: result - ignore_errors: true - - - assert: - that: - - result is failed - - "result.msg != 'MODULE FAILURE'" - - - name: test create bucket with an invalid name - s3_object: - bucket: "{{ bucket_name }}-" - mode: create - register: result - ignore_errors: true - - - assert: - that: - - result is failed - - name: test create bucket - s3_object: - bucket: "{{ bucket_name }}" - mode: create + s3_bucket: + name: "{{ bucket_name }}" + state: present register: result - assert: that: - result is changed - - name: trying to create a bucket name that already exists - s3_object: - bucket: "{{ bucket_name }}" - mode: create - register: result - - - assert: - that: - - result is not changed - - name: Create local upload.txt copy: content: "{{ content }}" @@ -582,9 +548,9 @@ delay: 3 - name: test delete bucket - s3_object: - bucket: "{{ bucket_name }}" - mode: delete + s3_bucket: + name: "{{ bucket_name }}" + state: absent register: result retries: 3 delay: 3 @@ -594,36 +560,6 @@ that: - result is changed - - name: test create a bucket with a dot in the name - s3_object: - bucket: "{{ bucket_name_with_dot }}" - mode: create - register: result - - - assert: - that: - - result is changed - - - name: test delete a bucket with a dot in the name - s3_object: - bucket: "{{ bucket_name_with_dot }}" - mode: delete - register: result - - - assert: - that: - - result is changed - - - name: test delete a nonexistent bucket - s3_object: - bucket: "{{ bucket_name_with_dot }}" - mode: delete - register: result - - - assert: - that: - - result is not changed - - name: make tempfile 4 GB for OSX command: _raw_params: "dd if=/dev/zero of={{ remote_tmp_dir }}/largefile bs=1m count=4096" @@ -637,9 +573,9 @@ - name: test multipart download - platform specific block: - name: make a bucket to upload the file - s3_object: - bucket: "{{ bucket_name }}" - mode: create + s3_bucket: + name: "{{ bucket_name }}" + state: present - name: upload the file to the bucket s3_object: diff --git a/tests/sanity/ignore-2.11.txt b/tests/sanity/ignore-2.11.txt index 52ef952085c..8002f467265 100644 --- a/tests/sanity/ignore-2.11.txt +++ b/tests/sanity/ignore-2.11.txt @@ -2,4 +2,3 @@ plugins/modules/ec2_vpc_dhcp_option.py pylint:collection-deprecated-version # ht plugins/modules/ec2_vpc_endpoint_info.py pylint:collection-deprecated-version # https://github.com/ansible-collections/amazon.aws/issues/1179 plugins/modules/route53.py validate-modules:parameter-state-invalid-choice # route53_info needs improvements before we can deprecate this plugins/modules/route53_health_check.py pylint:collection-deprecated-version # https://github.com/ansible-collections/amazon.aws/issues/1111 -plugins/modules/s3_object.py pylint:collection-deprecated-version # https://github.com/ansible-collections/amazon.aws/issues/1112 diff --git a/tests/sanity/ignore-2.12.txt b/tests/sanity/ignore-2.12.txt index 52ef952085c..8002f467265 100644 --- a/tests/sanity/ignore-2.12.txt +++ b/tests/sanity/ignore-2.12.txt @@ -2,4 +2,3 @@ plugins/modules/ec2_vpc_dhcp_option.py pylint:collection-deprecated-version # ht plugins/modules/ec2_vpc_endpoint_info.py pylint:collection-deprecated-version # https://github.com/ansible-collections/amazon.aws/issues/1179 plugins/modules/route53.py validate-modules:parameter-state-invalid-choice # route53_info needs improvements before we can deprecate this plugins/modules/route53_health_check.py pylint:collection-deprecated-version # https://github.com/ansible-collections/amazon.aws/issues/1111 -plugins/modules/s3_object.py pylint:collection-deprecated-version # https://github.com/ansible-collections/amazon.aws/issues/1112 diff --git a/tests/sanity/ignore-2.13.txt b/tests/sanity/ignore-2.13.txt index 52ef952085c..8002f467265 100644 --- a/tests/sanity/ignore-2.13.txt +++ b/tests/sanity/ignore-2.13.txt @@ -2,4 +2,3 @@ plugins/modules/ec2_vpc_dhcp_option.py pylint:collection-deprecated-version # ht plugins/modules/ec2_vpc_endpoint_info.py pylint:collection-deprecated-version # https://github.com/ansible-collections/amazon.aws/issues/1179 plugins/modules/route53.py validate-modules:parameter-state-invalid-choice # route53_info needs improvements before we can deprecate this plugins/modules/route53_health_check.py pylint:collection-deprecated-version # https://github.com/ansible-collections/amazon.aws/issues/1111 -plugins/modules/s3_object.py pylint:collection-deprecated-version # https://github.com/ansible-collections/amazon.aws/issues/1112 diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt index 52ef952085c..8002f467265 100644 --- a/tests/sanity/ignore-2.14.txt +++ b/tests/sanity/ignore-2.14.txt @@ -2,4 +2,3 @@ plugins/modules/ec2_vpc_dhcp_option.py pylint:collection-deprecated-version # ht plugins/modules/ec2_vpc_endpoint_info.py pylint:collection-deprecated-version # https://github.com/ansible-collections/amazon.aws/issues/1179 plugins/modules/route53.py validate-modules:parameter-state-invalid-choice # route53_info needs improvements before we can deprecate this plugins/modules/route53_health_check.py pylint:collection-deprecated-version # https://github.com/ansible-collections/amazon.aws/issues/1111 -plugins/modules/s3_object.py pylint:collection-deprecated-version # https://github.com/ansible-collections/amazon.aws/issues/1112 diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt index 52ef952085c..8002f467265 100644 --- a/tests/sanity/ignore-2.15.txt +++ b/tests/sanity/ignore-2.15.txt @@ -2,4 +2,3 @@ plugins/modules/ec2_vpc_dhcp_option.py pylint:collection-deprecated-version # ht plugins/modules/ec2_vpc_endpoint_info.py pylint:collection-deprecated-version # https://github.com/ansible-collections/amazon.aws/issues/1179 plugins/modules/route53.py validate-modules:parameter-state-invalid-choice # route53_info needs improvements before we can deprecate this plugins/modules/route53_health_check.py pylint:collection-deprecated-version # https://github.com/ansible-collections/amazon.aws/issues/1111 -plugins/modules/s3_object.py pylint:collection-deprecated-version # https://github.com/ansible-collections/amazon.aws/issues/1112 diff --git a/tests/unit/plugins/modules/test_s3_object.py b/tests/unit/plugins/modules/test_s3_object.py index 2d5985d7cb1..1208bc8673d 100644 --- a/tests/unit/plugins/modules/test_s3_object.py +++ b/tests/unit/plugins/modules/test_s3_object.py @@ -79,30 +79,6 @@ def test_s3_object_do_delobj_failure_noobj(m_delete_key): module.fail_json.assert_called_with(msg="object parameter is required") -@patch(module_name + ".delete_bucket") -def test_s3_object_do_delete_success(m_delete_bucket): - module = MagicMock() - s3 = MagicMock() - var_dict = {"bucket": "a987e6b6026ab04e4717"} - s3_object.s3_object_do_delete(module, s3, var_dict) - assert m_delete_bucket.call_count == 1 - module.exit_json.assert_called_with( - msg="Bucket a987e6b6026ab04e4717 and all keys have been deleted.", - changed=True, - ) - - -@patch(module_name + ".delete_bucket") -def test_s3_object_do_delete_failure_nobucket(m_delete_bucket): - module = MagicMock() - s3 = MagicMock() - - var_dict = {} - s3_object.s3_object_do_delete(module, s3, var_dict) - assert m_delete_bucket.call_count == 0 - module.fail_json.assert_called_with(msg="Bucket parameter is required.") - - @patch(module_name + ".paginated_list") @patch(module_name + ".list_keys") def test_s3_object_do_list_success(m_paginated_list, m_list_keys):