From bf72ed5892b9fa86ec918e8f6a063287c7306113 Mon Sep 17 00:00:00 2001 From: Bikouo Aubin <79859644+abikouo@users.noreply.github.com> Date: Thu, 21 Mar 2024 12:23:48 +0100 Subject: [PATCH] s3_object - fix issue when copying object with provided metadata (#2018) s3_object - fix issue when copying object with provided metadata SUMMARY Fixes #1991 ISSUE TYPE Bugfix Pull Request COMPONENT NAME s3_object Reviewed-by: Helen Bailey Reviewed-by: Mandar Kulkarni (cherry picked from commit 9a159338223026b95d103773c05080bb9f9bc06f) --- ...0314-s3_object-copy-mode-with-metadata.yml | 3 + plugins/modules/s3_object.py | 9 ++- tests/integration/targets/s3_object/aliases | 1 + .../targets/s3_object/tasks/copy_object.yml | 71 ++++++++++++++++++- .../targets/s3_object/tasks/main.yml | 4 +- 5 files changed, 84 insertions(+), 4 deletions(-) create mode 100644 changelogs/fragments/20240314-s3_object-copy-mode-with-metadata.yml diff --git a/changelogs/fragments/20240314-s3_object-copy-mode-with-metadata.yml b/changelogs/fragments/20240314-s3_object-copy-mode-with-metadata.yml new file mode 100644 index 00000000000..4f12794474f --- /dev/null +++ b/changelogs/fragments/20240314-s3_object-copy-mode-with-metadata.yml @@ -0,0 +1,3 @@ +--- +bugfixes: + - s3_object - Fix the issue when copying an object with overriding metadata. (https://github.com/ansible-collections/amazon.aws/issues/1991). diff --git a/plugins/modules/s3_object.py b/plugins/modules/s3_object.py index 2c4ebe9c3c8..2cd897c89b1 100644 --- a/plugins/modules/s3_object.py +++ b/plugins/modules/s3_object.py @@ -315,7 +315,9 @@ object: /my/desired/key.txt src: /usr/local/myfile.txt mode: put - metadata: 'Content-Encoding=gzip,Cache-Control=no-cache' + metadata: + Content-Encoding: gzip + Cache-Control: no-cache - name: PUT/upload with custom headers amazon.aws.s3_object: @@ -1314,6 +1316,11 @@ def copy_object_to_bucket(module, s3, bucket, obj, encrypt, metadata, validate, metadata, ) ) + if metadata: + # 'MetadataDirective' Specifies whether the metadata is copied from the source object or replaced + # with metadata that's provided in the request. The default value is 'COPY', therefore when user + # specifies a metadata we should set it to 'REPLACE' + params.update({"MetadataDirective": "REPLACE"}) s3.copy_object(aws_retry=True, **params) put_object_acl(module, s3, bucket, obj) # Tags diff --git a/tests/integration/targets/s3_object/aliases b/tests/integration/targets/s3_object/aliases index d34fac48dad..2a1c5ccb6df 100644 --- a/tests/integration/targets/s3_object/aliases +++ b/tests/integration/targets/s3_object/aliases @@ -1,3 +1,4 @@ cloud/aws aws_s3 s3_object_info +time=12m diff --git a/tests/integration/targets/s3_object/tasks/copy_object.yml b/tests/integration/targets/s3_object/tasks/copy_object.yml index 9ae36b9527b..994733d81dd 100644 --- a/tests/integration/targets/s3_object/tasks/copy_object.yml +++ b/tests/integration/targets/s3_object/tasks/copy_object.yml @@ -1,5 +1,12 @@ --- -- block: +- vars: + withmeta_data: + something: exists + version: "1.0.2" + metacopy_data: + name: metacopy + version: "1.0.3" + block: - name: define bucket name used for tests ansible.builtin.set_fact: copy_bucket: @@ -142,6 +149,68 @@ - result is not changed - result.msg == "Key this_key_does_not_exist.txt does not exist in bucket "+copy_bucket.src+"." + # Copy with metadata + - name: Set fact for bucket name + ansible.builtin.set_fact: + bucket_name: "{{ copy_bucket.dst }}" + + - name: Create test bucket + amazon.aws.s3_bucket: + name: "{{ bucket_name }}" + state: present + + - name: Create test object + amazon.aws.s3_object: + bucket: "{{ bucket_name }}" + object: nometa + mode: put + content: "some content" + + - name: Copy and add metadata + amazon.aws.s3_object: + bucket: "{{ bucket_name }}" + object: metacopy + mode: copy + copy_src: + bucket: "{{ bucket_name }}" + object: nometa + metadata: "{{ metacopy_data }}" + + - name: Create test object with metadata + amazon.aws.s3_object: + bucket: "{{ bucket_name }}" + object: withmeta + mode: put + content: "another content" + metadata: "{{ withmeta_data }}" + + - name: Copy and preserve metadata + amazon.aws.s3_object: + bucket: "{{ bucket_name }}" + object: copywithmeta + mode: copy + copy_src: + bucket: "{{ bucket_name }}" + object: withmeta + + - name: Get objects info + amazon.aws.s3_object_info: + bucket_name: "{{ bucket_name }}" + object_name: "{{ item }}" + loop: + - nometa + - metacopy + - withmeta + - copywithmeta + register: obj_info + + - assert: + that: + - obj_info.results | selectattr('item', 'equalto', 'nometa') | map(attribute='object_info.0.object_data.metadata') | first == {} + - obj_info.results | selectattr('item', 'equalto', 'withmeta') | map(attribute='object_info.0.object_data.metadata') | first == withmeta_data + - obj_info.results | selectattr('item', 'equalto', 'metacopy') | map(attribute='object_info.0.object_data.metadata') | first == metacopy_data + - obj_info.results | selectattr('item', 'equalto', 'copywithmeta') | map(attribute='object_info.0.object_data.metadata') | first == withmeta_data + always: - ansible.builtin.include_tasks: delete_bucket.yml with_items: diff --git a/tests/integration/targets/s3_object/tasks/main.yml b/tests/integration/targets/s3_object/tasks/main.yml index ed65fe31f16..7a8a585de1f 100644 --- a/tests/integration/targets/s3_object/tasks/main.yml +++ b/tests/integration/targets/s3_object/tasks/main.yml @@ -837,8 +837,6 @@ that: - binary_files.results[0].stat.checksum == binary_files.results[1].stat.checksum - - ansible.builtin.include_tasks: copy_object.yml - - ansible.builtin.include_tasks: copy_object_acl_disabled_bucket.yml - name: Run tagging tests block: # ============================================================ @@ -1074,6 +1072,8 @@ - (result.tags | length) == 0 - ansible.builtin.include_tasks: copy_recursively.yml + - ansible.builtin.include_tasks: copy_object.yml + - ansible.builtin.include_tasks: copy_object_acl_disabled_bucket.yml always: - name: delete temporary files file: