From f01b0310c3410b6a7b9832a8e2a0435127e20152 Mon Sep 17 00:00:00 2001 From: Mark Chappell Date: Sat, 9 Jul 2022 22:13:36 +0200 Subject: [PATCH] s3_sync - Improve error handling when testing for existing files (#1330) s3_sync - Improve error handling when testing for existing files SUMMARY fixes: #58 Simplifies handling of '404' codes (use is_boto3_error_code) Assume 403 files need updating (it's the best we can do, and mimics aws cli) Allows Boto3 exceptions to fall through to the outer try/except clause and cleanly fail rather than rethrowing it as an Exception() ISSUE TYPE Feature Pull Request COMPONENT NAME s3_sync ADDITIONAL INFORMATION Reviewed-by: Joseph Torcasso (cherry picked from commit 40984a403c150b31cd58236028ae0d0945c161f0) --- changelogs/fragments/58-s3_sync.yml | 2 ++ plugins/modules/s3_sync.py | 14 +++++-------- .../targets/s3_sync/tasks/main.yml | 20 +++++++++---------- 3 files changed, 17 insertions(+), 19 deletions(-) create mode 100644 changelogs/fragments/58-s3_sync.yml diff --git a/changelogs/fragments/58-s3_sync.yml b/changelogs/fragments/58-s3_sync.yml new file mode 100644 index 00000000000..892dd23a411 --- /dev/null +++ b/changelogs/fragments/58-s3_sync.yml @@ -0,0 +1,2 @@ +minor_changes: +- s3_sync - improves error handling during ``HEAD`` operation to compare existing files (https://github.com/ansible-collections/community.aws/issues/58). diff --git a/plugins/modules/s3_sync.py b/plugins/modules/s3_sync.py index 75c653f5712..78177416fcd 100644 --- a/plugins/modules/s3_sync.py +++ b/plugins/modules/s3_sync.py @@ -269,6 +269,7 @@ # import module snippets from ansible_collections.amazon.aws.plugins.module_utils.core import AnsibleAWSModule +from ansible_collections.amazon.aws.plugins.module_utils.core import is_boto3_error_code # the following function, calculate_multipart_etag, is from tlastowka @@ -431,17 +432,12 @@ def head_s3(s3, bucket, s3keys): retkeys = [] for entry in s3keys: retentry = entry.copy() - # don't modify the input dict try: retentry['s3_head'] = s3.head_object(Bucket=bucket, Key=entry['s3_path']) - except botocore.exceptions.ClientError as err: - if (hasattr(err, 'response') and - 'ResponseMetadata' in err.response and - 'HTTPStatusCode' in err.response['ResponseMetadata'] and - str(err.response['ResponseMetadata']['HTTPStatusCode']) == '404'): - pass - else: - raise Exception(err) + # 404 (Missing) - File doesn't exist, we'll need to upload + # 403 (Denied) - Sometimes we can write but not read, assume we'll need to upload + except is_boto3_error_code(['404', '403']): + pass retkeys.append(retentry) return retkeys diff --git a/tests/integration/targets/s3_sync/tasks/main.yml b/tests/integration/targets/s3_sync/tasks/main.yml index 667a5b34c4b..4587e6fbec0 100644 --- a/tests/integration/targets/s3_sync/tasks/main.yml +++ b/tests/integration/targets/s3_sync/tasks/main.yml @@ -44,7 +44,7 @@ dd if=/dev/zero of=test4.txt bs=1M count=10 args: chdir: "{{ output_dir }}/s3_sync" - + - name: Sync files with remote bucket s3_sync: bucket: "{{ test_bucket }}" @@ -66,7 +66,7 @@ - output.changed - output.name == "{{ test_bucket_2 }}" - not output.requester_pays - + - name: Sync files with remote bucket using glacier storage class s3_sync: bucket: "{{ test_bucket_2 }}" @@ -77,7 +77,7 @@ - assert: that: - output is changed - + # ============================================================ - name: Sync files already present @@ -99,7 +99,7 @@ - assert: that: - output is not changed - + # ============================================================ - name: Create a third s3_bucket s3_bucket: @@ -112,7 +112,7 @@ - output.changed - output.name == "{{ test_bucket_3 }}" - not output.requester_pays - + - name: Sync individual file with remote bucket s3_sync: bucket: "{{ test_bucket_3 }}" @@ -162,9 +162,9 @@ object: "{{ obj }}" with_items: "{{ objects.s3_keys }}" loop_control: - loop_var: obj + loop_var: obj ignore_errors: true - + - name: list test_bucket_2 objects aws_s3: bucket: "{{ test_bucket_2 }}" @@ -179,9 +179,9 @@ object: "{{ obj }}" with_items: "{{ objects.s3_keys }}" loop_control: - loop_var: obj + loop_var: obj ignore_errors: true - + - name: list test_bucket_3 objects aws_s3: bucket: "{{ test_bucket_3 }}" @@ -196,7 +196,7 @@ object: "{{ obj }}" with_items: "{{ objects.s3_keys }}" loop_control: - loop_var: obj + loop_var: obj ignore_errors: true - name: Ensure all buckets are deleted