Skip to content

Commit

Permalink
s3_object - fix regression related to leading / in object key names (#…
Browse files Browse the repository at this point in the history
…1549)

s3_object - fix regression related to leading / in object key names

SUMMARY
fixes #1548
S3 object key names should not include a leading / (https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html).  However, for historical reasons we've supported them (we just pruned them out) this pruning was dropped in 6.0.0 without warning.
For the sake of simplifying things and moving closer to the actual AWS resources, deprecate pruning out the leading /.  (Arbitrarily modifying inputs and resource names tends to lead to strange edge cases)
ISSUE TYPE

Bugfix Pull Request

COMPONENT NAME
s3_object
ADDITIONAL INFORMATION
Regression of ansible/ansible#30576 / ansible/ansible#30579

Reviewed-by: Alina Buzachis
(cherry picked from commit b53f7d7)
  • Loading branch information
tremble authored and patchback[bot] committed May 16, 2023
1 parent 6698cdd commit 1b302f5
Show file tree
Hide file tree
Showing 6 changed files with 94 additions and 22 deletions.
4 changes: 4 additions & 0 deletions changelogs/fragments/1548-s3_object-leading-slash.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
bugfixes:
- s3_object - fixes regression related to objects with a leading ``/`` (https://github.com/ansible-collections/amazon.aws/issues/1548).
# deprecated_features:
# - s3_object - support for passing object keys with a leading ``/`` has been deprecated and will be removed in a release after 2025-12-01 (https://github.com/ansible-collections/amazon.aws/pull/1549).
63 changes: 51 additions & 12 deletions plugins/modules/s3_object.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,13 @@
type: str
object:
description:
- Keyname of the object inside the bucket.
- Key name of the object inside the bucket.
- Can be used to create "virtual directories", see examples.
- Object key names should not include the leading C(/), see
U(https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html) for more
information.
# - Support for passing the leading C(/) has been deprecated and will be removed
# in a release after 2025-12-01.
type: str
sig_v4:
description:
Expand Down Expand Up @@ -399,15 +404,18 @@
- prefix1/key2
"""

import base64
import copy
import io
import mimetypes
import os
import io
from ssl import SSLError
import base64
import time

from ssl import SSLError

try:
# Beware, S3 is a "special" case, it sometimes catches botocore exceptions and
# re-raises them as boto3 exceptions.
import boto3
import botocore
except ImportError:
pass # Handled by AnsibleAWSModule
Expand Down Expand Up @@ -457,6 +465,7 @@ def key_check(module, s3, bucket, obj, version=None, validate=True):
except (
botocore.exceptions.BotoCoreError,
botocore.exceptions.ClientError,
boto3.exceptions.Boto3Error,
) as e: # pylint: disable=duplicate-except
raise S3ObjectFailure(f"Failed while looking up object (during key check) {obj}.", e)

Expand Down Expand Up @@ -525,6 +534,7 @@ def bucket_check(module, s3, bucket, validate=True):
except (
botocore.exceptions.BotoCoreError,
botocore.exceptions.ClientError,
boto3.exceptions.Boto3Error,
) as e: # pylint: disable=duplicate-except
raise S3ObjectFailure(
f"Failed while looking up bucket '{bucket}' (during bucket_check).",
Expand Down Expand Up @@ -571,6 +581,7 @@ def list_keys(module, s3, bucket, prefix, marker, max_keys):
except (
botocore.exceptions.ClientError,
botocore.exceptions.BotoCoreError,
boto3.exceptions.Boto3Error,
) as e:
raise S3ObjectFailure(f"Failed while listing the keys in the bucket {bucket}", e)

Expand All @@ -587,6 +598,7 @@ def delete_key(module, s3, bucket, obj):
except (
botocore.exceptions.ClientError,
botocore.exceptions.BotoCoreError,
boto3.exceptions.Boto3Error,
) as e:
raise S3ObjectFailure(f"Failed while trying to delete {obj}.", e)

Expand All @@ -607,6 +619,7 @@ def put_object_acl(module, s3, bucket, obj, params=None):
except (
botocore.exceptions.BotoCoreError,
botocore.exceptions.ClientError,
boto3.exceptions.Boto3Error,
) as e: # pylint: disable=duplicate-except
raise S3ObjectFailure(f"Failed while creating object {obj}.", e)

Expand Down Expand Up @@ -747,6 +760,7 @@ def upload_s3file(
except (
botocore.exceptions.ClientError,
botocore.exceptions.BotoCoreError,
boto3.exceptions.Boto3Error,
) as e:
raise S3ObjectFailure("Unable to complete PUT operation.", e)

Expand Down Expand Up @@ -786,6 +800,7 @@ def download_s3file(module, s3, bucket, obj, dest, retries, version=None):
except (
botocore.exceptions.BotoCoreError,
botocore.exceptions.ClientError,
boto3.exceptions.Boto3Error,
) as e: # pylint: disable=duplicate-except
raise S3ObjectFailure(f"Could not find the key {obj}.", e)

Expand All @@ -797,6 +812,7 @@ def download_s3file(module, s3, bucket, obj, dest, retries, version=None):
except (
botocore.exceptions.ClientError,
botocore.exceptions.BotoCoreError,
boto3.exceptions.Boto3Error,
) as e:
# actually fail on last pass through the loop.
if x >= retries:
Expand Down Expand Up @@ -830,6 +846,7 @@ def download_s3str(module, s3, bucket, obj, version=None):
except (
botocore.exceptions.BotoCoreError,
botocore.exceptions.ClientError,
boto3.exceptions.Boto3Error,
) as e: # pylint: disable=duplicate-except
raise S3ObjectFailure(f"Failed while getting contents of object {obj} as a string.", e)

Expand All @@ -852,6 +869,7 @@ def get_download_url(module, s3, bucket, obj, expiry, tags=None, changed=True):
except (
botocore.exceptions.ClientError,
botocore.exceptions.BotoCoreError,
boto3.exceptions.Boto3Error,
) as e:
raise S3ObjectFailure("Failed while getting download url.", e)

Expand All @@ -867,6 +885,7 @@ def put_download_url(s3, bucket, obj, expiry):
except (
botocore.exceptions.ClientError,
botocore.exceptions.BotoCoreError,
boto3.exceptions.Boto3Error,
) as e:
raise S3ObjectFailure("Unable to generate presigned URL", e)

Expand Down Expand Up @@ -937,6 +956,7 @@ def copy_object_to_bucket(module, s3, bucket, obj, encrypt, metadata, validate,
except (
botocore.exceptions.BotoCoreError,
botocore.exceptions.ClientError,
boto3.exceptions.Boto3Error,
) as e: # pylint: disable=duplicate-except
raise S3ObjectFailure(
f"Failed while copying object {obj} from bucket {module.params['copy_src'].get('Bucket')}.",
Expand Down Expand Up @@ -981,6 +1001,7 @@ def wait_tags_are_applied(module, s3, bucket, obj, expected_tags_dict, version=N
except (
botocore.exceptions.ClientError,
botocore.exceptions.BotoCoreError,
boto3.exceptions.Boto3Error,
) as e:
raise S3ObjectFailure("Failed to get object tags.", e)

Expand All @@ -1006,6 +1027,7 @@ def ensure_tags(client, module, bucket, obj):
except (
botocore.exceptions.BotoCoreError,
botocore.exceptions.ClientError,
boto3.exceptions.Boto3Error,
) as e: # pylint: disable=duplicate-except
raise S3ObjectFailure("Failed to get object tags.", e)

Expand All @@ -1029,6 +1051,7 @@ def ensure_tags(client, module, bucket, obj):
except (
botocore.exceptions.BotoCoreError,
botocore.exceptions.ClientError,
boto3.exceptions.Boto3Error,
) as e:
raise S3ObjectFailure("Failed to update object tags.", e)
else:
Expand All @@ -1037,6 +1060,7 @@ def ensure_tags(client, module, bucket, obj):
except (
botocore.exceptions.BotoCoreError,
botocore.exceptions.ClientError,
boto3.exceptions.Boto3Error,
) as e:
raise S3ObjectFailure("Failed to delete object tags.", e)

Expand Down Expand Up @@ -1325,9 +1349,9 @@ def s3_object_do_copy(module, connection, connection_v4, s3_vars):
)


def populate_facts(module, **variable_dict):
for k, v in module.params.items():
variable_dict[k] = v
def populate_params(module):
# Copy the parameters dict, we shouldn't be directly modifying it.
variable_dict = copy.deepcopy(module.params)

if variable_dict["validate_bucket_name"]:
validate_bucket_name(variable_dict["bucket"])
Expand All @@ -1347,8 +1371,19 @@ def populate_facts(module, **variable_dict):
variable_dict["overwrite"] = "never"

# Bucket deletion does not require obj. Prevents ambiguity with delobj.
if variable_dict["object"] and variable_dict.get("mode") == "delete":
module.fail_json(msg="Parameter obj cannot be used with mode=delete")
if variable_dict["object"]:
if variable_dict.get("mode") == "delete":
module.fail_json(msg="Parameter object cannot be used with mode=delete")
obj = variable_dict["object"]
# If the object starts with / remove the leading character
if obj.startswith("/"):
obj = obj[1:]
variable_dict["object"] = obj
# module.deprecate(
# "Support for passing object key names with a leading '/' has been deprecated.",
# date="2025-12-01",
# collection_name="amazon.aws",
# )

variable_dict["validate"] = not variable_dict["ignore_nonexistent_bucket"]
variable_dict["acl_disabled"] = False
Expand Down Expand Up @@ -1480,10 +1515,14 @@ def main():
try:
s3 = module.client("s3", retry_decorator=retry_decorator, **extra_params)
s3_v4 = module.client("s3", retry_decorator=retry_decorator, **extra_params_v4)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
except (
botocore.exceptions.ClientError,
botocore.exceptions.BotoCoreError,
boto3.exceptions.Boto3Error,
) as e:
module.fail_json_aws(e, msg="Failed to connect to AWS")

s3_object_params = populate_facts(module, **module.params)
s3_object_params = populate_params(module)
s3_object_params.update(validate_bucket(module, s3, s3_object_params))

func_mapping = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@
- permission_result is changed
- upload_file_result is not failed
- '"PutObjectAcl operation : The bucket does not allow ACLs." in permission_result.warnings'
- '"Virtual directory /test_directory/ created" in permission_result.msg'
- '"Virtual directory test_directory/ created" in permission_result.msg'

always:

Expand Down
5 changes: 2 additions & 3 deletions tests/integration/targets/s3_object/tasks/delete_bucket.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
- name: delete bucket at the end of Integration tests
block:
- name: list bucket object
s3_object:
bucket: "{{ item }}"
mode: list
s3_object_info:
bucket_name: "{{ item }}"
register: objects
ignore_errors: true

Expand Down
26 changes: 24 additions & 2 deletions tests/integration/targets/s3_object/tasks/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -269,6 +269,27 @@
that:
- upload_file.stat.checksum == download_file.stat.checksum

- name: test get object (absolute path)
s3_object:
bucket: "{{ bucket_name }}"
mode: get
dest: "{{ remote_tmp_dir }}/download-2.txt"
object: /delete.txt
retries: 3
delay: 3
register: result
until: "result.msg == 'GET operation complete'"

- name: stat the file so we can compare the checksums
stat:
path: "{{ remote_tmp_dir }}/download-2.txt"
get_checksum: yes
register: download_file

- assert:
that:
- upload_file.stat.checksum == download_file.stat.checksum

- name: test get with overwrite=different and identical files
s3_object:
bucket: "{{ bucket_name }}"
Expand Down Expand Up @@ -624,13 +645,14 @@
that:
- result is not changed

# Public objects aren't allowed by default
- name: fail to upload the file to the bucket with an ACL
s3_object:
bucket: "{{ bucket_name_acl }}"
mode: put
src: "{{ tmpdir.path }}/upload.txt"
src: "{{ remote_tmp_dir }}/upload.txt"
object: file-with-permissions.txt
permission: private
permission: public-read
ignore_nonexistent_bucket: True
register: upload_private
ignore_errors: True
Expand Down
16 changes: 12 additions & 4 deletions tests/unit/plugins/modules/test_s3_object.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def test_s3_object_do_list_success(m_paginated_list, m_list_keys):


@patch(utils + ".get_aws_connection_info")
def test_populate_facts(m_get_aws_connection_info):
def test_populate_params(m_get_aws_connection_info):
module = MagicMock()
m_get_aws_connection_info.return_value = (
"us-east-1",
Expand Down Expand Up @@ -143,10 +143,18 @@ def test_populate_facts(m_get_aws_connection_info):
"validate_certs": True,
"version": None,
}
result = s3_object.populate_facts(module)
result = s3_object.populate_params(module)
for k, v in module.params.items():
assert result[k] == v

module.params.update({"object": "example.txt", "mode": "get"})
result = s3_object.populate_params(module)
assert result["object"] == "example.txt"

module.params.update({"object": "/example.txt", "mode": "get"})
result = s3_object.populate_params(module)
assert result["object"] == "example.txt"

module.params.update({"object": "example.txt", "mode": "delete"})
result = s3_object.populate_facts(module)
module.fail_json.assert_called_with(msg="Parameter obj cannot be used with mode=delete")
result = s3_object.populate_params(module)
module.fail_json.assert_called_with(msg="Parameter object cannot be used with mode=delete")

0 comments on commit 1b302f5

Please sign in to comment.