Skip to content

Commit

Permalink
aws_s3: add S3 object tagging feature (#335)
Browse files Browse the repository at this point in the history
aws_s3: add S3 object tagging feature 

Reviewed-by: https://github.com/apps/ansible-zuul
  • Loading branch information
alinabuzachis authored Jun 14, 2021
1 parent 0cdc47b commit 714b3a8
Show file tree
Hide file tree
Showing 3 changed files with 385 additions and 19 deletions.
2 changes: 2 additions & 0 deletions changelogs/fragments/335-aws_s3-tagging-object-feature.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
minor_changes:
- aws_s3 - add ``tags`` and ``purge_tags`` features for an S3 object (https://github.com/ansible-collections/amazon.aws/pull/335)
156 changes: 137 additions & 19 deletions plugins/modules/aws_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,9 +155,23 @@
description:
- KMS key id to use when encrypting objects using I(encrypting=aws:kms). Ignored if I(encryption) is not C(aws:kms).
type: str
tags:
description:
- Tags dict to apply to the S3 object.
type: dict
version_added: 2.0.0
purge_tags:
description:
- Whether or not to remove tags assigned to the S3 object if not specified in the playbook.
- To remove all tags set I(tags) to an empty dictionary in conjunction with this.
type: bool
default: True
version_added: 2.0.0
requirements: [ "boto3", "botocore" ]
author:
- "Lester Wade (@lwade)"
- "Sloane Hertel (@s-hertel)"
- "Alina Buzachis (@linabuzachis)"
extends_documentation_fragment:
- amazon.aws.aws
- amazon.aws.ec2
Expand Down Expand Up @@ -302,6 +316,7 @@
import io
from ssl import SSLError
import base64
import time

try:
import botocore
Expand All @@ -318,6 +333,8 @@
from ..module_utils.ec2 import AWSRetry
from ..module_utils.ec2 import boto3_conn
from ..module_utils.ec2 import get_aws_connection_info
from ..module_utils.ec2 import ansible_dict_to_boto3_tag_list
from ..module_utils.ec2 import boto3_tag_list_to_ansible_dict
from ..module_utils.s3 import HAS_MD5
from ..module_utils.s3 import calculate_etag
from ..module_utils.s3 import calculate_etag_content
Expand Down Expand Up @@ -468,7 +485,7 @@ def delete_key(module, s3, bucket, obj):
module.fail_json_aws(e, msg="Failed while trying to delete %s." % obj)


def create_dirkey(module, s3, bucket, obj, encrypt):
def create_dirkey(module, s3, bucket, obj, encrypt, expiry):
if module.check_mode:
module.exit_json(msg="PUT operation skipped - running in check mode", changed=True)
try:
Expand All @@ -485,7 +502,20 @@ def create_dirkey(module, s3, bucket, obj, encrypt):
module.warn("PutObjectAcl is not implemented by your storage provider. Set the permissions parameters to the empty list to avoid this warning")
except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e: # pylint: disable=duplicate-except
module.fail_json_aws(e, msg="Failed while creating object %s." % obj)
module.exit_json(msg="Virtual directory %s created in bucket %s" % (obj, bucket), changed=True)

# Tags
tags, changed = ensure_tags(s3, module, bucket, obj)

try:
url = s3.generate_presigned_url(ClientMethod='put_object',
Params={'Bucket': bucket, 'Key': obj},
ExpiresIn=expiry)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Unable to generate presigned URL")

url = put_download_url(module, s3, bucket, obj, expiry)

module.exit_json(msg="Virtual directory %s created in bucket %s" % (obj, bucket), url=url, tags=tags, changed=True)


def path_check(path):
Expand Down Expand Up @@ -560,13 +590,13 @@ def upload_s3file(module, s3, bucket, obj, expiry, metadata, encrypt, headers, s
module.warn("PutObjectAcl is not implemented by your storage provider. Set the permission parameters to the empty list to avoid this warning")
except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e: # pylint: disable=duplicate-except
module.fail_json_aws(e, msg="Unable to set object ACL")
try:
url = s3.generate_presigned_url(ClientMethod='put_object',
Params={'Bucket': bucket, 'Key': obj},
ExpiresIn=expiry)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Unable to generate presigned URL")
module.exit_json(msg="PUT operation complete", url=url, changed=True)

# Tags
tags, changed = ensure_tags(s3, module, bucket, obj)

url = put_download_url(module, s3, bucket, obj, expiry)

module.exit_json(msg="PUT operation complete", url=url, tags=tags, changed=True)


def download_s3file(module, s3, bucket, obj, dest, retries, version=None):
Expand Down Expand Up @@ -624,16 +654,26 @@ def download_s3str(module, s3, bucket, obj, version=None, validate=True):
module.fail_json_aws(e, msg="Failed while getting contents of object %s as a string." % obj)


def get_download_url(module, s3, bucket, obj, expiry, changed=True):
def get_download_url(module, s3, bucket, obj, expiry, tags=None, changed=True):
try:
url = s3.generate_presigned_url(ClientMethod='get_object',
Params={'Bucket': bucket, 'Key': obj},
ExpiresIn=expiry)
module.exit_json(msg="Download url:", url=url, expiry=expiry, changed=changed)
module.exit_json(msg="Download url:", url=url, tags=tags, expiry=expiry, changed=changed)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Failed while getting download url.")


def put_download_url(module, s3, bucket, obj, expiry):
try:
url = s3.generate_presigned_url(ClientMethod='put_object',
Params={'Bucket': bucket, 'Key': obj},
ExpiresIn=expiry)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Unable to generate presigned URL")
return url


def is_fakes3(s3_url):
""" Return True if s3_url has scheme fakes3:// """
if s3_url is not None:
Expand Down Expand Up @@ -675,6 +715,80 @@ def get_s3_connection(module, aws_connect_kwargs, location, rgw, s3_url, sig_4=F
return boto3_conn(**params)


def get_current_object_tags_dict(s3, bucket, obj, version=None):
try:
if version:
current_tags = s3.get_object_tagging(Bucket=bucket, Key=obj, VersionId=version).get('TagSet')
else:
current_tags = s3.get_object_tagging(Bucket=bucket, Key=obj).get('TagSet')
except is_boto3_error_code('NoSuchTagSet'):
return {}
except is_boto3_error_code('NoSuchTagSetError'): # pylint: disable=duplicate-except
return {}

return boto3_tag_list_to_ansible_dict(current_tags)


@AWSRetry.jittered_backoff(max_delay=120, catch_extra_error_codes=['NoSuchBucket', 'OperationAborted'])
def put_object_tagging(s3, bucket, obj, tags):
s3.put_object_tagging(Bucket=bucket, Key=obj, Tagging={'TagSet': ansible_dict_to_boto3_tag_list(tags)})


@AWSRetry.jittered_backoff(max_delay=120, catch_extra_error_codes=['NoSuchBucket', 'OperationAborted'])
def delete_object_tagging(s3, bucket, obj):
s3.delete_object_tagging(Bucket=bucket, Key=obj)


def wait_tags_are_applied(module, s3, bucket, obj, expected_tags_dict, version=None):
for dummy in range(0, 12):
try:
current_tags_dict = get_current_object_tags_dict(s3, bucket, obj, version)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Failed to get object tags.")
if current_tags_dict != expected_tags_dict:
time.sleep(5)
else:
return current_tags_dict

module.fail_json(msg="Object tags failed to apply in the expected time.",
requested_tags=expected_tags_dict, live_tags=current_tags_dict)


def ensure_tags(client, module, bucket, obj):
tags = module.params.get("tags")
purge_tags = module.params.get("purge_tags")
changed = False

try:
current_tags_dict = get_current_object_tags_dict(client, bucket, obj)
except is_boto3_error_code(IGNORE_S3_DROP_IN_EXCEPTIONS):
module.warn("GetObjectTagging is not implemented by your storage provider. Set the permission parameters to the empty list to avoid this warning.")
except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e: # pylint: disable=duplicate-except
module.fail_json_aws(e, msg="Failed to get object tags.")
else:
if tags is not None:
if not purge_tags:
# Ensure existing tags that aren't updated by desired tags remain
current_copy = current_tags_dict.copy()
current_copy.update(tags)
tags = current_copy
if current_tags_dict != tags:
if tags:
try:
put_object_tagging(client, bucket, obj, tags)
except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e:
module.fail_json_aws(e, msg="Failed to update object tags.")
else:
if purge_tags:
try:
delete_object_tagging(client, bucket, obj)
except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e:
module.fail_json_aws(e, msg="Failed to delete object tags.")
current_tags_dict = wait_tags_are_applied(module, client, bucket, obj, tags)
changed = True
return current_tags_dict, changed


def main():
argument_spec = dict(
bucket=dict(required=True),
Expand All @@ -700,7 +814,9 @@ def main():
content=dict(),
content_base64=dict(),
ignore_nonexistent_bucket=dict(default=False, type='bool'),
encryption_kms_key_id=dict()
encryption_kms_key_id=dict(),
tags=dict(type='dict'),
purge_tags=dict(type='bool', default=True),
)
module = AnsibleAWSModule(
argument_spec=argument_spec,
Expand Down Expand Up @@ -831,9 +947,9 @@ def main():
# these were separated into the variables bucket_acl and object_acl above

if content is None and content_base64 is None and src is None:
module.fail_json('Either content, content_base64 or src must be specified for PUT operations')
module.fail_json(msg='Either content, content_base64 or src must be specified for PUT operations')
if src is not None and not path_check(src):
module.fail_json('Local object "%s" does not exist for PUT operation' % (src))
module.fail_json(msg='Local object "%s" does not exist for PUT operation' % (src))

keyrtn = None
if bucketrtn:
Expand All @@ -853,8 +969,9 @@ def main():

if keyrtn and overwrite != 'always':
if overwrite == 'never' or etag_compare(module, s3, bucket, obj, version=version, local_file=src, content=bincontent):
# Return the download URL for the existing object
get_download_url(module, s3, bucket, obj, expiry, changed=False)
# Return the download URL for the existing object and ensure tags are updated
tags, tags_update = ensure_tags(s3, module, bucket, obj)
get_download_url(module, s3, bucket, obj, expiry, tags, changed=tags_update)

# only use valid object acls for the upload_s3file function
module.params['permission'] = object_acl
Expand Down Expand Up @@ -915,14 +1032,14 @@ def main():
else:
# setting valid object acls for the create_dirkey function
module.params['permission'] = object_acl
create_dirkey(module, s3, bucket, dirobj, encrypt)
create_dirkey(module, s3, bucket, dirobj, encrypt, expiry)
else:
# only use valid bucket acls for the create_bucket function
module.params['permission'] = bucket_acl
created = create_bucket(module, s3, bucket, location)
# only use valid object acls for the create_dirkey function
module.params['permission'] = object_acl
create_dirkey(module, s3, bucket, dirobj, encrypt)
create_dirkey(module, s3, bucket, dirobj, encrypt, expiry)

# Support for grabbing the time-expired URL for an object in S3/Walrus.
if mode == 'geturl':
Expand All @@ -931,7 +1048,8 @@ def main():

keyrtn = key_check(module, s3, bucket, obj, version=version, validate=validate)
if keyrtn:
get_download_url(module, s3, bucket, obj, expiry)
tags = get_current_object_tags_dict(s3, bucket, obj, version=version)
get_download_url(module, s3, bucket, obj, expiry, tags)
else:
module.fail_json(msg="Key %s does not exist." % obj)

Expand Down
Loading

0 comments on commit 714b3a8

Please sign in to comment.