Skip to content

Commit

Permalink
aws_s3 copy to object (#359)
Browse files Browse the repository at this point in the history
``aws_s3`` copy to object

SUMMARY

add option to aws_s3 module to copy object existing on Amazon S3

Closes: #42
ISSUE TYPE


Feature Pull Request

COMPONENT NAME

aws_s3
ADDITIONAL INFORMATION



- name: copy from source to destination
  aws_s3:
      bucket: "{{ dest }}"
      mode: copy
      object: destination.txt
      copy_src:
        bucket: "{{ src }}"
        object: source.txt

Reviewed-by: Alina Buzachis <None>
Reviewed-by: Jill R <None>
Reviewed-by: Gonéri Le Bouder <[email protected]>
  • Loading branch information
abikouo authored Jul 8, 2021
1 parent 922514a commit 3591ca2
Show file tree
Hide file tree
Showing 6 changed files with 260 additions and 60 deletions.
2 changes: 2 additions & 0 deletions changelogs/fragments/359-aws_s3-add-copy-mode.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
minor_changes:
- aws_s3 - new mode to copy existing on another bucket (https://github.com/ansible-collections/amazon.aws/pull/359).
111 changes: 102 additions & 9 deletions plugins/modules/aws_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@
short_description: manage objects in S3.
description:
- This module allows the user to manage S3 buckets and the objects within them. Includes support for creating and
deleting both objects and buckets, retrieving objects as files or strings and generating download links.
deleting both objects and buckets, retrieving objects as files or strings, generating download links and
copy of an object that is already stored in Amazon S3.
options:
bucket:
description:
Expand All @@ -26,7 +27,7 @@
type: path
encrypt:
description:
- When set for PUT mode, asks for server-side encryption.
- When set for PUT/COPY mode, asks for server-side encryption.
default: true
type: bool
encryption_mode:
Expand Down Expand Up @@ -58,15 +59,15 @@
type: int
metadata:
description:
- Metadata for PUT operation, as a dictionary of C(key=value) and C(key=value,key=value).
- Metadata for PUT/COPY operation, as a dictionary of C(key=value) and C(key=value,key=value).
type: dict
mode:
description:
- Switches the module behaviour between C(put) (upload), C(get) (download), C(geturl) (return download url, Ansible 1.3+),
C(getstr) (download object as string (1.3+)), C(list) (list keys, Ansible 2.0+), C(create) (bucket), C(delete) (bucket),
and delobj (delete object, Ansible 2.0+).
delobj (delete object, Ansible 2.0+) and C(copy) object that is already stored in another (bucket).
required: true
choices: ['get', 'put', 'delete', 'create', 'geturl', 'getstr', 'delobj', 'list']
choices: ['get', 'put', 'delete', 'create', 'geturl', 'getstr', 'delobj', 'list', 'copy']
type: str
object:
description:
Expand Down Expand Up @@ -167,7 +168,27 @@
type: bool
default: True
version_added: 2.0.0
requirements: [ "boto3", "botocore" ]
copy_src:
description:
- The source details of the object to copy.
- Required if I(mode) is C(copy).
type: dict
version_added: 2.0.0
suboptions:
bucket:
type: str
description:
- The name of the source bucket.
required: true
object:
type: str
description:
- key name of the source object.
required: true
version_id:
type: str
description:
- version ID of the source object.
author:
- "Lester Wade (@lwade)"
- "Sloane Hertel (@s-hertel)"
Expand Down Expand Up @@ -277,6 +298,15 @@
bucket: mybucket
object: /my/desired/key.txt
mode: delobj
- name: Copy an object already stored in another bucket
amazon.aws.aws_s3:
bucket: mybucket
object: /my/desired/key.txt
mode: copy
copy_src:
bucket: srcbucket
object: /source/key.txt
'''

RETURN = '''
Expand Down Expand Up @@ -674,6 +704,52 @@ def put_download_url(module, s3, bucket, obj, expiry):
return url


def copy_object_to_bucket(module, s3, bucket, obj, encrypt, metadata, validate, d_etag):
if module.check_mode:
module.exit_json(msg="COPY operation skipped - running in check mode", changed=True)
try:
params = {'Bucket': bucket, 'Key': obj}
bucketsrc = {'Bucket': module.params['copy_src'].get('bucket'), 'Key': module.params['copy_src'].get('object')}
version = None
if module.params['copy_src'].get('version_id') is not None:
version = module.params['copy_src'].get('version_id')
bucketsrc.update({'VersionId': version})
keyrtn = key_check(module, s3, bucketsrc['Bucket'], bucketsrc['Key'], version=version, validate=validate)
if keyrtn:
s_etag = get_etag(s3, bucketsrc['Bucket'], bucketsrc['Key'], version=version)
if s_etag == d_etag:
# Tags
tags, changed = ensure_tags(s3, module, bucket, obj)
if not changed:
module.exit_json(msg="ETag from source and destination are the same", changed=False)
else:
params.update({'CopySource': bucketsrc})
if encrypt:
params['ServerSideEncryption'] = module.params['encryption_mode']
if module.params['encryption_kms_key_id'] and module.params['encryption_mode'] == 'aws:kms':
params['SSEKMSKeyId'] = module.params['encryption_kms_key_id']
if metadata:
params['Metadata'] = {}
# determine object metadata and extra arguments
for option in metadata:
extra_args_option = option_in_extra_args(option)
if extra_args_option is not None:
params[extra_args_option] = metadata[option]
else:
params['Metadata'][option] = metadata[option]

copy_result = s3.copy_object(**params)
for acl in module.params.get('permission'):
s3.put_object_acl(ACL=acl, Bucket=bucket, Key=obj)
# Tags
tags, changed = ensure_tags(s3, module, bucket, obj)
except is_boto3_error_code(IGNORE_S3_DROP_IN_EXCEPTIONS):
module.warn("PutObjectAcl is not implemented by your storage provider. Set the permissions parameters to the empty list to avoid this warning")
except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e: # pylint: disable=duplicate-except
module.fail_json_aws(e, msg="Failed while copying object %s from bucket %s." % (obj, module.params['copy_src'].get('Bucket')))
module.exit_json(msg="Object copied from bucket %s to bucket %s." % (bucketsrc['Bucket'], bucket), tags=tags, changed=True)


def is_fakes3(s3_url):
""" Return True if s3_url has scheme fakes3:// """
if s3_url is not None:
Expand Down Expand Up @@ -800,7 +876,7 @@ def main():
marker=dict(default=""),
max_keys=dict(default=1000, type='int', no_log=False),
metadata=dict(type='dict'),
mode=dict(choices=['get', 'put', 'delete', 'create', 'geturl', 'getstr', 'delobj', 'list'], required=True),
mode=dict(choices=['get', 'put', 'delete', 'create', 'geturl', 'getstr', 'delobj', 'list', 'copy'], required=True),
object=dict(),
permission=dict(type='list', elements='str', default=['private']),
version=dict(default=None),
Expand All @@ -817,14 +893,16 @@ def main():
encryption_kms_key_id=dict(),
tags=dict(type='dict'),
purge_tags=dict(type='bool', default=True),
copy_src=dict(type='dict', options=dict(bucket=dict(required=True), object=dict(required=True), version_id=dict())),
)
module = AnsibleAWSModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[['mode', 'put', ['object']],
['mode', 'get', ['dest', 'object']],
['mode', 'getstr', ['object']],
['mode', 'geturl', ['object']]],
['mode', 'geturl', ['object']],
['mode', 'copy', ['copy_src']]],
mutually_exclusive=[['content', 'content_base64', 'src']],
)

Expand Down Expand Up @@ -918,7 +996,7 @@ def main():
# First, we check to see if the bucket exists, we get "bucket" returned.
bucketrtn = bucket_check(module, s3, bucket, validate=validate)

if validate and mode not in ('create', 'put', 'delete') and not bucketrtn:
if validate and mode not in ('create', 'put', 'delete', 'copy') and not bucketrtn:
module.fail_json(msg="Source bucket cannot be found.")

if mode == 'get':
Expand Down Expand Up @@ -1067,6 +1145,21 @@ def main():
else:
module.fail_json(msg="Key %s does not exist." % obj)

if mode == 'copy':
# if copying an object in a bucket yet to be created, acls for the bucket and/or the object may be specified
# these were separated into the variables bucket_acl and object_acl above
d_etag = None
if bucketrtn:
d_etag = get_etag(s3, bucket, obj)
else:
# If the bucket doesn't exist we should create it.
# only use valid bucket acls for create_bucket function
module.params['permission'] = bucket_acl
create_bucket(module, s3, bucket, location)
# only use valid object acls for the copy operation
module.params['permission'] = object_acl
copy_object_to_bucket(module, s3, bucket, obj, encrypt, metadata, validate, d_etag)

module.exit_json(failed=False)


Expand Down
1 change: 1 addition & 0 deletions tests/integration/targets/aws_s3/defaults/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@
# defaults file for s3
bucket_name: '{{ resource_prefix | hash("md5") }}'
bucket_name_acl: "{{ bucket_name + '-with-acl' }}"
bucket_name_with_dot: "{{ bucket_name + '.bucket' }}"
119 changes: 119 additions & 0 deletions tests/integration/targets/aws_s3/tasks/copy_object.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
- block:
- name: define bucket name used for tests
set_fact:
copy_bucket:
src: "{{ bucket_name }}-copysrc"
dst: "{{ bucket_name }}-copydst"

- name: create bucket source
aws_s3:
bucket: "{{ copy_bucket.src }}"
mode: create

- name: Create content
set_fact:
content: "{{ lookup('password', '/dev/null chars=ascii_letters,digits,hexdigits,punctuation') }}"

- name: Put a content in the source bucket
aws_s3:
bucket: "{{ copy_bucket.src }}"
mode: put
content: "{{ content }}"
object: source.txt
tags:
ansible_release: '2.0.0'
ansible_team: cloud
retries: 3
delay: 3
register: put_result
until: "put_result.msg == 'PUT operation complete'"

- name: Copy the content of the source bucket into dest bucket
aws_s3:
bucket: "{{ copy_bucket.dst }}"
mode: copy
object: destination.txt
copy_src:
bucket: "{{ copy_bucket.src }}"
object: source.txt

- name: Get the content copied into {{ copy_bucket.dst }}
aws_s3:
bucket: "{{ copy_bucket.dst }}"
mode: getstr
object: destination.txt
register: copy_content

- name: assert that the content is matching with the source
assert:
that:
- content == copy_content.contents

- name: Get the download url for object copied into {{ copy_bucket.dst }}
aws_s3:
bucket: "{{ copy_bucket.dst }}"
mode: geturl
object: destination.txt
register: copy_url

- name: assert that tags are the same in the destination bucket
assert:
that:
- put_result.tags == copy_url.tags

- name: Copy the same content from the source bucket into dest bucket (idempotency)
aws_s3:
bucket: "{{ copy_bucket.dst }}"
mode: copy
object: destination.txt
copy_src:
bucket: "{{ copy_bucket.src }}"
object: source.txt
register: copy_idempotency

- name: assert that no change was made
assert:
that:
- copy_idempotency is not changed
- "copy_idempotency.msg == 'ETag from source and destination are the same'"

- name: Copy object with tags
aws_s3:
bucket: "{{ copy_bucket.dst }}"
mode: copy
object: destination.txt
tags:
ansible_release: "2.0.1"
copy_src:
bucket: "{{ copy_bucket.src }}"
object: source.txt
register: copy_result

- name: assert that tags were updated
assert:
that:
- copy_result is changed
- copy_result.tags['ansible_release'] == '2.0.1'

- name: Copy object with tags (idempotency)
aws_s3:
bucket: "{{ copy_bucket.dst }}"
mode: copy
object: destination.txt
tags:
ansible_release: "2.0.1"
copy_src:
bucket: "{{ copy_bucket.src }}"
object: source.txt
register: copy_result

- name: assert that no change was made
assert:
that:
- copy_result is not changed

always:
- include_tasks: delete_bucket.yml
with_items:
- "{{ copy_bucket.dst }}"
- "{{ copy_bucket.src }}"
24 changes: 24 additions & 0 deletions tests/integration/targets/aws_s3/tasks/delete_bucket.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
- name: delete bucket at the end of Integration tests
block:
- name: list bucket object
aws_s3:
bucket: "{{ item }}"
mode: list
register: objects
ignore_errors: true

- name: remove objects from bucket
aws_s3:
bucket: "{{ item }}"
mode: delobj
object: "{{ obj }}"
with_items: "{{ objects.s3_keys }}"
loop_control:
loop_var: obj
ignore_errors: true

- name: delete the bucket
aws_s3:
bucket: "{{ item }}"
mode: delete
ignore_errors: yes
Loading

0 comments on commit 3591ca2

Please sign in to comment.