Skip to content

Commit

Permalink
fix chunk_size calculation by using boto3 S3 Transport defaults (ansi…
Browse files Browse the repository at this point in the history
…ble-collections#273)

* fix chunk_size calculation by using boto3 S3 Transport defaults since defaults are used also for the upload function
* implemented some integration tests for s3_sync
* added changelog fragment
  • Loading branch information
GiuseppeChiesa-TomTom authored Nov 3, 2020
1 parent d02886f commit a159cd7
Show file tree
Hide file tree
Showing 8 changed files with 125 additions and 4 deletions.
2 changes: 2 additions & 0 deletions changelogs/fragments/273-fix-s3sync-etag-calculation.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
bugfixes:
- s3_sync - fix chunk_size calculation (https://github.com/ansible-collections/community.aws/issues/272)
7 changes: 3 additions & 4 deletions plugins/modules/s3_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,10 @@

try:
import botocore
from boto3.s3.transfer import TransferConfig
DEFAULT_CHUNK_SIZE = TransferConfig().multipart_chunksize
except ImportError:
DEFAULT_CHUNK_SIZE = 5 * 1024 * 1024
pass # Handled by AnsibleAWSModule

from ansible.module_utils._text import to_text
Expand Down Expand Up @@ -270,10 +273,6 @@
#
# You should have received a copy of the GNU General Public License
# along with calculate_multipart_etag. If not, see <http://www.gnu.org/licenses/>.

DEFAULT_CHUNK_SIZE = 5 * 1024 * 1024


def calculate_multipart_etag(source_path, chunk_size=DEFAULT_CHUNK_SIZE):
"""
calculates a multipart upload etag for amazon s3
Expand Down
3 changes: 3 additions & 0 deletions tests/integration/targets/s3_sync/aliases
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
cloud/aws
shippable/aws/group1

1 change: 1 addition & 0 deletions tests/integration/targets/s3_sync/files/test1.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
test1
2 changes: 2 additions & 0 deletions tests/integration/targets/s3_sync/files/test2.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
---
test2: example
3 changes: 3 additions & 0 deletions tests/integration/targets/s3_sync/files/test3.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"test3": "value"
}
3 changes: 3 additions & 0 deletions tests/integration/targets/s3_sync/meta/main.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
dependencies:
- prepare_tests
- setup_ec2
108 changes: 108 additions & 0 deletions tests/integration/targets/s3_sync/tasks/main.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
---
- name: S3 bucket creation
collections:
- amazon.aws
- community.general
module_defaults:
group/aws:
aws_access_key: '{{ aws_access_key }}'
aws_secret_key: '{{ aws_secret_key }}'
security_token: '{{ security_token | default(omit) }}'
region: '{{ aws_region }}'
block:
# ============================================================
- name: Create simple s3_bucket
s3_bucket:
name: "{{ resource_prefix }}-testbucket-ansible"
state: present
register: output

- assert:
that:
- output.changed
- output.name == '{{ resource_prefix }}-testbucket-ansible'
- not output.requester_pays
# ============================================================
- name: Prepare fixtures folder
file:
path: "{{ output_dir }}/s3_sync"
state: directory
mode: '0755'

- name: Prepare files to sync
copy:
src: "{{ item }}"
dest: "{{ output_dir }}/s3_sync/{{ item }}"
mode: preserve
with_items:
- test1.txt
- test2.yml
- test3.json

- name: Prepare file with size bigger than chunk size
shell: |
dd if=/dev/zero of=test4.txt bs=1M count=10
args:
chdir: "{{ output_dir }}/s3_sync"

- name: Sync files with remote bucket
s3_sync:
bucket: "{{ resource_prefix }}-testbucket-ansible"
file_root: "{{ output_dir }}/s3_sync"
register: output
- assert:
that:
- output is changed

# ============================================================
- name: Sync files already present
s3_sync:
bucket: "{{ resource_prefix }}-testbucket-ansible"
file_root: "{{ output_dir }}/s3_sync"
register: output
- assert:
that:
- output is not changed

# ============================================================
- name: Sync files with etag calculation
s3_sync:
bucket: "{{ resource_prefix }}-testbucket-ansible"
file_root: "{{ output_dir }}/s3_sync"
file_change_strategy: checksum
register: output
- assert:
that:
- output is not changed

# ============================================================
# DOCUMENTATION EXAMPLES
# ============================================================
- name: all the options
s3_sync:
bucket: "{{ resource_prefix }}-testbucket-ansible"
file_root: "{{ output_dir }}/s3_sync"
mime_map:
.yml: application/text
.json: application/text
key_prefix: config_files/web
file_change_strategy: force
permission: public-read
cache_control: "public, max-age=31536000"
include: "*"
exclude: "*.txt,.*"
register: output

- assert:
that:
- output is changed

always:
- name: Ensure all buckets are deleted
s3_bucket:
name: "{{item}}"
state: absent
force: true
ignore_errors: yes
with_items:
- "{{ resource_prefix }}-testbucket-ansible"

0 comments on commit a159cd7

Please sign in to comment.