Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Expose --metadata-directive for s3 commands #1188

Merged
merged 3 commits into from
Mar 5, 2015
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,14 @@
CHANGELOG
=========

Next Release (TBD)
==================

* feature:``aws s3``: Add support for ``--metadata-directive`` that allows
metadata to be copied or replaced for single part copies.
(`issue 1188 <https://github.com/aws/aws-cli/pull/1188>`__)


1.7.13
======

Expand Down
6 changes: 6 additions & 0 deletions awscli/customizations/s3/fileinfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,6 +243,11 @@ def _handle_object_params(self, params):
if self.parameters['expires']:
params['expires'] = self.parameters['expires'][0]

def _handle_metadata_directive(self, params):
if self.parameters['metadata_directive']:
params['metadata_directive'] = \
self.parameters['metadata_directive'][0]

def upload(self, payload=None):
"""
Redirects the file to the multipart upload function if the file is
Expand Down Expand Up @@ -294,6 +299,7 @@ def copy(self):
params = {'endpoint': self.endpoint, 'bucket': bucket,
'copy_source': copy_source, 'key': key}
self._handle_object_params(params)
self._handle_metadata_directive(params)
response_data, http = operate(self.service, 'CopyObject', params)

def delete(self):
Expand Down
2 changes: 1 addition & 1 deletion awscli/customizations/s3/s3handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def __init__(self, session, params, result_queue=None,
'content_language': None, 'expires': None,
'grants': None, 'only_show_errors': False,
'is_stream': False, 'paths_type': None,
'expected_size': None}
'expected_size': None, 'metadata_directive': None}
self.params['region'] = params['region']
for key in self.params.keys():
if key in params:
Expand Down
27 changes: 25 additions & 2 deletions awscli/customizations/s3/subcommands.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,6 +216,28 @@
}


METADATA_DIRECTIVE = {
'name': 'metadata-directive', 'nargs': 1, 'choices': ['COPY', 'REPLACE'],
'help_text': (
'Specifies whether the metadata is copied from the source object '
'or replaced with metadata provided when copying S3 objects. '
'Note that if the object is copied over in parts, the source '
'object\'s metadata will not be copied over, no matter the value for '
'``--metadata-directive``, and instead the desired metadata values '
'must be specified as parameters on the command line. '
'Valid values are ``COPY`` and ``REPLACE``. If this parameter is not '
'specified, ``COPY`` will be used by default. If ``REPLACE`` is used, '
'the copied object will only have the metadata values that were'
' specified by the CLI command. Note that if you are '
'using any of the following parameters: ``--content-type``, '
'``content-language``, ``--content-encoding``, '
'``--content-disposition``, ``-cache-control``, or ``--expires``, you '
'will need to specify ``--metadata-directive REPLACE`` for '
'non-multipart copies if you want the copied objects to have the '
'specified metadata values.')
}


INDEX_DOCUMENT = {'name': 'index-document',
'help_text': (
'A suffix that is appended to a request that is for '
Expand Down Expand Up @@ -530,7 +552,7 @@ class CpCommand(S3TransferCommand):
"or <S3Path> <S3Path>"
ARG_TABLE = [{'name': 'paths', 'nargs': 2, 'positional_arg': True,
'synopsis': USAGE}] + TRANSFER_ARGS + \
[EXPECTED_SIZE, RECURSIVE]
[METADATA_DIRECTIVE, EXPECTED_SIZE, RECURSIVE]
EXAMPLES = BasicCommand.FROM_FILE('s3/cp.rst')


Expand All @@ -541,7 +563,8 @@ class MvCommand(S3TransferCommand):
USAGE = "<LocalPath> <S3Path> or <S3Path> <LocalPath> " \
"or <S3Path> <S3Path>"
ARG_TABLE = [{'name': 'paths', 'nargs': 2, 'positional_arg': True,
'synopsis': USAGE}] + TRANSFER_ARGS + [RECURSIVE]
'synopsis': USAGE}] + TRANSFER_ARGS + [METADATA_DIRECTIVE,
RECURSIVE]
EXAMPLES = BasicCommand.FROM_FILE('s3/mv.rst')


Expand Down
51 changes: 51 additions & 0 deletions tests/integration/customizations/s3/test_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import socket
import tempfile
import shutil
import copy

import botocore.session
from botocore.exceptions import ClientError
Expand All @@ -35,6 +36,7 @@
from awscli.testutils import aws as _aws
from tests.unit.customizations.s3 import create_bucket as _create_bucket
from awscli.customizations.s3.transferconfig import DEFAULTS
from awscli.customizations.scalarparse import add_scalar_parsers


@contextlib.contextmanager
Expand Down Expand Up @@ -523,6 +525,55 @@ def test_copy_large_file_signature_v4(self):
self.assert_no_errors(p)
self.assertTrue(self.key_exists(bucket_name, key_name='foo.txt'))

def test_copy_metadata_directive(self):
# Copy the same style of parsing as the CLI session. This is needed
# For comparing expires timestamp.
add_scalar_parsers(self.session)
bucket_name = self.create_bucket()
original_key = 'foo.txt'
new_key = 'bar.txt'
metadata = {
'ContentType': 'foo',
'ContentDisposition': 'foo',
'ContentEncoding': 'foo',
'ContentLanguage': 'foo',
'CacheControl': '90',
'Expires': '0'
}
self.put_object(bucket_name, original_key, contents='foo',
extra_args=metadata)
p = aws('s3 cp s3://%s/%s s3://%s/%s' %
(bucket_name, original_key, bucket_name, new_key))
self.assert_no_errors(p)
response = self.head_object(bucket_name, new_key)
# These values should have the metadata of the source object
metadata_ref = copy.copy(metadata)
metadata_ref['Expires'] = 'Thu, 01 Jan 1970 00:00:00 GMT'
for name, value in metadata_ref.items():
self.assertEqual(response[name], value)

# Use REPLACE to wipe out all of the metadata.
p = aws('s3 cp s3://%s/%s s3://%s/%s --metadata-directive REPLACE' %
(bucket_name, original_key, bucket_name, new_key))
self.assert_no_errors(p)
response = self.head_object(bucket_name, new_key)
# Make sure all of the original metadata is gone.
for name, value in metadata_ref.items():
self.assertNotEqual(response.get(name), value)

# Use REPLACE to wipe out all of the metadata but include a new
# metadata value.
p = aws('s3 cp s3://%s/%s s3://%s/%s --metadata-directive REPLACE '
'--content-type bar' %
(bucket_name, original_key, bucket_name, new_key))
self.assert_no_errors(p)
response = self.head_object(bucket_name, new_key)
# Make sure the content type metadata is included
self.assertEqual(response['ContentType'], 'bar')
# Make sure all of the original metadata is gone.
for name, value in metadata_ref.items():
self.assertNotEqual(response.get(name), value)


class TestSync(BaseS3CLICommand):
def test_sync_with_plus_chars_paginate(self):
Expand Down
27 changes: 27 additions & 0 deletions tests/unit/customizations/s3/test_cp_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,33 @@ def test_website_redirect_ignore_paramfile(self):
'http://someserver'
)

def test_metadata_directive_copy(self):
self.parsed_responses = [
{"ContentLength": "100", "LastModified": "00:00:00Z"},
{'ETag': '"foo-1"'},
]
cmdline = ('%s s3://bucket/key.txt s3://bucket/key2.txt'
' --metadata-directive REPLACE' % self.prefix)
self.run_cmd(cmdline, expected_rc=0)
self.assertEqual(len(self.operations_called), 2,
self.operations_called)
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
self.assertEqual(self.operations_called[1][0].name, 'CopyObject')
self.assertEqual(self.operations_called[1][1]['metadata_directive'],
'REPLACE')

def test_no_metadata_directive_for_non_copy(self):
full_path = self.files.create_file('foo.txt', 'mycontent')
cmdline = '%s %s s3://bucket --metadata-directive REPLACE' % \
(self.prefix, full_path)
self.parsed_responses = \
[{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
self.run_cmd(cmdline, expected_rc=0)
self.assertEqual(len(self.operations_called), 1,
self.operations_called)
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
self.assertNotIn('metadata_directive', self.operations_called[0][1])


if __name__ == "__main__":
unittest.main()
28 changes: 28 additions & 0 deletions tests/unit/customizations/s3/test_mv_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,34 @@ def test_website_redirect_ignore_paramfile(self):
'http://someserver'
)

def test_metadata_directive_copy(self):
self.parsed_responses = [
{"ContentLength": "100", "LastModified": "00:00:00Z"},
{'ETag': '"foo-1"'},
{'ETag': '"foo-2"'}
]
cmdline = ('%s s3://bucket/key.txt s3://bucket/key2.txt'
' --metadata-directive REPLACE' % self.prefix)
self.run_cmd(cmdline, expected_rc=0)
self.assertEqual(len(self.operations_called), 3,
self.operations_called)
self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
self.assertEqual(self.operations_called[1][0].name, 'CopyObject')
self.assertEqual(self.operations_called[2][0].name, 'DeleteObject')
self.assertEqual(self.operations_called[1][1]['metadata_directive'],
'REPLACE')

def test_no_metadata_directive_for_non_copy(self):
full_path = self.files.create_file('foo.txt', 'mycontent')
cmdline = '%s %s s3://bucket --metadata-directive REPLACE' % \
(self.prefix, full_path)
self.parsed_responses = \
[{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
self.run_cmd(cmdline, expected_rc=0)
self.assertEqual(len(self.operations_called), 1,
self.operations_called)
self.assertEqual(self.operations_called[0][0].name, 'PutObject')
self.assertNotIn('metadata_directive', self.operations_called[0][1])

if __name__ == "__main__":
unittest.main()
5 changes: 3 additions & 2 deletions tests/unit/test_completer.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,8 @@
'--content-disposition', '--source-region',
'--content-encoding', '--content-language',
'--expires', '--grants', '--only-show-errors',
'--expected-size', '--page-size']
'--expected-size', '--page-size',
'--metadata-directive']
+ GLOBALOPTS)),
('aws s3 cp --quiet -', -1, set(['--no-guess-mime-type', '--dryrun',
'--recursive', '--content-type',
Expand All @@ -88,7 +89,7 @@
'--expires', '--website-redirect', '--acl',
'--storage-class', '--sse',
'--exclude', '--include',
'--source-region',
'--source-region', '--metadata-directive',
'--grants', '--only-show-errors',
'--expected-size', '--page-size']
+ GLOBALOPTS)),
Expand Down