diff --git a/CHANGELOG.rst b/CHANGELOG.rst index c0c3970c130b..41780e75ce50 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -7,6 +7,9 @@ Next Release (TBD) * bugfix:``aws opsworks register``: Support ``--no-verify-ssl`` argument for the ``aws opsworks register`` command (`issue 1632 `__) +* feature:``s3``: Add support for Server-Side Encryption with KMS + and Server-Side Encryption with Customer-Provided Keys. + (`issue 1623 `__) 1.9.7 diff --git a/awscli/arguments.py b/awscli/arguments.py index fddac4cbf702..0504d59cd500 100644 --- a/awscli/arguments.py +++ b/awscli/arguments.py @@ -206,7 +206,8 @@ class CustomArgument(BaseCLIArgument): def __init__(self, name, help_text='', dest=None, default=None, action=None, required=None, choices=None, nargs=None, cli_type_name=None, group_name=None, positional_arg=False, - no_paramfile=False, argument_model=None, synopsis=''): + no_paramfile=False, argument_model=None, synopsis='', + const=None): self._name = name self._help = help_text self._dest = dest @@ -214,6 +215,7 @@ def __init__(self, name, help_text='', dest=None, default=None, self._action = action self._required = required self._nargs = nargs + self._const = const self._cli_type_name = cli_type_name self._group_name = group_name self._positional_arg = positional_arg @@ -275,6 +277,8 @@ def add_to_parser(self, parser): kwargs['required'] = self._required if self._nargs is not None: kwargs['nargs'] = self._nargs + if self._const is not None: + kwargs['const'] = self._const parser.add_argument(cli_name, **kwargs) @property diff --git a/awscli/customizations/s3/filegenerator.py b/awscli/customizations/s3/filegenerator.py index 9bb8063c3314..47ae41fc278e 100644 --- a/awscli/customizations/s3/filegenerator.py +++ b/awscli/customizations/s3/filegenerator.py @@ -116,7 +116,7 @@ class FileGenerator(object): ``FileInfo`` objects to send to a ``Comparator`` or ``S3Handler``. """ def __init__(self, client, operation_name, follow_symlinks=True, - page_size=None, result_queue=None): + page_size=None, result_queue=None, request_parameters=None): self._client = client self.operation_name = operation_name self.follow_symlinks = follow_symlinks @@ -124,6 +124,9 @@ def __init__(self, client, operation_name, follow_symlinks=True, self.result_queue = result_queue if not result_queue: self.result_queue = queue.Queue() + self.request_parameters = {} + if request_parameters is not None: + self.request_parameters = request_parameters def call(self, files): """ @@ -320,7 +323,9 @@ def _list_single_object(self, s3_path): # instead use a HeadObject request. bucket, key = find_bucket_key(s3_path) try: - response = self._client.head_object(Bucket=bucket, Key=key) + params = {'Bucket': bucket, 'Key': key} + params.update(self.request_parameters.get('HeadObject', {})) + response = self._client.head_object(**params) except ClientError as e: # We want to try to give a more helpful error message. # This is what the customer is going to see so we want to diff --git a/awscli/customizations/s3/fileinfo.py b/awscli/customizations/s3/fileinfo.py index cb4d27fd2a86..6fdd24bf2663 100644 --- a/awscli/customizations/s3/fileinfo.py +++ b/awscli/customizations/s3/fileinfo.py @@ -11,7 +11,8 @@ from botocore.compat import quote from awscli.customizations.s3.utils import find_bucket_key, \ - uni_print, guess_content_type, MD5Error, bytes_print, set_file_utime + uni_print, guess_content_type, MD5Error, bytes_print, set_file_utime, \ + RequestParamsMapper LOGGER = logging.getLogger(__name__) @@ -38,7 +39,6 @@ def save_file(filename, response_data, last_update, is_stream=False): """ body = response_data['Body'] etag = response_data['ETag'][1:-1] - sse = response_data.get('ServerSideEncryption', None) if not is_stream: d = os.path.dirname(filename) try: @@ -59,7 +59,7 @@ def save_file(filename, response_data, last_update, is_stream=False): with open(filename, 'wb') as out_file: write_to_file(out_file, etag, md5, file_chunks) - if not _is_multipart_etag(etag) and sse != 'aws:kms': + if _can_validate_md5_with_etag(etag, response_data): if etag != md5.hexdigest(): if not is_stream: os.remove(filename) @@ -75,6 +75,15 @@ def save_file(filename, response_data, last_update, is_stream=False): sys.stdout.flush() +def _can_validate_md5_with_etag(etag, response_data): + sse = response_data.get('ServerSideEncryption', None) + sse_customer_algorithm = response_data.get('SSECustomerAlgorithm', None) + if not _is_multipart_etag(etag) and sse != 'aws:kms' and \ + sse_customer_algorithm is None: + return True + return False + + def write_to_file(out_file, etag, md5, file_chunks, is_stream=False): """ Updates the etag for each file chunk. It will write to the file if it a @@ -188,11 +197,9 @@ def __init__(self, src, dest=None, compare_key=None, size=None, self.size = size self.last_update = last_update # Usually inject ``parameters`` from ``BasicTask`` class. + self.parameters = {} if parameters is not None: self.parameters = parameters - else: - self.parameters = {'acl': None, - 'sse': None} self.source_client = source_client self.is_stream = is_stream self.associated_response_data = associated_response_data @@ -204,60 +211,10 @@ def set_size_from_s3(self): bucket, key = find_bucket_key(self.src) params = {'Bucket': bucket, 'Key': key} + RequestParamsMapper.map_head_object_params(params, self.parameters) response_data = self.client.head_object(**params) self.size = int(response_data['ContentLength']) - def _permission_to_param(self, permission): - if permission == 'read': - return 'GrantRead' - if permission == 'full': - return 'GrantFullControl' - if permission == 'readacl': - return 'GrantReadACP' - if permission == 'writeacl': - return 'GrantWriteACP' - raise ValueError('permission must be one of: ' - 'read|readacl|writeacl|full') - - def _handle_object_params(self, params): - if self.parameters['acl']: - params['ACL'] = self.parameters['acl'][0] - if self.parameters['grants']: - for grant in self.parameters['grants']: - try: - permission, grantee = grant.split('=', 1) - except ValueError: - raise ValueError('grants should be of the form ' - 'permission=principal') - params[self._permission_to_param(permission)] = grantee - if self.parameters['sse']: - params['ServerSideEncryption'] = 'AES256' - if self.parameters['storage_class']: - params['StorageClass'] = self.parameters['storage_class'][0] - if self.parameters['website_redirect']: - params['WebsiteRedirectLocation'] = \ - self.parameters['website_redirect'][0] - if self.parameters['guess_mime_type']: - self._inject_content_type(params, self.src) - if self.parameters['content_type']: - params['ContentType'] = self.parameters['content_type'][0] - if self.parameters['cache_control']: - params['CacheControl'] = self.parameters['cache_control'][0] - if self.parameters['content_disposition']: - params['ContentDisposition'] = \ - self.parameters['content_disposition'][0] - if self.parameters['content_encoding']: - params['ContentEncoding'] = self.parameters['content_encoding'][0] - if self.parameters['content_language']: - params['ContentLanguage'] = self.parameters['content_language'][0] - if self.parameters['expires']: - params['Expires'] = self.parameters['expires'][0] - - def _handle_metadata_directive(self, params): - if self.parameters['metadata_directive']: - params['MetadataDirective'] = \ - self.parameters['metadata_directive'][0] - def is_glacier_compatible(self): """Determines if a file info object is glacier compatible @@ -301,10 +258,14 @@ def _handle_upload(self, body): 'Key': key, 'Body': body, } - self._handle_object_params(params) + self._inject_content_type(params) + RequestParamsMapper.map_put_object_params(params, self.parameters) response_data = self.client.put_object(**params) - def _inject_content_type(self, params, filename): + def _inject_content_type(self, params): + if not self.parameters['guess_mime_type']: + return + filename = self.src # Add a content type param if we can guess the type. try: guessed_type = guess_content_type(filename) @@ -331,6 +292,7 @@ def download(self): """ bucket, key = find_bucket_key(self.src) params = {'Bucket': bucket, 'Key': key} + RequestParamsMapper.map_get_object_params(params, self.parameters) response_data = self.client.get_object(**params) save_file(self.dest, response_data, self.last_update, self.is_stream) @@ -343,9 +305,9 @@ def copy(self): bucket, key = find_bucket_key(self.dest) params = {'Bucket': bucket, 'CopySource': copy_source, 'Key': key} - self._handle_object_params(params) - self._handle_metadata_directive(params) - self.client.copy_object(**params) + self._inject_content_type(params) + RequestParamsMapper.map_copy_object_params(params, self.parameters) + response_data = self.client.copy_object(**params) def delete(self): """ @@ -378,7 +340,9 @@ def move(self): def create_multipart_upload(self): bucket, key = find_bucket_key(self.dest) params = {'Bucket': bucket, 'Key': key} - self._handle_object_params(params) + self._inject_content_type(params) + RequestParamsMapper.map_create_multipart_upload_params( + params, self.parameters) response_data = self.client.create_multipart_upload(**params) upload_id = response_data['UploadId'] return upload_id diff --git a/awscli/customizations/s3/s3handler.py b/awscli/customizations/s3/s3handler.py index 8c44487342c7..13b5be4242aa 100644 --- a/awscli/customizations/s3/s3handler.py +++ b/awscli/customizations/s3/s3handler.py @@ -55,16 +55,19 @@ def __init__(self, session, params, result_queue=None, self.result_queue = result_queue if not self.result_queue: self.result_queue = queue.Queue() - self.params = {'dryrun': False, 'quiet': False, 'acl': None, - 'guess_mime_type': True, 'sse': False, - 'storage_class': None, 'website_redirect': None, - 'content_type': None, 'cache_control': None, - 'content_disposition': None, 'content_encoding': None, - 'content_language': None, 'expires': None, - 'grants': None, 'only_show_errors': False, - 'is_stream': False, 'paths_type': None, - 'expected_size': None, 'metadata_directive': None, - 'ignore_glacier_warnings': False} + self.params = { + 'dryrun': False, 'quiet': False, 'acl': None, + 'guess_mime_type': True, 'sse_c_copy_source': None, + 'sse_c_copy_source_key': None, 'sse': None, + 'sse_c': None, 'sse_c_key': None, 'sse_kms_key_id': None, + 'storage_class': None, 'website_redirect': None, + 'content_type': None, 'cache_control': None, + 'content_disposition': None, 'content_encoding': None, + 'content_language': None, 'expires': None, 'grants': None, + 'only_show_errors': False, 'is_stream': False, + 'paths_type': None, 'expected_size': None, + 'metadata_directive': None, 'ignore_glacier_warnings': False + } self.params['region'] = params['region'] for key in self.params.keys(): if key in params: @@ -287,7 +290,8 @@ def _do_enqueue_range_download_tasks(self, filename, chunksize, task = tasks.DownloadPartTask( part_number=i, chunk_size=chunksize, result_queue=self.result_queue, filename=filename, - context=context, io_queue=self.write_queue) + context=context, io_queue=self.write_queue, + params=self.params) self.executor.submit(task) def _enqueue_multipart_upload_tasks(self, filename, @@ -350,7 +354,8 @@ def _enqueue_upload_single_part_task(self, part_number, chunk_size, payload=None): kwargs = {'part_number': part_number, 'chunk_size': chunk_size, 'result_queue': self.result_queue, - 'upload_context': upload_context, 'filename': filename} + 'upload_context': upload_context, 'filename': filename, + 'params': self.params} if payload: kwargs['payload'] = payload task = task_class(**kwargs) diff --git a/awscli/customizations/s3/subcommands.py b/awscli/customizations/s3/subcommands.py index 195dbc11613e..cdbde98e6963 100644 --- a/awscli/customizations/s3/subcommands.py +++ b/awscli/customizations/s3/subcommands.py @@ -13,6 +13,7 @@ import os import sys +from botocore.client import Config from dateutil.parser import parse from dateutil.tz import tzlocal @@ -27,7 +28,8 @@ from awscli.customizations.s3.filters import create_filter from awscli.customizations.s3.s3handler import S3Handler, S3StreamHandler from awscli.customizations.s3.utils import find_bucket_key, uni_print, \ - AppendFilter, find_dest_path_comp_key, human_readable_size + AppendFilter, find_dest_path_comp_key, human_readable_size, \ + RequestParamsMapper from awscli.customizations.s3.syncstrategy.base import MissingFileSync, \ SizeAndLastModifiedSync, NeverSync from awscli.customizations.s3 import transferconfig @@ -96,7 +98,7 @@ "file is guessed when it is uploaded.")} -CONTENT_TYPE = {'name': 'content-type', 'nargs': 1, +CONTENT_TYPE = {'name': 'content-type', 'help_text': ( "Specify an explicit content type for this operation. " "This value overrides any guessed mime types.")} @@ -116,7 +118,7 @@ "in the command that match the specified pattern")} -ACL = {'name': 'acl', 'nargs': 1, +ACL = {'name': 'acl', 'choices': ['private', 'public-read', 'public-read-write', 'authenticated-read', 'bucket-owner-read', 'bucket-owner-full-control', 'log-delivery-write'], @@ -163,12 +165,80 @@ 'UsingAuthAccess.html">Access Control')} -SSE = {'name': 'sse', 'action': 'store_true', - 'help_text': ( - "Enable Server Side Encryption of the object in S3")} +SSE = { + 'name': 'sse', 'nargs': '?', 'const': 'AES256', + 'choices': ['AES256', 'aws:kms'], + 'help_text': ( + 'Specifies server-side encryption of the object in S3. ' + 'Valid values are ``AES256`` and ``aws:kms``. If the parameter is ' + 'specified but no value is provided, ``AES256`` is used.' + ) +} + + +SSE_C = { + 'name': 'sse-c', 'nargs': '?', 'const': 'AES256', 'choices': ['AES256'], + 'help_text': ( + 'Specifies server-side encryption using customer provided keys ' + 'of the the object in S3. ``AES256`` is the only valid value. ' + 'If the parameter is specified but no value is provided, ' + '``AES256`` is used. If you provide this value, ``--sse-c-key`` ' + 'be specfied as well.' + ) +} + + +SSE_C_KEY = { + 'name': 'sse-c-key', + 'help_text': ( + 'The customer-provided encryption key to use to server-side ' + 'encrypt the object in S3. If you provide this value, ' + '``--sse-c`` be specfied as well.' + ) +} + + +SSE_KMS_KEY_ID = { + 'name': 'sse-kms-key-id', + 'help_text': ( + 'The AWS KMS key ID that should be used to server-side ' + 'encrypt the object in S3. Note that you should only ' + 'provide this parameter if KMS key ID is different the ' + 'default S3 master KMS key.' + ) +} -STORAGE_CLASS = {'name': 'storage-class', 'nargs': 1, +SSE_C_COPY_SOURCE = { + 'name': 'sse-c-copy-source', 'nargs': '?', + 'const': 'AES256', 'choices': ['AES256'], + 'help_text': ( + 'This parameter should only be specified when copying an S3 object ' + 'that was encrypted server-side with a customer-provided ' + 'key. It specifies the algorithm to use when decrypting the source ' + 'object. ``AES256`` is the only valid ' + 'value. If the parameter is specified but no value is provided, ' + '``AES256`` is used. If you provide this value, ' + '``--sse-c-copy-source-key`` be specfied as well. ' + ) +} + + +SSE_C_COPY_SOURCE_KEY = { + 'name': 'sse-c-copy-source-key', + 'help_text': ( + 'This parameter should only be specified when copying an S3 object ' + 'that was encrypted server-side with a customer-provided ' + 'key. Specifies the customer-provided encryption key for Amazon S3 ' + 'to use to decrypt the source object. The encryption key provided ' + 'must be one that was used when the source object was created. ' + 'If you provide this value, ``--sse-c-copy-source`` be specfied as ' + 'well.' + ) +} + + +STORAGE_CLASS = {'name': 'storage-class', 'choices': ['STANDARD', 'REDUCED_REDUNDANCY', 'STANDARD_IA'], 'help_text': ( "The type of storage to use for the object. " @@ -177,7 +247,7 @@ "Defaults to 'STANDARD'")} -WEBSITE_REDIRECT = {'name': 'website-redirect', 'nargs': 1, +WEBSITE_REDIRECT = {'name': 'website-redirect', 'help_text': ( "If the bucket is configured as a website, " "redirects requests for this object to another object " @@ -186,19 +256,19 @@ "metadata.")} -CACHE_CONTROL = {'name': 'cache-control', 'nargs': 1, +CACHE_CONTROL = {'name': 'cache-control', 'help_text': ( "Specifies caching behavior along the " "request/reply chain.")} -CONTENT_DISPOSITION = {'name': 'content-disposition', 'nargs': 1, +CONTENT_DISPOSITION = {'name': 'content-disposition', 'help_text': ( "Specifies presentational information " "for the object.")} -CONTENT_ENCODING = {'name': 'content-encoding', 'nargs': 1, +CONTENT_ENCODING = {'name': 'content-encoding', 'help_text': ( "Specifies what content encodings have been " "applied to the object and thus what decoding " @@ -206,11 +276,11 @@ "referenced by the Content-Type header field.")} -CONTENT_LANGUAGE = {'name': 'content-language', 'nargs': 1, +CONTENT_LANGUAGE = {'name': 'content-language', 'help_text': ("The language the content is in.")} -SOURCE_REGION = {'name': 'source-region', 'nargs': 1, +SOURCE_REGION = {'name': 'source-region', 'help_text': ( "When transferring objects from an s3 bucket to an s3 " "bucket, this specifies the region of the source bucket." @@ -222,14 +292,14 @@ EXPIRES = { - 'name': 'expires', 'nargs': 1, + 'name': 'expires', 'help_text': ( "The date and time at which the object is no longer cacheable.") } METADATA_DIRECTIVE = { - 'name': 'metadata-directive', 'nargs': 1, 'choices': ['COPY', 'REPLACE'], + 'name': 'metadata-directive', 'choices': ['COPY', 'REPLACE'], 'help_text': ( 'Specifies whether the metadata is copied from the source object ' 'or replaced with metadata provided when copying S3 objects. ' @@ -303,16 +373,18 @@ TRANSFER_ARGS = [DRYRUN, QUIET, INCLUDE, EXCLUDE, ACL, FOLLOW_SYMLINKS, NO_FOLLOW_SYMLINKS, NO_GUESS_MIME_TYPE, - SSE, STORAGE_CLASS, GRANTS, WEBSITE_REDIRECT, CONTENT_TYPE, - CACHE_CONTROL, CONTENT_DISPOSITION, CONTENT_ENCODING, - CONTENT_LANGUAGE, EXPIRES, SOURCE_REGION, ONLY_SHOW_ERRORS, + SSE, SSE_C, SSE_C_KEY, SSE_KMS_KEY_ID, SSE_C_COPY_SOURCE, + SSE_C_COPY_SOURCE_KEY, STORAGE_CLASS, GRANTS, + WEBSITE_REDIRECT, CONTENT_TYPE, CACHE_CONTROL, + CONTENT_DISPOSITION, CONTENT_ENCODING, CONTENT_LANGUAGE, + EXPIRES, SOURCE_REGION, ONLY_SHOW_ERRORS, PAGE_SIZE, IGNORE_GLACIER_WARNINGS] -def get_client(session, region, endpoint_url, verify): +def get_client(session, region, endpoint_url, verify, config=None): return session.create_client('s3', region_name=region, - endpoint_url=endpoint_url, verify=verify) - + endpoint_url=endpoint_url, verify=verify, + config=config) class S3Command(BasicCommand): def _run_main(self, parsed_args, parsed_globals): @@ -514,6 +586,7 @@ def _run_main(self, parsed_args, parsed_globals): cmd_params.add_page_size(parsed_args) cmd_params.add_paths(parsed_args.paths) self._handle_rm_force(parsed_globals, cmd_params.parameters) + runtime_config = transferconfig.RuntimeConfig().build_config( **self._session.get_scoped_config().get('s3', {})) cmd = CommandArchitecture(self._session, self.NAME, @@ -654,25 +727,31 @@ def __init__(self, session, cmd, parameters, runtime_config=None): self._source_client = None def set_clients(self): + client_config = None + if self.parameters.get('sse') == 'aws:kms': + client_config = Config(signature_version='s3v4') self._client = get_client( self.session, region=self.parameters['region'], endpoint_url=self.parameters['endpoint_url'], - verify=self.parameters['verify_ssl'] + verify=self.parameters['verify_ssl'], + config=client_config ) self._source_client = get_client( self.session, region=self.parameters['region'], endpoint_url=self.parameters['endpoint_url'], - verify=self.parameters['verify_ssl'] + verify=self.parameters['verify_ssl'], + config=client_config ) if self.parameters['source_region']: if self.parameters['paths_type'] == 's3s3': self._source_client = get_client( self.session, - region=self.parameters['source_region'][0], + region=self.parameters['source_region'], endpoint_url=None, - verify=self.parameters['verify_ssl'] + verify=self.parameters['verify_ssl'], + config=client_config ) def create_instructions(self): @@ -766,15 +845,43 @@ def run(self): } result_queue = queue.Queue() operation_name = cmd_translation[paths_type][self.cmd] - file_generator = FileGenerator(self._source_client, - operation_name, - self.parameters['follow_symlinks'], - self.parameters['page_size'], - result_queue=result_queue) - rev_generator = FileGenerator(self._client, '', - self.parameters['follow_symlinks'], - self.parameters['page_size'], - result_queue=result_queue) + + fgen_kwargs = { + 'client': self._source_client, 'operation_name': operation_name, + 'follow_symlinks': self.parameters['follow_symlinks'], + 'page_size': self.parameters['page_size'], + 'result_queue': result_queue + } + rgen_kwargs = { + 'client': self._client, 'operation_name': '', + 'follow_symlinks': self.parameters['follow_symlinks'], + 'page_size': self.parameters['page_size'], + 'result_queue': result_queue + } + + fgen_request_parameters = {} + fgen_head_object_params = {} + fgen_request_parameters['HeadObject'] = fgen_head_object_params + fgen_kwargs['request_parameters'] = fgen_request_parameters + + # SSE-C may be neaded for HeadObject for copies/downloads/deletes + # If the operation is s3 to s3, the FileGenerator should use the + # copy source key and algorithm. Otherwise, use the regular + # SSE-C key and algorithm. Note the reverse FileGenerator does + # not need any of these because it is used only for sync operations + # which only use ListObjects which does not require HeadObject. + RequestParamsMapper.map_head_object_params( + fgen_head_object_params, self.parameters) + if paths_type == 's3s3': + RequestParamsMapper.map_head_object_params( + fgen_head_object_params, { + 'sse_c': self.parameters.get('sse_c_copy_source'), + 'sse_c_key': self.parameters.get('sse_c_copy_source_key') + } + ) + + file_generator = FileGenerator(**fgen_kwargs) + rev_generator = FileGenerator(**rgen_kwargs) taskinfo = [TaskInfo(src=files['src']['path'], src_type='s3', operation_name=operation_name, @@ -909,6 +1016,7 @@ def add_paths(self, paths): self.parameters['dest'] = paths[0] self._validate_streaming_paths() self._validate_path_args() + self._validate_sse_c_args() def _validate_streaming_paths(self): self.parameters['is_stream'] = False @@ -1002,3 +1110,33 @@ def add_verify_ssl(self, parsed_globals): def add_page_size(self, parsed_args): self.parameters['page_size'] = getattr(parsed_args, 'page_size', None) + + def _validate_sse_c_args(self): + self._validate_sse_c_arg() + self._validate_sse_c_arg('sse_c_copy_source') + self._validate_sse_c_copy_source_for_paths() + + def _validate_sse_c_arg(self, sse_c_type='sse_c'): + sse_c_key_type = sse_c_type + '_key' + sse_c_type_param = '--' + sse_c_type.replace('_', '-') + sse_c_key_type_param = '--' + sse_c_key_type.replace('_', '-') + if self.parameters.get(sse_c_type): + if not self.parameters.get(sse_c_key_type): + raise ValueError( + 'It %s is specified, %s must be specified ' + 'as well.' % (sse_c_type_param, sse_c_key_type_param) + ) + if self.parameters.get(sse_c_key_type): + if not self.parameters.get(sse_c_type): + raise ValueError( + 'It %s is specified, %s must be specified ' + 'as well.' % (sse_c_key_type_param, sse_c_type_param) + ) + + def _validate_sse_c_copy_source_for_paths(self): + if self.parameters.get('sse_c_copy_source'): + if self.parameters['paths_type'] != 's3s3': + raise ValueError( + '--sse-c-copy-source is only supported for ' + 'copy operations.' + ) diff --git a/awscli/customizations/s3/tasks.py b/awscli/customizations/s3/tasks.py index c2c66a4a322a..2c6987fb39c3 100644 --- a/awscli/customizations/s3/tasks.py +++ b/awscli/customizations/s3/tasks.py @@ -11,7 +11,8 @@ ReadTimeoutError from awscli.customizations.s3.utils import find_bucket_key, MD5Error, \ - ReadFileChunk, relative_path, IORequest, IOCloseRequest, PrintTask + ReadFileChunk, relative_path, IORequest, IOCloseRequest, PrintTask, \ + RequestParamsMapper LOGGER = logging.getLogger(__name__) @@ -126,12 +127,13 @@ def _queue_print_message(self, filename, failed, dryrun, class CopyPartTask(OrderableTask): def __init__(self, part_number, chunk_size, - result_queue, upload_context, filename): + result_queue, upload_context, filename, params): self._result_queue = result_queue self._upload_context = upload_context self._part_number = part_number self._chunk_size = chunk_size self._filename = filename + self._params = params def _is_last_part(self, part_number): return self._part_number == int( @@ -161,6 +163,8 @@ def __call__(self): 'UploadId': upload_id, 'CopySource': '%s/%s' % (src_bucket, src_key), 'CopySourceRange': range_param} + RequestParamsMapper.map_upload_part_copy_params( + params, self._params) response_data = self._filename.client.upload_part_copy(**params) etag = response_data['CopyPartResult']['ETag'][1:-1] self._upload_context.announce_finished_part( @@ -199,12 +203,13 @@ class UploadPartTask(OrderableTask): object. """ def __init__(self, part_number, chunk_size, result_queue, upload_context, - filename, payload=None): + filename, params, payload=None): self._result_queue = result_queue self._upload_context = upload_context self._part_number = part_number self._chunk_size = chunk_size self._filename = filename + self._params = params self._payload = payload def _read_part(self): @@ -231,6 +236,7 @@ def __call__(self): 'PartNumber': self._part_number, 'UploadId': upload_id, 'Body': body} + RequestParamsMapper.map_upload_part_params(params, self._params) try: response_data = self._filename.client.upload_part(**params) finally: @@ -335,7 +341,7 @@ class DownloadPartTask(OrderableTask): TOTAL_ATTEMPTS = 5 def __init__(self, part_number, chunk_size, result_queue, - filename, context, io_queue): + filename, context, io_queue, params): self._part_number = part_number self._chunk_size = chunk_size self._result_queue = result_queue @@ -343,6 +349,7 @@ def __init__(self, part_number, chunk_size, result_queue, self._client = filename.client self._context = context self._io_queue = io_queue + self._params = params def __call__(self): try: @@ -365,7 +372,10 @@ def _download_part(self): LOGGER.debug("Downloading bytes range of %s for file %s", range_param, self._filename.dest) bucket, key = find_bucket_key(self._filename.src) - params = {'Bucket': bucket, 'Key': key, 'Range': range_param} + params = {'Bucket': bucket, + 'Key': key, + 'Range': range_param} + RequestParamsMapper.map_get_object_params(params, self._params) for i in range(self.TOTAL_ATTEMPTS): try: LOGGER.debug("Making GetObject requests with byte range: %s", @@ -500,7 +510,8 @@ def __call__(self): 'MultipartUpload': {'Parts': parts}, } try: - self.filename.client.complete_multipart_upload(**params) + response_data = self.filename.client.complete_multipart_upload( + **params) except Exception as e: LOGGER.debug("Error trying to complete multipart upload: %s", e, exc_info=True) diff --git a/awscli/customizations/s3/utils.py b/awscli/customizations/s3/utils.py index 596cff6d9981..a26b25351384 100644 --- a/awscli/customizations/s3/utils.py +++ b/awscli/customizations/s3/utils.py @@ -517,3 +517,155 @@ def __new__(cls, message, error=False, total_parts=None, warning=None): class IOCloseRequest(_IOCloseRequest): def __new__(cls, filename, desired_mtime=None): return super(IOCloseRequest, cls).__new__(cls, filename, desired_mtime) + + +class RequestParamsMapper(object): + """A utility class that maps CLI params to request params + + Each method in the class maps to a particular operation and will set + the request parameters depending on the operation and CLI parameters + provided. For each of the class's methods the parameters are as follows: + + :type request_params: dict + :param request_params: A dictionary to be filled out with the appropriate + parameters for the specified client operation using the current CLI + parameters + + :type cli_params: dict + :param cli_params: A dictionary of the current CLI params that will be + used to generate the request parameters for the specified operation + + For example, take the mapping of request parameters for PutObject:: + + >>> cli_request_params = {'sse': 'AES256', 'storage_class': 'GLACIER'} + >>> request_params = {} + >>> RequestParamsMapper.map_put_object_params( + request_params, cli_request_params) + >>> print(request_params) + {'StorageClass': 'GLACIER', 'ServerSideEncryption': 'AES256'} + + Note that existing parameters in ``request_params`` will be overriden if + a parameter in ``cli_params`` maps to the existing parameter. + """ + @classmethod + def map_put_object_params(cls, request_params, cli_params): + """Map CLI params to PutObject request params""" + cls._set_general_object_params(request_params, cli_params) + cls._set_sse_request_params(request_params, cli_params) + cls._set_sse_c_request_params(request_params, cli_params) + + @classmethod + def map_get_object_params(cls, request_params, cli_params): + """Map CLI params to GetObject request params""" + cls._set_sse_c_request_params(request_params, cli_params) + + @classmethod + def map_copy_object_params(cls, request_params, cli_params): + """Map CLI params to CopyObject request params""" + cls._set_general_object_params(request_params, cli_params) + cls._set_metadata_directive_param(request_params, cli_params) + cls._set_sse_request_params(request_params, cli_params) + cls._set_sse_c_and_copy_source_request_params( + request_params, cli_params) + + @classmethod + def map_head_object_params(cls, request_params, cli_params): + """Map CLI params to HeadObject request params""" + cls._set_sse_c_request_params(request_params, cli_params) + + @classmethod + def map_create_multipart_upload_params(cls, request_params, cli_params): + """Map CLI params to CreateMultipartUpload request params""" + cls._set_general_object_params(request_params, cli_params) + cls._set_sse_request_params(request_params, cli_params) + cls._set_sse_c_request_params(request_params, cli_params) + + @classmethod + def map_upload_part_params(cls, request_params, cli_params): + """Map CLI params to UploadPart request params""" + cls._set_sse_c_request_params(request_params, cli_params) + + @classmethod + def map_upload_part_copy_params(cls, request_params, cli_params): + """Map CLI params to UploadPartCopy request params""" + cls._set_sse_c_and_copy_source_request_params( + request_params, cli_params) + + @classmethod + def _set_general_object_params(cls, request_params, cli_params): + # Paramters set in this method should be applicable to the following + # operations involving objects: PutObject, CopyObject, and + # CreateMultipartUpload. + general_param_translation = { + 'acl': 'ACL', + 'storage_class': 'StorageClass', + 'website_redirect': 'WebsiteRedirectLocation', + 'content_type': 'ContentType', + 'cache_control': 'CacheControl', + 'content_disposition': 'ContentDisposition', + 'content_encoding': 'ContentEncoding', + 'content_language': 'ContentLanguage', + 'expires': 'Expires' + } + for cli_param_name in general_param_translation: + if cli_params.get(cli_param_name): + request_param_name = general_param_translation[cli_param_name] + request_params[request_param_name] = cli_params[cli_param_name] + cls._set_grant_params(request_params, cli_params) + + @classmethod + def _set_grant_params(cls, request_params, cli_params): + if cli_params.get('grants'): + for grant in cli_params['grants']: + try: + permission, grantee = grant.split('=', 1) + except ValueError: + raise ValueError('grants should be of the form ' + 'permission=principal') + request_params[cls._permission_to_param(permission)] = grantee + + @classmethod + def _permission_to_param(cls, permission): + if permission == 'read': + return 'GrantRead' + if permission == 'full': + return 'GrantFullControl' + if permission == 'readacl': + return 'GrantReadACP' + if permission == 'writeacl': + return 'GrantWriteACP' + raise ValueError('permission must be one of: ' + 'read|readacl|writeacl|full') + + @classmethod + def _set_metadata_directive_param(cls, request_params, cli_params): + if cli_params.get('metadata_directive'): + request_params['MetadataDirective'] = cli_params[ + 'metadata_directive'] + + @classmethod + def _set_sse_request_params(cls, request_params, cli_params): + if cli_params.get('sse'): + request_params['ServerSideEncryption'] = cli_params['sse'] + if cli_params.get('sse_kms_key_id'): + request_params['SSEKMSKeyId'] = cli_params['sse_kms_key_id'] + + @classmethod + def _set_sse_c_request_params(cls, request_params, cli_params): + if cli_params.get('sse_c'): + request_params['SSECustomerAlgorithm'] = cli_params['sse_c'] + request_params['SSECustomerKey'] = cli_params['sse_c_key'] + + @classmethod + def _set_sse_c_copy_source_request_params(cls, request_params, cli_params): + if cli_params.get('sse_c_copy_source'): + request_params['CopySourceSSECustomerAlgorithm'] = cli_params[ + 'sse_c_copy_source'] + request_params['CopySourceSSECustomerKey'] = cli_params[ + 'sse_c_copy_source_key'] + + @classmethod + def _set_sse_c_and_copy_source_request_params(cls, request_params, + cli_params): + cls._set_sse_c_request_params(request_params, cli_params) + cls._set_sse_c_copy_source_request_params(request_params, cli_params) diff --git a/tests/functional/s3/test_cp_command.py b/tests/functional/s3/test_cp_command.py index b6694c867a2a..96479e3aeaef 100644 --- a/tests/functional/s3/test_cp_command.py +++ b/tests/functional/s3/test_cp_command.py @@ -229,3 +229,121 @@ def test_turn_off_glacier_warnings(self): self.assertEqual(len(self.operations_called), 1) self.assertEqual(self.operations_called[0][0].name, 'HeadObject') self.assertEqual('', stderr) + + def test_cp_with_sse_flag(self): + full_path = self.files.create_file('foo.txt', 'contents') + cmdline = ( + '%s %s s3://bucket/key.txt --sse' % ( + self.prefix, full_path)) + self.run_cmd(cmdline, expected_rc=0) + self.assertEqual(len(self.operations_called), 1) + self.assertEqual(self.operations_called[0][0].name, 'PutObject') + self.assertDictEqual( + self.operations_called[0][1], + {'Key': 'key.txt', 'Bucket': 'bucket', + 'ContentType': 'text/plain', 'Body': mock.ANY, + 'ServerSideEncryption': 'AES256'} + ) + + def test_cp_with_sse_c_flag(self): + full_path = self.files.create_file('foo.txt', 'contents') + cmdline = ( + '%s %s s3://bucket/key.txt --sse-c --sse-c-key foo' % ( + self.prefix, full_path)) + self.run_cmd(cmdline, expected_rc=0) + self.assertEqual(len(self.operations_called), 1) + self.assertEqual(self.operations_called[0][0].name, 'PutObject') + self.assertDictEqual( + self.operations_called[0][1], + {'Key': 'key.txt', 'Bucket': 'bucket', + 'ContentType': 'text/plain', 'Body': mock.ANY, + 'SSECustomerAlgorithm': 'AES256', 'SSECustomerKey': 'Zm9v', + 'SSECustomerKeyMD5': 'rL0Y20zC+Fzt72VPzMSk2A=='} + ) + + # Note ideally the kms sse with a key id would be integration tests + # However, you cannot delete kms keys so there would be no way to clean + # up the tests + def test_cp_upload_with_sse_kms_and_key_id(self): + full_path = self.files.create_file('foo.txt', 'contents') + cmdline = ( + '%s %s s3://bucket/key.txt --sse aws:kms --sse-kms-key-id foo' % ( + self.prefix, full_path)) + self.run_cmd(cmdline, expected_rc=0) + self.assertEqual(len(self.operations_called), 1) + self.assertEqual(self.operations_called[0][0].name, 'PutObject') + self.assertDictEqual( + self.operations_called[0][1], + {'Key': 'key.txt', 'Bucket': 'bucket', + 'ContentType': 'text/plain', 'Body': mock.ANY, + 'SSEKMSKeyId': 'foo', 'ServerSideEncryption': 'aws:kms'} + ) + + def test_cp_upload_large_file_with_sse_kms_and_key_id(self): + self.parsed_responses = [ + {'UploadId': 'foo'}, # CreateMultipartUpload + {'ETag': '"foo"'}, # UploadPart + {'ETag': '"foo"'}, # UploadPart + {} # CompleteMultipartUpload + ] + full_path = self.files.create_file('foo.txt', 'a' * 10 * (1024 ** 2)) + cmdline = ( + '%s %s s3://bucket/key.txt --sse aws:kms --sse-kms-key-id foo' % ( + self.prefix, full_path)) + self.run_cmd(cmdline, expected_rc=0) + self.assertEqual(len(self.operations_called), 4) + + # We are only really concerned that the CreateMultipartUpload + # used the KMS key id. + self.assertEqual( + self.operations_called[0][0].name, 'CreateMultipartUpload') + self.assertDictEqual( + self.operations_called[0][1], + {'Key': 'key.txt', 'Bucket': 'bucket', + 'ContentType': 'text/plain', + 'SSEKMSKeyId': 'foo', 'ServerSideEncryption': 'aws:kms'} + ) + + def test_cp_copy_with_sse_kms_and_key_id(self): + self.parsed_responses = [ + {'ContentLength': 5, 'LastModified': '00:00:00Z'}, # HeadObject + {} # CopyObject + ] + cmdline = ( + '%s s3://bucket/key1.txt s3://bucket/key2.txt ' + '--sse aws:kms --sse-kms-key-id foo' % self.prefix) + self.run_cmd(cmdline, expected_rc=0) + self.assertEqual(len(self.operations_called), 2) + self.assertEqual(self.operations_called[1][0].name, 'CopyObject') + self.assertDictEqual( + self.operations_called[1][1], + {'Key': 'key2.txt', 'Bucket': 'bucket', + 'ContentType': 'text/plain', 'CopySource': 'bucket/key1.txt', + 'SSEKMSKeyId': 'foo', 'ServerSideEncryption': 'aws:kms'} + ) + + def test_cp_copy_large_file_with_sse_kms_and_key_id(self): + self.parsed_responses = [ + {'ContentLength': 10 * (1024 ** 2), + 'LastModified': '00:00:00Z'}, # HeadObject + {'UploadId': 'foo'}, # CreateMultipartUpload + {'CopyPartResult': {'ETag': '"foo"'}}, # UploadPartCopy + {'CopyPartResult': {'ETag': '"foo"'}}, # UploadPartCopy + {} # CompleteMultipartUpload + ] + cmdline = ( + '%s s3://bucket/key1.txt s3://bucket/key2.txt ' + '--sse aws:kms --sse-kms-key-id foo' % self.prefix) + self.run_cmd(cmdline, expected_rc=0) + self.assertEqual(len(self.operations_called), 5) + + # We are only really concerned that the CreateMultipartUpload + # used the KMS key id. + self.assertEqual( + self.operations_called[1][0].name, 'CreateMultipartUpload') + self.assertDictEqual( + self.operations_called[1][1], + {'Key': 'key2.txt', 'Bucket': 'bucket', + 'ContentType': 'text/plain', + 'SSEKMSKeyId': 'foo', 'ServerSideEncryption': 'aws:kms'} + ) diff --git a/tests/integration/customizations/s3/test_plugin.py b/tests/integration/customizations/s3/test_plugin.py index 83cce978bd9f..0ef0ab98005e 100644 --- a/tests/integration/customizations/s3/test_plugin.py +++ b/tests/integration/customizations/s3/test_plugin.py @@ -1699,5 +1699,305 @@ def test_verify_endpoint_url_is_used(self): self.assertIn(expected, debug_logs) -if __name__ == "__main__": - unittest.main() +class TestSSERelatedParams(BaseS3CLICommand): + def download_and_assert_kms_object_integrity(self, bucket, key, contents): + # Ensure the kms object can be download it by downloading it + # with --sse aws:kms is enabled to ensure sigv4 is used on the + # download, as it is required for kms. + download_filename = os.path.join(self.files.rootdir, 'tmp', key) + p = aws('s3 cp s3://%s/%s %s --sse aws:kms' % ( + bucket, key, download_filename)) + self.assert_no_errors(p) + + self.assertTrue(os.path.isfile(download_filename)) + with open(download_filename, 'r') as f: + self.assertEqual(f.read(), contents) + + def test_sse_upload(self): + bucket = self.create_bucket() + key = 'foo.txt' + contents = 'contents' + file_name = self.files.create_file(key, contents) + + # Upload the file using AES256 + p = aws('s3 cp %s s3://%s/%s --sse AES256' % (file_name, bucket, key)) + self.assert_no_errors(p) + + # Ensure the file was uploaded correctly + self.assert_key_contents_equal(bucket, key, contents) + + def test_large_file_sse_upload(self): + bucket = self.create_bucket() + key = 'foo.txt' + contents = 'a' * (10 * (1024 * 1024)) + file_name = self.files.create_file(key, contents) + + # Upload the file using AES256 + p = aws('s3 cp %s s3://%s/%s --sse AES256' % (file_name, bucket, key)) + self.assert_no_errors(p) + + # Ensure the file was uploaded correctly + self.assert_key_contents_equal(bucket, key, contents) + + def test_sse_with_kms_upload(self): + bucket = self.create_bucket() + key = 'foo.txt' + contents = 'contents' + file_name = self.files.create_file(key, contents) + + # Upload the file using KMS + p = aws('s3 cp %s s3://%s/%s --sse aws:kms' % (file_name, bucket, key)) + self.assert_no_errors(p) + + self.download_and_assert_kms_object_integrity(bucket, key, contents) + + def test_large_file_sse_kms_upload(self): + bucket = self.create_bucket() + key = 'foo.txt' + contents = 'a' * (10 * (1024 * 1024)) + file_name = self.files.create_file(key, contents) + + # Upload the file using KMS + p = aws('s3 cp %s s3://%s/%s --sse aws:kms' % (file_name, bucket, key)) + self.assert_no_errors(p) + + self.download_and_assert_kms_object_integrity(bucket, key, contents) + + def test_sse_copy(self): + bucket = self.create_bucket() + key = 'foo.txt' + new_key = 'bar.txt' + contents = 'contents' + self.put_object(bucket, key, contents) + + # Copy the file using AES256 + p = aws('s3 cp s3://%s/%s s3://%s/%s --sse AES256' % ( + bucket, key, bucket, new_key)) + self.assert_no_errors(p) + + # Ensure the file was copied correctly + self.assert_key_contents_equal(bucket, new_key, contents) + + def test_large_file_sse_copy(self): + bucket = self.create_bucket() + key = 'foo.txt' + new_key = 'bar.txt' + contents = 'a' * (10 * (1024 * 1024)) + + # This is a little faster and more efficient than + # calling self.put_object() + file_name = self.files.create_file(key, contents) + p = aws('s3 cp %s s3://%s/%s' % (file_name, bucket, key)) + self.assert_no_errors(p) + + # Copy the file using AES256 + p = aws('s3 cp s3://%s/%s s3://%s/%s --sse AES256' % ( + bucket, key, bucket, new_key)) + self.assert_no_errors(p) + + # Ensure the file was copied correctly + self.assert_key_contents_equal(bucket, new_key, contents) + + def test_sse_kms_copy(self): + bucket = self.create_bucket() + key = 'foo.txt' + new_key = 'bar.txt' + contents = 'contents' + self.put_object(bucket, key, contents) + + # Copy the file using KMS + p = aws('s3 cp s3://%s/%s s3://%s/%s --sse aws:kms' % ( + bucket, key, bucket, new_key)) + self.assert_no_errors(p) + self.download_and_assert_kms_object_integrity(bucket, key, contents) + + def test_large_file_sse_kms_copy(self): + bucket = self.create_bucket() + key = 'foo.txt' + new_key = 'bar.txt' + contents = 'a' * (10 * (1024 * 1024)) + + # This is a little faster and more efficient than + # calling self.put_object() + file_name = self.files.create_file(key, contents) + p = aws('s3 cp %s s3://%s/%s' % (file_name, bucket, key)) + self.assert_no_errors(p) + + # Copy the file using KMS + p = aws('s3 cp s3://%s/%s s3://%s/%s --sse aws:kms' % ( + bucket, key, bucket, new_key)) + self.assert_no_errors(p) + self.download_and_assert_kms_object_integrity(bucket, key, contents) + + def test_smoke_sync_sse(self): + bucket = self.create_bucket() + key = 'foo.txt' + contents = 'contents' + file_name = self.files.create_file(key, contents) + + # Upload sync + p = aws('s3 sync %s s3://%s/foo/ --sse AES256' % ( + self.files.rootdir, bucket)) + self.assert_no_errors(p) + + # Copy sync + p = aws('s3 sync s3://%s/foo/ s3://%s/bar/ --sse AES256' % ( + bucket, bucket)) + self.assert_no_errors(p) + + # Remove the original file + os.remove(file_name) + + # Download sync + p = aws('s3 sync s3://%s/bar/ %s --sse AES256' % ( + bucket, self.files.rootdir)) + self.assert_no_errors(p) + + self.assertTrue(os.path.isfile(file_name)) + with open(file_name, 'r') as f: + self.assertEqual(f.read(), contents) + + def test_smoke_sync_sse_kms(self): + bucket = self.create_bucket() + key = 'foo.txt' + contents = 'contents' + file_name = self.files.create_file(key, contents) + + # Upload sync + p = aws('s3 sync %s s3://%s/foo/ --sse aws:kms' % ( + self.files.rootdir, bucket)) + self.assert_no_errors(p) + + # Copy sync + p = aws('s3 sync s3://%s/foo/ s3://%s/bar/ --sse aws:kms' % ( + bucket, bucket)) + self.assert_no_errors(p) + + # Remove the original file + os.remove(file_name) + + # Download sync + p = aws('s3 sync s3://%s/bar/ %s --sse aws:kms' % ( + bucket, self.files.rootdir)) + self.assert_no_errors(p) + + self.assertTrue(os.path.isfile(file_name)) + with open(file_name, 'r') as f: + self.assertEqual(f.read(), contents) + + +class TestSSECRelatedParams(BaseS3CLICommand): + def setUp(self): + super(TestSSECRelatedParams, self).setUp() + self.encrypt_key = 'a' * 32 + self.other_encrypt_key = 'b' * 32 + self.bucket = self.create_bucket() + + def download_and_assert_sse_c_object_integrity( + self, bucket, key, encrypt_key, contents): + download_filename = os.path.join(self.files.rootdir, 'tmp', key) + p = aws('s3 cp s3://%s/%s %s --sse-c AES256 --sse-c-key %s' % ( + bucket, key, download_filename, encrypt_key)) + self.assert_no_errors(p) + + self.assertTrue(os.path.isfile(download_filename)) + with open(download_filename, 'r') as f: + self.assertEqual(f.read(), contents) + + def test_sse_c_upload_and_download(self): + key = 'foo.txt' + contents = 'contents' + file_name = self.files.create_file(key, contents) + + # Upload the file using SSE-C + p = aws('s3 cp %s s3://%s --sse-c AES256 --sse-c-key %s' % ( + file_name, self.bucket, self.encrypt_key)) + self.assert_no_errors(p) + + self.download_and_assert_sse_c_object_integrity( + self.bucket, key, self.encrypt_key, contents) + + def test_sse_c_upload_and_download_large_file(self): + key = 'foo.txt' + contents = 'a' * (10 * (1024 * 1024)) + file_name = self.files.create_file(key, contents) + + # Upload the file using SSE-C + p = aws('s3 cp %s s3://%s --sse-c AES256 --sse-c-key %s' % ( + file_name, self.bucket, self.encrypt_key)) + self.assert_no_errors(p) + + self.download_and_assert_sse_c_object_integrity( + self.bucket, key, self.encrypt_key, contents) + + def test_sse_c_copy(self): + key = 'foo.txt' + new_key = 'bar.txt' + contents = 'contents' + file_name = self.files.create_file(key, contents) + + # Upload the file using SSE-C + p = aws('s3 cp %s s3://%s --sse-c AES256 --sse-c-key %s' % ( + file_name, self.bucket, self.encrypt_key)) + self.assert_no_errors(p) + + # Copy the file using SSE-C and a new encryption key + p = aws( + 's3 cp s3://%s/%s s3://%s/%s --sse-c AES256 --sse-c-key %s ' + '--sse-c-copy-source AES256 --sse-c-copy-source-key %s' % ( + self.bucket, key, self.bucket, new_key, self.other_encrypt_key, + self.encrypt_key)) + self.assert_no_errors(p) + self.download_and_assert_sse_c_object_integrity( + self.bucket, new_key, self.other_encrypt_key, contents) + + def test_sse_c_copy_large_file(self): + key = 'foo.txt' + new_key = 'bar.txt' + contents = 'a' * (10 * (1024 * 1024)) + file_name = self.files.create_file(key, contents) + + # Upload the file using SSE-C + p = aws('s3 cp %s s3://%s --sse-c AES256 --sse-c-key %s' % ( + file_name, self.bucket, self.encrypt_key)) + self.assert_no_errors(p) + + # Copy the file using SSE-C and a new encryption key + p = aws( + 's3 cp s3://%s/%s s3://%s/%s --sse-c AES256 --sse-c-key %s ' + '--sse-c-copy-source AES256 --sse-c-copy-source-key %s' % ( + self.bucket, key, self.bucket, new_key, self.other_encrypt_key, + self.encrypt_key)) + self.assert_no_errors(p) + self.download_and_assert_sse_c_object_integrity( + self.bucket, new_key, self.other_encrypt_key, contents) + + def test_smoke_sync_sse_c(self): + key = 'foo.txt' + contents = 'contents' + file_name = self.files.create_file(key, contents) + + # Upload sync + p = aws('s3 sync %s s3://%s/foo/ --sse-c AES256 --sse-c-key %s' % ( + self.files.rootdir, self.bucket, self.encrypt_key)) + self.assert_no_errors(p) + + # Copy sync + p = aws('s3 sync s3://%s/foo/ s3://%s/bar/ --sse-c AES256 ' + '--sse-c-key %s --sse-c-copy-source AES256 ' + '--sse-c-copy-source-key %s' % ( + self.bucket, self.bucket, self.other_encrypt_key, + self.encrypt_key)) + self.assert_no_errors(p) + + # Remove the original file + os.remove(file_name) + + # Download sync + p = aws('s3 sync s3://%s/bar/ %s --sse-c AES256 --sse-c-key %s' % ( + self.bucket, self.files.rootdir, self.other_encrypt_key)) + self.assert_no_errors(p) + + self.assertTrue(os.path.isfile(file_name)) + with open(file_name, 'r') as f: + self.assertEqual(f.read(), contents) diff --git a/tests/integration/customizations/s3/test_s3handler.py b/tests/integration/customizations/s3/test_s3handler.py index b44846fef105..b4f72110218e 100644 --- a/tests/integration/customizations/s3/test_s3handler.py +++ b/tests/integration/customizations/s3/test_s3handler.py @@ -120,7 +120,7 @@ class S3HandlerTestUpload(unittest.TestCase): def setUp(self): self.session = botocore.session.get_session(EnvironmentVariables) self.client = self.session.create_client('s3', 'us-west-2') - params = {'region': 'us-west-2', 'acl': ['private']} + params = {'region': 'us-west-2', 'acl': 'private'} self.s3_handler = S3Handler(self.session, params) self.s3_handler_multi = S3Handler( self.session, params=params, @@ -184,7 +184,7 @@ def setUp(self): self.session = botocore.session.get_session(EnvironmentVariables) self.client = self.session.create_client('s3', 'us-west-2') self.source_client = self.session.create_client('s3', 'us-west-2') - params = {'region': 'us-west-2', 'acl': ['private'], 'quiet': True} + params = {'region': 'us-west-2', 'acl': 'private', 'quiet': True} self.s3_handler = S3Handler(self.session, params) self.bucket = make_s3_files(self.session, key1=u'\u2713') self.bucket2 = create_bucket(self.session) @@ -223,7 +223,7 @@ def setUp(self): self.session = botocore.session.get_session(EnvironmentVariables) self.client = self.session.create_client('s3', 'us-west-2') self.source_client = self.session.create_client('s3', 'us-west-2') - params = {'region': 'us-west-2', 'acl': ['private']} + params = {'region': 'us-west-2', 'acl': 'private'} self.s3_handler = S3Handler(self.session, params) self.bucket = make_s3_files(self.session) self.bucket2 = create_bucket(self.session) diff --git a/tests/unit/customizations/s3/test_s3handler.py b/tests/unit/customizations/s3/test_s3handler.py index 475c7aded972..eb2d74457cb4 100644 --- a/tests/unit/customizations/s3/test_s3handler.py +++ b/tests/unit/customizations/s3/test_s3handler.py @@ -125,7 +125,7 @@ class S3HandlerTestUpload(S3HandlerBaseTest): """ def setUp(self): super(S3HandlerTestUpload, self).setUp() - params = {'region': 'us-east-1', 'acl': ['private'], 'quiet': True} + params = {'region': 'us-east-1', 'acl': 'private', 'quiet': True} self.s3_handler = S3Handler( self.session, params, runtime_config=runtime_config( max_concurrent_requests=1)) @@ -294,7 +294,7 @@ class S3HandlerTestMvLocalS3(S3HandlerBaseTest): """ def setUp(self): super(S3HandlerTestMvLocalS3, self).setUp() - params = {'region': 'us-east-1', 'acl': ['private'], 'quiet': True} + params = {'region': 'us-east-1', 'acl': 'private', 'quiet': True} self.s3_handler = S3Handler(self.session, params, runtime_config=runtime_config( max_concurrent_requests=1)) @@ -341,7 +341,7 @@ class S3HandlerTestMvS3S3(S3HandlerBaseTest): """ def setUp(self): super(S3HandlerTestMvS3S3, self).setUp() - params = {'region': 'us-east-1', 'acl': ['private']} + params = {'region': 'us-east-1', 'acl': 'private'} self.s3_handler = S3Handler(self.session, params, runtime_config=runtime_config( max_concurrent_requests=1)) diff --git a/tests/unit/customizations/s3/test_subcommands.py b/tests/unit/customizations/s3/test_subcommands.py index 168834472071..14ebfbea56fe 100644 --- a/tests/unit/customizations/s3/test_subcommands.py +++ b/tests/unit/customizations/s3/test_subcommands.py @@ -43,9 +43,9 @@ def test_client(self): session = Mock() endpoint = get_client(session, region='us-west-1', endpoint_url='URL', verify=True) - session.create_client.assert_called_with('s3', region_name='us-west-1', - endpoint_url='URL', - verify=True) + session.create_client.assert_called_with( + 's3', region_name='us-west-1', endpoint_url='URL', verify=True, + config=None) class TestRbCommand(unittest.TestCase): @@ -121,8 +121,9 @@ def test_ls_command_with_no_args(self): # Verify get_client get_client = self.session.create_client args = get_client.call_args - self.assertEqual(args, mock.call('s3', region_name=None, - endpoint_url=None, verify=None)) + self.assertEqual(args, mock.call( + 's3', region_name=None, endpoint_url=None, verify=None, + config=None)) def test_ls_with_verify_argument(self): options = {'default': 's3://', 'nargs': '?'} @@ -135,8 +136,9 @@ def test_ls_with_verify_argument(self): # Verify get_client get_client = self.session.create_client args = get_client.call_args - self.assertEqual(args, mock.call('s3', region_name='us-west-2', - endpoint_url=None, verify=False)) + self.assertEqual(args, mock.call( + 's3', region_name='us-west-2', endpoint_url=None, verify=False, + config=None)) class CommandArchitectureTest(BaseAWSCommandParamsTest): @@ -174,14 +176,16 @@ def test_set_client_no_source(self): self.assertEqual( session.create_client.call_args_list[0], mock.call( - 's3', region_name='us-west-1', endpoint_url=None, verify=None) + 's3', region_name='us-west-1', endpoint_url=None, verify=None, + config=None) ) # A client created with the same arguments as the first should be used # for the source client since no source region was provided. self.assertEqual( session.create_client.call_args_list[1], mock.call( - 's3', region_name='us-west-1', endpoint_url=None, verify=None) + 's3', region_name='us-west-1', endpoint_url=None, verify=None, + config=None) ) def test_set_client_with_source(self): @@ -191,26 +195,46 @@ def test_set_client_with_source(self): 'endpoint_url': None, 'verify_ssl': None, 'paths_type': 's3s3', - 'source_region': ['us-west-2']}) + 'source_region': 'us-west-2'}) cmd_arc.set_clients() create_client_args = session.create_client.call_args_list # Assert that two clients were created self.assertEqual(len(create_client_args), 3) self.assertEqual( create_client_args[0][1], - {'region_name': 'us-west-1', 'verify': None, 'endpoint_url': None} + {'region_name': 'us-west-1', 'verify': None, 'endpoint_url': None, + 'config': None} ) self.assertEqual( create_client_args[1][1], - {'region_name': 'us-west-1', 'verify': None, 'endpoint_url': None} + {'region_name': 'us-west-1', 'verify': None, 'endpoint_url': None, + 'config': None} ) # Assert override the second client created with the one needed for the # source region. self.assertEqual( create_client_args[2][1], - {'region_name': 'us-west-2', 'verify': None, 'endpoint_url': None} + {'region_name': 'us-west-2', 'verify': None, 'endpoint_url': None, + 'config': None} ) + def test_set_sigv4_clients_with_sse_kms(self): + session = Mock() + cmd_arc = CommandArchitecture( + session, 'sync', + {'region': 'us-west-1', 'endpoint_url': None, 'verify_ssl': None, + 'source_region': None, 'sse': 'aws:kms'}) + cmd_arc.set_clients() + self.assertEqual( session.create_client.call_count, 2) + create_client_call = session.create_client.call_args_list[0] + create_source_client_call = session.create_client.call_args_list[1] + + # Make sure that both clients are using sigv4 if kms is enabled. + self.assertEqual( + create_client_call[1]['config'].signature_version, 's3v4') + self.assertEqual( + create_source_client_call[1]['config'].signature_version, 's3v4') + def test_create_instructions(self): """ This tests to make sure the instructions for any command is generated @@ -638,6 +662,46 @@ def test_add_path_for_non_existsent_local_path_download(self): cmd_param.add_paths(paths) self.assertTrue(os.path.exists(non_existent_path)) + def test_validate_sse_c_args_missing_sse(self): + paths = ['s3://bucket/foo', 's3://bucket/bar'] + params = {'dir_op': False, 'sse_c_key': 'foo'} + cmd_param = CommandParameters('cp', params, '') + with self.assertRaisesRegexp(ValueError, '--sse-c must be specified'): + cmd_param.add_paths(paths) + + def test_validate_sse_c_args_missing_sse_c_key(self): + paths = ['s3://bucket/foo', 's3://bucket/bar'] + params = {'dir_op': False, 'sse_c': 'AES256'} + cmd_param = CommandParameters('cp', params, '') + with self.assertRaisesRegexp(ValueError, + '--sse-c-key must be specified'): + cmd_param.add_paths(paths) + + def test_validate_sse_c_args_missing_sse_c_copy_source(self): + paths = ['s3://bucket/foo', 's3://bucket/bar'] + params = {'dir_op': False, 'sse_c_copy_source_key': 'foo'} + cmd_param = CommandParameters('cp', params, '') + with self.assertRaisesRegexp(ValueError, + '--sse-c-copy-source must be specified'): + cmd_param.add_paths(paths) + + def test_validate_sse_c_args_missing_sse_c_copy_source_key(self): + paths = ['s3://bucket/foo', 's3://bucket/bar'] + params = {'dir_op': False, 'sse_c_copy_source': 'AES256'} + cmd_param = CommandParameters('cp', params, '') + with self.assertRaisesRegexp(ValueError, + '--sse-c-copy-source-key must be specified'): + cmd_param.add_paths(paths) + + def test_validate_sse_c_args_wrong_path_type(self): + paths = ['s3://bucket/foo', self.file_creator.rootdir] + params = {'dir_op': False, 'sse_c_copy_source': 'AES256', + 'sse_c_copy_source_key': 'foo'} + cmd_param = CommandParameters('cp', params, '') + with self.assertRaisesRegexp(ValueError, + 'only supported for copy operations'): + cmd_param.add_paths(paths) + class HelpDocTest(BaseAWSHelpOutputTest): def setUp(self): diff --git a/tests/unit/customizations/s3/test_tasks.py b/tests/unit/customizations/s3/test_tasks.py index dacf6774be02..2b1e8ed90485 100644 --- a/tests/unit/customizations/s3/test_tasks.py +++ b/tests/unit/customizations/s3/test_tasks.py @@ -379,12 +379,13 @@ def setUp(self): self.filename.operation_name = 'download' self.context = mock.Mock() self.open = mock.MagicMock() + self.params = {} def test_socket_timeout_is_retried(self): self.client.get_object.side_effect = socket.error task = DownloadPartTask(0, 1024 * 1024, self.result_queue, self.filename, self.context, - self.io_queue) + self.io_queue, self.params) # The mock is configured to keep raising a socket.error # so we should cancel the download. with self.assertRaises(RetriesExeededError): @@ -401,7 +402,7 @@ def test_download_succeeds(self): socket.error, {'Body': body}] task = DownloadPartTask(0, 1024 * 1024, self.result_queue, self.filename, self.context, - self.io_queue) + self.io_queue, self.params) task() self.assertEqual(self.result_queue.put.call_count, 1) # And we tried twice, the first one failed, the second one @@ -414,7 +415,7 @@ def test_download_queues_io_properly(self): self.client.get_object.side_effect = [{'Body': body}] task = DownloadPartTask(0, 1024 * 1024, self.result_queue, self.filename, self.context, - self.io_queue) + self.io_queue, self.params) task() call_args_list = self.io_queue.put.call_args_list self.assertEqual(len(call_args_list), 2) @@ -428,7 +429,7 @@ def test_incomplete_read_is_retried(self): IncompleteReadError(actual_bytes=1, expected_bytes=2) task = DownloadPartTask(0, 1024 * 1024, self.result_queue, self.filename, - self.context, self.io_queue) + self.context, self.io_queue, self.params) with self.assertRaises(RetriesExeededError): task() self.context.cancel.assert_called_with() @@ -440,7 +441,8 @@ def test_readtimeout_is_retried(self): ReadTimeoutError(None, None, None) task = DownloadPartTask(0, 1024 * 1024, self.result_queue, self.filename, - self.context, self.io_queue) + self.context, self.io_queue, + self.params) with self.assertRaises(RetriesExeededError): task() self.context.cancel.assert_called_with() @@ -463,7 +465,8 @@ def test_retried_requests_dont_enqueue_writes_twice(self): self.filename.is_stream = True task = DownloadPartTask( 0, transferconfig.DEFAULTS['multipart_chunksize'], - self.result_queue, self.filename, self.context, self.io_queue) + self.result_queue, self.filename, self.context, self.io_queue, + self.params) task() call_args_list = self.io_queue.put.call_args_list self.assertEqual(len(call_args_list), 1) @@ -540,7 +543,7 @@ def complete_task(self): return CompleteDownloadTask(None, None, None, None, None) def download_task(self): - return DownloadPartTask(None, None, None, mock.Mock(), None, None) + return DownloadPartTask(None, None, None, mock.Mock(), None, None, {}) def shutdown_task(self, priority=None): return ShutdownThreadRequest(priority) diff --git a/tests/unit/customizations/s3/test_utils.py b/tests/unit/customizations/s3/test_utils.py index 6ec3dff402ad..10535108d6c8 100644 --- a/tests/unit/customizations/s3/test_utils.py +++ b/tests/unit/customizations/s3/test_utils.py @@ -25,6 +25,7 @@ from awscli.customizations.s3.utils import human_readable_to_bytes from awscli.customizations.s3.utils import MAX_SINGLE_UPLOAD_SIZE from awscli.customizations.s3.utils import set_file_utime, SetFileUtimeError +from awscli.customizations.s3.utils import RequestParamsMapper def test_human_readable_size(): @@ -420,3 +421,89 @@ def test_passes_through_other_os_errors(self): utime_mock.side_effect = OSError(2, '') with self.assertRaises(OSError): set_file_utime('not_real_file', epoch_now) + + +class TestRequestParamsMapperSSE(unittest.TestCase): + def setUp(self): + self.cli_params = { + 'sse': 'AES256', + 'sse_kms_key_id': 'my-kms-key', + 'sse_c': 'AES256', + 'sse_c_key': 'my-sse-c-key', + 'sse_c_copy_source': 'AES256', + 'sse_c_copy_source_key': 'my-sse-c-copy-source-key' + } + + def test_head_object(self): + params = {} + RequestParamsMapper.map_head_object_params(params, self.cli_params) + self.assertEqual( + params, + {'SSECustomerAlgorithm': 'AES256', + 'SSECustomerKey': 'my-sse-c-key'} + ) + + def test_put_object(self): + params = {} + RequestParamsMapper.map_put_object_params(params, self.cli_params) + self.assertEqual( + params, + {'SSECustomerAlgorithm': 'AES256', + 'SSECustomerKey': 'my-sse-c-key', + 'SSEKMSKeyId': 'my-kms-key', + 'ServerSideEncryption': 'AES256'} + ) + + def test_get_object(self): + params = {} + RequestParamsMapper.map_get_object_params(params, self.cli_params) + self.assertEqual( + params, + {'SSECustomerAlgorithm': 'AES256', + 'SSECustomerKey': 'my-sse-c-key'} + ) + + def test_copy_object(self): + params = {} + RequestParamsMapper.map_copy_object_params(params, self.cli_params) + self.assertEqual( + params, + {'CopySourceSSECustomerAlgorithm': 'AES256', + 'CopySourceSSECustomerKey': 'my-sse-c-copy-source-key', + 'SSECustomerAlgorithm': 'AES256', + 'SSECustomerKey': 'my-sse-c-key', + 'SSEKMSKeyId': 'my-kms-key', + 'ServerSideEncryption': 'AES256'} + ) + + def test_create_multipart_upload(self): + params = {} + RequestParamsMapper.map_create_multipart_upload_params( + params, self.cli_params) + self.assertEqual( + params, + {'SSECustomerAlgorithm': 'AES256', + 'SSECustomerKey': 'my-sse-c-key', + 'SSEKMSKeyId': 'my-kms-key', + 'ServerSideEncryption': 'AES256'} + ) + + def test_upload_part(self): + params = {} + RequestParamsMapper.map_upload_part_params(params, self.cli_params) + self.assertEqual( + params, + {'SSECustomerAlgorithm': 'AES256', + 'SSECustomerKey': 'my-sse-c-key'} + ) + + def test_upload_part_copy(self): + params = {} + RequestParamsMapper.map_upload_part_copy_params( + params, self.cli_params) + self.assertEqual( + params, + {'CopySourceSSECustomerAlgorithm': 'AES256', + 'CopySourceSSECustomerKey': 'my-sse-c-copy-source-key', + 'SSECustomerAlgorithm': 'AES256', + 'SSECustomerKey': 'my-sse-c-key'}) diff --git a/tests/unit/test_completer.py b/tests/unit/test_completer.py index 4f9750779c93..0f28b0503bad 100644 --- a/tests/unit/test_completer.py +++ b/tests/unit/test_completer.py @@ -79,7 +79,11 @@ ('aws s3 cp -', -1, set(['--no-guess-mime-type', '--dryrun', '--recursive', '--website-redirect', '--quiet', '--acl', '--storage-class', - '--sse', '--exclude', '--include', + '--sse', '--sse-c', '--sse-c-copy-source', + '--sse-c-copy-source-key', + '--sse-c-key', + '--sse-kms-key-id', + '--exclude', '--include', '--follow-symlinks', '--no-follow-symlinks', '--cache-control', '--content-type', '--content-disposition', '--source-region', @@ -95,7 +99,11 @@ '--content-disposition', '--cache-control', '--content-encoding', '--content-language', '--expires', '--website-redirect', '--acl', - '--storage-class', '--sse', + '--storage-class', '--sse', '--sse-c', + '--sse-c-copy-source', + '--sse-c-copy-source-key', + '--sse-c-key', + '--sse-kms-key-id', '--exclude', '--include', '--source-region', '--metadata-directive', '--grants', '--only-show-errors',