diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 049615ebaeae..03b8d8abe735 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -2,6 +2,30 @@ CHANGELOG ========= +Next Release (TBD) +================== + +* Allow tcp, udp, icmp, all for ``--protocol`` param of + the ``ec2 create-network-acl-entry`` command + (`issue 508 `__) +* Fix bug when filtering ``s3://`` locations with the + ``--include/--exclude`` params + (issue 531 `__) +* Fix an issue with map type parameters raising uncaught + exceptions in commands such as `sns create-platform-application` + (`issue 407 `__) +* Fix an issue when both ``--private-ip-address`` and + ``--associate-public-ip-address`` are specified in the + ``ec2 run-instances`` command + (`issue 520 `__) +* Fix an issue where ``--output text`` was not providing + a starting identifier for certain rows + (`issue 516 `__) +* Update the ``support`` command to the latest version +* Update the ``--query`` syntax to support flattening sublists + (`boto/jmespath#20 `__) + + 1.2.6 ===== diff --git a/awscli/__init__.py b/awscli/__init__.py index 84312ca1287e..4b2c5de2c12e 100644 --- a/awscli/__init__.py +++ b/awscli/__init__.py @@ -17,7 +17,7 @@ """ import os -__version__ = '1.2.6' +__version__ = '1.2.7' # # Get our data path to be added to botocore's search path diff --git a/awscli/argprocess.py b/awscli/argprocess.py index 87b942b3132a..26bef03c1509 100644 --- a/awscli/argprocess.py +++ b/awscli/argprocess.py @@ -12,10 +12,11 @@ # language governing permissions and limitations under the License. """Module for processing CLI args.""" import os -import json import logging import six +from botocore.compat import OrderedDict, json + from awscli import utils from awscli import SCALAR_TYPES, COMPLEX_TYPES @@ -243,7 +244,7 @@ def _key_value_parse(self, param, value): # that is, csv key value pairs, where the key and values # are separated by '='. All of this should be whitespace # insensitive. - parsed = {} + parsed = OrderedDict() parts = self._split_on_commas(value) valid_names = self._create_name_to_params(param) for part in parts: @@ -253,18 +254,19 @@ def _key_value_parse(self, param, value): raise ParamSyntaxError(part) key = key.strip() value = value.strip() - if key not in valid_names: + if valid_names and key not in valid_names: raise ParamUnknownKeyError(param, key, valid_names) - sub_param = valid_names[key] - if sub_param is not None: - value = unpack_scalar_cli_arg(sub_param, value) + if valid_names: + sub_param = valid_names[key] + if sub_param is not None: + value = unpack_scalar_cli_arg(sub_param, value) parsed[key] = value return parsed def _create_name_to_params(self, param): if param.type == 'structure': return dict([(p.name, p) for p in param.members]) - elif param.type == 'map': + elif param.type == 'map' and hasattr(param.keys, 'enum'): return dict([(v, None) for v in param.keys.enum]) def _docs_list_scalar_list_parse(self, param): @@ -351,7 +353,7 @@ def unpack_cli_arg(parameter, value): def unpack_complex_cli_arg(parameter, value): if parameter.type == 'structure' or parameter.type == 'map': if value.lstrip()[0] == '{': - d = json.loads(value) + d = json.loads(value, object_pairs_hook=OrderedDict) else: msg = 'The value for parameter "%s" must be JSON or path to file.' % ( parameter.cli_name) @@ -360,11 +362,11 @@ def unpack_complex_cli_arg(parameter, value): elif parameter.type == 'list': if isinstance(value, six.string_types): if value.lstrip()[0] == '[': - return json.loads(value) + return json.loads(value, object_pairs_hook=OrderedDict) elif isinstance(value, list) and len(value) == 1: single_value = value[0].strip() if single_value and single_value[0] == '[': - return json.loads(value[0]) + return json.loads(value[0], object_pairs_hook=OrderedDict) return [unpack_cli_arg(parameter.members, v) for v in value] diff --git a/awscli/arguments.py b/awscli/arguments.py index 601d1e7f66de..1a4002f5b9a1 100644 --- a/awscli/arguments.py +++ b/awscli/arguments.py @@ -143,7 +143,7 @@ def group_name(self): This base class has no default behavior for groups, code that consumes argument objects can use them for whatever - purposes they like (documentation, mututally exclusive group + purposes they like (documentation, mutually exclusive group validation, etc.). """ diff --git a/awscli/clidriver.py b/awscli/clidriver.py index 92810b9af0f3..1d90144b5cba 100644 --- a/awscli/clidriver.py +++ b/awscli/clidriver.py @@ -14,6 +14,7 @@ import logging import botocore.session +from botocore import __version__ as botocore_version from botocore.hooks import HierarchicalEmitter from botocore import xform_name from botocore.compat import copy_kwargs, OrderedDict @@ -219,6 +220,9 @@ def _handle_top_level_args(self, args): # loading of plugins, etc. self.session.set_debug_logger(logger_name='botocore') self.session.set_debug_logger(logger_name='awscli') + LOG.debug("CLI version: %s, botocore version: %s", + self.session.user_agent(), + botocore_version) else: self.session.set_stream_logger(logger_name='awscli', log_level=logging.ERROR) diff --git a/awscli/customizations/ec2protocolarg.py b/awscli/customizations/ec2protocolarg.py new file mode 100644 index 000000000000..f1fb4d46d418 --- /dev/null +++ b/awscli/customizations/ec2protocolarg.py @@ -0,0 +1,35 @@ +# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +""" +This customization allows the user to specify the values "tcp", "udp", +or "icmp" as values for the --protocol parameter. The actual Protocol +parameter of the operation accepts only integer protocol numbers. +""" + +def _fix_args(operation, endpoint, params, **kwargs): + if 'protocol' in params: + if params['protocol'] == 'tcp': + params['protocol'] = '6' + elif params['protocol'] == 'udp': + params['protocol'] = '17' + elif params['protocol'] == 'icmp': + params['protocol'] = '1' + elif params['protocol'] == 'all': + params['protocol'] = '-1' + + +def register_protocol_args(cli): + ('before-parameter-build.ec2.RunInstances', _fix_args), + cli.register('before-parameter-build.ec2.CreateNetworkAclEntry', + _fix_args) + diff --git a/awscli/customizations/ec2runinstances.py b/awscli/customizations/ec2runinstances.py index afebeac9a3f1..1a0e5da23016 100644 --- a/awscli/customizations/ec2runinstances.py +++ b/awscli/customizations/ec2runinstances.py @@ -100,6 +100,11 @@ def _fix_args(operation, endpoint, params, **kwargs): if 'security_group_ids' in params: ni[0]['Groups'] = params['security_group_ids'] del params['security_group_ids'] + if 'private_ip_address' in params: + ip_addr = {'PrivateIpAddress': params['private_ip_address'], + 'Primary': True} + ni[0]['PrivateIpAddresses'] = [ip_addr] + del params['private_ip_address'] EVENTS = [ diff --git a/awscli/customizations/s3/filegenerator.py b/awscli/customizations/s3/filegenerator.py index b1082129700c..b98ba7ef00bb 100644 --- a/awscli/customizations/s3/filegenerator.py +++ b/awscli/customizations/s3/filegenerator.py @@ -12,6 +12,7 @@ # language governing permissions and limitations under the License. import os import sys +import datetime from six import text_type from dateutil.parser import parse @@ -152,27 +153,39 @@ def list_objects(self, s3_path, dir_op): common prefix. It yields the file's source path, size, and last update. """ - operation = self._service.get_operation('ListObjects') + # Short circuit path: if we are not recursing into the s3 + # bucket and a specific path was given, we can just yield + # that path and not have to call any operation in s3. bucket, prefix = find_bucket_key(s3_path) - iterator = operation.paginate(self._endpoint, bucket=bucket, - prefix=prefix) - for html_response, response_data in iterator: - contents = response_data['Contents'] - for content in contents: - src_path = bucket + '/' + content['Key'] - size = content['Size'] - last_update = parse(content['LastModified']) - last_update = last_update.astimezone(tzlocal()) - if size == 0 and src_path.endswith('/'): - if self.operation_name == 'delete': - # This is to filter out manually created folders - # in S3. They have a size zero and would be - # undesirably downloaded. Local directories - # are automatically created when they do not - # exist locally. But user should be able to - # delete them. + if not dir_op and prefix: + # Then a specific path was specified so we yield that + # exact path. The size doesn't matter, but the last_update + # is normally set to the last_modified time we get back + # from s3 for the specific object. We lose that here, but + # on the plus side, we don't need to require ListObjects + # permission to download a single file. + yield s3_path, 1, datetime.datetime.now() + else: + operation = self._service.get_operation('ListObjects') + iterator = operation.paginate(self._endpoint, bucket=bucket, + prefix=prefix) + for html_response, response_data in iterator: + contents = response_data['Contents'] + for content in contents: + src_path = bucket + '/' + content['Key'] + size = content['Size'] + last_update = parse(content['LastModified']) + last_update = last_update.astimezone(tzlocal()) + if size == 0 and src_path.endswith('/'): + if self.operation_name == 'delete': + # This is to filter out manually created folders + # in S3. They have a size zero and would be + # undesirably downloaded. Local directories + # are automatically created when they do not + # exist locally. But user should be able to + # delete them. + yield src_path, size, last_update + elif not dir_op and s3_path != src_path: + pass + else: yield src_path, size, last_update - elif not dir_op and s3_path != src_path: - pass - else: - yield src_path, size, last_update diff --git a/awscli/customizations/s3/filters.py b/awscli/customizations/s3/filters.py index f91c66ee0b40..4bdd3620a886 100644 --- a/awscli/customizations/s3/filters.py +++ b/awscli/customizations/s3/filters.py @@ -10,10 +10,14 @@ # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. +import logging import fnmatch import os +LOG = logging.getLogger(__name__) + + class Filter(object): """ This is a universal exclude/include filter. @@ -55,13 +59,21 @@ def call(self, file_infos): else: path_pattern = pattern[1].replace(os.sep, '/') - full_path_pattern = path_pattern - + full_path_pattern = os.path.join(file_path.split('/')[0], + path_pattern) is_match = fnmatch.fnmatch(file_path, full_path_pattern) if is_match and pattern_type == '--include': file_status = (file_info, True) + LOG.debug("%s matched include filter: %s", + file_path, full_path_pattern) elif is_match and pattern_type == '--exclude': file_status = (file_info, False) - + LOG.debug("%s matched exclude filter: %s", + file_path, full_path_pattern) + else: + LOG.debug("%s did not match %s filter: %s", + file_path, pattern_type[2:], full_path_pattern) + LOG.debug("=%s final filtered status, should_include: %s", + file_path, file_status[1]) if file_status[1]: yield file_info diff --git a/awscli/customizations/s3/s3.py b/awscli/customizations/s3/s3.py index 905c83049f13..e30024ce2077 100644 --- a/awscli/customizations/s3/s3.py +++ b/awscli/customizations/s3/s3.py @@ -639,24 +639,12 @@ def add_paths(self, paths): the destination always have some value. """ self.check_path_type(paths) - self.check_src_path(paths) src_path = paths[0] self.parameters['src'] = src_path if len(paths) == 2: self.parameters['dest'] = paths[1] elif len(paths) == 1: self.parameters['dest'] = paths[0] - self.check_dest_path(self.parameters['dest']) - - def check_dest_path(self, destination): - if destination.startswith('s3://') and \ - self.cmd in ['cp', 'sync', 'mv']: - bucket, key = find_bucket_key(destination[5:]) - # A bucket is not always provided (like 'aws s3 ls') - # so only verify the bucket exists if we actually have - # a bucket. - if bucket: - self._verify_bucket_exists(bucket) def _verify_bucket_exists(self, bucket_name): session = self.session @@ -706,36 +694,7 @@ def check_src_path(self, paths): """ src_path = paths[0] dir_op = self.parameters['dir_op'] - if src_path.startswith('s3://'): - if self.cmd in ['mb', 'rb']: - return - session = self.session - service = session.get_service('s3') - endpoint = service.get_endpoint(self.parameters['region']) - src_path = src_path[5:] - if dir_op: - if not src_path.endswith('/'): - src_path += '/' # all prefixes must end with a / - bucket, key = find_bucket_key(src_path) - operation = service.get_operation('ListObjects') - response_data = operation.call(endpoint, bucket=bucket, prefix=key, - delimiter='/')[1] - check_error(response_data) - contents = response_data['Contents'] - common_prefixes = response_data['CommonPrefixes'] - if not dir_op: - if contents: - if contents[0]['Key'] == key: - pass - else: - raise Exception("Error: S3 Object does not exist") - else: - raise Exception('Error: S3 Object does not exist') - else: - if not contents and not common_prefixes: - raise Exception('Error: S3 Prefix does not exist') - - else: + if not src_path.startswith('s3://'): src_path = os.path.abspath(src_path) if os.path.exists(src_path): if os.path.isdir(src_path) and not dir_op: diff --git a/awscli/customizations/s3/tasks.py b/awscli/customizations/s3/tasks.py index cea0a6a59d8e..5c0494b2448e 100644 --- a/awscli/customizations/s3/tasks.py +++ b/awscli/customizations/s3/tasks.py @@ -70,6 +70,7 @@ def _execute_task(self, attempts, last_error=''): self._queue_print_message(self.filename, failed=True, dryrun=self.parameters['dryrun'], error_message=last_error) + return filename = self.filename try: if not self.parameters['dryrun']: @@ -99,7 +100,7 @@ def _queue_print_message(self, filename, failed, dryrun, message = print_operation(filename, failed, self.parameters['dryrun']) if error_message is not None: - message += '\n' + error_message + message += ' ' + error_message result = {'message': message, 'error': failed} self.result_queue.put(result) except Exception as e: diff --git a/awscli/errorhandler.py b/awscli/errorhandler.py index 3e575aa5982a..b620adf4c51b 100644 --- a/awscli/errorhandler.py +++ b/awscli/errorhandler.py @@ -17,11 +17,27 @@ LOG = logging.getLogger(__name__) -class ClientError(Exception): +class BaseOperationError(Exception): + MSG_TEMPLATE = ("A {error_type} error ({error_code}) occurred " + "when calling the {operation_name} operation: " + "{error_message}") + + def __init__(self, error_code, error_message, error_type, operation_name): + msg = self.MSG_TEMPLATE.format( + error_code=error_code, error_message=error_message, + error_type=error_type, operation_name=operation_name) + super(BaseOperationError, self).__init__(msg) + self.error_code = error_code + self.error_message = error_message + self.error_type = error_type + self.operation_name = operation_name + + +class ClientError(BaseOperationError): pass -class ServerError(Exception): +class ServerError(BaseOperationError): pass @@ -38,17 +54,22 @@ class ErrorHandler(object): def __call__(self, http_response, parsed, operation, **kwargs): LOG.debug('HTTP Response Code: %d', http_response.status_code) + msg_template = ("A {error_type} error ({error_code}) occurred " + "when calling the {operation_name} operation: " + "{error_message}") + error_type = None + error_class = None if http_response.status_code >= 500: - code, message = self._get_error_code_and_message(parsed) - msg = "A server error ({error_code}) occurred: {error_message}" - msg = msg.format(error_code=code, error_message=message) - raise ServerError(msg) + error_type = 'server' + error_class = ServerError if http_response.status_code >= 400 or http_response.status_code == 301: + error_type = 'client' + error_class = ClientError + if error_class is not None: code, message = self._get_error_code_and_message(parsed) - msg = "A client error ({error_code}) occurred: {error_message}" - msg = msg.format(error_code=code, error_message=message) - raise ClientError(msg) - return 0 + raise error_class( + error_code=code, error_message=message, + error_type=error_type, operation_name=operation.name) def _get_error_code_and_message(self, response): code = 'Unknown' diff --git a/awscli/handlers.py b/awscli/handlers.py index 0059181b7170..435e2e129628 100644 --- a/awscli/handlers.py +++ b/awscli/handlers.py @@ -39,6 +39,7 @@ from awscli.customizations.configure import register_configure_cmd from awscli.customizations.cloudtrail import initialize as cloudtrail_init from awscli.customizations.toplevelbool import register_bool_params +from awscli.customizations.ec2protocolarg import register_protocol_args def awscli_initialize(event_handlers): @@ -81,3 +82,4 @@ def awscli_initialize(event_handlers): register_configure_cmd(event_handlers) cloudtrail_init(event_handlers) register_bool_params(event_handlers) + register_protocol_args(event_handlers) diff --git a/awscli/text.py b/awscli/text.py index fc9d80250052..135195483c2d 100644 --- a/awscli/text.py +++ b/awscli/text.py @@ -41,6 +41,10 @@ def _format_text(item, stream, identifier=None, scalar_keys=None): for list_element in item: _format_text(list_element, stream=stream, identifier=identifier) + elif identifier is not None: + for list_element in item: + stream.write('%s\t%s\n' % (identifier.upper(), + list_element)) else: # For a bare list, just print the contents. stream.write('\t'.join([six.text_type(el) for el in item])) diff --git a/doc/source/conf.py b/doc/source/conf.py index e5d292972ce5..0c9a53ccea85 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -52,7 +52,7 @@ # The short X.Y version. version = '1.2' # The full version, including alpha/beta/rc tags. -release = '1.2.6' +release = '1.2.7' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/source/index.rst b/doc/source/index.rst index 84d661e68270..071ef1c7a05c 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -9,5 +9,4 @@ interface for interacting with all parts of AWS. :maxdepth: 2 :titlesonly: - Getting Started Command Reference diff --git a/doc/source/tutorial/getting_started.rst b/doc/source/tutorial/getting_started.rst deleted file mode 100644 index 0a13c4266e65..000000000000 --- a/doc/source/tutorial/getting_started.rst +++ /dev/null @@ -1,208 +0,0 @@ -**************************** -Getting Started With AWS CLI -**************************** - -This package provides a unified command line interface to many -Amazon Web Services. - -The aws-cli package should work on Python versions 2.6.x - 3.3.x. - -Installation -============ - -The easiest way to install aws-cli is to use ``easy_install`` or ``pip``:: - - $ easy_install awscli - -or, if you are not installing in a ``virtualenv``:: - - $ sudo easy_install awscli - -Using ``pip``, it would simply be:: - - $ pip install awscli - -or:: - - $ sudo pip install awscli - -This will install the aws-cli package as well as all dependencies. You can -also just clone the git repo or download the tarball. Once you have the -awscli directory structure on your workstation, you can just:: - - $ cd - $ python setup.py install - -Command Completion -================== - -The aws-cli package includes a very useful command completion feature. -This feature is not automatically installed so you need to configure -it manually. To enable tab completion for bash use the built-in -command ``complete``:: - - $ complete -C aws_completer aws - -For tcsh:: - - $ complete aws 'p/*/`aws_completer`/' - -You should add this to your startup scripts to enable it for future sessions. - -For zsh please refer to bin/aws_zsh_completer.sh. Source that file:: - - $ source bin/aws_zsh_completer.sh - -For now the bash compatibility auto completion (bashcompinit) is used. -For further details please refer to the top of bin/aws_zsh_completer.sh. - - -Getting Started -=============== - -Before using aws-cli, you need to tell it about your AWS credentials. You -can do this in several ways: - -* Environment variables -* Config file -* IAM Role - -To use environment variables, do the following:: - - $ export AWS_ACCESS_KEY_ID= - $ export AWS_SECRET_ACCESS_KEY= - -To use a config file, create a configuration file like this:: - - [default] - aws_access_key_id= - aws_secret_access_key= - region=us-west-1 # optional, to define default region for this profile - - [profile testing] - aws_access_key_id= - aws_secret_access_key= - region=us-west-2 - -As you can see, you can have multiple ``profiles`` defined in this -configuration file and specify which profile to use by using the -``--profile`` option. If no profile is specified the ``default`` -profile is used. Once you have created the config file, you need to -tell aws-cli where to find it. Do this by setting the appropriate -environment variable:: - - $ export AWS_CONFIG_FILE=/path/to/config_file - -The final option for credentials is highly recommended if you are -using aws-cli on an EC2 instance. IAM Roles are -a great way to have credentials installed automatically on your -instance. If you are using IAM Roles, aws-cli will find them and use -them automatically. - -Other Important Environment Variables -===================================== - -The following additional environment variables can be useful in -configuring and customizing your environment. - -AWS_DEFAULT_REGION can be used to specify a default region to use -if one is not provided explicitly on the command line with the -``--region`` option or in a config file:: - - $ export AWS_DEFAULT_REGION=us-west-2 - -AWS_DEFAULT_PROFILE can be used to specify which profile to use -if one is not explicitly specified on the command line via the -``--profile`` option:: - - $ export AWS_DEFAULT_PROFILE=testing - -Accessing Services With Global Endpoints -======================================== - -Some services, such as AWS Identity and Access Management (IAM), -AWS Security Token Service (STS), and Amazon Simple Email Service (SES) -have a single, global endpoint rather than different endpoints for -each region. - -To make access to these services simpler, aws-cli will automatically -use the global endpoint unless you explicitly supply a region (using -the ``--region`` option) or a profile (using the ``--profile`` option). -Therefore, the following:: - - $ aws iam list-users - -Will automatically use the global endpoint for the IAM service -regardless of the value of the ``AWS_DEFAULT_REGION`` environment -variable or the ``region`` variable specified in your profile. - - -JSON Parameter Input ------------------------ -Many options that need to be provided are simple string or numeric -values. However, some operations require JSON data structures -as input parameters, either on the command line or in files. - -For example, consider the command to authorize access to an EC2 -security group. In this case, we will add ingress access to port 22 -for all IP addresses:: - - $ aws ec2 authorize-security-group-ingress --group-name MySecurityGroup \ - --ip-permissions '{"from_port":22,"to_port":22,"ip_protocol":"tcp","ip_ranges":["0.0.0.0/0"]}' - -You could also place the JSON in a file, called port22.json for example, -and use this:: - - $ aws ec2 authorize-security-group-ingress --group-name MySecurityGroup \ - --ip-permissions /path/to/port22.json - -File-based Parameter Input -========================== - -Some parameter values are so large or so complex that it would be easier -to place the parameter value in a file and refer to that file rather than -entering the value directly on the command line. - -Let's use the ``authorize-security-group-ingress`` command shown above. -Rather than provide the value of the ``--ip-permissions`` parameter directly -in the command, you could first store the values in a file. Let's call -the file ip_perms.json:: - - {"from_port":22, - "to_port":22, - "ip_protocol":"tcp", - "ip_ranges":["0.0.0.0/0"]} - -Then, we could make the same call as above like this:: - - $ aws ec2 authorize-security-group-ingress --group-name MySecurityGroup \ - --ip-permissions file://ip_perms.json - -The ``file://`` prefix on the parameter value signals that the parameter value -is actually a reference to a file that contains the actual parameter value. -aws-cli will open the file, read the value and pass use that value as the -parameter value. - -This is also useful when the parameter is really referring to file-based -data. For example, the ``--user-data`` option of the ``aws ec2 run-instances`` -command or the ``--public-key-material`` parameter of the -``aws ec2 import-key-pair`` command. - - -URI-based Parameter Input -========================= - -Similar to the file-based input described above, aws-cli also includes a -way to use data from a URI as the value of a parameter. The idea is exactly -the same except the prefix used is ``https:`` or ``http:``:: - - $ aws ec2 authorize-security-group-ingress --group-name MySecurityGroup \ - --ip-permissions http://mybucket.s3.amazonaws.com/ip_perms.json - -Command Output -============== - -The default output for commands is currently JSON. This may change in the -future but for now it provides the most complete output. You may find the -[jq](http://stedolan.github.com/jq/) tool useful in processing the JSON -output for other uses. diff --git a/setup.py b/setup.py index 278e58d19b78..938676aa58e7 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ import awscli -requires = ['botocore>=0.26.0,<0.27.0', +requires = ['botocore>=0.27.0,<0.28.0', 'bcdoc>=0.12.0,<0.13.0', 'six>=1.1.0', 'colorama==0.2.5', diff --git a/tests/integration/customizations/s3/test_filegenerator.py b/tests/integration/customizations/s3/test_filegenerator.py index e55a653c0458..181b9e1c7652 100644 --- a/tests/integration/customizations/s3/test_filegenerator.py +++ b/tests/integration/customizations/s3/test_filegenerator.py @@ -40,19 +40,6 @@ def setUp(self): def tearDown(self): s3_cleanup(self.bucket, self.session) - def test_nonexist_s3_file(self): - """ - This tests to make sure that files are not misproperly yielded by - ensuring the file prefix is the exact same as what was inputted. - """ - input_s3_file = {'src': {'path': self.file1[:-1], 'type': 's3'}, - 'dest': {'path': 'text1.txt', 'type': 'local'}, - 'dir_op': False, 'use_src_name': False} - params = {'region': 'us-east-1'} - files = FileGenerator(self.service, self.endpoint, '', params).call( - input_s3_file) - self.assertEqual(len(list(files)), 0) - def test_s3_file(self): """ Generate a single s3 file diff --git a/tests/integration/customizations/s3/test_plugin.py b/tests/integration/customizations/s3/test_plugin.py index 7fa3b96cdb89..8ccfd1eab1b4 100644 --- a/tests/integration/customizations/s3/test_plugin.py +++ b/tests/integration/customizations/s3/test_plugin.py @@ -27,39 +27,10 @@ from tests.integration import aws from tests.unit.customizations.s3 import create_bucket as _create_bucket +from tests.unit import FileCreator from awscli.customizations.s3 import constants -class FileCreator(object): - def __init__(self): - self.rootdir = tempfile.mkdtemp() - - def remove_all(self): - shutil.rmtree(self.rootdir) - - def create_file(self, filename, contents): - """Creates a file in a tmpdir - - ``filename`` should be a relative path, e.g. "foo/bar/baz.txt" - It will be translated into a full path in a tmp dir. - - Returns the full path to the file. - """ - full_path = os.path.join(self.rootdir, filename) - if not os.path.isdir(os.path.dirname(full_path)): - os.makedirs(os.path.dirname(full_path)) - with open(full_path, 'w') as f: - f.write(contents) - return full_path - - def full_path(self, filename): - """Translate relative path to full path in temp dir. - - f.full_path('foo/bar.txt') -> /tmp/asdfasd/foo/bar.txt - """ - return os.path.join(self.rootdir, filename) - - class BaseS3CLICommand(unittest.TestCase): """Base class for aws s3 command. @@ -212,7 +183,7 @@ def test_mv_with_large_file(self): def test_mv_to_nonexistent_bucket(self): full_path = self.files.create_file('foo.txt', 'this is foo.txt') p = aws('s3 mv %s s3://bad-noexist-13143242/foo.txt' % (full_path,)) - self.assertEqual(p.rc, 255) + self.assertEqual(p.rc, 1) class TestCp(BaseS3CLICommand): @@ -271,7 +242,7 @@ def test_download_large_file(self): def test_cp_to_nonexistent_bucket(self): foo_txt = self.files.create_file('foo.txt', 'this is foo.txt') p = aws('s3 cp %s s3://noexist-bucket-foo-bar123/foo.txt' % (foo_txt,)) - self.assertEqual(p.rc, 255) + self.assertEqual(p.rc, 1) def test_cp_empty_file(self): bucket_name = self.create_bucket() @@ -314,7 +285,7 @@ def test_sync_to_nonexistent_bucket(self): # Sync the directory and the bucket. p = aws('s3 sync %s s3://noexist-bkt-nme-1412' % (self.files.rootdir,)) - self.assertEqual(p.rc, 255) + self.assertEqual(p.rc, 1) def test_sync_with_empty_files(self): foo_txt = self.files.create_file('foo.txt', 'foo contents') @@ -436,7 +407,8 @@ def test_ls_non_existent_bucket(self): p = aws('s3 ls s3://foobara99842u4wbts829381') self.assertEqual(p.rc, 255) self.assertIn( - 'A client error (NoSuchBucket) occurred: The specified bucket does not exist', + ('A client error (NoSuchBucket) occurred when calling the ' + 'ListObjects operation: The specified bucket does not exist'), p.stderr) # There should be no stdout if we can't find the bucket. self.assertEqual(p.stdout, '') diff --git a/tests/integration/customizations/s3/test_s3.py b/tests/integration/customizations/s3/test_s3.py deleted file mode 100644 index d1bcc7dbb097..000000000000 --- a/tests/integration/customizations/s3/test_s3.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -# Note that all of these functions can be found in the unit tests. -# The only difference is that these tests use botocore's actual session -# variables to communicate with s3 as these are integration tests. Therefore, -# only tests that use sessions are included as integration tests. -import argparse -import os -from tests import unittest - -from awscli import EnvironmentVariables -from awscli.customizations.s3.s3 import CommandParameters -import botocore.session -from tests.unit.customizations.s3 import make_loc_files, clean_loc_files, \ - make_s3_files, s3_cleanup - - -class CommandParametersTest(unittest.TestCase): - def setUp(self): - self.session = botocore.session.get_session(EnvironmentVariables) - self.loc_files = make_loc_files() - self.bucket = make_s3_files(self.session) - parser = argparse.ArgumentParser() - parser.add_argument('--region') - self.parsed_globals = parser.parse_args(['--region', 'us-east-1']) - - def tearDown(self): - clean_loc_files(self.loc_files) - s3_cleanup(self.bucket, self.session) - - def test_check_src_path_pass(self): - """ - This tests to see if all of the checks on the source path works. It - does so by testing if s3 objects and and prefixes exist as well as - local files and directories. All of these should not throw an - exception - """ - s3_file = 's3://' + self.bucket + '/' + 'text1.txt' - local_file = self.loc_files[0] - s3_prefix = 's3://' + self.bucket - local_dir = self.loc_files[3] - - # :var files: a list of tuples where the first element is a single - # element list of file paths. The second element is a boolean - # representing if the operation is a directory operation. - files = [([s3_file], False), ([local_file], False), - ([s3_prefix], True), ([local_dir], True)] - - parameters = {} - for filename in files: - parameters['dir_op'] = filename[1] - cmd_parameter = CommandParameters(self.session, 'put', parameters) - cmd_parameter.check_region(self.parsed_globals) - cmd_parameter.check_src_path(filename[0]) - - def test_check_src_path_fail(self): - """ - This tests to see if all of the checks on the source path works. It - does so by testing if s3 objects and and prefixes do not exist as well - as local files and directories. All of these should throw an - exception - """ - local_file = self.loc_files[0] - local_dir = self.loc_files[3] - fake_s3_file = 's3://' + self.bucket + '/' + 'text1.tx' - fake_local_file = local_file[:-1] - fake_s3_prefix = 's3://' + self.bucket + '/' + 'fake/' - fake_local_dir = local_dir + os.sep + 'fake' + os.sep - - # :var files: a list of tuples where the first element is a single - # element list of file paths. The second element is a boolean - # representing if the operation is a directory operation. - files = [([fake_s3_file], False), ([fake_local_file], False), - ([fake_s3_prefix], True), ([local_file], True), - ([local_dir], False), ([fake_s3_file+'dag'], False)] - - parameters = {} - for filename in files: - parameters['dir_op'] = filename[1] - cmd_parameter = CommandParameters(self.session, 'put', parameters) - cmd_parameter.check_region(self.parsed_globals) - with self.assertRaises(Exception): - cmd_parameter.check_src_path(filename[0]) - - -if __name__ == "__main__": - unittest.main() diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index 232b8a4b85b7..058498574c0d 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -14,6 +14,8 @@ import os import copy import logging +import tempfile +import shutil import mock import six @@ -45,6 +47,7 @@ def setUp(self): self.parsed_response = {} self.make_request_patch = mock.patch('botocore.endpoint.Endpoint.make_request') self.make_request_is_patched = False + self.operations_called = [] def tearDown(self): # This clears all the previous registrations. @@ -79,8 +82,9 @@ def assert_params_for_cmd(self, cmd, params=None, expected_rc=0, self.assertDictEqual(params, last_params) return stdout, stderr, rc - def before_parameter_build(self, params, **kwargs): + def before_parameter_build(self, params, operation, **kwargs): self.last_kwargs = params + self.operations_called.append((operation, params)) def run_cmd(self, cmd, expected_rc=0): logging.debug("Calling cmd: %s", cmd) @@ -106,3 +110,33 @@ def run_cmd(self, cmd, expected_rc=0): "Unexpected rc (expected: %s, actual: %s) for command: %s" % ( expected_rc, rc, cmd)) return stdout, stderr, rc + + +class FileCreator(object): + def __init__(self): + self.rootdir = tempfile.mkdtemp() + + def remove_all(self): + shutil.rmtree(self.rootdir) + + def create_file(self, filename, contents): + """Creates a file in a tmpdir + + ``filename`` should be a relative path, e.g. "foo/bar/baz.txt" + It will be translated into a full path in a tmp dir. + + Returns the full path to the file. + """ + full_path = os.path.join(self.rootdir, filename) + if not os.path.isdir(os.path.dirname(full_path)): + os.makedirs(os.path.dirname(full_path)) + with open(full_path, 'w') as f: + f.write(contents) + return full_path + + def full_path(self, filename): + """Translate relative path to full path in temp dir. + + f.full_path('foo/bar.txt') -> /tmp/asdfasd/foo/bar.txt + """ + return os.path.join(self.rootdir, filename) diff --git a/tests/unit/customizations/s3/test_cp_command.py b/tests/unit/customizations/s3/test_cp_command.py new file mode 100644 index 000000000000..42fe771888f2 --- /dev/null +++ b/tests/unit/customizations/s3/test_cp_command.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python +# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests.unit import BaseAWSCommandParamsTest, FileCreator +import re + +import mock +import six + + +class TestCPCommand(BaseAWSCommandParamsTest): + + prefix = 's3 cp ' + + def setUp(self): + super(TestCPCommand, self).setUp() + self.files = FileCreator() + + def tearDown(self): + super(TestCPCommand, self).tearDown() + self.files.remove_all() + + def test_operations_used_in_upload(self): + full_path = self.files.create_file('foo.txt', 'mycontent') + cmdline = '%s %s s3://bucket/key.txt' % (self.prefix, full_path) + self.parsed_response = {'ETag': '"c8afdb36c52cf4727836669019e69222"'} + self.run_cmd(cmdline, expected_rc=0) + # The only operation we should have called is PutObject. + self.assertEqual(len(self.operations_called), 1, self.operations_called) + self.assertEqual(self.operations_called[0][0].name, 'PutObject') + + def test_operations_used_in_download_file(self): + self.parsed_response = {'ETag': '"foo-1"', + 'Body': six.BytesIO(b'foo')} + cmdline = '%s s3://bucket/key.txt %s' % (self.prefix, + self.files.rootdir) + self.run_cmd(cmdline, expected_rc=0) + # The only operation we should have called is GetObject. + self.assertEqual(len(self.operations_called), 1, self.operations_called) + self.assertEqual(self.operations_called[0][0].name, 'GetObject') + + def test_operations_used_in_recursive_download(self): + self.parsed_response = {'ETag': '"foo-1"', + 'Contents': [], + 'CommonPrefixes': [] + } + cmdline = '%s s3://bucket/key.txt %s --recursive' % ( + self.prefix, self.files.rootdir) + self.run_cmd(cmdline, expected_rc=0) + # We called ListObjects but had no objects to download, so + # we only have a single ListObjects operation being called. + self.assertEqual(len(self.operations_called), 1, self.operations_called) + self.assertEqual(self.operations_called[0][0].name, 'ListObjects') + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/customizations/s3/test_filegenerator.py b/tests/unit/customizations/s3/test_filegenerator.py index 5f6dabcfbbe6..1b88b07c7be4 100644 --- a/tests/unit/customizations/s3/test_filegenerator.py +++ b/tests/unit/customizations/s3/test_filegenerator.py @@ -179,17 +179,15 @@ def setUp(self): def tearDown(self): s3_cleanup(self.bucket, self.session) - def test_nonexist_s3_file(self): - """ - This tests to make sure that files are not misproperly yielded by - ensuring the file prefix is the exact same as what was inputted. - """ + def test_path_exact_yield(self): input_s3_file = {'src': {'path': self.file1[:-1], 'type': 's3'}, 'dest': {'path': 'text1.txt', 'type': 'local'}, 'dir_op': False, 'use_src_name': False} params = {'region': 'us-east-1'} - files = FileGenerator(self.service, self.endpoint, '', params).call(input_s3_file) - self.assertEqual(len(list(files)), 0) + files = FileGenerator(self.service, self.endpoint, '', params).call( + input_s3_file) + # The path is yielded exactly. + self.assertEqual(list(files)[0].src, self.file1[:-1]) def test_s3_file(self): """ diff --git a/tests/unit/customizations/s3/test_filters.py b/tests/unit/customizations/s3/test_filters.py index c8c9fa865cf6..f5df834dec77 100644 --- a/tests/unit/customizations/s3/test_filters.py +++ b/tests/unit/customizations/s3/test_filters.py @@ -19,136 +19,115 @@ class FiltersTest(unittest.TestCase): def setUp(self): - self.local_files = [] - self.loc_file1 = FileInfo(src=os.path.abspath('test.txt'), dest='', - compare_key='', size=10, - last_update=0, src_type='local', - dest_type='s3', operation_name='', - service=None, endpoint=None) - self.loc_file2 = FileInfo(src=os.path.abspath('test.jpg'), dest='', - compare_key='', size=10, - last_update=0, src_type='local', - dest_type='s3', operation_name='', - service=None, endpoint=None) - path = 'directory' + os.sep + 'test.jpg' - self.loc_file3 = FileInfo(src=os.path.abspath(path), dest='', - compare_key='', size=10, - last_update=0, src_type='local', - dest_type='s3', operation_name='', - service=None, endpoint=None) - self.local_files.append(self.loc_file1) - self.local_files.append(self.loc_file2) - self.local_files.append(self.loc_file3) - - self.s3_files = [] - self.s3_file1 = FileInfo('bucket/test.txt', dest='', - compare_key='', size=10, - last_update=0, src_type='s3', - dest_type='s3', operation_name='', - service=None, endpoint=None) - self.s3_file2 = FileInfo('bucket/test.jpg', dest='', - compare_key='', size=10, - last_update=0, src_type='s3', - dest_type='s3', operation_name='', - service=None, endpoint=None) - self.s3_file3 = FileInfo('bucket/key/test.jpg', dest='', - compare_key='', size=10, - last_update=0, src_type='s3', - dest_type='s3', operation_name='', - service=None, endpoint=None) - self.s3_files.append(self.s3_file1) - self.s3_files.append(self.s3_file2) - self.s3_files.append(self.s3_file3) + self.local_files = [ + self.file_info('test.txt'), + self.file_info('test.jpg'), + self.file_info(os.path.join('directory', 'test.jpg')), + ] + self.s3_files = [ + self.file_info('bucket/test.txt'), + self.file_info('bucket/test.jpg'), + self.file_info('bucket/key/test.jpg'), + ] + + def file_info(self, filename, src_type='local'): + if src_type == 'local': + filename = os.path.abspath(filename) + dest_type = 's3' + else: + dest_type = 'local' + return FileInfo(src=filename, dest='', + compare_key='', size=10, + last_update=0, src_type=src_type, + dest_type=dest_type, operation_name='', + service=None, endpoint=None) def test_no_filter(self): - """ - No filters - """ - patterns = [] exc_inc_filter = Filter({}) - files = exc_inc_filter.call(iter(self.local_files)) - result_list = [] - for filename in files: - result_list.append(filename) - self.assertEqual(result_list, self.local_files) - - files = exc_inc_filter.call(iter(self.s3_files)) - result_list = [] - for filename in files: - result_list.append(filename) - self.assertEqual(result_list, self.s3_files) + matched_files = list(exc_inc_filter.call(self.local_files)) + self.assertEqual(matched_files, self.local_files) + + matched_files2 = list(exc_inc_filter.call(self.s3_files)) + self.assertEqual(matched_files2, self.s3_files) def test_include(self): - """ - Only an include file - """ patterns = [['--include', '*.txt']] - exc_inc_filter = Filter({'filters': patterns}) - files = exc_inc_filter.call(iter(self.local_files)) - result_list = [] - for filename in files: - result_list.append(filename) - self.assertEqual(result_list, self.local_files) - - files = exc_inc_filter.call(iter(self.s3_files)) - result_list = [] - for filename in files: - result_list.append(filename) - self.assertEqual(result_list, self.s3_files) + include_filter = Filter({'filters': [['--include', '*.txt']]}) + matched_files = list(include_filter.call(self.local_files)) + self.assertEqual(matched_files, self.local_files) + + matched_files2 = list(include_filter.call(self.s3_files)) + self.assertEqual(matched_files2, self.s3_files) def test_exclude(self): - """ - Only an exclude filter - """ - patterns = [['--exclude', '*']] - exc_inc_filter = Filter({'filters': patterns}) - files = exc_inc_filter.call(iter(self.local_files)) - result_list = [] - for filename in files: - result_list.append(filename) - self.assertEqual(result_list, []) - - files = exc_inc_filter.call(iter(self.s3_files)) - result_list = [] - for filename in files: - result_list.append(filename) - self.assertEqual(result_list, []) + exclude_filter = Filter({'filters': [['--exclude', '*']]}) + matched_files = list(exclude_filter.call(self.local_files)) + self.assertEqual(matched_files, []) + + matched_files = list(exclude_filter.call(self.s3_files)) + self.assertEqual(matched_files, []) def test_exclude_include(self): - """ - Exclude everything and then include all .txt files - """ patterns = [['--exclude', '*'], ['--include', '*.txt']] - exc_inc_filter = Filter({'filters': patterns}) - files = exc_inc_filter.call(iter(self.local_files)) - result_list = [] - for filename in files: - result_list.append(filename) - self.assertEqual(result_list, [self.loc_file1]) - - files = exc_inc_filter.call(iter(self.s3_files)) - result_list = [] - for filename in files: - result_list.append(filename) - self.assertEqual(result_list, [self.s3_file1]) + exclude_include_filter = Filter({'filters': patterns}) + matched_files = list(exclude_include_filter.call(self.local_files)) + self.assertEqual(matched_files, [self.local_files[0]]) + + matched_files = list(exclude_include_filter.call(self.s3_files)) + self.assertEqual(matched_files, [self.s3_files[0]]) def test_include_exclude(self): - """ - Include all .txt files then exclude everything - """ patterns = [['--include', '*.txt'], ['--exclude', '*']] - exc_inc_filter = Filter({'filters': patterns}) - files = exc_inc_filter.call(iter(self.local_files)) - result_list = [] - for filename in files: - result_list.append(filename) - self.assertEqual(result_list, []) - - files = exc_inc_filter.call(iter(self.s3_files)) - result_list = [] - for filename in files: - result_list.append(filename) - self.assertEqual(result_list, []) + exclude_all_filter = Filter({'filters': patterns}) + matched_files = list(exclude_all_filter.call(self.local_files)) + self.assertEqual(matched_files, []) + + matched_files = list(exclude_all_filter.call(self.s3_files)) + self.assertEqual(matched_files, []) + + def test_prefix_filtering_consistent(self): + # The same filter should work for both local and remote files. + # So if I have a directory with 2 files: + local_files = [ + self.file_info('test1.txt'), + self.file_info('nottest1.txt'), + ] + # And the same 2 files remote (note that the way FileInfo objects + # are constructed, we'll have the bucket name but no leading '/' + # character): + remote_files = [ + self.file_info('bucket/test1.txt', src_type='s3'), + self.file_info('bucket/nottest1.txt', src_type='s3'), + ] + # If I apply the filter to the local to the local files. + exclude_filter = Filter({'filters': [['--exclude', 't*']]}) + filtered_files = list(exclude_filter.call(local_files)) + self.assertEqual(len(filtered_files), 1) + self.assertEqual(os.path.basename(filtered_files[0].src), + 'nottest1.txt') + + # I should get the same result if I apply the same filter to s3 + # objects. + same_filtered_files = list(exclude_filter.call(remote_files)) + self.assertEqual(len(same_filtered_files), 1) + self.assertEqual(os.path.basename(same_filtered_files[0].src), + 'nottest1.txt') + + def test_bucket_exclude_with_prefix(self): + s3_files = [ + self.file_info('bucket/dir1/key1.txt', src_type='s3'), + self.file_info('bucket/dir1/key2.txt', src_type='s3'), + self.file_info('bucket/dir1/notkey3.txt', src_type='s3'), + ] + filtered_files = list( + Filter({'filters': [['--exclude', 'dir1/*']]}).call(s3_files)) + self.assertEqual(filtered_files, []) + + key_files = list( + Filter({'filters': [['--exclude', 'dir1/key*']]}).call(s3_files)) + self.assertEqual(len(key_files), 1) + self.assertEqual(key_files[0].src, 'bucket/dir1/notkey3.txt') + if __name__ == "__main__": unittest.main() diff --git a/tests/unit/customizations/s3/test_s3.py b/tests/unit/customizations/s3/test_s3.py index 5f29fcbe034a..7f96433eda7c 100644 --- a/tests/unit/customizations/s3/test_s3.py +++ b/tests/unit/customizations/s3/test_s3.py @@ -201,6 +201,25 @@ def test_run_cp_put(self): output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file) self.assertIn(output_str, self.output.getvalue()) + def test_error_on_same_line_as_status(self): + s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt' + local_file = self.loc_files[0] + rel_local_file = os.path.relpath(local_file) + filters = [['--include', '*']] + params = {'dir_op': False, 'dryrun': False, 'quiet': False, + 'src': local_file, 'dest': s3_file, 'filters': filters, + 'paths_type': 'locals3', 'region': 'us-east-1', + 'endpoint_url': None} + cmd_arc = CommandArchitecture(self.session, 'cp', params) + cmd_arc.create_instructions() + cmd_arc.run() + # Also, we need to verify that the error message is on the *same* line + # as the upload failed line, to make it easier to track. + output_str = ( + "upload failed: %s to %s Error: Bucket does not exist\n" % ( + rel_local_file, s3_file)) + self.assertIn(output_str, self.output.getvalue()) + def test_run_cp_get(self): # This ensures that the architecture sets up correctly for a ``cp`` get # command. It is just just a dry run, but all of the components need @@ -425,32 +444,6 @@ def test_check_src_path_pass(self): cmd_parameter.check_region([]) cmd_parameter.check_src_path(filename[0]) - def test_check_src_path_fail(self): - # This tests to see if all of the checks on the source path works. It - # does so by testing if s3 objects and and prefixes do not exist as well - # as local files and directories. All of these should throw an - # exception. - local_file = self.loc_files[0] - local_dir = self.loc_files[3] - fake_s3_file = 's3://' + self.bucket + '/' + 'text1.tx' - fake_local_file = local_file[:-1] - fake_s3_prefix = 's3://' + self.bucket + '/' + 'fake/' - - # :var files: a list of tuples where the first element is a single - # element list of file paths. The second element is a boolean - # representing if the operation is a directory operation. - files = [([fake_s3_file], False), ([fake_local_file], False), - ([fake_s3_prefix], True), ([local_file], True), - ([local_dir], False), ([fake_s3_file+'dag'], False)] - - parameters = {} - for filename in files: - parameters['dir_op'] = filename[1] - cmd_parameter = CommandParameters(self.session, 'put', parameters) - cmd_parameter.check_region([]) - with self.assertRaises(Exception): - cmd_parameter.check_src_path(filename[0]) - def test_check_force(self): # This checks to make sure that the force parameter is run. If # successful. The delete command will fail as the bucket is empty diff --git a/tests/unit/ec2/test_create_network_acl_entry.py b/tests/unit/ec2/test_create_network_acl_entry.py new file mode 100644 index 000000000000..506c65120030 --- /dev/null +++ b/tests/unit/ec2/test_create_network_acl_entry.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python +# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests.unit import BaseAWSCommandParamsTest + + +class TestCreateNetworkACLEntry(BaseAWSCommandParamsTest): + + prefix = 'ec2 create-network-acl-entry' + + def test_tcp(self): + cmdline = self.prefix + cmdline += ' --network-acl-id acl-12345678' + cmdline += ' --rule-number 100' + cmdline += ' --protocol tcp' + cmdline += ' --rule-action allow' + cmdline += ' --ingress' + cmdline += ' --port-range From=22,To=22' + cmdline += ' --cidr-block 0.0.0.0/0' + result = {'NetworkAclId': 'acl-12345678', + 'RuleNumber': '100', + 'Protocol': '6', + 'RuleAction': 'allow', + 'Egress': 'false', + 'CidrBlock': '0.0.0.0/0', + 'PortRange.From': '22', + 'PortRange.To': '22' + } + self.assert_params_for_cmd(cmdline, result) + + def test_udp(self): + cmdline = self.prefix + cmdline += ' --network-acl-id acl-12345678' + cmdline += ' --rule-number 100' + cmdline += ' --protocol udp' + cmdline += ' --rule-action allow' + cmdline += ' --ingress' + cmdline += ' --port-range From=22,To=22' + cmdline += ' --cidr-block 0.0.0.0/0' + result = {'NetworkAclId': 'acl-12345678', + 'RuleNumber': '100', + 'Protocol': '17', + 'RuleAction': 'allow', + 'Egress': 'false', + 'CidrBlock': '0.0.0.0/0', + 'PortRange.From': '22', + 'PortRange.To': '22' + } + self.assert_params_for_cmd(cmdline, result) + + def test_icmp(self): + cmdline = self.prefix + cmdline += ' --network-acl-id acl-12345678' + cmdline += ' --rule-number 100' + cmdline += ' --protocol icmp' + cmdline += ' --rule-action allow' + cmdline += ' --ingress' + cmdline += ' --port-range From=22,To=22' + cmdline += ' --cidr-block 0.0.0.0/0' + result = {'NetworkAclId': 'acl-12345678', + 'RuleNumber': '100', + 'Protocol': '1', + 'RuleAction': 'allow', + 'Egress': 'false', + 'CidrBlock': '0.0.0.0/0', + 'PortRange.From': '22', + 'PortRange.To': '22' + } + self.assert_params_for_cmd(cmdline, result) + + def test_all(self): + cmdline = self.prefix + cmdline += ' --network-acl-id acl-12345678' + cmdline += ' --rule-number 100' + cmdline += ' --protocol all' + cmdline += ' --rule-action allow' + cmdline += ' --ingress' + cmdline += ' --port-range From=22,To=22' + cmdline += ' --cidr-block 0.0.0.0/0' + result = {'NetworkAclId': 'acl-12345678', + 'RuleNumber': '100', + 'Protocol': '-1', + 'RuleAction': 'allow', + 'Egress': 'false', + 'CidrBlock': '0.0.0.0/0', + 'PortRange.From': '22', + 'PortRange.To': '22' + } + self.assert_params_for_cmd(cmdline, result) + + def test_number(self): + cmdline = self.prefix + cmdline += ' --network-acl-id acl-12345678' + cmdline += ' --rule-number 100' + cmdline += ' --protocol 99' + cmdline += ' --rule-action allow' + cmdline += ' --ingress' + cmdline += ' --port-range From=22,To=22' + cmdline += ' --cidr-block 0.0.0.0/0' + result = {'NetworkAclId': 'acl-12345678', + 'RuleNumber': '100', + 'Protocol': '99', + 'RuleAction': 'allow', + 'Egress': 'false', + 'CidrBlock': '0.0.0.0/0', + 'PortRange.From': '22', + 'PortRange.To': '22' + } + self.assert_params_for_cmd(cmdline, result) + diff --git a/tests/unit/ec2/test_run_instances.py b/tests/unit/ec2/test_run_instances.py index 084346fabb42..c12bd023175f 100644 --- a/tests/unit/ec2/test_run_instances.py +++ b/tests/unit/ec2/test_run_instances.py @@ -191,3 +191,32 @@ def test_group_id_alone(self): } self.assert_params_for_cmd(args_list, result) + def test_associate_public_ip_address_and_private_ip_address(self): + args = ' --image-id ami-foobar --count 1 ' + args += '--private-ip-address 10.0.0.200 ' + args += '--associate-public-ip-address --subnet-id subnet-12345678' + args_list = (self.prefix + args).split() + result = { + 'NetworkInterface.1.DeviceIndex': '0', + 'NetworkInterface.1.AssociatePublicIpAddress': 'true', + 'NetworkInterface.1.SubnetId': 'subnet-12345678', + 'NetworkInterface.1.PrivateIpAddresses.1.PrivateIpAddress': '10.0.0.200', + 'NetworkInterface.1.PrivateIpAddresses.1.Primary': 'true', + 'ImageId': 'ami-foobar', + 'MaxCount': '1', + 'MinCount': '1' + } + self.assert_params_for_cmd(args_list, result) + + def test_private_ip_address_alone(self): + args = ' --image-id ami-foobar --count 1 ' + args += '--private-ip-address 10.0.0.200' + args_list = (self.prefix + args).split() + result = { + 'PrivateIpAddress': '10.0.0.200', + 'ImageId': 'ami-foobar', + 'MaxCount': '1', + 'MinCount': '1' + } + self.assert_params_for_cmd(args_list, result) + diff --git a/tests/unit/s3/test_put_object.py b/tests/unit/s3/test_put_object.py index 74c1d58def64..83b8c6998b61 100644 --- a/tests/unit/s3/test_put_object.py +++ b/tests/unit/s3/test_put_object.py @@ -66,6 +66,20 @@ def test_headers(self): payload = self.last_params['payload'].getvalue() self.assertEqual(payload.name, self.file_path) + def test_website_redirect(self): + cmdline = self.prefix + cmdline += ' --bucket mybucket' + cmdline += ' --key mykey' + cmdline += ' --acl public-read' + cmdline += ' --website-redirect-location http://www.example.com/' + result = { + 'uri_params': {'Bucket': 'mybucket', 'Key': 'mykey'}, + 'headers': { + 'x-amz-acl': 'public-read', + 'x-amz-website-redirect-location': 'http://www.example.com/', + }} + self.assert_params_for_cmd(cmdline, result, ignore_params=['payload']) + if __name__ == "__main__": unittest.main() diff --git a/tests/unit/sns/__init__.py b/tests/unit/sns/__init__.py new file mode 100644 index 000000000000..a4018327e46f --- /dev/null +++ b/tests/unit/sns/__init__.py @@ -0,0 +1,12 @@ +# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. diff --git a/tests/unit/sns/test_create_platform_application.py b/tests/unit/sns/test_create_platform_application.py new file mode 100644 index 000000000000..28b23ed8b72a --- /dev/null +++ b/tests/unit/sns/test_create_platform_application.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python +# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests.unit import BaseAWSCommandParamsTest + + +class TestCreatePlatformApplication(BaseAWSCommandParamsTest): + + prefix = 'sns create-platform-application' + + def test_gcm_shorthand(self): + cmdline = self.prefix + cmdline += ' --name gcmpushapp' + cmdline += ' --platform GCM' + cmdline += ' --attributes ' + cmdline += 'PlatformCredential=foo,' + cmdline += 'PlatformPrincipal=bar' + result = {'Name': 'gcmpushapp', + 'Platform': 'GCM', + 'Attributes.entry.1.key': 'PlatformCredential', + 'Attributes.entry.1.value': 'foo', + 'Attributes.entry.2.key': 'PlatformPrincipal', + 'Attributes.entry.2.value': 'bar'} + self.assert_params_for_cmd(cmdline, result) + + def test_gcm_json(self): + cmdline = self.prefix + cmdline += ' --name gcmpushapp' + cmdline += ' --platform GCM' + cmdline += ' --attributes ' + cmdline += ('{"PlatformCredential":"AIzaSyClE2lcV2zEKTLYYo645zfk2jhQPFeyxDo",' + '"PlatformPrincipal":"There+is+no+principal+for+GCM"}') + result = {'Name': 'gcmpushapp', + 'Platform': 'GCM', + 'Attributes.entry.1.key': 'PlatformCredential', + 'Attributes.entry.1.value': 'AIzaSyClE2lcV2zEKTLYYo645zfk2jhQPFeyxDo', + 'Attributes.entry.2.key': 'PlatformPrincipal', + 'Attributes.entry.2.value': 'There+is+no+principal+for+GCM'} + self.assert_params_for_cmd(cmdline, result) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/test_completer.py b/tests/unit/test_completer.py index 3d49b53ed918..7067a134430e 100644 --- a/tests/unit/test_completer.py +++ b/tests/unit/test_completer.py @@ -59,8 +59,9 @@ set([])), ('aws ec2 --debug describe-instances --instance-ids i-12345678 - ', -1, set(['--filters', '--dry-run', '--no-dry-run', '--endpoint-url', - '--no-verify-ssl', '--no-paginate', '--output', '--profile', - '--region', '--version', '--color', '--query'])), + '--no-verify-ssl', '--no-paginate', '--output', '--profile', + '--starting-token', '--max-items', + '--region', '--version', '--color', '--query'])), ('aws s3', -1, set(['cp', 'mv', 'rm', 'mb', 'rb', 'ls', 'sync', 'website'])), ('aws s3 m', -1, set(['mv', 'mb'])), ('aws s3 cp -', -1, set(['--no-guess-mime-type', '--dryrun', diff --git a/tests/unit/test_errorhandler.py b/tests/unit/test_errorhandler.py new file mode 100644 index 000000000000..1804e180bd10 --- /dev/null +++ b/tests/unit/test_errorhandler.py @@ -0,0 +1,80 @@ +# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from tests import unittest + +import mock +from awscli import errorhandler + + +class TestErrorHandler(unittest.TestCase): + + def create_http_response(self, **kwargs): + response = mock.Mock() + for key, value in kwargs.items(): + setattr(response, key, value) + return response + + def test_error_handler_client_side(self): + response = { + 'CommonPrefixes': [], + 'Contents': [], + 'Errors': [{'Code': 'AccessDenied', + 'HostId': 'foohost', + 'Message': 'Access Denied', + 'RequestId': 'requestid'}], + 'ResponseMetadata': {}} + handler = errorhandler.ErrorHandler() + http_response = self.create_http_response(status_code=403) + # We're manually using the try/except form because + # we want to catch the exception and assert that it has specific + # attributes on it. + operation = mock.Mock() + operation.name = 'OperationName' + try: + handler(http_response, response, operation) + except errorhandler.ClientError as e: + # First, the operation name should be in the error message. + self.assertIn('OperationName', str(e)) + # We should state that this is a ClientError. + self.assertIn('client error', str(e)) + # And these values should be available on the exception + # so clients can access this information programmatically. + self.assertEqual(e.error_code, 'AccessDenied') + self.assertEqual(e.error_message, 'Access Denied') + self.assertEqual(e.operation_name, 'OperationName') + except Exception as e: + self.fail("Unexpected error raised: %s" % e) + else: + self.fail("Expected errorhandler.ClientError to be raised " + "but no exception was raised.") + + def test_no_exception_raised_on_200(self): + response = { + 'CommonPrefixes': [], + 'Contents': [], + } + handler = errorhandler.ErrorHandler() + http_response = self.create_http_response(status_code=200) + # We're manually using the try/except form because + # we want to catch the exception and assert that it has specific + # attributes on it. + operation = mock.Mock() + operation.name = 'OperationName' + try: + self.assertIsNone(handler(http_response, response, operation)) + except errorhandler.BaseOperationError as e: + self.fail("Unexpected error raised: %s" % e) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/unit/test_text.py b/tests/unit/test_text.py index f86e4e571413..c6fd8be7be18 100644 --- a/tests/unit/test_text.py +++ b/tests/unit/test_text.py @@ -131,6 +131,13 @@ def test_empty_list_mock_calls(self): # We should not call .write() at all for an empty list. self.assertFalse(fake_stream.write.called) + def test_list_of_strings_in_dict(self): + self.assert_text_renders_to( + {"KeyName": ['a', 'b', 'c']}, + 'KEYNAME\ta\n' + 'KEYNAME\tb\n' + 'KEYNAME\tc\n') + if __name__ == '__main__': unittest.main()