From d51d6f73fe03712bcfc6a248be7274549195a453 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Wed, 13 Nov 2019 11:06:58 -0800 Subject: [PATCH] Deploy off aws cli (#1455) * feat: sam deploy without aws cli pre-installed - Not breaking parameter overrides formats - still requires refactoring and error handling * feat: new click types for deploy parameters * feat: show changeset and stack events - needs refactoring * refactor: move deploy classes to lib - wire up command.py for `sam deploy` - move deploy specific exceptions to inherit from UserException * rebase: latest from `sam package` port * feat: decorator for printing tables - `sam deploy` now has tables while showcasing the changeset and showcasing events happening during deploy. * fix: wrap text on resource status column on `sam deploy` - fixed unit tests - linting fixes - doc strings - further unit tests and integration tests need to be added. * fix: cleaner text formatting for tables * tests: add unit tests for full suite of `sam deploy` * tests: add integration tests for `sam deploy` * tests: regression test suite for `sam deploy` - exercise all command line parameters for `aws` and `sam` * fix: deploy command now showcases stack outputs * fix: address comments * fix: return stack outputs from `get_stack_outputs` * fix: width margins on table prints * fix: address comments - add retries - more regression testing - remove types for capabilities * tests: tests for pprint of tables * usability: add table headers - show cases Add, Modify, Delete with +, * and - --- .pylintrc | 2 +- samcli/cli/types.py | 95 +++- .../_utils/custom_options/__init__.py | 0 .../_utils/custom_options/option_nargs.py | 50 ++ samcli/commands/_utils/options.py | 60 +- samcli/commands/_utils/table_print.py | 110 ++++ samcli/commands/deploy/__init__.py | 54 +- samcli/commands/deploy/command.py | 185 ++++++ samcli/commands/deploy/deploy_context.py | 185 ++++++ samcli/commands/deploy/exceptions.py | 54 ++ samcli/lib/deploy/__init__.py | 0 samcli/lib/deploy/deployer.py | 418 ++++++++++++++ samcli/lib/samlib/cloudformation_command.py | 55 -- samcli/lib/utils/time.py | 10 + tests/integration/deploy/__init__.py | 0 tests/integration/deploy/deploy_integ_base.py | 83 +++ .../integration/deploy/test_deploy_command.py | 84 +++ .../integration/package/package_integ_base.py | 7 +- .../package/aws-serverless-function.yaml | 6 + tests/regression/deploy/__init__.py | 0 .../deploy/regression_deploy_base.py | 106 ++++ .../deploy/test_deploy_regression.py | 154 +++++ tests/unit/cli/test_types.py | 101 +++- .../_utils/custom_options/__init__.py | 0 .../custom_options/test_option_nargs.py | 40 ++ .../unit/commands/_utils/test_table_print.py | 83 +++ tests/unit/commands/deploy/__init__.py | 0 tests/unit/commands/deploy/test_command.py | 71 +++ .../commands/deploy/test_deploy_context.py | 141 +++++ tests/unit/commands/test_deploy.py | 20 - tests/unit/lib/deploy/__init__.py | 0 tests/unit/lib/deploy/test_deployer.py | 535 ++++++++++++++++++ .../lib/samlib/test_cloudformation_command.py | 166 ------ tests/unit/lib/utils/test_time.py | 10 +- 34 files changed, 2554 insertions(+), 331 deletions(-) create mode 100644 samcli/commands/_utils/custom_options/__init__.py create mode 100644 samcli/commands/_utils/custom_options/option_nargs.py create mode 100644 samcli/commands/_utils/table_print.py create mode 100644 samcli/commands/deploy/command.py create mode 100644 samcli/commands/deploy/deploy_context.py create mode 100644 samcli/commands/deploy/exceptions.py create mode 100644 samcli/lib/deploy/__init__.py create mode 100644 samcli/lib/deploy/deployer.py delete mode 100644 samcli/lib/samlib/cloudformation_command.py create mode 100644 tests/integration/deploy/__init__.py create mode 100644 tests/integration/deploy/deploy_integ_base.py create mode 100644 tests/integration/deploy/test_deploy_command.py create mode 100644 tests/regression/deploy/__init__.py create mode 100644 tests/regression/deploy/regression_deploy_base.py create mode 100644 tests/regression/deploy/test_deploy_regression.py create mode 100644 tests/unit/commands/_utils/custom_options/__init__.py create mode 100644 tests/unit/commands/_utils/custom_options/test_option_nargs.py create mode 100644 tests/unit/commands/_utils/test_table_print.py create mode 100644 tests/unit/commands/deploy/__init__.py create mode 100644 tests/unit/commands/deploy/test_command.py create mode 100644 tests/unit/commands/deploy/test_deploy_context.py delete mode 100644 tests/unit/commands/test_deploy.py create mode 100644 tests/unit/lib/deploy/__init__.py create mode 100644 tests/unit/lib/deploy/test_deployer.py delete mode 100644 tests/unit/lib/samlib/test_cloudformation_command.py diff --git a/.pylintrc b/.pylintrc index 767f748fba..d450445bc8 100644 --- a/.pylintrc +++ b/.pylintrc @@ -59,7 +59,7 @@ confidence= # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" -disable=R0201,W0613,I0021,I0020,W1618,W1619,R0902,R0903,W0231,W0611,R0913,W0703,C0330,R0204,I0011,R0904,C0301 +disable=R0201,W0613,W0640,I0021,I0020,W1618,W1619,R0902,R0903,W0231,W0611,R0913,W0703,C0330,R0204,I0011,R0904,C0301 [REPORTS] diff --git a/samcli/cli/types.py b/samcli/cli/types.py index faacbacf70..e22b2fec26 100644 --- a/samcli/cli/types.py +++ b/samcli/cli/types.py @@ -15,27 +15,51 @@ class CfnParameterOverridesType(click.ParamType): parameters as "ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro" """ - __EXAMPLE = "ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro" + __EXAMPLE_1 = "ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro" + __EXAMPLE_2 = "KeyPairName=MyKey InstanceType=t1.micro" # Regex that parses CloudFormation parameter key-value pairs: https://regex101.com/r/xqfSjW/2 - _pattern = r"(?:ParameterKey=([A-Za-z0-9\"]+),ParameterValue=(\"(?:\\.|[^\"\\]+)*\"|(?:\\.|[^ \"\\]+)+))" + _pattern_1 = r"(?:ParameterKey=([A-Za-z0-9\"]+),ParameterValue=(\"(?:\\.|[^\"\\]+)*\"|(?:\\.|[^ \"\\]+)+))" + _pattern_2 = r"(?:([A-Za-z0-9\"]+)=(\"(?:\\.|[^\"\\]+)*\"|(?:\\.|[^ \"\\]+)+))" + ordered_pattern_match = [_pattern_1, _pattern_2] + + # NOTE(TheSriram): name needs to be added to click.ParamType requires it. name = "" def convert(self, value, param, ctx): result = {} - if not value: - return result - groups = re.findall(self._pattern, value) - if not groups: - return self.fail( - "{} is not in valid format. It must look something like '{}'".format(value, self.__EXAMPLE), param, ctx - ) + # Empty tuple + if value == ("",): + return result - # 'groups' variable is a list of tuples ex: [(key1, value1), (key2, value2)] - for key, param_value in groups: - result[self._unquote(key)] = self._unquote(param_value) + for val in value: + + try: + # NOTE(TheSriram): find the first regex that matched. + # pylint is concerned that we are checking at the same `val` within the loop, + # but that is the point, so disabling it. + pattern = next( + i + for i in filter( + lambda item: re.findall(item, val), self.ordered_pattern_match + ) # pylint: disable=cell-var-from-loop + ) + except StopIteration: + return self.fail( + "{} is not in valid format. It must look something like '{}' or '{}'".format( + val, self.__EXAMPLE_1, self.__EXAMPLE_2 + ), + param, + ctx, + ) + + groups = re.findall(pattern, val) + + # 'groups' variable is a list of tuples ex: [(key1, value1), (key2, value2)] + for key, param_value in groups: + result[self._unquote(key)] = self._unquote(param_value) return result @@ -80,7 +104,7 @@ class CfnMetadataType(click.ParamType): _pattern = r"([A-Za-z0-9\"]+)=([A-Za-z0-9\"]+)" # NOTE(TheSriram): name needs to be added to click.ParamType requires it. - name = "CfnMetadata" + name = "" def convert(self, value, param, ctx): result = {} @@ -103,9 +127,9 @@ def convert(self, value, param, ctx): if not groups: fail = True for group in groups: - key, value = group + key, v = group # assign to result['KeyName1'] = string and so on. - result[key] = value + result[key] = v if fail: return self.fail( @@ -113,3 +137,44 @@ def convert(self, value, param, ctx): ) return result + + +class CfnTags(click.ParamType): + """ + Custom Click options type to accept values for tag parameters. + tag parameters can be of the type KeyName1=string KeyName2=string + """ + + _EXAMPLE = "KeyName1=string KeyName2=string" + + _pattern = r"([A-Za-z0-9\"]+)=([A-Za-z0-9\"]+)" + + # NOTE(TheSriram): name needs to be added to click.ParamType requires it. + name = "" + + def convert(self, value, param, ctx): + result = {} + fail = False + # Empty tuple + if value == ("",): + return result + + for val in value: + + groups = re.findall(self._pattern, val) + + if not groups: + fail = True + for group in groups: + key, v = group + # assign to result['KeyName1'] = string and so on. + result[key] = v + + if fail: + return self.fail( + "{} is not in valid format. It must look something like '{}'".format(value, self._EXAMPLE), + param, + ctx, + ) + + return result diff --git a/samcli/commands/_utils/custom_options/__init__.py b/samcli/commands/_utils/custom_options/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/commands/_utils/custom_options/option_nargs.py b/samcli/commands/_utils/custom_options/option_nargs.py new file mode 100644 index 0000000000..1c310103a1 --- /dev/null +++ b/samcli/commands/_utils/custom_options/option_nargs.py @@ -0,0 +1,50 @@ +""" +Custom Click options for multiple arguments +""" + +import click + + +class OptionNargs(click.Option): + """ + A custom option class that allows parsing for multiple arguments + for an option, when the number of arguments for an option are unknown. + """ + + def __init__(self, *args, **kwargs): + self.nargs = kwargs.pop("nargs", -1) + super(OptionNargs, self).__init__(*args, **kwargs) + self._previous_parser_process = None + self._nargs_parser = None + + def add_to_parser(self, parser, ctx): + def parser_process(value, state): + # look ahead into arguments till we reach the next option. + # the next option starts with a prefix which is either '-' or '--' + next_option = False + value = [value] + + while state.rargs and not next_option: + for prefix in self._nargs_parser.prefixes: + if state.rargs[0].startswith(prefix): + next_option = True + if not next_option: + value.append(state.rargs.pop(0)) + + value = tuple(value) + + # call the actual process + self._previous_parser_process(value, state) + + # Add current option to Parser by calling add_to_parser on the super class. + super(OptionNargs, self).add_to_parser(parser, ctx) + for name in self.opts: + # Get OptionParser object for current option + option_parser = getattr(parser, "_long_opt").get(name) or getattr(parser, "_short_opt").get(name) + if option_parser: + # Monkey patch `process` method for click.parser.Option class. + # This allows for setting multiple parsed values into current option arguments + self._nargs_parser = option_parser + self._previous_parser_process = option_parser.process + option_parser.process = parser_process + break diff --git a/samcli/commands/_utils/options.py b/samcli/commands/_utils/options.py index 6537f6cabe..71152834d4 100644 --- a/samcli/commands/_utils/options.py +++ b/samcli/commands/_utils/options.py @@ -7,7 +7,9 @@ from functools import partial import click -from samcli.cli.types import CfnParameterOverridesType, CfnMetadataType +from samcli.cli.types import CfnParameterOverridesType, CfnMetadataType, CfnTags +from samcli.commands._utils.custom_options.option_nargs import OptionNargs + _TEMPLATE_OPTION_DEFAULT_VALUE = "template.[yaml|yml]" @@ -113,6 +115,7 @@ def docker_click_options(): def parameter_override_click_option(): return click.option( "--parameter-overrides", + cls=OptionNargs, type=CfnParameterOverridesType(), help="Optional. A string that contains CloudFormation parameter overrides encoded as key=value " "pairs. Use the same format as the AWS CLI, e.g. 'ParameterKey=KeyPairName," @@ -134,3 +137,58 @@ def metadata_click_option(): def metadata_override_option(f): return metadata_click_option()(f) + + +def capabilities_click_option(): + return click.option( + "--capabilities", + cls=OptionNargs, + type=click.STRING, + required=True, + help="A list of capabilities that you must specify" + "before AWS Cloudformation can create certain stacks. Some stack tem-" + "plates might include resources that can affect permissions in your AWS" + "account, for example, by creating new AWS Identity and Access Manage-" + "ment (IAM) users. For those stacks, you must explicitly acknowledge" + "their capabilities by specifying this parameter. The only valid values" + "are CAPABILITY_IAM and CAPABILITY_NAMED_IAM. If you have IAM resources," + "you can specify either capability. If you have IAM resources with cus-" + "tom names, you must specify CAPABILITY_NAMED_IAM. If you don't specify" + "this parameter, this action returns an InsufficientCapabilities error.", + ) + + +def capabilities_override_option(f): + return capabilities_click_option()(f) + + +def tags_click_option(): + return click.option( + "--tags", + cls=OptionNargs, + type=CfnTags(), + required=False, + help="A list of tags to associate with the stack that is created or updated." + "AWS CloudFormation also propagates these tags to resources " + "in the stack if the resource supports it.", + ) + + +def tags_override_option(f): + return tags_click_option()(f) + + +def notification_arns_click_option(): + return click.option( + "--notification-arns", + cls=OptionNargs, + type=click.STRING, + required=False, + help="Amazon Simple Notification Service topic" + "Amazon Resource Names (ARNs) that AWS CloudFormation associates with" + "the stack.", + ) + + +def notification_arns_override_option(f): + return notification_arns_click_option()(f) diff --git a/samcli/commands/_utils/table_print.py b/samcli/commands/_utils/table_print.py new file mode 100644 index 0000000000..76672632ff --- /dev/null +++ b/samcli/commands/_utils/table_print.py @@ -0,0 +1,110 @@ +""" +Utilities for table pretty printing using click +""" +from itertools import count, zip_longest +import textwrap +from functools import wraps + +import click + + +def pprint_column_names(format_string, format_kwargs, margin=None, table_header=None): + """ + + :param format_string: format string to be used that has the strings, minimum width to be replaced + :param format_kwargs: dictionary that is supplied to the format_string to format the string + :param margin: margin that is to be reduced from column width for columnar text. + :param table_header: Supplied table header + :return: boilerplate table string + """ + + min_width = 100 + min_margin = 2 + + def pprint_wrap(func): + # Calculate terminal width, number of columns in the table + width, _ = click.get_terminal_size() + # For UX purposes, set a minimum width for the table to be usable + # and usable_width keeps margins in mind. + width = max(width, min_width) + + total_args = len(format_kwargs) + if not total_args: + raise ValueError("Number of arguments supplied should be > 0 , format_kwargs: {}".format(format_kwargs)) + + # Get width to be a usable number so that we can equally divide the space for all the columns. + # Can be refactored, to allow for modularity in the shaping of the columns. + width = width - (width % total_args) + usable_width_no_margin = int(width) - 1 + usable_width = int((usable_width_no_margin - (margin if margin else min_margin))) + if total_args > int(usable_width / 2): + raise ValueError("Total number of columns exceed available width") + width_per_column = int(usable_width / total_args) + + # The final column should not roll over into the next line + final_arg_width = width_per_column - 1 + + # the format string contains minimumwidth that need to be set. + # eg: "{a:{0}}} {b:<{1}}} {c:{2}}}" + format_args = [width_per_column for _ in range(total_args - 1)] + format_args.extend([final_arg_width]) + + # format arguments are now ready for setting minimumwidth + + @wraps(func) + def wrap(*args, **kwargs): + # The table is setup with the column names, format_string contains the column names. + if table_header: + click.secho("\n" + table_header) + click.secho("-" * usable_width) + click.secho(format_string.format(*format_args, **format_kwargs)) + click.secho("-" * usable_width) + # format_args which have the minimumwidth set per {} in the format_string is passed to the function + # which this decorator wraps, so that the function has access to the correct format_args + kwargs["format_args"] = format_args + kwargs["width"] = width_per_column + kwargs["margin"] = margin if margin else min_margin + result = func(*args, **kwargs) + # Complete the table + click.secho("-" * usable_width) + return result + + return wrap + + return pprint_wrap + + +def wrapped_text_generator(texts, width, margin): + """ + + Return a generator where the contents are wrapped text to a specified width. + + :param texts: list of text that needs to be wrapped at specified width + :param width: width of the text to be wrapped + :param margin: margin to be reduced from width for cleaner UX + :return: generator of wrapped text + """ + for text in texts: + yield textwrap.wrap(text, width=width - margin) + + +def pprint_columns(columns, width, margin, format_string, format_args, columns_dict): + """ + + Print columns based on list of columnar text, associated formatting string and associated format arguments. + + :param columns: list of columnnar text that go into columns as specified by the format_string + :param width: width of the text to be wrapped + :param margin: margin to be reduced from width for cleaner UX + :param format_string: A format string that has both width and text specifiers set. + :param format_args: list of offset specifiers + :param columns_dict: arguments dictionary that have dummy values per column + :return: + """ + for columns_text in zip_longest(*wrapped_text_generator(columns, width, margin), fillvalue=""): + counter = count() + # Generate columnar data that correspond to the column names and update them. + for k, _ in columns_dict.items(): + columns_dict[k] = columns_text[next(counter)] + + click.secho(format_string.format(*format_args, **columns_dict)) diff --git a/samcli/commands/deploy/__init__.py b/samcli/commands/deploy/__init__.py index 7e3bd984ab..ff2b95977d 100644 --- a/samcli/commands/deploy/__init__.py +++ b/samcli/commands/deploy/__init__.py @@ -1,54 +1,6 @@ """ -CLI command for "deploy" command +`sam deploy` command """ -import click - -from samcli.cli.main import pass_context, common_options -from samcli.lib.samlib.cloudformation_command import execute_command -from samcli.commands.exceptions import UserException -from samcli.lib.telemetry.metrics import track_command - - -SHORT_HELP = "Deploy an AWS SAM application. This is an alias for 'aws cloudformation deploy'." - - -HELP_TEXT = """The sam deploy command creates a Cloudformation Stack and deploys your resources. - -\b -e.g. sam deploy --template-file packaged.yaml --stack-name sam-app --capabilities CAPABILITY_IAM - -\b -This is an alias for aws cloudformation deploy. To learn about other parameters you can use, -run aws cloudformation deploy help. -""" - - -@click.command("deploy", short_help=SHORT_HELP, context_settings={"ignore_unknown_options": True}, help=HELP_TEXT) -@click.argument("args", nargs=-1, type=click.UNPROCESSED) -@click.option( - "--template-file", required=True, type=click.Path(), help="The path where your AWS SAM template is located" -) -@click.option( - "--stack-name", - required=True, - help="The name of the AWS CloudFormation stack you're deploying to. " - "If you specify an existing stack, the command updates the stack. " - "If you specify a new stack, the command creates it.", -) -@common_options -@pass_context -@track_command -def cli(ctx, args, template_file, stack_name): - - # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - do_cli(args, template_file, stack_name) # pragma: no cover - - -def do_cli(args, template_file, stack_name): - args = args + ("--stack-name", stack_name) - - try: - execute_command("deploy", args, template_file=template_file) - except OSError as ex: - raise UserException(str(ex)) +# Expose the cli object here +from .command import cli # noqa diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py new file mode 100644 index 0000000000..68314c48b3 --- /dev/null +++ b/samcli/commands/deploy/command.py @@ -0,0 +1,185 @@ +""" +CLI command for "deploy" command +""" + +import click + + +from samcli.commands._utils.options import ( + parameter_override_option, + capabilities_override_option, + tags_override_option, + notification_arns_override_option, +) +from samcli.cli.main import pass_context, common_options, aws_creds_options +from samcli.lib.telemetry.metrics import track_command + + +SHORT_HELP = "Deploy an AWS SAM application." + + +HELP_TEXT = """The sam deploy command creates a Cloudformation Stack and deploys your resources. + +\b +e.g. sam deploy --template-file packaged.yaml --stack-name sam-app --capabilities CAPABILITY_IAM + +\b +""" + + +@click.command( + "deploy", + short_help=SHORT_HELP, + context_settings={"ignore_unknown_options": False, "allow_interspersed_args": True, "allow_extra_args": True}, + help=HELP_TEXT, +) +@click.option( + "--template-file", + "--template", + "-t", + required=True, + type=click.Path(), + help="The path where your AWS SAM template is located", +) +@click.option( + "--stack-name", + required=True, + help="The name of the AWS CloudFormation stack you're deploying to. " + "If you specify an existing stack, the command updates the stack. " + "If you specify a new stack, the command creates it.", +) +@click.option( + "--s3-bucket", + required=False, + help="The name of the S3 bucket where this command uploads your " + "CloudFormation template. This is required the deployments of " + "templates sized greater than 51,200 bytes", +) +@click.option( + "--force-upload", + required=False, + is_flag=True, + help="Indicates whether to override existing files in the S3 bucket. " + "Specify this flag to upload artifacts even if they" + "match existing artifacts in the S3 bucket.", +) +@click.option( + "--s3-prefix", + required=False, + help="A prefix name that the command adds to the " + "artifacts' name when it uploads them to the S3 bucket." + "The prefix name is a path name (folder name) for the S3 bucket.", +) +@click.option( + "--kms-key-id", + required=False, + help="The ID of an AWS KMS key that the command uses" " to encrypt artifacts that are at rest in the S3 bucket.", +) +@click.option( + "--no-execute-changeset", + required=False, + is_flag=True, + help="Indicates whether to execute the" + "change set. Specify this flag if you want to view your stack changes" + "before executing the change set. The command creates an AWS CloudForma-" + "tion change set and then exits without executing the change set. if " + "the changeset looks satisfactory, the stack changes can be made by " + "running the same command without specifying `--no-execute-changeset`", +) +@click.option( + "--role-arn", + required=False, + help="The Amazon Resource Name (ARN) of an AWS Identity" + "and Access Management (IAM) role that AWS CloudFormation assumes when" + "executing the change set.", +) +@click.option( + "--fail-on-empty-changeset", + required=False, + is_flag=True, + help="Specify if the CLI should return a non-zero exit code if there are no" + "changes to be made to the stack. The default behavior is to return a" + "non-zero exit code.", +) +@notification_arns_override_option +@tags_override_option +@parameter_override_option +@capabilities_override_option +@aws_creds_options +@common_options +@pass_context +@track_command +def cli( + ctx, + template_file, + stack_name, + s3_bucket, + force_upload, + s3_prefix, + kms_key_id, + parameter_overrides, + capabilities, + no_execute_changeset, + role_arn, + notification_arns, + fail_on_empty_changeset, + tags, +): + + # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing + do_cli( + template_file, + stack_name, + s3_bucket, + force_upload, + s3_prefix, + kms_key_id, + parameter_overrides, + capabilities, + no_execute_changeset, + role_arn, + notification_arns, + fail_on_empty_changeset, + tags, + ctx.region, + ctx.profile, + ) # pragma: no cover + + +def do_cli( + template_file, + stack_name, + s3_bucket, + force_upload, + s3_prefix, + kms_key_id, + parameter_overrides, + capabilities, + no_execute_changeset, + role_arn, + notification_arns, + fail_on_empty_changeset, + tags, + region, + profile, +): + from samcli.commands.deploy.deploy_context import DeployContext + + with DeployContext( + template_file=template_file, + stack_name=stack_name, + s3_bucket=s3_bucket, + force_upload=force_upload, + s3_prefix=s3_prefix, + kms_key_id=kms_key_id, + parameter_overrides=parameter_overrides, + capabilities=capabilities, + no_execute_changeset=no_execute_changeset, + role_arn=role_arn, + notification_arns=notification_arns, + fail_on_empty_changeset=fail_on_empty_changeset, + tags=tags, + region=region, + profile=profile, + ) as deploy_context: + deploy_context.run() diff --git a/samcli/commands/deploy/deploy_context.py b/samcli/commands/deploy/deploy_context.py new file mode 100644 index 0000000000..4eaf60b2cc --- /dev/null +++ b/samcli/commands/deploy/deploy_context.py @@ -0,0 +1,185 @@ +""" +Deploy a SAM stack +""" + +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import os +import logging +import boto3 +import click + +from samcli.commands.deploy import exceptions as deploy_exceptions +from samcli.lib.deploy.deployer import Deployer +from samcli.lib.package.s3_uploader import S3Uploader +from samcli.yamlhelper import yaml_parse + +LOG = logging.getLogger(__name__) + + +class DeployContext: + + MSG_NO_EXECUTE_CHANGESET = "\nChangeset created successfully. \n" + + MSG_EXECUTE_SUCCESS = "\nSuccessfully created/updated stack - {stack_name}\n" + + def __init__( + self, + template_file, + stack_name, + s3_bucket, + force_upload, + s3_prefix, + kms_key_id, + parameter_overrides, + capabilities, + no_execute_changeset, + role_arn, + notification_arns, + fail_on_empty_changeset, + tags, + region, + profile, + ): + self.template_file = template_file + self.stack_name = stack_name + self.s3_bucket = s3_bucket + self.force_upload = force_upload + self.s3_prefix = s3_prefix + self.kms_key_id = kms_key_id + self.parameter_overrides = parameter_overrides + self.capabilities = capabilities + self.no_execute_changeset = no_execute_changeset + self.role_arn = role_arn + self.notification_arns = notification_arns + self.fail_on_empty_changeset = fail_on_empty_changeset + self.tags = tags + self.region = region + self.profile = profile + self.s3_uploader = None + self.deployer = None + + def __enter__(self): + return self + + def __exit__(self, *args): + pass + + def run(self): + + # Parse parameters + with open(self.template_file, "r") as handle: + template_str = handle.read() + + template_dict = yaml_parse(template_str) + + if not isinstance(template_dict, dict): + raise deploy_exceptions.DeployFailedError( + stack_name=self.stack_name, msg="{} not in required format".format(self.template_file) + ) + + parameters = self.merge_parameters(template_dict, self.parameter_overrides) + + template_size = os.path.getsize(self.template_file) + if template_size > 51200 and not self.s3_bucket: + raise deploy_exceptions.DeployBucketRequiredError() + + session = boto3.Session(profile_name=self.profile if self.profile else None) + cloudformation_client = session.client("cloudformation", region_name=self.region if self.region else None) + + if self.s3_bucket: + s3_client = session.client("s3", region_name=self.region if self.region else None) + + self.s3_uploader = S3Uploader(s3_client, self.s3_bucket, self.s3_prefix, self.kms_key_id, self.force_upload) + + self.deployer = Deployer(cloudformation_client) + + return self.deploy( + self.stack_name, + template_str, + parameters, + self.capabilities, + self.no_execute_changeset, + self.role_arn, + self.notification_arns, + self.s3_uploader, + [{"Key": key, "Value": value} for key, value in self.tags.items()] if self.tags else [], + self.fail_on_empty_changeset, + ) + + def deploy( + self, + stack_name, + template_str, + parameters, + capabilities, + no_execute_changeset, + role_arn, + notification_arns, + s3_uploader, + tags, + fail_on_empty_changeset=True, + ): + try: + result, changeset_type = self.deployer.create_and_wait_for_changeset( + stack_name=stack_name, + cfn_template=template_str, + parameter_values=parameters, + capabilities=capabilities, + role_arn=role_arn, + notification_arns=notification_arns, + s3_uploader=s3_uploader, + tags=tags, + ) + + if not no_execute_changeset: + self.deployer.execute_changeset(result["Id"], stack_name) + self.deployer.wait_for_execute(stack_name, changeset_type) + click.echo(self.MSG_EXECUTE_SUCCESS.format(stack_name=stack_name)) + else: + click.echo(self.MSG_NO_EXECUTE_CHANGESET.format(changeset_id=result["Id"])) + + except deploy_exceptions.ChangeEmptyError as ex: + if fail_on_empty_changeset: + raise + click.echo(str(ex)) + + def merge_parameters(self, template_dict, parameter_overrides): + """ + CloudFormation CreateChangeset requires a value for every parameter + from the template, either specifying a new value or use previous value. + For convenience, this method will accept new parameter values and + generates a dict of all parameters in a format that ChangeSet API + will accept + + :param parameter_overrides: + :return: + """ + parameter_values = [] + + if not isinstance(template_dict.get("Parameters", None), dict): + return parameter_values + + for key, _ in template_dict["Parameters"].items(): + + obj = {"ParameterKey": key} + + if key in parameter_overrides: + obj["ParameterValue"] = parameter_overrides[key] + else: + obj["UsePreviousValue"] = True + + parameter_values.append(obj) + + return parameter_values diff --git a/samcli/commands/deploy/exceptions.py b/samcli/commands/deploy/exceptions.py new file mode 100644 index 0000000000..851ba9a999 --- /dev/null +++ b/samcli/commands/deploy/exceptions.py @@ -0,0 +1,54 @@ +""" +Exceptions that are raised by sam deploy +""" +from samcli.commands.exceptions import UserException + + +class ChangeEmptyError(UserException): + def __init__(self, stack_name): + self.stack_name = stack_name + message_fmt = "No changes to deploy.Stack {stack_name} is up to date" + super(ChangeEmptyError, self).__init__(message=message_fmt.format(stack_name=self.stack_name)) + + +class ChangeSetError(UserException): + def __init__(self, stack_name, msg): + self.stack_name = stack_name + self.msg = msg + message_fmt = "Failed to create changeset for the stack: {stack_name}, {msg}" + super(ChangeSetError, self).__init__(message=message_fmt.format(stack_name=self.stack_name, msg=self.msg)) + + +class DeployFailedError(UserException): + def __init__(self, stack_name, msg): + self.stack_name = stack_name + self.msg = msg + + message_fmt = "Failed to create/update the stack: {stack_name}, {msg}" + + super(DeployFailedError, self).__init__(message=message_fmt.format(stack_name=self.stack_name, msg=msg)) + + +class DeployStackOutPutFailedError(UserException): + def __init__(self, stack_name, msg): + self.stack_name = stack_name + self.msg = msg + + message_fmt = "Failed to get outputs from stack: {stack_name}, {msg}" + + super(DeployStackOutPutFailedError, self).__init__( + message=message_fmt.format(stack_name=self.stack_name, msg=msg) + ) + + +class DeployBucketRequiredError(UserException): + def __init__(self): + + message_fmt = ( + "Templates with a size greater than 51,200 bytes must be deployed " + "via an S3 Bucket. Please add the --s3-bucket parameter to your " + "command. The local template will be copied to that S3 bucket and " + "then deployed." + ) + + super(DeployBucketRequiredError, self).__init__(message=message_fmt) diff --git a/samcli/lib/deploy/__init__.py b/samcli/lib/deploy/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/lib/deploy/deployer.py b/samcli/lib/deploy/deployer.py new file mode 100644 index 0000000000..b7e3a77969 --- /dev/null +++ b/samcli/lib/deploy/deployer.py @@ -0,0 +1,418 @@ +""" +Cloudformation deploy class which also streams events and changeset information +""" + +# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import sys +import math +from collections import OrderedDict +import logging +import time +from datetime import datetime + +import botocore + +from samcli.commands.deploy.exceptions import DeployFailedError, ChangeSetError, DeployStackOutPutFailedError +from samcli.commands._utils.table_print import pprint_column_names, pprint_columns +from samcli.commands.deploy import exceptions as deploy_exceptions +from samcli.lib.package.artifact_exporter import mktempfile, parse_s3_url +from samcli.lib.utils.time import utc_to_timestamp + +LOG = logging.getLogger(__name__) + +DESCRIBE_STACK_EVENTS_FORMAT_STRING = ( + "{ResourceStatus:<{0}} {ResourceType:<{1}} {LogicalResourceId:<{2}} {ResourceStatusReason:<{3}}" +) +DESCRIBE_STACK_EVENTS_DEFAULT_ARGS = OrderedDict( + { + "ResourceStatus": "ResourceStatus", + "ResourceType": "ResourceType", + "LogicalResourceId": "LogicalResourceId", + "ResourceStatusReason": "ResourceStatusReason", + } +) + +DESCRIBE_STACK_EVENTS_TABLE_HEADER_NAME = "**CloudFormation events from changeset**" + +DESCRIBE_CHANGESET_FORMAT_STRING = "{Operation:<{0}} {LogicalResourceId:<{1}} {ResourceType:<{2}}" +DESCRIBE_CHANGESET_DEFAULT_ARGS = OrderedDict( + {"Operation": "Operation", "LogicalResourceId": "LogicalResourceId", "ResourceType": "ResourceType"} +) + +DESCRIBE_CHANGESET_TABLE_HEADER_NAME = "**CloudFormation stack changeset**" + +OUTPUTS_FORMAT_STRING = "{OutputKey:<{0}} {OutputValue:<{1}} {Description:<{2}}" +OUTPUTS_DEFAULTS_ARGS = OrderedDict( + {"OutputKey": "OutputKey", "OutputValue": "OutputValue", "Description": "Description"} +) + + +class Deployer: + def __init__(self, cloudformation_client, changeset_prefix="samcli-deploy"): + self._client = cloudformation_client + self.changeset_prefix = changeset_prefix + # 500ms of sleep time between stack checks and describe stack events. + self.client_sleep = 0.5 + # 2000ms of backoff time which is exponentially used, when there are exceptions during describe stack events + self.backoff = 2 + # Maximum number of attempts before raising exception back up the chain. + self.max_attempts = 3 + + def has_stack(self, stack_name): + """ + Checks if a CloudFormation stack with given name exists + + :param stack_name: Name or ID of the stack + :return: True if stack exists. False otherwise + """ + try: + resp = self._client.describe_stacks(StackName=stack_name) + if not resp["Stacks"]: + return False + + # When you run CreateChangeSet on a a stack that does not exist, + # CloudFormation will create a stack and set it's status + # REVIEW_IN_PROGRESS. However this stack is cannot be manipulated + # by "update" commands. Under this circumstances, we treat like + # this stack does not exist and call CreateChangeSet will + # ChangeSetType set to CREATE and not UPDATE. + stack = resp["Stacks"][0] + return stack["StackStatus"] != "REVIEW_IN_PROGRESS" + + except botocore.exceptions.ClientError as e: + # If a stack does not exist, describe_stacks will throw an + # exception. Unfortunately we don't have a better way than parsing + # the exception msg to understand the nature of this exception. + + if "Stack with id {0} does not exist".format(stack_name) in str(e): + LOG.debug("Stack with id %s does not exist", stack_name) + return False + + # We don't know anything about this exception. Don't handle + LOG.debug("Unable to get stack details.", exc_info=e) + raise e + + def create_changeset( + self, stack_name, cfn_template, parameter_values, capabilities, role_arn, notification_arns, s3_uploader, tags + ): + """ + Call Cloudformation to create a changeset and wait for it to complete + + :param stack_name: Name or ID of stack + :param cfn_template: CloudFormation template string + :param parameter_values: Template parameters object + :param capabilities: Array of capabilities passed to CloudFormation + :param tags: Array of tags passed to CloudFormation + :return: + """ + + if not self.has_stack(stack_name): + changeset_type = "CREATE" + # When creating a new stack, UsePreviousValue=True is invalid. + # For such parameters, users should either override with new value, + # or set a Default value in template to successfully create a stack. + parameter_values = [x for x in parameter_values if not x.get("UsePreviousValue", False)] + else: + changeset_type = "UPDATE" + # UsePreviousValue not valid if parameter is new + summary = self._client.get_template_summary(StackName=stack_name) + existing_parameters = [parameter["ParameterKey"] for parameter in summary["Parameters"]] + parameter_values = [ + x + for x in parameter_values + if not (x.get("UsePreviousValue", False) and x["ParameterKey"] not in existing_parameters) + ] + + # Each changeset will get a unique name based on time. + # Description is also setup based on current date and that SAM CLI is used. + kwargs = { + "ChangeSetName": self.changeset_prefix + str(int(time.time())), + "StackName": stack_name, + "TemplateBody": cfn_template, + "ChangeSetType": changeset_type, + "Parameters": parameter_values, + "Capabilities": capabilities, + "Description": "Created by SAM CLI at {0} UTC".format(datetime.utcnow().isoformat()), + "Tags": tags, + } + + # If an S3 uploader is available, use TemplateURL to deploy rather than + # TemplateBody. This is required for large templates. + if s3_uploader: + with mktempfile() as temporary_file: + temporary_file.write(kwargs.pop("TemplateBody")) + temporary_file.flush() + + # TemplateUrl property requires S3 URL to be in path-style format + parts = parse_s3_url( + s3_uploader.upload_with_dedup(temporary_file.name, "template"), version_property="Version" + ) + kwargs["TemplateURL"] = s3_uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None)) + + # don't set these arguments if not specified to use existing values + if role_arn is not None: + kwargs["RoleARN"] = role_arn + if notification_arns is not None: + kwargs["NotificationARNs"] = notification_arns + try: + resp = self._client.create_change_set(**kwargs) + return resp, changeset_type + except Exception as ex: + LOG.debug("Unable to create changeset", exc_info=ex) + raise ChangeSetError(stack_name=stack_name, msg=str(ex)) + + @pprint_column_names( + format_string=DESCRIBE_CHANGESET_FORMAT_STRING, + format_kwargs=DESCRIBE_CHANGESET_DEFAULT_ARGS, + table_header=DESCRIBE_CHANGESET_TABLE_HEADER_NAME, + ) + def describe_changeset(self, change_set_id, stack_name, **kwargs): + """ + Call Cloudformation to describe a changeset + + :param change_set_id: ID of the changeset + :param stack_name: Name of the CloudFormation stack + :return: dictionary of changes described in the changeset. + """ + paginator = self._client.get_paginator("describe_change_set") + response_iterator = paginator.paginate(ChangeSetName=change_set_id, StackName=stack_name) + changes = {"Add": [], "Modify": [], "Remove": []} + changes_showcase = {"Add": "+ Add", "Modify": "* Modify", "Remove": "- Delete"} + changeset = False + for item in response_iterator: + cf_changes = item.get("Changes") + for change in cf_changes: + changeset = True + resource_props = change.get("ResourceChange") + action = resource_props.get("Action") + changes[action].append( + { + "LogicalResourceId": resource_props.get("LogicalResourceId"), + "ResourceType": resource_props.get("ResourceType"), + } + ) + + for k, v in changes.items(): + for value in v: + pprint_columns( + columns=[changes_showcase.get(k, k), value["LogicalResourceId"], value["ResourceType"]], + width=kwargs["width"], + margin=kwargs["margin"], + format_string=DESCRIBE_CHANGESET_FORMAT_STRING, + format_args=kwargs["format_args"], + columns_dict=DESCRIBE_CHANGESET_DEFAULT_ARGS.copy(), + ) + + if not changeset: + # There can be cases where there are no changes, + # but could be an an addition of a SNS notification topic. + pprint_columns( + columns=["-", "-", "-"], + width=kwargs["width"], + margin=kwargs["margin"], + format_string=DESCRIBE_CHANGESET_FORMAT_STRING, + format_args=kwargs["format_args"], + columns_dict=DESCRIBE_CHANGESET_DEFAULT_ARGS.copy(), + ) + + return changes + + def wait_for_changeset(self, changeset_id, stack_name): + """ + Waits until the changeset creation completes + + :param changeset_id: ID or name of the changeset + :param stack_name: Stack name + :return: Latest status of the create-change-set operation + """ + sys.stdout.write("\nWaiting for changeset to be created..\n") + sys.stdout.flush() + + # Wait for changeset to be created + waiter = self._client.get_waiter("change_set_create_complete") + # Poll every 5 seconds. Changeset creation should be fast + waiter_config = {"Delay": 5} + try: + waiter.wait(ChangeSetName=changeset_id, StackName=stack_name, WaiterConfig=waiter_config) + except botocore.exceptions.WaiterError as ex: + LOG.debug("Create changeset waiter exception", exc_info=ex) + + resp = ex.last_response + status = resp["Status"] + reason = resp["StatusReason"] + + if ( + status == "FAILED" + and "The submitted information didn't contain changes." in reason + or "No updates are to be performed" in reason + ): + raise deploy_exceptions.ChangeEmptyError(stack_name=stack_name) + + raise ChangeSetError( + stack_name=stack_name, msg="ex: {0} Status: {1}. Reason: {2}".format(ex, status, reason) + ) + + def execute_changeset(self, changeset_id, stack_name): + """ + Calls CloudFormation to execute changeset + + :param changeset_id: ID of the changeset + :param stack_name: Name or ID of the stack + :return: Response from execute-change-set call + """ + try: + return self._client.execute_change_set(ChangeSetName=changeset_id, StackName=stack_name) + except botocore.exceptions.ClientError as ex: + raise DeployFailedError(stack_name=stack_name, msg=str(ex)) + + def get_last_event_time(self, stack_name): + """ + Finds the last event time stamp thats present for the stack, if not get the current time + :param stack_name: Name or ID of the stack + :return: unix epoch + """ + try: + return utc_to_timestamp( + self._client.describe_stack_events(StackName=stack_name)["StackEvents"][0]["Timestamp"] + ) + except KeyError: + return time.time() + + @pprint_column_names( + format_string=DESCRIBE_STACK_EVENTS_FORMAT_STRING, + format_kwargs=DESCRIBE_STACK_EVENTS_DEFAULT_ARGS, + table_header=DESCRIBE_STACK_EVENTS_TABLE_HEADER_NAME, + ) + def describe_stack_events(self, stack_name, time_stamp_marker, **kwargs): + """ + Calls CloudFormation to get current stack events + :param stack_name: Name or ID of the stack + :param time_stamp_marker: last event time on the stack to start streaming events from. + :return: + """ + + stack_change_in_progress = True + events = set() + retry_attempts = 0 + + while stack_change_in_progress and retry_attempts <= self.max_attempts: + try: + + # Only sleep if there have been no retry_attempts + time.sleep(self.client_sleep if retry_attempts == 0 else 0) + describe_stacks_resp = self._client.describe_stacks(StackName=stack_name) + paginator = self._client.get_paginator("describe_stack_events") + response_iterator = paginator.paginate(StackName=stack_name) + stack_status = describe_stacks_resp["Stacks"][0]["StackStatus"] + for event_items in response_iterator: + for event in event_items["StackEvents"]: + if event["EventId"] not in events and utc_to_timestamp(event["Timestamp"]) > time_stamp_marker: + events.add(event["EventId"]) + + pprint_columns( + columns=[ + event["ResourceStatus"], + event["ResourceType"], + event["LogicalResourceId"], + event.get("ResourceStatusReason", "-"), + ], + width=kwargs["width"], + margin=kwargs["margin"], + format_string=DESCRIBE_STACK_EVENTS_FORMAT_STRING, + format_args=kwargs["format_args"], + columns_dict=DESCRIBE_STACK_EVENTS_DEFAULT_ARGS.copy(), + ) + + if self._check_stack_complete(stack_status): + stack_change_in_progress = False + break + except botocore.exceptions.ClientError: + retry_attempts = retry_attempts + 1 + if retry_attempts > self.max_attempts: + raise + # Sleep in exponential backoff mode + time.sleep(math.pow(self.backoff, retry_attempts)) + + def _check_stack_complete(self, status): + return "COMPLETE" in status and "CLEANUP" not in status + + def wait_for_execute(self, stack_name, changeset_type): + + sys.stdout.write("\nWaiting for stack create/update to complete\n") + sys.stdout.flush() + + self.describe_stack_events(stack_name, self.get_last_event_time(stack_name)) + + # Pick the right waiter + if changeset_type == "CREATE": + waiter = self._client.get_waiter("stack_create_complete") + elif changeset_type == "UPDATE": + waiter = self._client.get_waiter("stack_update_complete") + else: + raise RuntimeError("Invalid changeset type {0}".format(changeset_type)) + + # Poll every 5 seconds. Optimizing for the case when the stack has only + # minimal changes, such the Code for Lambda Function + waiter_config = {"Delay": 5, "MaxAttempts": 720} + + try: + waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) + except botocore.exceptions.WaiterError as ex: + LOG.debug("Execute changeset waiter exception", exc_info=ex) + + raise deploy_exceptions.DeployFailedError(stack_name=stack_name, msg=str(ex)) + + self.get_stack_outputs(stack_name=stack_name) + + def create_and_wait_for_changeset( + self, stack_name, cfn_template, parameter_values, capabilities, role_arn, notification_arns, s3_uploader, tags + ): + try: + result, changeset_type = self.create_changeset( + stack_name, cfn_template, parameter_values, capabilities, role_arn, notification_arns, s3_uploader, tags + ) + self.wait_for_changeset(result["Id"], stack_name) + self.describe_changeset(result["Id"], stack_name) + return result, changeset_type + except botocore.exceptions.ClientError as ex: + raise DeployFailedError(stack_name=stack_name, msg=str(ex)) + + @pprint_column_names(format_string=OUTPUTS_FORMAT_STRING, format_kwargs=OUTPUTS_DEFAULTS_ARGS) + def _stack_outputs(self, stack_outputs, **kwargs): + for output in stack_outputs: + pprint_columns( + columns=[output["OutputKey"], output["OutputValue"], output.get("Description", "-")], + width=kwargs["width"], + margin=kwargs["margin"], + format_string=OUTPUTS_FORMAT_STRING, + format_args=kwargs["format_args"], + columns_dict=OUTPUTS_DEFAULTS_ARGS.copy(), + ) + + def get_stack_outputs(self, stack_name, echo=True): + try: + stacks_description = self._client.describe_stacks(StackName=stack_name) + try: + outputs = stacks_description["Stacks"][0]["Outputs"] + if echo: + sys.stdout.write("\nStack {stack_name} outputs:\n".format(stack_name=stack_name)) + sys.stdout.flush() + self._stack_outputs(stack_outputs=outputs) + return outputs + except KeyError: + return None + + except botocore.exceptions.ClientError as ex: + raise DeployStackOutPutFailedError(stack_name=stack_name, msg=str(ex)) diff --git a/samcli/lib/samlib/cloudformation_command.py b/samcli/lib/samlib/cloudformation_command.py deleted file mode 100644 index e9bdbb4304..0000000000 --- a/samcli/lib/samlib/cloudformation_command.py +++ /dev/null @@ -1,55 +0,0 @@ -""" -Utility to call cloudformation command with args -""" - -import os -import logging -import platform -import subprocess -import sys - -from samcli.cli.global_config import GlobalConfig - -LOG = logging.getLogger(__name__) - - -def execute_command(command, args, template_file): - LOG.debug("%s command is called", command) - try: - aws_cmd = find_executable("aws") - - # Add SAM CLI information for AWS CLI to know about the caller. - gc = GlobalConfig() - env = os.environ.copy() - if gc.telemetry_enabled: - env["AWS_EXECUTION_ENV"] = "SAM-" + gc.installation_id - - args = list(args) - if template_file: - # Since --template-file was parsed separately, add it here manually - args.extend(["--template-file", template_file]) - - subprocess.check_call([aws_cmd, "cloudformation", command] + args, env=env) - LOG.debug("%s command successful", command) - except subprocess.CalledProcessError as e: - # Underlying aws command will print the exception to the user - LOG.debug("Exception: %s", e) - sys.exit(e.returncode) - - -def find_executable(execname): - - if platform.system().lower() == "windows": - options = ["{}.cmd".format(execname), "{}.exe".format(execname), execname] - else: - options = [execname] - - for name in options: - try: - subprocess.Popen([name], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - # No exception. Let's pick this - return name - except OSError as ex: - LOG.debug("Unable to find executable %s", name, exc_info=ex) - - raise OSError("Cannot find AWS CLI installation, was looking at executables with names: {}".format(options)) diff --git a/samcli/lib/utils/time.py b/samcli/lib/utils/time.py index 3989cc03ad..02b078337f 100644 --- a/samcli/lib/utils/time.py +++ b/samcli/lib/utils/time.py @@ -65,6 +65,16 @@ def to_timestamp(some_time): return int((some_time - datetime.datetime(1970, 1, 1)).total_seconds() * 1000.0) +def utc_to_timestamp(utc): + """ + Converts utc timestamp with tz_info set to utc to Unix timestamp + :param utc: datetime.datetime + :return: UNIX timestamp + """ + + return to_timestamp(utc.replace(tzinfo=None)) + + def to_utc(some_time): """ Convert the given date to UTC, if the date contains a timezone. diff --git a/tests/integration/deploy/__init__.py b/tests/integration/deploy/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/integration/deploy/deploy_integ_base.py b/tests/integration/deploy/deploy_integ_base.py new file mode 100644 index 0000000000..1e68f8878f --- /dev/null +++ b/tests/integration/deploy/deploy_integ_base.py @@ -0,0 +1,83 @@ +import os +import uuid +import json +import time +from pathlib import Path +from unittest import TestCase + +import boto3 + + +class DeployIntegBase(TestCase): + @classmethod + def setUpClass(cls): + pass + + def setUp(self): + super(DeployIntegBase, self).setUp() + + def tearDown(self): + super(DeployIntegBase, self).tearDown() + + def base_command(self): + command = "sam" + if os.getenv("SAM_CLI_DEV"): + command = "samdev" + + return command + + def get_deploy_command_list( + self, + s3_bucket=None, + stack_name=None, + template=None, + template_file=None, + s3_prefix=None, + capabilities=None, + force_upload=False, + notification_arns=None, + fail_on_empty_changeset=False, + no_execute_changeset=False, + parameter_overrides=None, + role_arn=None, + kms_key_id=None, + tags=None, + profile=None, + region=None, + ): + command_list = [self.base_command(), "deploy"] + + if s3_bucket: + command_list = command_list + ["--s3-bucket", str(s3_bucket)] + if capabilities: + command_list = command_list + ["--capabilities", str(capabilities)] + if parameter_overrides: + command_list = command_list + ["--parameter-overrides", str(parameter_overrides)] + if role_arn: + command_list = command_list + ["--role-arn", str(role_arn)] + if notification_arns: + command_list = command_list + ["--notification-arns", str(notification_arns)] + if stack_name: + command_list = command_list + ["--stack-name", str(stack_name)] + if template: + command_list = command_list + ["--template", str(template)] + if template_file: + command_list = command_list + ["--template-file", str(template_file)] + if s3_prefix: + command_list = command_list + ["--s3-prefix", str(s3_prefix)] + if kms_key_id: + command_list = command_list + ["--kms-key-id", str(kms_key_id)] + if no_execute_changeset: + command_list = command_list + ["--no-execute-changeset"] + if force_upload: + command_list = command_list + ["--force-upload"] + if fail_on_empty_changeset: + command_list = command_list + ["--fail-on-empty-changeset"] + if tags: + command_list = command_list + ["--tags", str(tags)] + if region: + command_list = command_list + ["--region", str(region)] + if profile: + command_list = command_list + ["--profile", str(profile)] + + return command_list diff --git a/tests/integration/deploy/test_deploy_command.py b/tests/integration/deploy/test_deploy_command.py new file mode 100644 index 0000000000..a2bfb79f68 --- /dev/null +++ b/tests/integration/deploy/test_deploy_command.py @@ -0,0 +1,84 @@ +import os +import tempfile +import uuid +from subprocess import Popen, PIPE +from unittest import skipIf + +import boto3 +from parameterized import parameterized + +from tests.integration.deploy.deploy_integ_base import DeployIntegBase +from tests.integration.package.package_integ_base import PackageIntegBase +from tests.testing_utils import RUNNING_ON_CI, RUNNING_TEST_FOR_MASTER_ON_CI + +# Deploy tests require credentials and CI/CD will only add credentials to the env if the PR is from the same repo. +# This is to restrict package tests to run outside of CI/CD and when the branch is not master. +SKIP_DEPLOY_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI + + +@skipIf(SKIP_DEPLOY_TESTS, "Skip deploy tests in CI/CD only") +class TestDeploy(PackageIntegBase, DeployIntegBase): + def setUp(self): + self.cf_client = boto3.client("cloudformation") + self.sns_arn = os.environ.get("AWS_SNS") + self.stack_names = [] + super(TestDeploy, self).setUp() + + def tearDown(self): + for stack_name in self.stack_names: + self.cf_client.delete_stack(StackName=stack_name) + super(TestDeploy, self).tearDown() + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_all_args(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + with tempfile.NamedTemporaryFile(delete=False) as output_template_file: + # Package necessary artifacts. + package_command_list = self.get_command_list( + s3_bucket=self.s3_bucket.name, template=template_path, output_template_file=output_template_file.name + ) + + package_process = Popen(package_command_list, stdout=PIPE) + package_process.wait() + + self.assertEqual(package_process.returncode, 0) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(stack_name) + + # Deploy and only show changeset. + deploy_command_list_no_execute = self.get_deploy_command_list( + template_file=output_template_file.name, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix="integ_deploy", + s3_bucket=self.s3_bucket.name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=True, + tags="integ=true clarity=yes", + ) + + deploy_process_no_execute = Popen(deploy_command_list_no_execute, stdout=PIPE) + deploy_process_no_execute.wait() + self.assertEqual(deploy_process_no_execute.returncode, 0) + + # Deploy the given stack with the changeset. + deploy_command_list_execute = self.get_deploy_command_list( + template_file=output_template_file.name, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix="integ_deploy", + s3_bucket=self.s3_bucket.name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + tags="integ=true clarity=yes", + ) + + deploy_process = Popen(deploy_command_list_execute, stdout=PIPE) + deploy_process.wait() + self.assertEqual(deploy_process.returncode, 0) diff --git a/tests/integration/package/package_integ_base.py b/tests/integration/package/package_integ_base.py index 35d8ff47c0..b39115eb30 100644 --- a/tests/integration/package/package_integ_base.py +++ b/tests/integration/package/package_integ_base.py @@ -1,7 +1,6 @@ import os import uuid import json -import tempfile import time from pathlib import Path from unittest import TestCase @@ -26,6 +25,12 @@ def setUpClass(cls): # Given 3 seconds for all the bucket creation to complete time.sleep(3) + def setUp(self): + super(PackageIntegBase, self).setUp() + + def tearDown(self): + super(PackageIntegBase, self).tearDown() + @classmethod def tearDownClass(cls): cls.s3_bucket.objects.all().delete() diff --git a/tests/integration/testdata/package/aws-serverless-function.yaml b/tests/integration/testdata/package/aws-serverless-function.yaml index 1691cffe8e..ef8b30c245 100644 --- a/tests/integration/testdata/package/aws-serverless-function.yaml +++ b/tests/integration/testdata/package/aws-serverless-function.yaml @@ -2,6 +2,12 @@ AWSTemplateFormatVersion : '2010-09-09' Transform: AWS::Serverless-2016-10-31 Description: A hello world application. +Parameters: + Parameter: + Type: String + Default: Sample + Description: A custom parameter + Resources: HelloWorldFunction: Type: AWS::Serverless::Function diff --git a/tests/regression/deploy/__init__.py b/tests/regression/deploy/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/regression/deploy/regression_deploy_base.py b/tests/regression/deploy/regression_deploy_base.py new file mode 100644 index 0000000000..c564128243 --- /dev/null +++ b/tests/regression/deploy/regression_deploy_base.py @@ -0,0 +1,106 @@ +import os +import uuid +import json +import tempfile +import time +from pathlib import Path +from subprocess import Popen, PIPE +from unittest import TestCase + +import boto3 + + +class DeployRegressionBase(TestCase): + @classmethod + def setUpClass(cls): + pass + + @classmethod + def tearDownClass(cls): + pass + + def base_command(self, base): + command = [base] + if os.getenv("SAM_CLI_DEV") and base == "sam": + command = ["samdev"] + elif base == "aws": + command = [base, "cloudformation"] + + return command + + def get_deploy_command_list( + self, + base="sam", + s3_bucket=None, + stack_name=None, + template=None, + template_file=None, + s3_prefix=None, + capabilities=None, + force_upload=False, + notification_arns=None, + fail_on_empty_changeset=False, + no_execute_changeset=False, + parameter_overrides=None, + role_arn=None, + kms_key_id=None, + tags=None, + profile=None, + region=None, + ): + command_list = self.base_command(base=base) + + command_list = command_list + ["deploy"] + + if s3_bucket: + command_list = command_list + ["--s3-bucket", str(s3_bucket)] + if capabilities: + command_list = command_list + ["--capabilities", str(capabilities)] + if parameter_overrides: + command_list = command_list + ["--parameter-overrides", str(parameter_overrides)] + if role_arn: + command_list = command_list + ["--role-arn", str(role_arn)] + if notification_arns: + command_list = command_list + ["--notification-arns", str(notification_arns)] + if stack_name: + command_list = command_list + ["--stack-name", str(stack_name)] + if template: + command_list = command_list + ["--template", str(template)] + if template_file: + command_list = command_list + ["--template-file", str(template_file)] + if s3_prefix: + command_list = command_list + ["--s3-prefix", str(s3_prefix)] + if kms_key_id: + command_list = command_list + ["--kms-key-id", str(kms_key_id)] + if no_execute_changeset: + command_list = command_list + ["--no-execute-changeset"] + if force_upload: + command_list = command_list + ["--force-upload"] + if fail_on_empty_changeset: + command_list = command_list + ["--fail-on-empty-changeset"] + if tags: + command_list = command_list + ["--tags", str(tags)] + if region: + command_list = command_list + ["--region", str(region)] + if profile: + command_list = command_list + ["--profile", str(profile)] + + return command_list + + def deploy_regression_check(self, args, sam_return_code=0, aws_return_code=0, commands=[]): + sam_stack_name = args.get("sam_stack_name", None) + aws_stack_name = args.get("aws_stack_name", None) + if sam_stack_name: + del args["sam_stack_name"] + if aws_stack_name: + del args["aws_stack_name"] + + aws_command_list = self.get_deploy_command_list(base="aws", stack_name=aws_stack_name, **args) + process = Popen(aws_command_list, stdout=PIPE) + process.wait() + self.assertEqual(process.returncode, aws_return_code) + + sam_command_list = self.get_deploy_command_list(stack_name=sam_stack_name, **args) + process = Popen(sam_command_list, stdout=PIPE) + process.wait() + self.assertEqual(process.returncode, sam_return_code) diff --git a/tests/regression/deploy/test_deploy_regression.py b/tests/regression/deploy/test_deploy_regression.py new file mode 100644 index 0000000000..8280189b01 --- /dev/null +++ b/tests/regression/deploy/test_deploy_regression.py @@ -0,0 +1,154 @@ +import os +import tempfile +import uuid +from subprocess import Popen, PIPE +from unittest import skipIf + +import boto3 +from parameterized import parameterized + +from tests.regression.deploy.regression_deploy_base import DeployRegressionBase +from tests.regression.package.regression_package_base import PackageRegressionBase +from tests.testing_utils import RUNNING_ON_CI, RUNNING_TEST_FOR_MASTER_ON_CI + +# Package Regression tests require credentials and CI/CD will only add credentials to the env if the PR is from the same repo. +# This is to restrict package tests to run outside of CI/CD and when the branch is not master. +SKIP_DEPLOY_REGRESSION_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI + +# Only testing return codes to be equivalent + + +@skipIf(SKIP_DEPLOY_REGRESSION_TESTS, "Skip deploy regression tests in CI/CD only") +class TestDeployRegression(PackageRegressionBase, DeployRegressionBase): + def setUp(self): + self.sns_arn = os.environ.get("AWS_SNS") + self.kms_key = os.environ.get("AWS_KMS_KEY") + self.stack_names = [] + self.cf_client = boto3.client("cloudformation") + super(TestDeployRegression, self).setUp() + + def tearDown(self): + for stack_name in self.stack_names: + self.cf_client.delete_stack(StackName=stack_name) + super(TestDeployRegression, self).tearDown() + + def prepare_package(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + output_template_file = tempfile.NamedTemporaryFile(delete=False) + package_command_list = self.get_command_list( + s3_bucket=self.s3_bucket.name, template_file=template_path, output_template_file=output_template_file.name + ) + + package_process = Popen(package_command_list, stdout=PIPE) + package_process.wait() + self.assertEqual(package_process.returncode, 0) + return output_template_file.name + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_with_all_args(self, template_file): + + output_template_file = self.prepare_package(template_file=template_file) + + sam_stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(sam_stack_name) + + aws_stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(aws_stack_name) + + arguments = { + "template_file": output_template_file, + "aws_stack_name": aws_stack_name, + "sam_stack_name": sam_stack_name, + "capabilities": "CAPABILITY_IAM", + "s3_prefix": "regress_deploy", + "force_upload": True, + "notification_arns": self.sns_arn, + "parameter_overrides": "Parameter=Clarity", + "kms_key_id": self.kms_key, + "tags": "integ=true clarity=yes", + } + + self.deploy_regression_check(arguments) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_with_no_stack_name(self, template_file): + output_template_file = self.prepare_package(template_file=template_file) + + arguments = { + "template_file": output_template_file, + "capabilities": "CAPABILITY_IAM", + "s3_prefix": "regress_deploy", + "force_upload": True, + "notification_arns": self.sns_arn, + "parameter_overrides": "Parameter=Clarity", + "kms_key_id": self.kms_key, + "tags": "integ=true clarity=yes", + } + + self.deploy_regression_check(arguments, sam_return_code=2, aws_return_code=2) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_with_no_capabilities(self, template_file): + output_template_file = self.prepare_package(template_file=template_file) + + sam_stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(sam_stack_name) + + aws_stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(aws_stack_name) + + arguments = { + "template_file": output_template_file, + "aws_stack_name": aws_stack_name, + "sam_stack_name": sam_stack_name, + "s3_prefix": "regress_deploy", + "force_upload": True, + "notification_arns": self.sns_arn, + "parameter_overrides": "Parameter=Clarity", + "kms_key_id": self.kms_key, + "tags": "integ=true clarity=yes", + } + + self.deploy_regression_check(arguments, sam_return_code=2, aws_return_code=255) + + def test_deploy_with_no_template_file(self): + sam_stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(sam_stack_name) + + aws_stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(aws_stack_name) + + arguments = { + "aws_stack_name": aws_stack_name, + "sam_stack_name": sam_stack_name, + "s3_prefix": "regress_deploy", + "force_upload": True, + "notification_arns": self.sns_arn, + "parameter_overrides": "Parameter=Clarity", + "kms_key_id": self.kms_key, + "tags": "integ=true clarity=yes", + } + + self.deploy_regression_check(arguments, sam_return_code=2, aws_return_code=2) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_with_no_changes(self, template_file): + output_template_file = self.prepare_package(template_file=template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(stack_name) + + arguments = { + "template_file": output_template_file, + "capabilities": "CAPABILITY_IAM", + "sam_stack_name": stack_name, + "aws_stack_name": stack_name, + "s3_prefix": "regress_deploy", + "force_upload": True, + "notification_arns": self.sns_arn, + "parameter_overrides": "Parameter=Clarity", + "kms_key_id": self.kms_key, + "tags": "integ=true clarity=yes", + } + + self.deploy_regression_check(arguments, sam_return_code=0, aws_return_code=0) diff --git a/tests/unit/cli/test_types.py b/tests/unit/cli/test_types.py index b0cd383db9..985c3e66c1 100644 --- a/tests/unit/cli/test_types.py +++ b/tests/unit/cli/test_types.py @@ -2,7 +2,7 @@ from unittest.mock import Mock, ANY from nose_parameterized import parameterized -from samcli.cli.types import CfnParameterOverridesType +from samcli.cli.types import CfnParameterOverridesType, CfnTags from samcli.cli.types import CfnMetadataType @@ -12,19 +12,19 @@ def setUp(self): @parameterized.expand( [ - ("some string"), + (("some string"),), # Key must not contain spaces - ('ParameterKey="Ke y",ParameterValue=Value'), + (('ParameterKey="Ke y",ParameterValue=Value'),), # No value - ("ParameterKey=Key,ParameterValue="), + (("ParameterKey=Key,ParameterValue="),), # No key - ("ParameterKey=,ParameterValue=Value"), + (("ParameterKey=,ParameterValue=Value"),), # Case sensitive - ("parameterkey=Key,ParameterValue=Value"), + (("parameterkey=Key,ParameterValue=Value"),), # No space after comma - ("ParameterKey=Key, ParameterValue=Value"), + (("ParameterKey=Key, ParameterValue=Value"),), # Bad separator - ("ParameterKey:Key,ParameterValue:Value"), + (("ParameterKey:Key,ParameterValue:Value"),), ] ) def test_must_fail_on_invalid_format(self, input): @@ -36,44 +36,44 @@ def test_must_fail_on_invalid_format(self, input): @parameterized.expand( [ ( - "ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro", + ("ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro",), {"KeyPairName": "MyKey", "InstanceType": "t1.micro"}, ), - ('ParameterKey="Key",ParameterValue=Val\\ ue', {"Key": "Val ue"}), - ('ParameterKey="Key",ParameterValue="Val\\"ue"', {"Key": 'Val"ue'}), - ("ParameterKey=Key,ParameterValue=Value", {"Key": "Value"}), - ('ParameterKey=Key,ParameterValue=""', {"Key": ""}), + (('ParameterKey="Key",ParameterValue=Val\\ ue',), {"Key": "Val ue"}), + (('ParameterKey="Key",ParameterValue="Val\\"ue"',), {"Key": 'Val"ue'}), + (("ParameterKey=Key,ParameterValue=Value",), {"Key": "Value"}), + (('ParameterKey=Key,ParameterValue=""',), {"Key": ""}), ( # Trailing and leading whitespaces - " ParameterKey=Key,ParameterValue=Value ParameterKey=Key2,ParameterValue=Value2 ", + (" ParameterKey=Key,ParameterValue=Value ParameterKey=Key2,ParameterValue=Value2 ",), {"Key": "Value", "Key2": "Value2"}, ), ( # Quotes at the end - 'ParameterKey=Key,ParameterValue=Value\\"', + ('ParameterKey=Key,ParameterValue=Value\\"',), {"Key": 'Value"'}, ), ( # Quotes at the start - 'ParameterKey=Key,ParameterValue=\\"Value', + ('ParameterKey=Key,ParameterValue=\\"Value',), {"Key": '"Value'}, ), ( # Value is spacial characters - "ParameterKey=Key,ParameterValue==-_)(*&^%$#@!`~:;,. ParameterKey=Key2,ParameterValue=Value2", + ("ParameterKey=Key,ParameterValue==-_)(*&^%$#@!`~:;,. ParameterKey=Key2,ParameterValue=Value2",), {"Key": "=-_)(*&^%$#@!`~:;,.", "Key2": "Value2"}, ), - ('ParameterKey=Key1230,ParameterValue="{\\"a\\":\\"b\\"}"', {"Key1230": '{"a":"b"}'}), + (('ParameterKey=Key1230,ParameterValue="{\\"a\\":\\"b\\"}"',), {"Key1230": '{"a":"b"}'}), ( # Must ignore empty inputs - "", + ("",), {}, ), ] ) def test_successful_parsing(self, input, expected): result = self.param_type.convert(input, None, None) - self.assertEqual(result, expected, msg="Failed with Input = " + input) + self.assertEqual(result, expected, msg="Failed with Input = " + str(input)) class TestCfnMetadataType(TestCase): @@ -120,3 +120,64 @@ def test_must_fail_on_invalid_format(self, input): def test_successful_parsing(self, input, expected): result = self.param_type.convert(input, None, None) self.assertEqual(result, expected, msg="Failed with Input = " + input) + + +class TestCfnTags(TestCase): + def setUp(self): + self.param_type = CfnTags() + + @parameterized.expand( + [ + # Just a string + ("some string"), + # Wrong notation + ("a==b"), + # Wrong multi-key notation + ("a==b,c==d"), + ] + ) + def test_must_fail_on_invalid_format(self, input): + self.param_type.fail = Mock() + self.param_type.convert(input, "param", "ctx") + + self.param_type.fail.assert_called_with(ANY, "param", "ctx") + + @parameterized.expand([(("a=b",), {"a": "b"}), (("a=b", "c=d"), {"a": "b", "c": "d"}), (("",), {})]) + def test_successful_parsing(self, input, expected): + result = self.param_type.convert(input, None, None) + self.assertEqual(result, expected, msg="Failed with Input = " + str(input)) + + +# class TestCfnCapabilitiesType(TestCase): +# def setUp(self): +# self.param_type = CfnCapabilitiesType() +# +# @parameterized.expand( +# [ +# # Just a string +# ("some string"), +# # tuple of string +# ("some string",), +# # non-tuple valid string +# "CAPABILITY_NAMED_IAM", +# ] +# ) +# def test_must_fail_on_invalid_format(self, input): +# self.param_type.fail = Mock() +# self.param_type.convert(input, "param", "ctx") +# +# self.param_type.fail.assert_called_with(ANY, "param", "ctx") +# +# @parameterized.expand( +# [ +# (("CAPABILITY_AUTO_EXPAND",), ("CAPABILITY_AUTO_EXPAND",)), +# (("CAPABILITY_AUTO_EXPAND", "CAPABILITY_NAMED_IAM"), ("CAPABILITY_AUTO_EXPAND", "CAPABILITY_NAMED_IAM")), +# ( +# ("CAPABILITY_AUTO_EXPAND", "CAPABILITY_NAMED_IAM", "CAPABILITY_IAM"), +# ("CAPABILITY_AUTO_EXPAND", "CAPABILITY_NAMED_IAM", "CAPABILITY_IAM"), +# ), +# ] +# ) +# def test_successful_parsing(self, input, expected): +# result = self.param_type.convert(input, None, None) +# self.assertEqual(result, expected, msg="Failed with Input = " + str(input)) diff --git a/tests/unit/commands/_utils/custom_options/__init__.py b/tests/unit/commands/_utils/custom_options/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/_utils/custom_options/test_option_nargs.py b/tests/unit/commands/_utils/custom_options/test_option_nargs.py new file mode 100644 index 0000000000..18472b3e97 --- /dev/null +++ b/tests/unit/commands/_utils/custom_options/test_option_nargs.py @@ -0,0 +1,40 @@ +from unittest import TestCase +from unittest.mock import MagicMock + +from samcli.commands._utils.custom_options.option_nargs import OptionNargs + + +class MockRArgs: + def __init__(self, rargs): + self.rargs = rargs + + +class TestOptionNargs(TestCase): + def setUp(self): + self.name = "test" + self.opt = "--use" + self.prefixes = ["--", "-"] + self.arg = "first" + self.rargs_list = ["second", "third", "--nextopt"] + self.expected_args = tuple([self.arg] + self.rargs_list[:-1]) + self.option_nargs = OptionNargs(param_decls=(self.name, self.opt)) + + def test_option(self): + parser = MagicMock() + ctx = MagicMock() + self.option_nargs.add_to_parser(parser=parser, ctx=ctx) + # Get option parser + + parser._long_opt.get.assert_called_with(self.opt) + self.assertEqual(self.option_nargs._nargs_parser, parser._long_opt.get()) + + # set prefixes + self.option_nargs._nargs_parser.prefixes = self.prefixes + + # create new state with remaining args + state = MockRArgs(self.rargs_list) + # call process with the monkey patched `parser_process` within `add_to_process` + parser._long_opt.get().process(self.arg, state) + + # finally call parser.process with ("first", "second", "third") + self.option_nargs._previous_parser_process.assert_called_with(self.expected_args, state) diff --git a/tests/unit/commands/_utils/test_table_print.py b/tests/unit/commands/_utils/test_table_print.py new file mode 100644 index 0000000000..518a30e43a --- /dev/null +++ b/tests/unit/commands/_utils/test_table_print.py @@ -0,0 +1,83 @@ +import io +from contextlib import redirect_stdout +from collections import OrderedDict +from unittest import TestCase + +from samcli.commands._utils.table_print import pprint_column_names, pprint_columns + +TABLE_FORMAT_STRING = "{Alpha:<{0}} {Beta:<{1}} {Gamma:<{2}}" +TABLE_FORMAT_ARGS = OrderedDict({"Alpha": "Alpha", "Beta": "Beta", "Gamma": "Gamma"}) + + +class TestTablePrint(TestCase): + def setUp(self): + self.redirect_out = io.StringIO() + + def test_pprint_column_names(self): + @pprint_column_names(TABLE_FORMAT_STRING, TABLE_FORMAT_ARGS) + def to_be_decorated(*args, **kwargs): + pass + + with redirect_stdout(self.redirect_out): + to_be_decorated() + output = ( + "------------------------------------------------------------------------------------------------\n" + "Alpha Beta Gamma \n" + "------------------------------------------------------------------------------------------------\n" + "------------------------------------------------------------------------------------------------\n" + ) + + self.assertEqual(output, self.redirect_out.getvalue()) + + def test_pprint_column_names_and_text(self): + @pprint_column_names(TABLE_FORMAT_STRING, TABLE_FORMAT_ARGS) + def to_be_decorated(*args, **kwargs): + pprint_columns( + columns=["A", "B", "C"], + width=kwargs["width"], + margin=kwargs["margin"], + format_args=kwargs["format_args"], + format_string=TABLE_FORMAT_STRING, + columns_dict=TABLE_FORMAT_ARGS.copy(), + ) + + with redirect_stdout(self.redirect_out): + to_be_decorated() + + output = ( + "------------------------------------------------------------------------------------------------\n" + "Alpha Beta Gamma \n" + "------------------------------------------------------------------------------------------------\n" + "A B C \n" + "------------------------------------------------------------------------------------------------\n" + ) + self.assertEqual(output, self.redirect_out.getvalue()) + + def test_pprint_exceptions_with_no_column_names(self): + with self.assertRaises(ValueError): + + @pprint_column_names(TABLE_FORMAT_STRING, {}) + def to_be_decorated(*args, **kwargs): + pprint_columns( + columns=["A", "B", "C"], + width=kwargs["width"], + margin=kwargs["margin"], + format_args=kwargs["format_args"], + format_string=TABLE_FORMAT_STRING, + columns_dict=TABLE_FORMAT_ARGS.copy(), + ) + + def test_pprint_exceptions_with_too_many_column_names(self): + massive_dictionary = {str(i): str(i) for i in range(100)} + with self.assertRaises(ValueError): + + @pprint_column_names(TABLE_FORMAT_STRING, massive_dictionary) + def to_be_decorated(*args, **kwargs): + pprint_columns( + columns=["A", "B", "C"], + width=kwargs["width"], + margin=kwargs["margin"], + format_args=kwargs["format_args"], + format_string=TABLE_FORMAT_STRING, + columns_dict=TABLE_FORMAT_ARGS.copy(), + ) diff --git a/tests/unit/commands/deploy/__init__.py b/tests/unit/commands/deploy/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py new file mode 100644 index 0000000000..4a39696eb2 --- /dev/null +++ b/tests/unit/commands/deploy/test_command.py @@ -0,0 +1,71 @@ +from unittest import TestCase +from unittest.mock import patch, Mock + +from samcli.commands.deploy.command import do_cli + + +class TestDeployliCommand(TestCase): + def setUp(self): + + self.template_file = "input-template-file" + self.stack_name = "stack-name" + self.s3_bucket = "s3-bucket" + self.s3_prefix = "s3-prefix" + self.kms_key_id = "kms-key-id" + self.no_execute_changeset = False + self.notification_arns = [] + self.parameter_overrides = {"a": "b"} + self.capabilities = "CAPABILITY_IAM" + self.tags = {"c": "d"} + self.fail_on_empty_changset = True + self.role_arn = "role_arn" + self.force_upload = False + self.metadata = {"abc": "def"} + self.region = None + self.profile = None + + @patch("samcli.commands.deploy.command.click") + @patch("samcli.commands.deploy.deploy_context.DeployContext") + def test_all_args(self, deploy_command_context, click_mock): + + context_mock = Mock() + deploy_command_context.return_value.__enter__.return_value = context_mock + + do_cli( + template_file=self.template_file, + stack_name=self.stack_name, + s3_bucket=self.s3_bucket, + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region=self.region, + profile=self.profile, + ) + + deploy_command_context.assert_called_with( + template_file=self.template_file, + stack_name=self.stack_name, + s3_bucket=self.s3_bucket, + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region=self.region, + profile=self.profile, + ) + + context_mock.run.assert_called_with() + self.assertEqual(context_mock.run.call_count, 1) diff --git a/tests/unit/commands/deploy/test_deploy_context.py b/tests/unit/commands/deploy/test_deploy_context.py new file mode 100644 index 0000000000..0813553904 --- /dev/null +++ b/tests/unit/commands/deploy/test_deploy_context.py @@ -0,0 +1,141 @@ +"""Test sam deploy command""" +from unittest import TestCase +from unittest.mock import patch, MagicMock +import tempfile + +from samcli.lib.deploy.deployer import Deployer +from samcli.commands.deploy.deploy_context import DeployContext +from samcli.commands.deploy.exceptions import DeployBucketRequiredError, DeployFailedError, ChangeEmptyError + + +class TestPackageCommand(TestCase): + def setUp(self): + self.deploy_command_context = DeployContext( + template_file="template-file", + stack_name="stack-name", + s3_bucket="s3-bucket", + force_upload=True, + s3_prefix="s3-prefix", + kms_key_id="kms-key-id", + parameter_overrides={"a": "b"}, + capabilities="CAPABILITY_IAM", + no_execute_changeset=False, + role_arn="role-arn", + notification_arns=[], + fail_on_empty_changeset=False, + tags={"a": "b"}, + region=None, + profile=None, + ) + + def test_template_improper(self): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + with self.assertRaises(DeployFailedError): + self.deploy_command_context.template_file = template_file.name + self.deploy_command_context.run() + + def test_template_size_large_no_s3_bucket(self): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b" " * 51200) + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + self.deploy_command_context.s3_bucket = None + with self.assertRaises(DeployBucketRequiredError): + self.deploy_command_context.run() + + @patch("boto3.Session") + @patch.object(Deployer, "create_and_wait_for_changeset", MagicMock(return_value=({"Id": "test"}, "CREATE"))) + @patch.object(Deployer, "execute_changeset", MagicMock()) + @patch.object(Deployer, "wait_for_execute", MagicMock()) + def test_template_size_large_and_s3_bucket(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b" " * 51200) + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + self.deploy_command_context.run() + + @patch("boto3.Session") + def test_template_valid(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + + self.deploy_command_context.deploy = MagicMock() + self.deploy_command_context.run() + + @patch("boto3.Session") + @patch.object( + Deployer, "create_and_wait_for_changeset", MagicMock(side_effect=ChangeEmptyError(stack_name="stack-name")) + ) + def test_template_valid_change_empty(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.fail_on_empty_changeset = True + self.deploy_command_context.template_file = template_file.name + + with self.assertRaises(ChangeEmptyError): + self.deploy_command_context.run() + + @patch("boto3.Session") + @patch.object( + Deployer, "create_and_wait_for_changeset", MagicMock(side_effect=ChangeEmptyError(stack_name="stack-name")) + ) + def test_template_valid_change_empty_no_fail_on_empty_changeset(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + + self.deploy_command_context.run() + + @patch("boto3.Session") + @patch.object(Deployer, "create_and_wait_for_changeset", MagicMock(return_value=({"Id": "test"}, "CREATE"))) + @patch.object(Deployer, "execute_changeset", MagicMock()) + @patch.object(Deployer, "wait_for_execute", MagicMock()) + def test_template_valid_execute_changeset(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + + self.deploy_command_context.run() + self.assertEqual(self.deploy_command_context.deployer.create_and_wait_for_changeset.call_count, 1) + self.assertEqual(self.deploy_command_context.deployer.execute_changeset.call_count, 1) + self.assertEqual(self.deploy_command_context.deployer.wait_for_execute.call_count, 1) + self.assertEqual(self.deploy_command_context.deployer.get_stack_outputs.call_count, 1) + + @patch("boto3.Session") + @patch.object(Deployer, "create_and_wait_for_changeset", MagicMock(return_value=({"Id": "test"}, "CREATE"))) + @patch.object(Deployer, "execute_changeset", MagicMock()) + @patch.object(Deployer, "wait_for_execute", MagicMock()) + def test_template_valid_no_execute_changeset(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + self.deploy_command_context.no_execute_changeset = True + + self.deploy_command_context.run() + self.assertEqual(self.deploy_command_context.deployer.create_and_wait_for_changeset.call_count, 1) + self.assertEqual(self.deploy_command_context.deployer.execute_changeset.call_count, 0) + self.assertEqual(self.deploy_command_context.deployer.wait_for_execute.call_count, 0) + + @patch("boto3.Session") + @patch.object(Deployer, "create_and_wait_for_changeset", MagicMock(return_value=({"Id": "test"}, "CREATE"))) + @patch.object(Deployer, "execute_changeset", MagicMock()) + @patch.object(Deployer, "wait_for_execute", MagicMock()) + def test_template_valid_execute_changeset(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b'{"Parameters": {"a":"b","c":"d"}}') + template_file.flush() + self.deploy_command_context.template_file = template_file.name + self.deploy_command_context.run() + self.assertEqual(self.deploy_command_context.deployer.create_and_wait_for_changeset.call_count, 1) + self.assertEqual( + self.deploy_command_context.deployer.create_and_wait_for_changeset.call_args[1]["parameter_values"], + [{"ParameterKey": "a", "ParameterValue": "b"}, {"ParameterKey": "c", "UsePreviousValue": True}], + ) diff --git a/tests/unit/commands/test_deploy.py b/tests/unit/commands/test_deploy.py deleted file mode 100644 index 90387d5095..0000000000 --- a/tests/unit/commands/test_deploy.py +++ /dev/null @@ -1,20 +0,0 @@ -""" -Tests Deploy CLI command -""" - -from unittest import TestCase -from unittest.mock import patch - -from samcli.commands.deploy import do_cli as deploy_cli - - -class TestCli(TestCase): - def setUp(self): - self.args = ("--force-upload",) - self.expected_args = self.args + ("--stack-name", "stackName") - - @patch("samcli.commands.deploy.execute_command") - def test_deploy_must_pass_args(self, execute_command_mock): - execute_command_mock.return_value = True - deploy_cli(self.args, "file.yaml", "stackName") - execute_command_mock.assert_called_with("deploy", self.expected_args, template_file="file.yaml") diff --git a/tests/unit/lib/deploy/__init__.py b/tests/unit/lib/deploy/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/lib/deploy/test_deployer.py b/tests/unit/lib/deploy/test_deployer.py new file mode 100644 index 0000000000..0d5b2d3f32 --- /dev/null +++ b/tests/unit/lib/deploy/test_deployer.py @@ -0,0 +1,535 @@ +import uuid +import time +from datetime import datetime, timedelta +from unittest import TestCase +from unittest.mock import patch, MagicMock, ANY + +from botocore.exceptions import ClientError, WaiterError + +from samcli.commands.deploy.exceptions import DeployFailedError, ChangeSetError, DeployStackOutPutFailedError +from samcli.lib.deploy.deployer import Deployer +from samcli.lib.package.s3_uploader import S3Uploader +from samcli.lib.utils.time import utc_to_timestamp, to_datetime + + +class MockPaginator: + def __init__(self, resp): + self.resp = resp + + def paginate(self, ChangeSetName=None, StackName=None): + return self.resp + + +class MockChangesetWaiter: + def __init__(self, ex=None): + self.ex = ex + + def wait(self, ChangeSetName, StackName, WaiterConfig): + if self.ex: + raise self.ex + return + + +class MockCreateUpdateWaiter: + def __init__(self, ex=None): + self.ex = ex + + def wait(self, StackName, WaiterConfig): + if self.ex: + raise self.ex + return + + +class TestDeployer(TestCase): + def setUp(self): + self.session = MagicMock() + self.cloudformation_client = self.session.client("cloudformation") + self.s3_client = self.session.client("s3") + self.deployer = Deployer(self.cloudformation_client) + + def test_deployer_init(self): + self.assertEqual(self.deployer._client, self.cloudformation_client) + self.assertEqual(self.deployer.changeset_prefix, "samcli-deploy") + + def test_deployer_has_no_stack(self): + self.deployer._client.describe_stacks = MagicMock(return_value={"Stacks": []}) + self.assertEqual(self.deployer.has_stack("test"), False) + + def test_deployer_has_stack_in_review(self): + self.deployer._client.describe_stacks = MagicMock( + return_value={"Stacks": [{"StackStatus": "REVIEW_IN_PROGRESS"}]} + ) + self.assertEqual(self.deployer.has_stack("test"), False) + + def test_deployer_has_stack_exception_non_exsistent(self): + self.deployer._client.describe_stacks = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "Stack with id test does not exist"}}, + operation_name="stack_status", + ) + ) + self.assertEqual(self.deployer.has_stack("test"), False) + + def test_deployer_has_stack_exception(self): + self.deployer._client.describe_stacks = MagicMock( + side_effect=ClientError(error_response={"Error": {"Message": "Error"}}, operation_name="stack_status") + ) + with self.assertRaises(ClientError): + self.deployer.has_stack("test") + + def test_create_changeset(self): + self.deployer.has_stack = MagicMock(return_value=False) + self.deployer.create_changeset( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + {"ParameterKey": "c", "UsePreviousValue": True}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + ) + + self.assertEqual(self.deployer._client.create_change_set.call_count, 1) + self.deployer._client.create_change_set.assert_called_with( + Capabilities=["CAPABILITY_IAM"], + ChangeSetName=ANY, + ChangeSetType="CREATE", + Description=ANY, + NotificationARNs=[], + Parameters=[{"ParameterKey": "a", "ParameterValue": "b"}], + RoleARN="role-arn", + StackName="test", + Tags={"unit": "true"}, + TemplateURL=ANY, + ) + + def test_update_changeset(self): + self.deployer.has_stack = MagicMock(return_value=True) + self.deployer.create_changeset( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + {"ParameterKey": "c", "UsePreviousValue": True}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + ) + + self.assertEqual(self.deployer._client.create_change_set.call_count, 1) + self.deployer._client.create_change_set.assert_called_with( + Capabilities=["CAPABILITY_IAM"], + ChangeSetName=ANY, + ChangeSetType="UPDATE", + Description=ANY, + NotificationARNs=[], + Parameters=[{"ParameterKey": "a", "ParameterValue": "b"}], + RoleARN="role-arn", + StackName="test", + Tags={"unit": "true"}, + TemplateURL=ANY, + ) + + def test_create_changeset_exception(self): + self.deployer.has_stack = MagicMock(return_value=False) + self.deployer._client.create_change_set = MagicMock(side_effect=Exception) + with self.assertRaises(ChangeSetError): + self.deployer.create_changeset( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + {"ParameterKey": "c", "UsePreviousValue": True}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + ) + + def test_describe_changeset_with_changes(self): + response = [ + { + "Changes": [ + {"ResourceChange": {"LogicalResourceId": "resource_id1", "ResourceType": "s3", "Action": "Add"}} + ] + }, + { + "Changes": [ + {"ResourceChange": {"LogicalResourceId": "resource_id2", "ResourceType": "kms", "Action": "Add"}} + ] + }, + { + "Changes": [ + {"ResourceChange": {"LogicalResourceId": "resource_id3", "ResourceType": "lambda", "Action": "Add"}} + ] + }, + ] + self.deployer._client.get_paginator = MagicMock(return_value=MockPaginator(resp=response)) + changes = self.deployer.describe_changeset("change_id", "test") + self.assertEqual( + changes, + { + "Add": [ + {"LogicalResourceId": "resource_id1", "ResourceType": "s3"}, + {"LogicalResourceId": "resource_id2", "ResourceType": "kms"}, + {"LogicalResourceId": "resource_id3", "ResourceType": "lambda"}, + ], + "Modify": [], + "Remove": [], + }, + ) + + def test_describe_changeset_with_no_changes(self): + response = [{"Changes": []}] + self.deployer._client.get_paginator = MagicMock(return_value=MockPaginator(resp=response)) + changes = self.deployer.describe_changeset("change_id", "test") + self.assertEqual(changes, {"Add": [], "Modify": [], "Remove": []}) + + def test_wait_for_changeset(self): + self.deployer._client.get_waiter = MagicMock(return_value=MockChangesetWaiter()) + self.deployer.wait_for_changeset("test-id", "test-stack") + + def test_wait_for_changeset_exception_ChangeEmpty(self): + self.deployer._client.get_waiter = MagicMock( + return_value=MockChangesetWaiter( + ex=WaiterError( + name="wait_for_changeset", + reason="unit-test", + last_response={"Status": "Failed", "StatusReason": "It's a unit test"}, + ) + ) + ) + with self.assertRaises(ChangeSetError): + self.deployer.wait_for_changeset("test-id", "test-stack") + + def test_execute_changeset(self): + self.deployer.execute_changeset("id", "test") + self.deployer._client.execute_change_set.assert_called_with(ChangeSetName="id", StackName="test") + + def test_execute_changeset_exception(self): + self.deployer._client.execute_change_set = MagicMock( + side_effect=ClientError(error_response={"Error": {"Message": "Error"}}, operation_name="execute_changeset") + ) + with self.assertRaises(DeployFailedError): + self.deployer.execute_changeset("id", "test") + + def test_get_last_event_time(self): + timestamp = datetime.utcnow() + self.deployer._client.describe_stack_events = MagicMock( + return_value={"StackEvents": [{"Timestamp": timestamp}]} + ) + self.assertEqual(self.deployer.get_last_event_time("test"), utc_to_timestamp(timestamp)) + + def test_get_last_event_time_unknown_last_time(self): + current_timestamp = datetime.utcnow() + self.deployer._client.describe_stack_events = MagicMock(side_effect=KeyError) + # Convert to milliseconds from seconds + last_stack_event_timestamp = to_datetime(self.deployer.get_last_event_time("test") * 1000) + self.assertEqual(last_stack_event_timestamp.year, current_timestamp.year) + self.assertEqual(last_stack_event_timestamp.month, current_timestamp.month) + self.assertEqual(last_stack_event_timestamp.day, current_timestamp.day) + self.assertEqual(last_stack_event_timestamp.hour, current_timestamp.hour) + self.assertEqual(last_stack_event_timestamp.minute, current_timestamp.minute) + self.assertEqual(last_stack_event_timestamp.second, current_timestamp.second) + + @patch("time.sleep") + def test_describe_stack_events(self, patched_time): + current_timestamp = datetime.utcnow() + + self.deployer._client.describe_stacks = MagicMock( + side_effect=[ + {"Stacks": [{"StackStatus": "CREATE_IN_PROGRESS"}]}, + {"Stacks": [{"StackStatus": "CREATE_IN_PROGRESS"}]}, + {"Stacks": [{"StackStatus": "CREATE_COMPLETE_CLEANUP_IN_PROGRESS"}]}, + {"Stacks": [{"StackStatus": "CREATE_COMPLETE"}]}, + ] + ) + self.deployer._client.get_paginator = MagicMock( + return_value=MockPaginator( + [ + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_IN_PROGRESS", + "ResourceType": "s3", + "LogicalResourceId": "mybucket", + } + ] + }, + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_IN_PROGRESS", + "ResourceType": "kms", + "LogicalResourceId": "mykms", + } + ] + }, + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_COMPLETE", + "ResourceType": "s3", + "LogicalResourceId": "mybucket", + } + ] + }, + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_COMPLETE", + "ResourceType": "kms", + "LogicalResourceId": "mykms", + } + ] + }, + ] + ) + ) + + self.deployer.describe_stack_events("test", time.time() - 1) + + @patch("time.sleep") + def test_describe_stack_events_exceptions(self, patched_time): + + self.deployer._client.describe_stacks = MagicMock( + side_effect=[ + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + ] + ) + with self.assertRaises(ClientError): + self.deployer.describe_stack_events("test", time.time()) + + @patch("time.sleep") + def test_describe_stack_events_resume_after_exceptions(self, patched_time): + current_timestamp = datetime.utcnow() + + self.deployer._client.describe_stacks = MagicMock( + side_effect=[ + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + {"Stacks": [{"StackStatus": "CREATE_IN_PROGRESS"}]}, + {"Stacks": [{"StackStatus": "CREATE_IN_PROGRESS"}]}, + {"Stacks": [{"StackStatus": "CREATE_COMPLETE_CLEANUP_IN_PROGRESS"}]}, + {"Stacks": [{"StackStatus": "CREATE_COMPLETE"}]}, + ] + ) + + self.deployer._client.get_paginator = MagicMock( + return_value=MockPaginator( + [ + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_IN_PROGRESS", + "ResourceType": "s3", + "LogicalResourceId": "mybucket", + } + ] + }, + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_IN_PROGRESS", + "ResourceType": "kms", + "LogicalResourceId": "mykms", + } + ] + }, + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_COMPLETE", + "ResourceType": "s3", + "LogicalResourceId": "mybucket", + } + ] + }, + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_COMPLETE", + "ResourceType": "kms", + "LogicalResourceId": "mykms", + } + ] + }, + ] + ) + ) + + self.deployer.describe_stack_events("test", time.time()) + + def test_check_stack_status(self): + self.assertEqual(self.deployer._check_stack_complete("CREATE_COMPLETE"), True) + self.assertEqual(self.deployer._check_stack_complete("CREATE_FAILED"), False) + self.assertEqual(self.deployer._check_stack_complete("CREATE_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("DELETE_COMPLETE"), True) + self.assertEqual(self.deployer._check_stack_complete("DELETE_FAILED"), False) + self.assertEqual(self.deployer._check_stack_complete("DELETE_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("REVIEW_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("ROLLBACK_COMPLETE"), True) + self.assertEqual(self.deployer._check_stack_complete("ROLLBACK_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("UPDATE_COMPLETE"), True) + self.assertEqual(self.deployer._check_stack_complete("UPDATE_COMPLETE_CLEANUP_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("UPDATE_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("UPDATE_ROLLBACK_FAILED"), False) + self.assertEqual(self.deployer._check_stack_complete("UPDATE_ROLLBACK_IN_PROGRESS"), False) + + @patch("time.sleep") + def test_wait_for_execute(self, patched_time): + self.deployer.describe_stack_events = MagicMock() + self.deployer._client.get_waiter = MagicMock(return_value=MockCreateUpdateWaiter()) + self.deployer.wait_for_execute("test", "CREATE") + self.deployer.wait_for_execute("test", "UPDATE") + with self.assertRaises(RuntimeError): + self.deployer.wait_for_execute("test", "DESTRUCT") + + self.deployer._client.get_waiter = MagicMock( + return_value=MockCreateUpdateWaiter( + ex=WaiterError( + name="create_changeset", + reason="unit-test", + last_response={"Status": "Failed", "StatusReason": "It's a unit test"}, + ) + ) + ) + with self.assertRaises(DeployFailedError): + self.deployer.wait_for_execute("test", "CREATE") + + def test_create_and_wait_for_changeset(self): + self.deployer.create_changeset = MagicMock(return_value=({"Id": "test"}, "create")) + self.deployer.wait_for_changeset = MagicMock() + self.deployer.describe_changeset = MagicMock() + + result = self.deployer.create_and_wait_for_changeset( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + {"ParameterKey": "c", "UsePreviousValue": True}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + ) + + self.assertEqual(result, ({"Id": "test"}, "create")) + + def test_create_and_wait_for_changeset_exception(self): + self.deployer.create_changeset = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "Something Wrong"}}, operation_name="create_changeset" + ) + ) + with self.assertRaises(DeployFailedError): + self.deployer.create_and_wait_for_changeset( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + {"ParameterKey": "c", "UsePreviousValue": True}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + ) + + def test_get_stack_outputs(self): + outputs = { + "Stacks": [ + { + "Outputs": [ + {"OutputKey": "Key1", "OutputValue": "Value1", "Description": "output for s3"}, + {"OutputKey": "Key2", "OutputValue": "Value2", "Description": "output for kms"}, + ] + } + ] + } + self.deployer._client.describe_stacks = MagicMock(return_value=outputs) + + self.assertEqual(outputs["Stacks"][0]["Outputs"], self.deployer.get_stack_outputs(stack_name="test")) + self.deployer._client.describe_stacks.assert_called_with(StackName="test") + + @patch("samcli.lib.deploy.deployer.pprint_columns") + def test_get_stack_outputs_no_echo(self, mock_pprint_columns): + outputs = { + "Stacks": [ + { + "Outputs": [ + {"OutputKey": "Key1", "OutputValue": "Value1", "Description": "output for s3"}, + {"OutputKey": "Key2", "OutputValue": "Value2", "Description": "output for kms"}, + ] + } + ] + } + self.deployer._client.describe_stacks = MagicMock(return_value=outputs) + + self.assertEqual( + outputs["Stacks"][0]["Outputs"], self.deployer.get_stack_outputs(stack_name="test", echo=False) + ) + self.deployer._client.describe_stacks.assert_called_with(StackName="test") + self.assertEqual(mock_pprint_columns.call_count, 0) + + def test_get_stack_outputs_no_outputs_no_exception(self): + outputs = {"Stacks": [{"SomeOtherKey": "Value"}]} + self.deployer._client.describe_stacks = MagicMock(return_value=outputs) + + self.assertEqual(None, self.deployer.get_stack_outputs(stack_name="test")) + self.deployer._client.describe_stacks.assert_called_with(StackName="test") + + def test_get_stack_outputs_exception(self): + self.deployer._client.describe_stacks = MagicMock( + side_effect=ClientError(error_response={"Error": {"Message": "Error"}}, operation_name="describe_stacks") + ) + + with self.assertRaises(DeployStackOutPutFailedError): + self.deployer.get_stack_outputs(stack_name="test") diff --git a/tests/unit/lib/samlib/test_cloudformation_command.py b/tests/unit/lib/samlib/test_cloudformation_command.py deleted file mode 100644 index e846570c96..0000000000 --- a/tests/unit/lib/samlib/test_cloudformation_command.py +++ /dev/null @@ -1,166 +0,0 @@ -""" -Tests Deploy CLI -""" - -import os -from subprocess import CalledProcessError, PIPE - -from unittest import TestCase -from unittest.mock import patch, call, ANY - -from samcli.lib.samlib.cloudformation_command import execute_command, find_executable - - -class TestExecuteCommand(TestCase): - def setUp(self): - self.args = ("--arg1", "value1", "different args", "more") - - @patch("subprocess.check_call") - @patch("samcli.lib.samlib.cloudformation_command.find_executable") - def test_must_add_template_file(self, find_executable_mock, check_call_mock): - find_executable_mock.return_value = "mycmd" - check_call_mock.return_value = True - execute_command("command", self.args, "/path/to/template") - - check_call_mock.assert_called_with( - ["mycmd", "cloudformation", "command"] - + ["--arg1", "value1", "different args", "more", "--template-file", "/path/to/template"], - env=ANY, - ) - - @patch("subprocess.check_call") - @patch("samcli.lib.samlib.cloudformation_command.find_executable") - @patch("samcli.lib.samlib.cloudformation_command.GlobalConfig") - def test_must_add_sam_cli_info_to_execution_env_var_if_telemetry_is_on( - self, global_config_mock, find_executable_mock, check_call_mock - ): - installation_id = "testtest" - global_config_mock.return_value.installation_id = installation_id - global_config_mock.return_value.telemetry_enabled = True - - expected_env = os.environ.copy() - expected_env["AWS_EXECUTION_ENV"] = "SAM-" + installation_id - - find_executable_mock.return_value = "mycmd" - check_call_mock.return_value = True - execute_command("command", self.args, "/path/to/template") - - check_call_mock.assert_called() - kwargs = check_call_mock.call_args[1] - self.assertIn("env", kwargs) - self.assertEqual(kwargs["env"], expected_env) - - @patch("subprocess.check_call") - @patch("samcli.lib.samlib.cloudformation_command.find_executable") - @patch("samcli.lib.samlib.cloudformation_command.GlobalConfig") - def test_must_not_set_exec_env(self, global_config_mock, find_executable_mock, check_call_mock): - global_config_mock.return_value.telemetry_enabled = False - - # Expected to pass just a copy of the environment variables without modification - expected_env = os.environ.copy() - - find_executable_mock.return_value = "mycmd" - check_call_mock.return_value = True - execute_command("command", self.args, "/path/to/template") - - check_call_mock.assert_called() - kwargs = check_call_mock.call_args[1] - self.assertIn("env", kwargs) - self.assertEqual(kwargs["env"], expected_env) - - @patch("sys.exit") - @patch("subprocess.check_call") - @patch("samcli.lib.samlib.cloudformation_command.find_executable") - def test_command_must_exit_with_status_code(self, find_executable_mock, check_call_mock, exit_mock): - find_executable_mock.return_value = "mycmd" - check_call_mock.side_effect = CalledProcessError(2, "Error") - exit_mock.return_value = True - execute_command("command", self.args, None) - exit_mock.assert_called_with(2) - - -class TestFindExecutable(TestCase): - @patch("subprocess.Popen") - @patch("platform.system") - def test_must_use_raw_name(self, platform_system_mock, popen_mock): - platform_system_mock.return_value = "Linux" - execname = "foo" - - find_executable(execname) - - self.assertEqual(popen_mock.mock_calls, [call([execname], stdout=PIPE, stderr=PIPE)]) - - @patch("subprocess.Popen") - @patch("platform.system") - def test_must_use_name_with_cmd_extension_on_windows(self, platform_system_mock, popen_mock): - platform_system_mock.return_value = "windows" - execname = "foo" - expected = "foo.cmd" - - result = find_executable(execname) - self.assertEqual(result, expected) - - self.assertEqual(popen_mock.mock_calls, [call(["foo.cmd"], stdout=PIPE, stderr=PIPE)]) - - @patch("subprocess.Popen") - @patch("platform.system") - def test_must_use_name_with_exe_extension_on_windows(self, platform_system_mock, popen_mock): - platform_system_mock.return_value = "windows" - execname = "foo" - expected = "foo.exe" - - popen_mock.side_effect = [OSError, "success"] # fail on .cmd extension - - result = find_executable(execname) - self.assertEqual(result, expected) - - self.assertEqual( - popen_mock.mock_calls, - [call(["foo.cmd"], stdout=PIPE, stderr=PIPE), call(["foo.exe"], stdout=PIPE, stderr=PIPE)], - ) - - @patch("subprocess.Popen") - @patch("platform.system") - def test_must_use_name_with_no_extension_on_windows(self, platform_system_mock, popen_mock): - platform_system_mock.return_value = "windows" - execname = "foo" - expected = "foo" - - popen_mock.side_effect = [OSError, OSError, "success"] # fail on .cmd and .exe extension - - result = find_executable(execname) - self.assertEqual(result, expected) - - self.assertEqual( - popen_mock.mock_calls, - [ - call(["foo.cmd"], stdout=PIPE, stderr=PIPE), - call(["foo.exe"], stdout=PIPE, stderr=PIPE), - call(["foo"], stdout=PIPE, stderr=PIPE), - ], - ) - - @patch("subprocess.Popen") - @patch("platform.system") - def test_must_raise_error_if_executable_not_found(self, platform_system_mock, popen_mock): - platform_system_mock.return_value = "windows" - execname = "foo" - - popen_mock.side_effect = [OSError, OSError, OSError, "success"] # fail on all executable names - - with self.assertRaises(OSError) as ctx: - find_executable(execname) - - expected = "Cannot find AWS CLI installation, was looking at executables with names: {}".format( - ["foo.cmd", "foo.exe", "foo"] - ) - self.assertEqual(expected, str(ctx.exception)) - - self.assertEqual( - popen_mock.mock_calls, - [ - call(["foo.cmd"], stdout=PIPE, stderr=PIPE), - call(["foo.exe"], stdout=PIPE, stderr=PIPE), - call(["foo"], stdout=PIPE, stderr=PIPE), - ], - ) diff --git a/tests/unit/lib/utils/test_time.py b/tests/unit/lib/utils/test_time.py index 14b879b0e7..df63d2cc81 100644 --- a/tests/unit/lib/utils/test_time.py +++ b/tests/unit/lib/utils/test_time.py @@ -1,8 +1,10 @@ +import time import datetime from unittest import TestCase -from samcli.lib.utils.time import to_timestamp, timestamp_to_iso, parse_date, to_utc +from samcli.lib.utils.time import to_timestamp, timestamp_to_iso, parse_date, to_utc, utc_to_timestamp +from dateutil.tz import tzutc class TestTimestampToIso(TestCase): @@ -26,6 +28,12 @@ def test_must_convert_to_timestamp(self): self.assertEqual(expected, to_timestamp(date)) + def test_convert_utc_to_timestamp(self): + timestamp = time.time() + utc = datetime.datetime.utcfromtimestamp(timestamp) + # compare in milliseconds + self.assertEqual(int(timestamp * 1000), utc_to_timestamp(utc)) + class TestToUtc(TestCase): def test_with_timezone(self):