diff --git a/.pylintrc b/.pylintrc index 767f748fba..70776ecaa2 100644 --- a/.pylintrc +++ b/.pylintrc @@ -59,7 +59,7 @@ confidence= # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" -disable=R0201,W0613,I0021,I0020,W1618,W1619,R0902,R0903,W0231,W0611,R0913,W0703,C0330,R0204,I0011,R0904,C0301 +disable=R0201,W0613,W0640,I0021,I0020,W1618,W1619,R0902,R0903,W0231,W0611,R0913,W0703,C0330,R0204,I0011,R0904,R0914,C0301 [REPORTS] diff --git a/appveyor.yml b/appveyor.yml index 5bd7850166..3d77f8d7ee 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -61,18 +61,8 @@ for: - "pylint --rcfile .pylintrc samcli" # There are some functional tests that are currently broken due to not being updated with changed code or still running with node4.3 runtimes # We need to update those but this allows us to at least runs the ones we currently have working - - "pytest tests/functional/commands/validate tests/functional/commands/cli/test_global_config.py" + - "pytest -n 4 tests/functional/commands/validate tests/functional/commands/cli/test_global_config.py" - # Runs only in Linux - - sh: "pytest -vv tests/integration" - - sh: "/tmp/black --check setup.py tests samcli scripts" - - sh: "python scripts/check-isolated-needs-update.py" - - # Smoke tests run in parallel - it runs on both Linux & Windows - # Presence of the RUN_SMOKE envvar will run the smoke tests - # Note: temporarily removing as with current dependencies we require syslog on windows - # which is not present on stdlib. - # - ps: "If ($env:RUN_SMOKE) {pytest -n 4 -vv tests/smoke}" - matrix: only: @@ -128,11 +118,11 @@ for: - "pylint --rcfile .pylintrc samcli" # There are some functional tests that are currently broken due to not being updated with changed code or still running with node4.3 runtimes # We need to update those but this allows us to at least runs the ones we currently have working - - "pytest tests/functional/commands/validate tests/functional/commands/cli/test_global_config.py" + - "pytest -n 4 tests/functional/commands/validate tests/functional/commands/cli/test_global_config.py" # Runs only in Linux - sh: "pytest -vv tests/integration" - - sh: "pytest -vv -n 4 tests/regression" + - sh: "pytest -vv tests/regression" - sh: "/tmp/black --check setup.py tests samcli scripts" # Set JAVA_HOME to java11 @@ -142,13 +132,3 @@ for: # Smoke tests run in parallel - it runs on both Linux & Windows # Presence of the RUN_SMOKE envvar will run the smoke tests - ps: "If ($env:RUN_SMOKE) {pytest -n 4 -vv tests/smoke}" - - - - matrix: - only: - - ONLY_SMOKE: 1 - - test_script: - # Smoke tests run in parallel - - sh: "venv/Scripts/activate" - - sh: "pytest -n 4 -vv tests/smoke" diff --git a/designs/package_during_deploy.md b/designs/package_during_deploy.md new file mode 100644 index 0000000000..e67a3bff04 --- /dev/null +++ b/designs/package_during_deploy.md @@ -0,0 +1,178 @@ +Package during `sam deploy` +==================================== + + +What is the problem? +-------------------- + +Today while using `sam deploy` the specified `--template-file` or `--template` is expected to have packaged artifacts references in the given template file. + +This is driven by the following workflow. + +`sam build` -> `sam package --s3-bucket ... --s3-prefix ... --output-template-file packaged.yaml`. + +This workflow builds the requisite code and packages those built artifacts into an s3 bucket, optionally under a given s3 prefix. + +If a developer can optionally cut through this process without requiring an explicit `package` command, but rather have `sam deploy` package to a given s3 bucket, it cuts the number of steps before needing to deploy and test in the cloud. + +This also reduces friction in the `author` and `test` loop. + +Ideal end state. + +`sam build` -> `sam deploy ..... --s3-bucket ....` + + +What will be changed? +--------------------- + +Addition of extra parameters that are currently supported by `sam package` over to `sam deploy` with all of them being optional. + +Additional parameters that need to be on `sam deploy` that are not on `sam package`. + +* `--metadata` +* `--use-json` + +Parameters that dont need to be added. + +* `--output-template-file` + * An explicit `output-template-file` is created on the fly during packaging in the deploy phase. + +If the expectation is to package and deploy in one command, One can now do. + +`sam deploy --stack-name sam-package-on-deploy --capabilities CAPABILITY_IAM --s3-bucket sam-package-bucket` + +There is no explicit need to pass in a `--template-file` or `--template` parameter, if one is not passed in it to defaults to trying to find the template.yaml that was generated by `sam build` which is located at `.aws-sam/build/template.yaml` + +The old method of deploying pre-packaged artifacts will continue to work as before. + +* `sam deploy --template-file packaged.yaml --stack-name sam-package-on-deploy --capabilities CAPABILITY_IAM` + +If a deployment is done without running `sam build` prior we still go ahead and deploy with the given `template.yaml` in the project root. This might still result in a successful deploy, but not a deploy with the correct build artifacts. + + +Future of `sam package`? +--------------------- + +* `sam package` will continue to exist in the state it is today and will continue to be improved upon separately. + +Success criteria for the change +------------------------------- + +User do not require to run `sam package` as part of their author test loop, except for CI/CD purposes, where `sam package` can be run and the packaged template file can be passed to cloudformation deploy actions. + + +Out-of-Scope +------------ + +The s3 bucket where the packaged artifacts go is not abstracted in this design. In the future, the s3 bucket could be specified via a configuration file. + +This is currently in design in : https://github.com/awslabs/aws-sam-cli/pull/1503 + +User Experience Walkthrough +--------------------------- + +`sam build` -> `sam deploy` + +`sam build` -> `sam package` -> `sam deploy` + +Provide identical experiences in terms of a deploying the same stack, with exactly same artifacts. + + +Implementation +============== + +CLI Changes +----------- + +* Add new arguments `--metadata`, `--use-json` and modify existing `--template-file` or `--template` to look for a default `template.yaml` that exists under `.aws-sam/build/` + +### Breaking Change + +* Not a breaking change , but there are optional behavorial changes that a user can subscribe into by supplying a non-packaged template file and an s3 bucket. + +Design +------ + +* Changes to Deploy command's click options +* Attempt to package on every deploy if an appropriate s3 bucket is specified and deploy using the output template file during package. +* If a pre-packaged template is specified, an attempt to package does not change the template and the same template is used for deploy. +* The parameters that share the same name across package and deploy are collapsed together. eg: `--kms-key-id` , if a kms-key-id is specified that same key is used across both packaging and deploy purposes. + +`.samrc` Changes +---------------- + +None + +Security +-------- + +**What new dependencies (libraries/cli) does this change require?** +N/A + +**What other Docker container images are you using?** +N/A + +**Are you creating a new HTTP endpoint? If so explain how it will be +created & used** +N/A + +**Are you connecting to a remote API? If so explain how is this +connection secured** +N/A + +**Are you reading/writing to a temporary folder? If so, what is this +used for and when do you clean up?** + +Possibly reading from a configuration file in the future. + +**How do you validate new .samrc configuration?** + +N/A + +What is your Testing Plan (QA)? +=============================== + +Goal +---- +* Regression tests on previous functionality of `sam deploy` +* Integration tests on automatic packaging on `sam deploy` + +Pre-requesites +-------------- +N/A + +Test Scenarios/Cases +-------------------- +* Re-deploy a stack that was deployed with a packaged template before hand using the new sam deploy menthod. + +`sam deploy --template-file packaged.yaml --stack-name sam-stack --capabilities CAPABILITY_IAM` + +`sam deploy --stack-name sam-stack --capabilities CAPABILITY_IAM` + +The new stack should not have any changes. + + +Expected Results +---------------- + +* Regresssion and Integration tests pass. + +Documentation Changes +===================== +* Required nature of `--template-file`, `--template` parameter has a series of defaults that are looked at during `sam deploy` similair to `sam package`. +* If `--template-file` or `--template` points to a non-packaged template-file, `--s3-bucket` becomes required to be able to effectively package and deploy in one command using `sam deploy`. + +Open Issues +============ + +Task Breakdown +============== + +- \[x\] Send a Pull Request with this design document +- \[ \] Build the command line interface +- \[ \] Build the underlying library +- \[ \] Unit tests +- \[ \] Functional Tests +- \[ \] Integration tests +- \[ \] Run all tests on Windows +- \[ \] Update documentation diff --git a/designs/sam-config.md b/designs/sam-config.md new file mode 100644 index 0000000000..39b2bdb91a --- /dev/null +++ b/designs/sam-config.md @@ -0,0 +1,393 @@ +SAM Config +==================================== + + +What is the problem? +-------------------- + +Today users of SAM CLI need to invoke the CLI directly with all parameters supplied to its commands. + +for e.g: `sam build --use-container --debug` + +But often, during the lifecycle of building and deploying a serverless application. the same commands get run repeatedly to build, package and deploy, before solidifying into the final application. + +These CLI commands are often long and have many changing parts. + +Have a look at the following series of workflows + + +* `sam build --use-container --template ... --parameter-overrides=... --skip-pull-image --manifest ...` + +* `sam package --s3-bucket ... --template-file ... --output-template-file ... --s3-prefix ... --kms-key-id ...` + +* `sam deploy --template-file ... --stack-name ... --capabilities ... --tags ... --parameter-overrides ... --kms-key-id ...` + +If this could be condensed into a series of workflows that look like + +* `sam build` +* `sam package` +* `sam deploy` + +That would be a huge user experience win. + +Tenets +------------------------------- + +* Resolution of command line parameters should always favor explicit versus implicit. A native command line parameter specified directly on the command line should override a parameter specified in the configuration file. + +What will be changed? +--------------------- + +The suite of commands supported by SAM CLI would be aided by looking for a configuration file thats locally located under the `.aws-sam/` at the project root where template.yaml is located by default. + +`.aws-sam/samconfig.toml` + + +This configuration would be used for specifiying the parameters that each of SAM CLI commands use and would be in TOML format. + +Running a SAM CLI command now automatically looks for `.aws-sam/samconfig.toml` file and if its finds it goes ahead with parameter passthroughs to the CLI. + +``` +sam build +Default Config file location: .aws-sam/samconfig.toml +.. +.. +.. +``` + +Why samconfig under `.aws-sam` +--------------------------------- + +The `.aws-sam` directory within the project directory is created with normal 755 permissions as default without any special permisions. `sam build` only creates a build directory within `.aws-sam` as `.aws-sam/build`. This directory is erased and re-built on every build. but top level directory is left unaffected. + +The `.gitignore` specified in the init apps also only have `.aws-sam/build` ignored and not anything else. + + +Config file versioning +----------------------- + +The configuration file: `samconfig.toml` will come with a top level version key that specifies the version of the configuration file. This version can then be used to determine if a given configuration file works with a version of SAM CLI. + +It also paves the forward when major changes need to be made to the configuration file and add a version bump to the config file version + +``` +version = 0.1 +``` + + +Overrides +---------- + +The default location of a .aws-sam/samconfig can be replaced by overriding an environment variable called `SAM_CLI_CONFIG` + +` +export SAM_CLI_CONFIG=~/Users/username/mysamconfig.toml +` + +Users can pass an environment `--env` for the section that will be scanned within the configuration file to pass parameters through. + +By default the `default` section of the configuration is chosen. + +``` +version = 0.1 + +[default] + +[default.build] +[default.build.parameters] +profile="srirammv" +debug=true +skip_pull_image=true +use_container=true + +[default.package] +[default.package.parameters] +profile="srirammv" +region="us-east-1" +s3_bucket="sam-bucket" +output_template_file="packaged.yaml" + +[default.deploy] +[default.deploy.parameters] +stack_name="using_config_file" +capabilities="CAPABILITY_IAM" +region="us-east-1" +profile="srirammv" + +``` + +If a custom environment is specified, the environment is looked up in `samconfig.toml` file instead. + +`sam build --env dev` + +Sample configuration file + +``` +version = 0.1 + +[default.build.paramaters] +profile="srirammv" +debug=true +skip_pull_image=true +use_container=true + +[default.package.parameters] +profile="srirammv" +region="us-east-1" +s3_bucket="sam-bucket" +output_template_file="packaged.yaml" + +[default.deploy.parameters] +stack_name="using_config_file" +capabilities="CAPABILITY_IAM" +region="us-east-1" +profile="srirammv" + + +[dev.build.paramaters] +profile="srirammv" +debug=true +skip_pull_image=true +use_container=true + +[dev.package.parameters] +profile="srirammv" +region="us-east-1" +s3_bucket="sam-bucket" +output_template_file="packaged.yaml" + +[dev.deploy.parameters] +stack_name="using_config_file" +capabilities="CAPABILITY_IAM" +region="us-east-1" +profile="srirammv" +``` + + +The configuration file can then be potentially intialized + +* all sam init projects could come with a sample samconfig file + +Showcase configuration values +----------------------------- + +On running SAM CLI commands with `--debug`, SAM CLI can output the values read from the configuration file. This way the user is always informed of the total set of parameters are being used by SAM CLI, when the customers need to debug what parameters are actually being passed to the `sam` commands. + + +Config file in Git Repos +------------------------ + +`samconfig.toml` file can be checked into a git repo, so that its ready to use on cloning the repo. if the configuration file does not present all the necesssary parameters, the command fails just as if one had specified the same arguments on the command line directly. + +Optionally, if multiple configuration files are checked in. One can change the `SAM_CLI_CONFIG` environment variable to point a different configuration file. + +`--env` can also be passed in to deal with custom environments defined in the configuration file. + +Error Messages +--------------- + +When a custom env is passed in, and such an environment is not found. The error message can highlight all the environments that were found in the given configuration file. + +` +sam build --env devo +Error: Environment 'devo' was not found in .aws-sam/samconfig.toml , Possible environments are : ['dev', 'prod'] +` + +Future +---------- + +In the future, based on the file names of the configuration files, the environment could also be inferred. + +``` +.aws-sam/samconfig-dev.toml +.aws-sam/samconfig-beta.toml +.aws-sam/samconfig-prod.toml +``` + +`--env` dev will refer to `.aws-sam/samconfig-dev.toml` and so on. + +If multiple default file locations are added in the look up order for `samconfig.toml`, this means that multiple config files can be merged together. + +For example, if the hierachy of lookup for configuration files are: $SAM_CLI_CONFIG -> `.aws-sam/samconfig.toml` -> `~/.aws-sam/samconfig.toml` + +The resulting configuration would be a merge of all the sections that are relevant for the command that was run. + +This way, configuration that might be global can be placed in `~/.aws-sam/samconfig.toml`. + +``` +version = 0.1 +[default.build.parameters] +use_container = True +skip_pull_image = True +``` + +Project specific configuration placed in `~/.aws-sam/samconfig.toml` + +``` +version = 0.1 +[default.build.parameters] +parameter_overrides="ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro" +``` + +Eventual merged configuration read during `sam build` in-memory. + +``` +version = 0.1 +[default.build.parameters] +use_container = True +skip_pull_image = True +parameter_overrides="ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro" +``` + +Open Questions +------------------------------- + +* Potentially every sam command could have functionality to have a series of command line parameters exported into a configuraion file. + + +Out-of-Scope +------------ + +* Not focusing on a global configuration. SAM CLI already has a notion of a global config at `~/.aws-sam/metadata.json` + +User Experience Walkthrough +--------------------------- + +Once a configuration file is appropriately populated, day to day workflows per application developed with SAM CLI become much simpler. + +* `sam build` -> `sam package` -> `sam deploy` +* `sam build` -> `sam local invoke` +* `sam build` -> `sam package` -> `sam publish` + +Implementation +============== + +CLI Changes +----------- + +New command line argument is added per command called `--env` to be able to specify non default environment section within a config file. + + +### Breaking Change + +* No breaking changes to the CLI, in absence of the configuration file, CLI continues to work as normal. + +Design +------ + +*Explain how this feature will be implemented. Highlight the components +of your implementation, relationships* *between components, constraints, +etc.* + +A custom decorator to `click.option` is developed which reads from a configuration file the sections that are pertinent to that particular command and populates the click's `map` context. + +The configuration file parser is a custom provider that can be made to understand any configuration file format in a pluggable manner. + +This decorator benefits from the same type checking that some SAM CLI parameters already use. + +A custom callback function (`configuration_callback`) (for the click option) that takes in a custom configuration parser (`provider`) will have rules in place, on how the corresponding configuration can be retrieved and what are the parts that the configuration parser has access to read from. + +``` +provider = attrs.pop("provider", TomlProvider(rules=DefaultRules(), command="build", section="parameters")) +attrs["type"] = click.STRING +saved_callback = attrs.pop("callback", None) +partial_callback = functools.partial(onfiguration_callback, cmd_name, option_name, env_name, saved_callback, provider) +attrs["callback"] = partial_callback +click.option(*param_decls, **attrs)(f) + +``` + +Phases +------ + +The design can be built in phases. + +* No option to specify configuration file or env name +* Specify configuration file with an environment variable +* Read `--env` to make sure we can select an appropriate portion in configuration file. + + +`.samrc` Changes +---------------- + +This design emphasizes parameter pass throughs with a configuration file and does not change the core working of the SAM CLI itself. The SAM CLI continues to be working just as it was with efficiency gains in usability. + +Security +-------- + +*Tip: How does this change impact security? Answer the following +questions to help answer this question better:* + +**What new dependencies (libraries/cli) does this change require?** + +toml + +**What other Docker container images are you using?** + +N/A + +**Are you creating a new HTTP endpoint? If so explain how it will be +created & used** + +N/A + +**Are you connecting to a remote API? If so explain how is this +connection secured** + +N/A + +**Are you reading/writing to a temporary folder? If so, what is this +used for and when do you clean up?** + +N/A. But we do read from a confiuration file thats either at a default location or specified by the user via an environment variable. + +**How do you validate new .samrc configuration?** + + + +What is your Testing Plan (QA)? +=============================== + +Goal +---- + +Configuration files are tested alongside SAM CLI and are expected to work seamlessly with meaningful error messages to steer users towards using configuration file to manage their app workflows. + +Pre-requesites +-------------- + +N/A + +Test Scenarios/Cases +-------------------- + +* Integration tests for every command with `env` based overrides, and command line overrides on existing sam configuration file and custom configuration file through environment variables. +* Tested to work on all platforms + +Expected Results +---------------- +* Works on all platforms +* Resolution of parameters follows. + * CLI parameters -> Config file parameters + +Documentation Changes +===================== + +* Addition of a new `--env` parameter per command + +Related Open Issues +============ +* https://github.com/awslabs/aws-sam-cli/issues/975 +* https://github.com/awslabs/aws-sam-cli/issues/748 + +Task Breakdown +============== + +- \[x\] Send a Pull Request with this design document +- \[ \] Build the command line interface +- \[ \] Build the underlying library +- \[ \] Unit tests +- \[ \] Functional Tests +- \[ \] Integration tests +- \[ \] Run all tests on Windows +- \[ \] Update documentation diff --git a/designs/sam_setup_cmd.md b/designs/sam_setup_cmd.md new file mode 100644 index 0000000000..1362edff8d --- /dev/null +++ b/designs/sam_setup_cmd.md @@ -0,0 +1,25 @@ +# `sam setup` command + +As a part of packaging Lambda functions for deployment to AWS, users of the AWS SAM CLI currently need to provide an S3 bucket to store their code artifacts in. This creates a number of extra setup steps today, from users needing to go and set up an S3 bucket, to needing to track which bucket is appropriate for a given region (S3 bucket region must match CloudFormation deployment region). This project aims to simplify this experience. + +## Goals + +1. AWS SAM CLI users should be able to set up an S3 bucket for their SAM project entirely through the AWS SAM CLI. +2. The AWS SAM CLI, in setting up such a bucket, should choose an appropriate region and populate the users’s SAM CLI config file in their project. +3. A user doing the interactive deploy experience should be able to be completely separated from the S3 bucket used for source code storage, if the user does not wish to directly configure their source bucket. + +## Design + +We propose creating a new SAM CLI command, sam setup for this process. The underlying functionality would also be accessible to other commands, such as package itself. + +The `sam setup` command would have the following parameters: + +* `--region` This parameter is **CONDITIONALLY REQUIRED**, because the primary goal of this command is to ensure that the user’s region has an S3 bucket set up. We will also accept the `AWS_REGION` environment variable, or the default region in a user’s profile. In short, a region must be provided in some way, or we will fail. +* `--profile` This is associated with a user’s AWS profile, and defaults to `"default"` if not provided. It will be used for sourcing credentials for CloudFormation commands used when setting up the bucket, and for doing S3 ListBucket calls to see if a suitable bucket already exists. + +## Challenges + +Both S3 buckets and CloudFormation stacks do not have sufficiently efficient ways to search by tags. Simply put, there’s likely to be some computational inefficiency as up to hundreds of API calls might be required to identify an existing bucket that was created to be a source bucket. This means that to avoid severe performance issues, we need to make compromises. Proposed: + +* The default managed bucket uses a fixed stack name per region, such as “aws-sam-cli-managed-source-bucket”. If the user for some reason has a stack with that name, then we cannot support a managed bucket for them. +* Alternatively, when doing sam setup, the user providing a bucket name would mean that we just check for it to exist and if it does and is in the correct region, populate the config file. diff --git a/docs/sam-config-docs.md b/docs/sam-config-docs.md new file mode 100644 index 0000000000..944a78c288 --- /dev/null +++ b/docs/sam-config-docs.md @@ -0,0 +1,97 @@ +`samconfig.toml` +-------------------------- + +This doc goes through the different sections of the configuration file and explains them + +``` +version = 0.1 + +[default.build.paramaters] +profile="srirammv" +debug=true +skip_pull_image=true +use_container=true + +[default.local_start_api.paramaters] +port=5400 + +[default.package.parameters] +profile="srirammv" +region="us-east-1" +s3_bucket="sam-bucket" +output_template_file="packaged.yaml" + +[default.deploy.parameters] +stack_name="using_config_file" +capabilities="CAPABILITY_IAM" +region="us-east-1" +profile="srirammv" +``` + +Version +------- + +`version` denotes the version of the `samconfig.toml` configuration file + +Env +---------- + +The default chosen env (environment) is denoted as `default` + +Command +----------- +The nested sections under `default` are reflected as `default.[SAM COMMAND]` + +these commands should not have spaces or hyphens, both " " and "-" will be converted to underscores "_" + +Therefore the sections for commands would like + +``` +[default.init] +[default.validate] +[default.build] +[default.local_generate_event_s3_delete] +[default.local_invoke] +[default.local_start_api] +[default.local_start_lambda] +[default.package] +[default.deploy] +[default.logs] +[default.publish] +``` + +Note: +sam local generate-event has a ton of options within it, but the above rules apply. + +Some examples: + +``` +[default.local.generate_event_alexa_skills_kit_intent_answer] +[default.local.generate_event_codepipeline_job] +``` + +Parameters +---------- +Since this configuration file is TOML, the parameters have types built-in. + +### Specifying a number + +``` +[default.local_start_api.paramaters] +port=5400 +``` + +### Specifying a string + +``` +[default.deploy.parameters] +stack_name="using_config_file" +``` + +### Specifying a flag + +``` +[default.build.parameters] +debug=true +``` + diff --git a/requirements/base.txt b/requirements/base.txt index 8cd89436f2..fd6e4eef24 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -13,4 +13,5 @@ requests==2.22.0 serverlessrepo==0.1.9 aws_lambda_builders==0.6.0 # https://github.com/mhammond/pywin32/issues/1439 -pywin32 < 226; sys_platform == 'win32' \ No newline at end of file +pywin32 < 226; sys_platform == 'win32' +tomlkit==0.5.8 \ No newline at end of file diff --git a/requirements/isolated.txt b/requirements/isolated.txt index 0dbc04bb8e..82353dcb93 100644 --- a/requirements/isolated.txt +++ b/requirements/isolated.txt @@ -32,6 +32,7 @@ requests==2.22.0 s3transfer==0.2.1 serverlessrepo==0.1.9 six==1.11.0 +tomlkit==0.5.8 tzlocal==2.0.0 urllib3==1.25.3 websocket-client==0.56.0 diff --git a/samcli/__init__.py b/samcli/__init__.py index a74a1acfe7..718184ec46 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = "0.32.0" +__version__ = "0.33.0" diff --git a/samcli/cli/cli_config_file.py b/samcli/cli/cli_config_file.py new file mode 100644 index 0000000000..db346472ee --- /dev/null +++ b/samcli/cli/cli_config_file.py @@ -0,0 +1,187 @@ +""" +CLI configuration decorator to use TOML configuration files for click commands. +""" + +## This section contains code copied and modified from [click_config_file][https://github.com/phha/click_config_file/blob/master/click_config_file.py] +## SPDX-License-Identifier: MIT + +import os +import functools +import logging + +import click + +from samcli.commands.exceptions import ConfigException +from samcli.lib.config.exceptions import SamConfigVersionException +from samcli.cli.context import get_cmd_names +from samcli.lib.config.samconfig import SamConfig, DEFAULT_ENV + +__all__ = ("TomlProvider", "configuration_option", "get_ctx_defaults") + +LOG = logging.getLogger(__name__) + + +class TomlProvider: + """ + A parser for toml configuration files + :param cmd: sam command name as defined by click + :param section: section defined in the configuration file nested within `cmd` + """ + + def __init__(self, section=None): + self.section = section + + def __call__(self, config_dir, config_env, cmd_names): + """ + Get resolved config based on the `file_path` for the configuration file, + `config_env` targeted inside the config file and corresponding `cmd_name` + as denoted by `click`. + + :param config_env: The name of the sectional config_env within configuration file. + :param cmd_names list(str): sam command name as defined by click + :returns dictionary containing the configuration parameters under specified config_env + """ + + resolved_config = {} + + samconfig = SamConfig(config_dir) + LOG.debug("Config file location: %s", samconfig.path()) + + if not samconfig.exists(): + LOG.debug("Config file does not exist") + return resolved_config + + try: + LOG.debug("Getting configuration value for %s %s %s", cmd_names, self.section, config_env) + + # NOTE(TheSriram): change from tomlkit table type to normal dictionary, + # so that click defaults work out of the box. + samconfig.sanity_check() + resolved_config = {k: v for k, v in samconfig.get_all(cmd_names, self.section, env=config_env).items()} + LOG.debug("Configuration values read from the file: %s", resolved_config) + + except KeyError as ex: + LOG.debug( + "Error reading configuration file at %s with config_env=%s, command=%s, section=%s %s", + samconfig.path(), + config_env, + cmd_names, + self.section, + str(ex), + ) + + except SamConfigVersionException as ex: + LOG.debug("%s %s", samconfig.path(), str(ex)) + raise ConfigException(f"Syntax invalid in samconfig.toml: {str(ex)}") + + except Exception as ex: + LOG.debug("Error reading configuration file: %s %s", samconfig.path(), str(ex)) + + return resolved_config + + +def configuration_callback(cmd_name, option_name, config_env_name, saved_callback, provider, ctx, param, value): + """ + Callback for reading the config file. + + Also takes care of calling user specified custom callback afterwards. + + :param cmd_name: `sam` command name derived from click. + :param option_name: The name of the option. This is used for error messages. + :param config_env_name: `top` level section within configuration file + :param saved_callback: User-specified callback to be called later. + :param provider: A callable that parses the configuration file and returns a dictionary + of the configuration parameters. Will be called as + `provider(file_path, config_env, cmd_name)`. + :param ctx: Click context + :param param: Click parameter + :param value: Specified value for config_env + :returns specified callback or the specified value for config_env. + """ + + # ctx, param and value are default arguments for click specified callbacks. + ctx.default_map = ctx.default_map or {} + cmd_name = cmd_name or ctx.info_name + param.default = None + config_env_name = value or config_env_name + config = get_ctx_defaults(cmd_name, provider, ctx, config_env_name=config_env_name) + ctx.default_map.update(config) + + return saved_callback(ctx, param, value) if saved_callback else value + + +def get_ctx_defaults(cmd_name, provider, ctx, config_env_name): + """ + Get the set of the parameters that are needed to be set into the click command. + This function also figures out the command name by looking up current click context's parent + and constructing the parsed command name that is used in default configuration file. + If a given cmd_name is start-api, the parsed name is "local_start_api". + provider is called with `config_file`, `config_env_name` and `parsed_cmd_name`. + + :param cmd_name: `sam` command name + :param provider: provider to be called for reading configuration file + :param ctx: Click context + :param config_env_name: config-env within configuration file + :return: dictionary of defaults for parameters + """ + + # `config_dir` will be a directory relative to SAM template, if it is available. If not it's relative to cwd + config_dir = getattr(ctx, "samconfig_dir", None) or os.getcwd() + return provider(config_dir, config_env_name, get_cmd_names(cmd_name, ctx)) + + +def configuration_option(*param_decls, **attrs): + """ + Adds configuration file support to a click application. + + NOTE: This decorator should be added to the top of parameter chain, right below click.command, before + any options are declared. + + Example: + >>> @click.command("hello") + @configuration_option(provider=TomlProvider(section="parameters")) + @click.option('--name', type=click.String) + def hello(name): + print("Hello " + name) + + This will create an option of type `STRING` expecting the config_env in the + configuration file, by default this config_env is `default`. When specified, + the requisite portion of the configuration file is considered as the + source of truth. + + The default name of the option is `--config-env`. + + This decorator accepts the same arguments as `click.option`. + In addition, the following keyword arguments are available: + :param cmd_name: The command name. Default: `ctx.info_name` + :param config_env_name: The config_env name. This is used to determine which part of the configuration + needs to be read. + :param provider: A callable that parses the configuration file and returns a dictionary + of the configuration parameters. Will be called as + `provider(file_path, config_env, cmd_name) + """ + param_decls = param_decls or ("--config-env",) + option_name = param_decls[0] + + def decorator(f): + + attrs.setdefault("is_eager", True) + attrs.setdefault("help", "Read config-env from Configuration File.") + attrs.setdefault("expose_value", False) + # --config-env is hidden and can potentially be opened up in the future. + attrs.setdefault("hidden", True) + # explicitly ignore values passed to --config-env, can be opened up in the future. + config_env_name = DEFAULT_ENV + provider = attrs.pop("provider") + attrs["type"] = click.STRING + saved_callback = attrs.pop("callback", None) + partial_callback = functools.partial( + configuration_callback, None, option_name, config_env_name, saved_callback, provider + ) + attrs["callback"] = partial_callback + return click.option(*param_decls, **attrs)(f) + + return decorator + + +# End section copied from [[click_config_file][https://github.com/phha/click_config_file/blob/master/click_config_file.py] diff --git a/samcli/cli/command.py b/samcli/cli/command.py index f56e93fb80..4e2187de1d 100644 --- a/samcli/cli/command.py +++ b/samcli/cli/command.py @@ -21,6 +21,8 @@ "samcli.commands.deploy", "samcli.commands.logs", "samcli.commands.publish", + # We intentionally do not expose the `bootstrap` command for now. We might open it up later + # "samcli.commands.bootstrap", ] diff --git a/samcli/cli/context.py b/samcli/cli/context.py index 488c6a1ee0..a72f9ed923 100644 --- a/samcli/cli/context.py +++ b/samcli/cli/context.py @@ -146,3 +146,42 @@ def _refresh_session(self): boto3.setup_default_session(region_name=self._aws_region, profile_name=self._aws_profile) except botocore.exceptions.ProfileNotFound as ex: raise CredentialsError(str(ex)) + + +def get_cmd_names(cmd_name, ctx): + """ + Given the click core context, return a list representing all the subcommands passed to the CLI + + Parameters + ---------- + cmd_name : name of current command + + ctx : click.Context + + Returns + ------- + list(str) + List containing subcommand names. Ex: ["local", "start-api"] + + """ + if not ctx: + return [] + + if ctx and not getattr(ctx, "parent", None): + return [ctx.info_name] + # Find parent of current context + _parent = ctx.parent + _cmd_names = [] + # Need to find the total set of commands that current command is part of. + if cmd_name != ctx.info_name: + _cmd_names = [cmd_name] + _cmd_names.append(ctx.info_name) + # Go through all parents till a parent of a context exists. + while _parent.parent: + info_name = _parent.info_name + _cmd_names.append(info_name) + _parent = _parent.parent + + # Make sure the output reads natural. Ex: ["local", "start-api"] + _cmd_names.reverse() + return _cmd_names diff --git a/samcli/cli/main.py b/samcli/cli/main.py index c4e0279d6d..ede907bdbf 100644 --- a/samcli/cli/main.py +++ b/samcli/cli/main.py @@ -97,6 +97,8 @@ def cli(ctx): sam_cli_logger = logging.getLogger("samcli") sam_cli_formatter = logging.Formatter("%(message)s") lambda_builders_logger = logging.getLogger("aws_lambda_builders") + botocore_logger = logging.getLogger("botocore") SamCliLogger.configure_logger(sam_cli_logger, sam_cli_formatter, logging.INFO) SamCliLogger.configure_logger(lambda_builders_logger, sam_cli_formatter, logging.INFO) + SamCliLogger.configure_null_logger(botocore_logger) diff --git a/samcli/cli/types.py b/samcli/cli/types.py index faacbacf70..7d43e353e7 100644 --- a/samcli/cli/types.py +++ b/samcli/cli/types.py @@ -9,33 +9,78 @@ import click +def _value_regex(delim): + return f'(\\"(?:\\\\.|[^\\"\\\\]+)*\\"|(?:\\\\.|[^{delim}\\"\\\\]+)+)' + + +KEY_REGEX = '([A-Za-z0-9\\"]+)' +# Use this regex when you have space as delimiter Ex: "KeyName1=string KeyName2=string" +VALUE_REGEX_SPACE_DELIM = _value_regex(" ") +# Use this regex when you have comma as delimiter Ex: "KeyName1=string,KeyName2=string" +VALUE_REGEX_COMMA_DELIM = _value_regex(",") + + class CfnParameterOverridesType(click.ParamType): """ Custom Click options type to accept values for CloudFormation template parameters. You can pass values for parameters as "ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro" """ - __EXAMPLE = "ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro" + __EXAMPLE_1 = "ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro" + __EXAMPLE_2 = "KeyPairName=MyKey InstanceType=t1.micro" + + # Regex that parses CloudFormation parameter key-value pairs: + # https://regex101.com/r/xqfSjW/2 + # https://regex101.com/r/xqfSjW/5 - # Regex that parses CloudFormation parameter key-value pairs: https://regex101.com/r/xqfSjW/2 - _pattern = r"(?:ParameterKey=([A-Za-z0-9\"]+),ParameterValue=(\"(?:\\.|[^\"\\]+)*\"|(?:\\.|[^ \"\\]+)+))" + # If Both ParameterKey pattern and KeyPairName=MyKey should not be present + # while adding parameter overrides, if they are, it + # can result in unpredicatable behavior. + _pattern_1 = r"(?:ParameterKey={key},ParameterValue={value})".format(key=KEY_REGEX, value=VALUE_REGEX_SPACE_DELIM) + _pattern_2 = r"(?:(?: ){key}={value})".format(key=KEY_REGEX, value=VALUE_REGEX_SPACE_DELIM) + ordered_pattern_match = [_pattern_1, _pattern_2] + + # NOTE(TheSriram): name needs to be added to click.ParamType requires it. name = "" def convert(self, value, param, ctx): result = {} - if not value: - return result - groups = re.findall(self._pattern, value) - if not groups: - return self.fail( - "{} is not in valid format. It must look something like '{}'".format(value, self.__EXAMPLE), param, ctx - ) + # Empty tuple + if value == ("",): + return result - # 'groups' variable is a list of tuples ex: [(key1, value1), (key2, value2)] - for key, param_value in groups: - result[self._unquote(key)] = self._unquote(param_value) + value = (value,) if isinstance(value, str) else value + for val in value: + val.strip() + # Add empty string to start of the string to help match `_pattern2` + val = " " + val + + try: + # NOTE(TheSriram): find the first regex that matched. + # pylint is concerned that we are checking at the same `val` within the loop, + # but that is the point, so disabling it. + pattern = next( + i + for i in filter( + lambda item: re.findall(item, val), self.ordered_pattern_match + ) # pylint: disable=cell-var-from-loop + ) + except StopIteration: + return self.fail( + "{} is not in valid format. It must look something like '{}' or '{}'".format( + val, self.__EXAMPLE_1, self.__EXAMPLE_2 + ), + param, + ctx, + ) + + groups = re.findall(pattern, val) + + # 'groups' variable is a list of tuples ex: [(key1, value1), (key2, value2)] + for key, param_value in groups: + result[self._unquote(key)] = self._unquote(param_value) return result @@ -77,10 +122,10 @@ class CfnMetadataType(click.ParamType): _EXAMPLE = 'KeyName1=string,KeyName2=string or {"string":"string"}' - _pattern = r"([A-Za-z0-9\"]+)=([A-Za-z0-9\"]+)" + _pattern = r"(?:{key}={value})".format(key=KEY_REGEX, value=VALUE_REGEX_COMMA_DELIM) # NOTE(TheSriram): name needs to be added to click.ParamType requires it. - name = "CfnMetadata" + name = "" def convert(self, value, param, ctx): result = {} @@ -103,9 +148,9 @@ def convert(self, value, param, ctx): if not groups: fail = True for group in groups: - key, value = group + key, v = group # assign to result['KeyName1'] = string and so on. - result[key] = value + result[key] = v if fail: return self.fail( @@ -113,3 +158,46 @@ def convert(self, value, param, ctx): ) return result + + +class CfnTags(click.ParamType): + """ + Custom Click options type to accept values for tag parameters. + tag parameters can be of the type KeyName1=string KeyName2=string + """ + + _EXAMPLE = "KeyName1=string KeyName2=string" + + _pattern = r"{key}={value}".format(key=KEY_REGEX, value=VALUE_REGEX_SPACE_DELIM) + + # NOTE(TheSriram): name needs to be added to click.ParamType requires it. + name = "" + + def convert(self, value, param, ctx): + result = {} + fail = False + # Empty tuple + if value == ("",): + return result + + # if value comes in a via configuration file, it will be a string. So we should still convert it. + value = (value,) if not isinstance(value, tuple) else value + + for val in value: + groups = re.findall(self._pattern, val) + + if not groups: + fail = True + for group in groups: + key, v = group + # assign to result['KeyName1'] = string and so on. + result[key] = v + + if fail: + return self.fail( + "{} is not in valid format. It must look something like '{}'".format(value, self._EXAMPLE), + param, + ctx, + ) + + return result diff --git a/samcli/commands/_utils/custom_options/__init__.py b/samcli/commands/_utils/custom_options/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/commands/_utils/custom_options/option_nargs.py b/samcli/commands/_utils/custom_options/option_nargs.py new file mode 100644 index 0000000000..1c310103a1 --- /dev/null +++ b/samcli/commands/_utils/custom_options/option_nargs.py @@ -0,0 +1,50 @@ +""" +Custom Click options for multiple arguments +""" + +import click + + +class OptionNargs(click.Option): + """ + A custom option class that allows parsing for multiple arguments + for an option, when the number of arguments for an option are unknown. + """ + + def __init__(self, *args, **kwargs): + self.nargs = kwargs.pop("nargs", -1) + super(OptionNargs, self).__init__(*args, **kwargs) + self._previous_parser_process = None + self._nargs_parser = None + + def add_to_parser(self, parser, ctx): + def parser_process(value, state): + # look ahead into arguments till we reach the next option. + # the next option starts with a prefix which is either '-' or '--' + next_option = False + value = [value] + + while state.rargs and not next_option: + for prefix in self._nargs_parser.prefixes: + if state.rargs[0].startswith(prefix): + next_option = True + if not next_option: + value.append(state.rargs.pop(0)) + + value = tuple(value) + + # call the actual process + self._previous_parser_process(value, state) + + # Add current option to Parser by calling add_to_parser on the super class. + super(OptionNargs, self).add_to_parser(parser, ctx) + for name in self.opts: + # Get OptionParser object for current option + option_parser = getattr(parser, "_long_opt").get(name) or getattr(parser, "_short_opt").get(name) + if option_parser: + # Monkey patch `process` method for click.parser.Option class. + # This allows for setting multiple parsed values into current option arguments + self._nargs_parser = option_parser + self._previous_parser_process = option_parser.process + option_parser.process = parser_process + break diff --git a/samcli/commands/_utils/options.py b/samcli/commands/_utils/options.py index 6537f6cabe..72973ad43e 100644 --- a/samcli/commands/_utils/options.py +++ b/samcli/commands/_utils/options.py @@ -7,9 +7,13 @@ from functools import partial import click -from samcli.cli.types import CfnParameterOverridesType, CfnMetadataType +from click.types import FuncParamType +from samcli.cli.types import CfnParameterOverridesType, CfnMetadataType, CfnTags +from samcli.commands._utils.custom_options.option_nargs import OptionNargs + _TEMPLATE_OPTION_DEFAULT_VALUE = "template.[yaml|yml]" +DEFAULT_STACK_NAME = "sam-app" LOG = logging.getLogger(__name__) @@ -27,6 +31,8 @@ def get_or_default_template_file_name(ctx, param, provided_value, include_build) :return: Actual value to be used in the CLI """ + original_template_path = os.path.abspath(provided_value) + search_paths = ["template.yaml", "template.yml"] if include_build: @@ -41,12 +47,38 @@ def get_or_default_template_file_name(ctx, param, provided_value, include_build) if os.path.exists(option): provided_value = option break - result = os.path.abspath(provided_value) + + if ctx: + # sam configuration file should always be relative to the supplied original template and should not to be set + # to be .aws-sam/build/ + setattr(ctx, "samconfig_dir", os.path.dirname(original_template_path)) LOG.debug("Using SAM Template at %s", result) return result +def guided_deploy_stack_name(ctx, param, provided_value): + """ + Provide a default value for stack name if invoked with a guided deploy. + :param ctx: Click Context + :param param: Param name + :param provided_value: Value provided by Click, it would be the value provided by the user. + :return: Actual value to be used in the CLI + """ + + guided = ctx.params.get("guided", False) or ctx.params.get("g", False) + + if not guided and not provided_value: + raise click.BadOptionUsage( + option_name=param.name, + ctx=ctx, + message="Missing option '--stack-name', 'sam deploy –guided' can " + "be used to provide and save needed parameters for future deploys.", + ) + + return provided_value if provided_value else DEFAULT_STACK_NAME + + def template_common_option(f): """ Common ClI option for template @@ -72,6 +104,7 @@ def template_click_option(include_build=True): Click Option for template option """ return click.option( + "--template-file", "--template", "-t", default=_TEMPLATE_OPTION_DEFAULT_VALUE, @@ -79,6 +112,7 @@ def template_click_option(include_build=True): envvar="SAM_TEMPLATE_FILE", callback=partial(get_or_default_template_file_name, include_build=include_build), show_default=True, + is_eager=True, help="AWS SAM template file", ) @@ -113,10 +147,12 @@ def docker_click_options(): def parameter_override_click_option(): return click.option( "--parameter-overrides", + cls=OptionNargs, type=CfnParameterOverridesType(), - help="Optional. A string that contains CloudFormation parameter overrides encoded as key=value " - "pairs. Use the same format as the AWS CLI, e.g. 'ParameterKey=KeyPairName," - "ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro'", + default={}, + help="Optional. A string that contains AWS CloudFormation parameter overrides encoded as key=value pairs." + "For example, 'ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType," + "ParameterValue=t1.micro' or KeyPairName=MyKey InstanceType=t1.micro", ) @@ -134,3 +170,65 @@ def metadata_click_option(): def metadata_override_option(f): return metadata_click_option()(f) + + +def capabilities_click_option(): + return click.option( + "--capabilities", + cls=OptionNargs, + required=False, + type=FuncParamType(func=_space_separated_list_func_type), + help="A list of capabilities that you must specify" + "before AWS Cloudformation can create certain stacks. Some stack tem-" + "plates might include resources that can affect permissions in your AWS" + "account, for example, by creating new AWS Identity and Access Manage-" + "ment (IAM) users. For those stacks, you must explicitly acknowledge" + "their capabilities by specifying this parameter. The only valid values" + "are CAPABILITY_IAM and CAPABILITY_NAMED_IAM. If you have IAM resources," + "you can specify either capability. If you have IAM resources with cus-" + "tom names, you must specify CAPABILITY_NAMED_IAM. If you don't specify" + "this parameter, this action returns an InsufficientCapabilities error.", + ) + + +def capabilities_override_option(f): + return capabilities_click_option()(f) + + +def tags_click_option(): + return click.option( + "--tags", + cls=OptionNargs, + type=CfnTags(), + required=False, + help="A list of tags to associate with the stack that is created or updated." + "AWS CloudFormation also propagates these tags to resources " + "in the stack if the resource supports it.", + ) + + +def tags_override_option(f): + return tags_click_option()(f) + + +def notification_arns_click_option(): + return click.option( + "--notification-arns", + cls=OptionNargs, + type=FuncParamType(func=_space_separated_list_func_type), + required=False, + help="Amazon Simple Notification Service topic" + "Amazon Resource Names (ARNs) that AWS CloudFormation associates with" + "the stack.", + ) + + +def notification_arns_override_option(f): + return notification_arns_click_option()(f) + + +def _space_separated_list_func_type(value): + return value.split(" ") if not isinstance(value, tuple) else value + + +_space_separated_list_func_type.__name__ = "LIST" diff --git a/samcli/commands/_utils/table_print.py b/samcli/commands/_utils/table_print.py new file mode 100644 index 0000000000..d22f4a0c43 --- /dev/null +++ b/samcli/commands/_utils/table_print.py @@ -0,0 +1,112 @@ +""" +Utilities for table pretty printing using click +""" +from itertools import count, zip_longest +import textwrap +from functools import wraps + +import click + + +def pprint_column_names(format_string, format_kwargs, margin=None, table_header=None, color="yellow"): + """ + + :param format_string: format string to be used that has the strings, minimum width to be replaced + :param format_kwargs: dictionary that is supplied to the format_string to format the string + :param margin: margin that is to be reduced from column width for columnar text. + :param table_header: Supplied table header + :param color: color supplied for table headers and column names. + :return: boilerplate table string + """ + + min_width = 100 + min_margin = 2 + + def pprint_wrap(func): + # Calculate terminal width, number of columns in the table + width, _ = click.get_terminal_size() + # For UX purposes, set a minimum width for the table to be usable + # and usable_width keeps margins in mind. + width = max(width, min_width) + + total_args = len(format_kwargs) + if not total_args: + raise ValueError("Number of arguments supplied should be > 0 , format_kwargs: {}".format(format_kwargs)) + + # Get width to be a usable number so that we can equally divide the space for all the columns. + # Can be refactored, to allow for modularity in the shaping of the columns. + width = width - (width % total_args) + usable_width_no_margin = int(width) - 1 + usable_width = int((usable_width_no_margin - (margin if margin else min_margin))) + if total_args > int(usable_width / 2): + raise ValueError("Total number of columns exceed available width") + width_per_column = int(usable_width / total_args) + + # The final column should not roll over into the next line + final_arg_width = width_per_column - 1 + + # the format string contains minimumwidth that need to be set. + # eg: "{a:{0}}} {b:<{1}}} {c:{2}}}" + format_args = [width_per_column for _ in range(total_args - 1)] + format_args.extend([final_arg_width]) + + # format arguments are now ready for setting minimumwidth + + @wraps(func) + def wrap(*args, **kwargs): + # The table is setup with the column names, format_string contains the column names. + if table_header: + click.secho("\n" + table_header) + click.secho("-" * usable_width, fg=color) + click.secho(format_string.format(*format_args, **format_kwargs), fg=color) + click.secho("-" * usable_width, fg=color) + # format_args which have the minimumwidth set per {} in the format_string is passed to the function + # which this decorator wraps, so that the function has access to the correct format_args + kwargs["format_args"] = format_args + kwargs["width"] = width_per_column + kwargs["margin"] = margin if margin else min_margin + result = func(*args, **kwargs) + # Complete the table + click.secho("-" * usable_width, fg=color) + return result + + return wrap + + return pprint_wrap + + +def wrapped_text_generator(texts, width, margin): + """ + + Return a generator where the contents are wrapped text to a specified width. + + :param texts: list of text that needs to be wrapped at specified width + :param width: width of the text to be wrapped + :param margin: margin to be reduced from width for cleaner UX + :return: generator of wrapped text + """ + for text in texts: + yield textwrap.wrap(text, width=width - margin) + + +def pprint_columns(columns, width, margin, format_string, format_args, columns_dict, color="yellow"): + """ + + Print columns based on list of columnar text, associated formatting string and associated format arguments. + + :param columns: list of columnnar text that go into columns as specified by the format_string + :param width: width of the text to be wrapped + :param margin: margin to be reduced from width for cleaner UX + :param format_string: A format string that has both width and text specifiers set. + :param format_args: list of offset specifiers + :param columns_dict: arguments dictionary that have dummy values per column + :param color: color supplied for rows within the table. + :return: + """ + for columns_text in zip_longest(*wrapped_text_generator(columns, width, margin), fillvalue=""): + counter = count() + # Generate columnar data that correspond to the column names and update them. + for k, _ in columns_dict.items(): + columns_dict[k] = columns_text[next(counter)] + + click.secho(format_string.format(*format_args, **columns_dict), fg=color) diff --git a/samcli/commands/_utils/template.py b/samcli/commands/_utils/template.py index 164a90cbf2..0fb7a5783e 100644 --- a/samcli/commands/_utils/template.py +++ b/samcli/commands/_utils/template.py @@ -204,3 +204,20 @@ def _resolve_relative_to(path, original_root, new_root): return os.path.relpath( os.path.normpath(os.path.join(original_root, path)), new_root # Absolute original path w.r.t ``original_root`` ) # Resolve the original path with respect to ``new_root`` + + +def get_template_parameters(template_file): + """ + Get Parameters from a template file. + + Parameters + ---------- + template_file : string + Path to the template to read + + Returns + ------- + Template Parameters as a dictionary + """ + template_dict = get_template_data(template_file=template_file) + return template_dict.get("Parameters", dict()) diff --git a/samcli/commands/bootstrap/__init__.py b/samcli/commands/bootstrap/__init__.py new file mode 100644 index 0000000000..e432ed5341 --- /dev/null +++ b/samcli/commands/bootstrap/__init__.py @@ -0,0 +1,6 @@ +""" +`sam setup` command +""" + +# Expose the cli object here +from .command import cli # noqa diff --git a/samcli/commands/bootstrap/command.py b/samcli/commands/bootstrap/command.py new file mode 100644 index 0000000000..6da1afb4ba --- /dev/null +++ b/samcli/commands/bootstrap/command.py @@ -0,0 +1,30 @@ +""" +CLI command for "bootstrap", which sets up a SAM development environment +""" +import click + +from samcli.cli.main import pass_context, common_options, aws_creds_options +from samcli.lib.telemetry.metrics import track_command +from samcli.lib.bootstrap import bootstrap + +SHORT_HELP = "Set up development environment for AWS SAM applications." + +HELP_TEXT = """ +Sets up a development environment for AWS SAM applications. + +Currently this creates, if one does not exist, a managed S3 bucket for your account in your working AWS region. +""" + + +@click.command("bootstrap", short_help=SHORT_HELP, help=HELP_TEXT, context_settings=dict(max_content_width=120)) +@common_options +@aws_creds_options +@pass_context +@track_command +def cli(ctx): + do_cli(ctx.region, ctx.profile) # pragma: no cover + + +def do_cli(region, profile): + bucket_name = bootstrap.manage_stack(profile=profile, region=region) + click.echo("Source Bucket: " + bucket_name) diff --git a/samcli/commands/bootstrap/exceptions.py b/samcli/commands/bootstrap/exceptions.py new file mode 100644 index 0000000000..d3d7fa88bc --- /dev/null +++ b/samcli/commands/bootstrap/exceptions.py @@ -0,0 +1,11 @@ +""" +Exceptions that are raised by sam bootstrap +""" +from samcli.commands.exceptions import UserException + + +class ManagedStackError(UserException): + def __init__(self, ex): + self.ex = ex + message_fmt = f"Failed to create managed resources: {ex}" + super(ManagedStackError, self).__init__(message=message_fmt.format(ex=self.ex)) diff --git a/samcli/commands/build/command.py b/samcli/commands/build/command.py index b6f9c67f9f..0c12273479 100644 --- a/samcli/commands/build/command.py +++ b/samcli/commands/build/command.py @@ -6,10 +6,14 @@ import logging import click -from samcli.commands._utils.options import template_option_without_build, docker_common_options, \ - parameter_override_option +from samcli.commands._utils.options import ( + template_option_without_build, + docker_common_options, + parameter_override_option, +) from samcli.cli.main import pass_context, common_options as cli_framework_options, aws_creds_options from samcli.lib.telemetry.metrics import track_command +from samcli.cli.cli_config_file import configuration_option, TomlProvider LOG = logging.getLogger(__name__) @@ -54,62 +58,89 @@ @click.command("build", help=HELP_TEXT, short_help="Build your Lambda function code") -@click.option('--build-dir', '-b', - default=DEFAULT_BUILD_DIR, - type=click.Path(file_okay=False, dir_okay=True, writable=True), # Must be a directory - help="Path to a folder where the built artifacts will be stored. This directory will be first removed before starting a build.") -@click.option("--base-dir", "-s", - default=None, - type=click.Path(dir_okay=True, file_okay=False), # Must be a directory - help="Resolve relative paths to function's source code with respect to this folder. Use this if " - "SAM template and your source code are not in same enclosing folder. By default, relative paths " - "are resolved with respect to the SAM template's location") -@click.option("--use-container", "-u", - is_flag=True, - help="If your functions depend on packages that have natively compiled dependencies, use this flag " - "to build your function inside an AWS Lambda-like Docker container") -@click.option("--manifest", "-m", - default=None, - type=click.Path(), - help="Path to a custom dependency manifest (ex: package.json) to use instead of the default one") +@configuration_option(provider=TomlProvider(section="parameters")) +@click.option( + "--build-dir", + "-b", + default=DEFAULT_BUILD_DIR, + type=click.Path(file_okay=False, dir_okay=True, writable=True), # Must be a directory + help="Path to a folder where the built artifacts will be stored. This directory will be first removed before starting a build.", +) +@click.option( + "--base-dir", + "-s", + default=None, + type=click.Path(dir_okay=True, file_okay=False), # Must be a directory + help="Resolve relative paths to function's source code with respect to this folder. Use this if " + "SAM template and your source code are not in same enclosing folder. By default, relative paths " + "are resolved with respect to the SAM template's location", +) +@click.option( + "--use-container", + "-u", + is_flag=True, + help="If your functions depend on packages that have natively compiled dependencies, use this flag " + "to build your function inside an AWS Lambda-like Docker container", +) +@click.option( + "--manifest", + "-m", + default=None, + type=click.Path(), + help="Path to a custom dependency manifest (ex: package.json) to use instead of the default one", +) @template_option_without_build @parameter_override_option @docker_common_options @cli_framework_options @aws_creds_options -@click.argument('function_identifier', required=False) +@click.argument("function_identifier", required=False) @pass_context @track_command -def cli(ctx, +def cli( + ctx, + function_identifier, + template_file, + base_dir, + build_dir, + use_container, + manifest, + docker_network, + skip_pull_image, + parameter_overrides, +): + # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing + + mode = _get_mode_value_from_envvar("SAM_BUILD_MODE", choices=["debug"]) + + do_cli( function_identifier, - template, + template_file, base_dir, build_dir, + True, use_container, manifest, docker_network, skip_pull_image, parameter_overrides, - ): - # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - - mode = _get_mode_value_from_envvar("SAM_BUILD_MODE", choices=["debug"]) - - do_cli(function_identifier, template, base_dir, build_dir, True, use_container, manifest, docker_network, - skip_pull_image, parameter_overrides, mode) # pragma: no cover - - -def do_cli(function_identifier, # pylint: disable=too-many-locals, too-many-statements - template, - base_dir, - build_dir, - clean, - use_container, - manifest_path, - docker_network, - skip_pull_image, - parameter_overrides, - mode): + mode, + ) # pragma: no cover + + +def do_cli( # pylint: disable=too-many-locals, too-many-statements + function_identifier, + template, + base_dir, + build_dir, + clean, + use_container, + manifest_path, + docker_network, + skip_pull_image, + parameter_overrides, + mode, +): """ Implementation of the ``cli`` method """ @@ -117,8 +148,12 @@ def do_cli(function_identifier, # pylint: disable=too-many-locals, too-many-sta from samcli.commands.exceptions import UserException from samcli.commands.build.build_context import BuildContext - from samcli.lib.build.app_builder import ApplicationBuilder, BuildError, UnsupportedBuilderLibraryVersionError, \ - ContainerBuildNotSupported + from samcli.lib.build.app_builder import ( + ApplicationBuilder, + BuildError, + UnsupportedBuilderLibraryVersionError, + ContainerBuildNotSupported, + ) from samcli.lib.build.workflow_config import UnsupportedRuntimeException from samcli.local.lambdafn.exceptions import FunctionNotFound from samcli.commands._utils.template import move_template @@ -128,36 +163,36 @@ def do_cli(function_identifier, # pylint: disable=too-many-locals, too-many-sta if use_container: LOG.info("Starting Build inside a container") - with BuildContext(function_identifier, - template, - base_dir, - build_dir, - clean=clean, - manifest_path=manifest_path, - use_container=use_container, - parameter_overrides=parameter_overrides, - docker_network=docker_network, - skip_pull_image=skip_pull_image, - mode=mode) as ctx: + with BuildContext( + function_identifier, + template, + base_dir, + build_dir, + clean=clean, + manifest_path=manifest_path, + use_container=use_container, + parameter_overrides=parameter_overrides, + docker_network=docker_network, + skip_pull_image=skip_pull_image, + mode=mode, + ) as ctx: try: - builder = ApplicationBuilder(ctx.functions_to_build, - ctx.build_dir, - ctx.base_dir, - manifest_path_override=ctx.manifest_path_override, - container_manager=ctx.container_manager, - mode=ctx.mode) + builder = ApplicationBuilder( + ctx.functions_to_build, + ctx.build_dir, + ctx.base_dir, + manifest_path_override=ctx.manifest_path_override, + container_manager=ctx.container_manager, + mode=ctx.mode, + ) except FunctionNotFound as ex: raise UserException(str(ex)) try: artifacts = builder.build() - modified_template = builder.update_template(ctx.template_dict, - ctx.original_template_path, - artifacts) + modified_template = builder.update_template(ctx.template_dict, ctx.original_template_path, artifacts) - move_template(ctx.original_template_path, - ctx.output_template_path, - modified_template) + move_template(ctx.original_template_path, ctx.output_template_path, modified_template) click.secho("\nBuild Succeeded", fg="green") @@ -172,14 +207,20 @@ def do_cli(function_identifier, # pylint: disable=too-many-locals, too-many-sta build_dir_in_success_message = ctx.build_dir output_template_path_in_success_message = ctx.output_template_path - msg = gen_success_msg(build_dir_in_success_message, - output_template_path_in_success_message, - os.path.abspath(ctx.build_dir) == os.path.abspath(DEFAULT_BUILD_DIR)) + msg = gen_success_msg( + build_dir_in_success_message, + output_template_path_in_success_message, + os.path.abspath(ctx.build_dir) == os.path.abspath(DEFAULT_BUILD_DIR), + ) click.secho(msg, fg="yellow") - except (UnsupportedRuntimeException, BuildError, UnsupportedBuilderLibraryVersionError, - ContainerBuildNotSupported) as ex: + except ( + UnsupportedRuntimeException, + BuildError, + UnsupportedBuilderLibraryVersionError, + ContainerBuildNotSupported, + ) as ex: click.secho("\nBuild Failed", fg="red") raise UserException(str(ex)) @@ -190,9 +231,9 @@ def gen_success_msg(artifacts_dir, output_template_path, is_default_build_dir): if not is_default_build_dir: invoke_cmd += " -t {}".format(output_template_path) - package_cmd = "sam package --s3-bucket " + deploy_cmd = "sam deploy --guided" if not is_default_build_dir: - package_cmd += " --template-file {}".format(output_template_path) + deploy_cmd += " --template-file {}".format(output_template_path) msg = """\nBuilt Artifacts : {artifacts_dir} Built Template : {template} @@ -200,11 +241,10 @@ def gen_success_msg(artifacts_dir, output_template_path, is_default_build_dir): Commands you can use next ========================= [*] Invoke Function: {invokecmd} -[*] Package: {packagecmd} - """.format(invokecmd=invoke_cmd, - packagecmd=package_cmd, - artifacts_dir=artifacts_dir, - template=output_template_path) +[*] Deploy: {deploycmd} + """.format( + invokecmd=invoke_cmd, deploycmd=deploy_cmd, artifacts_dir=artifacts_dir, template=output_template_path + ) return msg @@ -216,7 +256,6 @@ def _get_mode_value_from_envvar(name, choices): return None if mode not in choices: - raise click.UsageError("Invalid value for 'mode': invalid choice: {}. (choose from {})" - .format(mode, choices)) + raise click.UsageError("Invalid value for 'mode': invalid choice: {}. (choose from {})".format(mode, choices)) return mode diff --git a/samcli/commands/deploy/__init__.py b/samcli/commands/deploy/__init__.py index 7e3bd984ab..ff2b95977d 100644 --- a/samcli/commands/deploy/__init__.py +++ b/samcli/commands/deploy/__init__.py @@ -1,54 +1,6 @@ """ -CLI command for "deploy" command +`sam deploy` command """ -import click - -from samcli.cli.main import pass_context, common_options -from samcli.lib.samlib.cloudformation_command import execute_command -from samcli.commands.exceptions import UserException -from samcli.lib.telemetry.metrics import track_command - - -SHORT_HELP = "Deploy an AWS SAM application. This is an alias for 'aws cloudformation deploy'." - - -HELP_TEXT = """The sam deploy command creates a Cloudformation Stack and deploys your resources. - -\b -e.g. sam deploy --template-file packaged.yaml --stack-name sam-app --capabilities CAPABILITY_IAM - -\b -This is an alias for aws cloudformation deploy. To learn about other parameters you can use, -run aws cloudformation deploy help. -""" - - -@click.command("deploy", short_help=SHORT_HELP, context_settings={"ignore_unknown_options": True}, help=HELP_TEXT) -@click.argument("args", nargs=-1, type=click.UNPROCESSED) -@click.option( - "--template-file", required=True, type=click.Path(), help="The path where your AWS SAM template is located" -) -@click.option( - "--stack-name", - required=True, - help="The name of the AWS CloudFormation stack you're deploying to. " - "If you specify an existing stack, the command updates the stack. " - "If you specify a new stack, the command creates it.", -) -@common_options -@pass_context -@track_command -def cli(ctx, args, template_file, stack_name): - - # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - do_cli(args, template_file, stack_name) # pragma: no cover - - -def do_cli(args, template_file, stack_name): - args = args + ("--stack-name", stack_name) - - try: - execute_command("deploy", args, template_file=template_file) - except OSError as ex: - raise UserException(str(ex)) +# Expose the cli object here +from .command import cli # noqa diff --git a/samcli/commands/deploy/command.py b/samcli/commands/deploy/command.py new file mode 100644 index 0000000000..23b37b24a5 --- /dev/null +++ b/samcli/commands/deploy/command.py @@ -0,0 +1,457 @@ +""" +CLI command for "deploy" command +""" +import json +import logging + +import click +from click.types import FuncParamType + +from samcli.lib.utils import temp_file_utils +from samcli.cli.cli_config_file import configuration_option, TomlProvider +from samcli.cli.context import get_cmd_names +from samcli.cli.main import pass_context, common_options, aws_creds_options +from samcli.commands._utils.options import ( + parameter_override_option, + capabilities_override_option, + tags_override_option, + notification_arns_override_option, + template_click_option, + metadata_override_option, + _space_separated_list_func_type, + guided_deploy_stack_name, +) +from samcli.commands._utils.template import get_template_parameters +from samcli.commands.deploy.exceptions import GuidedDeployFailedError +from samcli.lib.bootstrap.bootstrap import manage_stack +from samcli.lib.config.samconfig import SamConfig +from samcli.lib.telemetry.metrics import track_command +from samcli.lib.utils.colors import Colored + +SHORT_HELP = "Deploy an AWS SAM application." + + +HELP_TEXT = """The sam deploy command creates a Cloudformation Stack and deploys your resources. + +\b +e.g. sam deploy --template-file packaged.yaml --stack-name sam-app --capabilities CAPABILITY_IAM + +\b +""" + +CONFIG_SECTION = "parameters" +LOG = logging.getLogger(__name__) + + +@click.command( + "deploy", + short_help=SHORT_HELP, + context_settings={"ignore_unknown_options": False, "allow_interspersed_args": True, "allow_extra_args": True}, + help=HELP_TEXT, +) +@configuration_option(provider=TomlProvider(section=CONFIG_SECTION)) +@click.option( + "--guided", + "-g", + required=False, + is_flag=True, + is_eager=True, + help="Specify this flag to allow SAM CLI to guide you through the deployment using guided prompts.", +) +@template_click_option(include_build=True) +@click.option( + "--stack-name", + required=False, + callback=guided_deploy_stack_name, + help="The name of the AWS CloudFormation stack you're deploying to. " + "If you specify an existing stack, the command updates the stack. " + "If you specify a new stack, the command creates it.", +) +@click.option( + "--s3-bucket", + required=False, + help="The name of the S3 bucket where this command uploads your " + "CloudFormation template. This is required the deployments of " + "templates sized greater than 51,200 bytes", +) +@click.option( + "--force-upload", + required=False, + is_flag=True, + help="Indicates whether to override existing files in the S3 bucket. " + "Specify this flag to upload artifacts even if they" + "match existing artifacts in the S3 bucket.", +) +@click.option( + "--s3-prefix", + required=False, + help="A prefix name that the command adds to the " + "artifacts' name when it uploads them to the S3 bucket." + "The prefix name is a path name (folder name) for the S3 bucket.", +) +@click.option( + "--kms-key-id", + required=False, + help="The ID of an AWS KMS key that the command uses to encrypt artifacts that are at rest in the S3 bucket.", +) +@click.option( + "--no-execute-changeset", + required=False, + is_flag=True, + help="Indicates whether to execute the" + "change set. Specify this flag if you want to view your stack changes" + "before executing the change set. The command creates an AWS CloudForma-" + "tion change set and then exits without executing the change set. if " + "the changeset looks satisfactory, the stack changes can be made by " + "running the same command without specifying `--no-execute-changeset`", +) +@click.option( + "--role-arn", + required=False, + help="The Amazon Resource Name (ARN) of an AWS Identity" + "and Access Management (IAM) role that AWS CloudFormation assumes when" + "executing the change set.", +) +@click.option( + "--fail-on-empty-changeset", + required=False, + is_flag=True, + help="Specify if the CLI should return a non-zero exit code if there are no" + "changes to be made to the stack. The default behavior is to return a" + "non-zero exit code.", +) +@click.option( + "--confirm-changeset", + required=False, + is_flag=True, + help="Prompt to confirm if the computed changeset is to be deployed by SAM CLI.", +) +@click.option( + "--use-json", + required=False, + is_flag=True, + help="Indicates whether to use JSON as the format for " + "the output AWS CloudFormation template. YAML is used by default.", +) +@metadata_override_option +@notification_arns_override_option +@tags_override_option +@parameter_override_option +@capabilities_override_option +@aws_creds_options +@common_options +@pass_context +@track_command +def cli( + ctx, + template_file, + stack_name, + s3_bucket, + force_upload, + s3_prefix, + kms_key_id, + parameter_overrides, + capabilities, + no_execute_changeset, + role_arn, + notification_arns, + fail_on_empty_changeset, + use_json, + tags, + metadata, + guided, + confirm_changeset, +): + + # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing + do_cli( + template_file, + stack_name, + s3_bucket, + force_upload, + s3_prefix, + kms_key_id, + parameter_overrides, + capabilities, + no_execute_changeset, + role_arn, + notification_arns, + fail_on_empty_changeset, + use_json, + tags, + metadata, + guided, + confirm_changeset, + ctx.region, + ctx.profile, + ) # pragma: no cover + + +def do_cli( + template_file, + stack_name, + s3_bucket, + force_upload, + s3_prefix, + kms_key_id, + parameter_overrides, + capabilities, + no_execute_changeset, + role_arn, + notification_arns, + fail_on_empty_changeset, + use_json, + tags, + metadata, + guided, + confirm_changeset, + region, + profile, +): + from samcli.commands.package.package_context import PackageContext + from samcli.commands.deploy.deploy_context import DeployContext + + # set capabilities and changeset decision to None, before guided gets input from the user + changeset_decision = None + _capabilities = None + _parameter_overrides = None + guided_stack_name = None + guided_s3_bucket = None + guided_s3_prefix = None + guided_region = None + + if guided: + + try: + _parameter_override_keys = get_template_parameters(template_file=template_file) + except ValueError as ex: + LOG.debug("Failed to parse SAM template", exc_info=ex) + raise GuidedDeployFailedError(str(ex)) + + read_config_showcase(template_file=template_file) + + guided_stack_name, guided_s3_bucket, guided_s3_prefix, guided_region, guided_profile, changeset_decision, _capabilities, _parameter_overrides, save_to_config = guided_deploy( + stack_name, s3_bucket, region, profile, confirm_changeset, _parameter_override_keys, parameter_overrides + ) + + if save_to_config: + save_config( + template_file, + stack_name=guided_stack_name, + s3_bucket=guided_s3_bucket, + s3_prefix=guided_s3_prefix, + region=guided_region, + profile=guided_profile, + confirm_changeset=changeset_decision, + capabilities=_capabilities, + parameter_overrides=_parameter_overrides, + ) + + print_deploy_args( + stack_name=guided_stack_name if guided else stack_name, + s3_bucket=guided_s3_bucket if guided else s3_bucket, + region=guided_region if guided else region, + capabilities=_capabilities if guided else capabilities, + parameter_overrides=_parameter_overrides if guided else parameter_overrides, + confirm_changeset=changeset_decision if guided else confirm_changeset, + ) + + with temp_file_utils.tempfile_platform_independent() as output_template_file: + + with PackageContext( + template_file=template_file, + s3_bucket=guided_s3_bucket if guided else s3_bucket, + s3_prefix=guided_s3_prefix if guided else s3_prefix, + output_template_file=output_template_file.name, + kms_key_id=kms_key_id, + use_json=use_json, + force_upload=force_upload, + metadata=metadata, + on_deploy=True, + region=guided_region if guided else region, + profile=profile, + ) as package_context: + package_context.run() + + with DeployContext( + template_file=output_template_file.name, + stack_name=guided_stack_name if guided else stack_name, + s3_bucket=guided_s3_bucket if guided else s3_bucket, + force_upload=force_upload, + s3_prefix=guided_s3_prefix if guided else s3_prefix, + kms_key_id=kms_key_id, + parameter_overrides=sanitize_parameter_overrides(_parameter_overrides) if guided else parameter_overrides, + capabilities=_capabilities if guided else capabilities, + no_execute_changeset=no_execute_changeset, + role_arn=role_arn, + notification_arns=notification_arns, + fail_on_empty_changeset=fail_on_empty_changeset, + tags=tags, + region=guided_region if guided else region, + profile=profile, + confirm_changeset=changeset_decision if guided else confirm_changeset, + ) as deploy_context: + deploy_context.run() + + +def guided_deploy( + stack_name, s3_bucket, region, profile, confirm_changeset, parameter_override_keys, parameter_overrides +): + default_stack_name = stack_name or "sam-app" + default_region = region or "us-east-1" + default_capabilities = ("CAPABILITY_IAM",) + input_capabilities = None + + color = Colored() + start_bold = "\033[1m" + end_bold = "\033[0m" + + click.echo( + color.yellow("\n\tSetting default arguments for 'sam deploy'\n\t=========================================") + ) + + stack_name = click.prompt(f"\t{start_bold}Stack Name{end_bold}", default=default_stack_name, type=click.STRING) + s3_prefix = stack_name + region = click.prompt(f"\t{start_bold}AWS Region{end_bold}", default=default_region, type=click.STRING) + input_parameter_overrides = prompt_parameters(parameter_override_keys, start_bold, end_bold) + + click.secho("\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy") + confirm_changeset = click.confirm( + f"\t{start_bold}Confirm changes before deploy{end_bold}", default=confirm_changeset + ) + click.secho("\t#SAM needs permission to be able to create roles to connect to the resources in your template") + capabilities_confirm = click.confirm(f"\t{start_bold}Allow SAM CLI IAM role creation{end_bold}", default=True) + + if not capabilities_confirm: + input_capabilities = click.prompt( + f"\t{start_bold}Capabilities{end_bold}", + default=default_capabilities[0], + type=FuncParamType(func=_space_separated_list_func_type), + ) + + save_to_config = click.confirm(f"\t{start_bold}Save arguments to samconfig.toml{end_bold}", default=True) + + s3_bucket = manage_stack(profile=profile, region=region) + click.echo(f"\n\t\tManaged S3 bucket: {s3_bucket}") + click.echo("\t\tA different default S3 bucket can be set in samconfig.toml") + + return ( + stack_name, + s3_bucket, + s3_prefix, + region, + profile, + confirm_changeset, + input_capabilities if input_capabilities else default_capabilities, + input_parameter_overrides if input_parameter_overrides else parameter_overrides, + save_to_config, + ) + + +def prompt_parameters(parameter_override_keys, start_bold, end_bold): + _prompted_param_overrides = {} + if parameter_override_keys: + for parameter_key, parameter_properties in parameter_override_keys.items(): + no_echo = parameter_properties.get("NoEcho", False) + if no_echo: + parameter = click.prompt( + f"\t{start_bold}Parameter {parameter_key}{end_bold}", type=click.STRING, hide_input=True + ) + _prompted_param_overrides[parameter_key] = {"Value": parameter, "Hidden": True} + else: + # Make sure the default is casted to a string. + parameter = click.prompt( + f"\t{start_bold}Parameter {parameter_key}{end_bold}", + default=_prompted_param_overrides.get(parameter_key, str(parameter_properties.get("Default", ""))), + type=click.STRING, + ) + _prompted_param_overrides[parameter_key] = {"Value": parameter, "Hidden": False} + return _prompted_param_overrides + + +def print_deploy_args(stack_name, s3_bucket, region, capabilities, parameter_overrides, confirm_changeset): + + _parameters = parameter_overrides.copy() + for key, value in _parameters.items(): + if isinstance(value, dict): + _parameters[key] = value.get("Value", value) if not value.get("Hidden") else "*" * len(value.get("Value")) + + capabilities_string = json.dumps(capabilities) + + click.secho("\n\tDeploying with following values\n\t===============================", fg="yellow") + click.echo(f"\tStack name : {stack_name}") + click.echo(f"\tRegion : {region}") + click.echo(f"\tConfirm changeset : {confirm_changeset}") + click.echo(f"\tDeployment s3 bucket : {s3_bucket}") + click.echo(f"\tCapabilities : {capabilities_string}") + click.echo(f"\tParameter overrides : {_parameters}") + + click.secho("\nInitiating deployment\n=====================", fg="yellow") + + +def read_config_showcase(template_file): + _, samconfig = get_config_ctx(template_file) + + status = "Found" if samconfig.exists() else "Not found" + msg = ( + "Syntax invalid in samconfig.toml; save values " + "through sam deploy --guided to overwrite file with a valid set of values." + ) + config_sanity = samconfig.sanity_check() + click.secho("\nConfiguring SAM deploy\n======================", fg="yellow") + click.echo(f"\n\tLooking for samconfig.toml : {status}") + if samconfig.exists(): + click.echo("\tReading default arguments : {}".format("Success" if config_sanity else "Failure")) + + if not config_sanity and samconfig.exists(): + raise GuidedDeployFailedError(msg) + + +def save_config(template_file, parameter_overrides, **kwargs): + + section = CONFIG_SECTION + ctx, samconfig = get_config_ctx(template_file) + + cmd_names = get_cmd_names(ctx.info_name, ctx) + + for key, value in kwargs.items(): + if isinstance(value, (list, tuple)): + value = " ".join(val for val in value) + if value: + samconfig.put(cmd_names, section, key, value) + + if parameter_overrides: + _params = [] + for key, value in parameter_overrides.items(): + if isinstance(value, dict): + if not value.get("Hidden"): + _params.append(f"{key}={value.get('Value')}") + else: + _params.append(f"{key}={value}") + if _params: + samconfig.put(cmd_names, section, "parameter_overrides", " ".join(_params)) + + samconfig.flush() + + click.echo(f"\n\tSaved arguments to config file") + click.echo("\tRunning 'sam deploy' for future deployments will use the parameters saved above.") + click.echo("\tThe above parameters can be changed by modifying samconfig.toml") + click.echo( + "\tLearn more about samconfig.toml syntax at " + "\n\thttps://docs.aws.amazon.com/serverless-application-model/latest/" + "developerguide/serverless-sam-cli-config.html" + ) + + +def get_config_ctx(template_file): + ctx = click.get_current_context() + + samconfig_dir = getattr(ctx, "samconfig_dir", None) + samconfig = SamConfig( + config_dir=samconfig_dir if samconfig_dir else SamConfig.config_dir(template_file_path=template_file) + ) + return ctx, samconfig + + +def sanitize_parameter_overrides(parameter_overrides): + return {key: value.get("Value") if isinstance(value, dict) else value for key, value in parameter_overrides.items()} diff --git a/samcli/commands/deploy/deploy_context.py b/samcli/commands/deploy/deploy_context.py new file mode 100644 index 0000000000..9c246f45cc --- /dev/null +++ b/samcli/commands/deploy/deploy_context.py @@ -0,0 +1,205 @@ +""" +Deploy a SAM stack +""" + +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import os +import logging +import boto3 +import click + +from samcli.commands.deploy import exceptions as deploy_exceptions +from samcli.lib.deploy.deployer import Deployer +from samcli.lib.package.s3_uploader import S3Uploader +from samcli.yamlhelper import yaml_parse +from samcli.lib.utils.colors import Colored + +LOG = logging.getLogger(__name__) + + +class DeployContext: + + MSG_NO_EXECUTE_CHANGESET = "\nChangeset created successfully. \n" + + MSG_EXECUTE_SUCCESS = "\nSuccessfully created/updated stack - {stack_name} in {region}\n" + + MSG_CONFIRM_CHANGESET = "Deploy this changeset?" + MSG_CONFIRM_CHANGESET_HEADER = "\nPreviewing CloudFormation changeset before deployment" + + def __init__( + self, + template_file, + stack_name, + s3_bucket, + force_upload, + s3_prefix, + kms_key_id, + parameter_overrides, + capabilities, + no_execute_changeset, + role_arn, + notification_arns, + fail_on_empty_changeset, + tags, + region, + profile, + confirm_changeset, + ): + self.template_file = template_file + self.stack_name = stack_name + self.s3_bucket = s3_bucket + self.force_upload = force_upload + self.s3_prefix = s3_prefix + self.kms_key_id = kms_key_id + self.parameter_overrides = parameter_overrides + self.capabilities = capabilities + self.no_execute_changeset = no_execute_changeset + self.role_arn = role_arn + self.notification_arns = notification_arns + self.fail_on_empty_changeset = fail_on_empty_changeset + self.tags = tags + self.region = region + self.profile = profile + self.s3_uploader = None + self.deployer = None + self.confirm_changeset = confirm_changeset + + def __enter__(self): + return self + + def __exit__(self, *args): + pass + + def run(self): + + # Parse parameters + with open(self.template_file, "r") as handle: + template_str = handle.read() + + template_dict = yaml_parse(template_str) + + if not isinstance(template_dict, dict): + raise deploy_exceptions.DeployFailedError( + stack_name=self.stack_name, msg="{} not in required format".format(self.template_file) + ) + + parameters = self.merge_parameters(template_dict, self.parameter_overrides) + + template_size = os.path.getsize(self.template_file) + if template_size > 51200 and not self.s3_bucket: + raise deploy_exceptions.DeployBucketRequiredError() + + session = boto3.Session(profile_name=self.profile if self.profile else None) + cloudformation_client = session.client("cloudformation", region_name=self.region if self.region else None) + + s3_client = None + if self.s3_bucket: + s3_client = session.client("s3", region_name=self.region if self.region else None) + + self.s3_uploader = S3Uploader(s3_client, self.s3_bucket, self.s3_prefix, self.kms_key_id, self.force_upload) + + self.deployer = Deployer(cloudformation_client) + + region = s3_client._client_config.region_name if s3_client else self.region # pylint: disable=W0212 + + return self.deploy( + self.stack_name, + template_str, + parameters, + self.capabilities, + self.no_execute_changeset, + self.role_arn, + self.notification_arns, + self.s3_uploader, + [{"Key": key, "Value": value} for key, value in self.tags.items()] if self.tags else [], + region, + self.fail_on_empty_changeset, + self.confirm_changeset, + ) + + def deploy( + self, + stack_name, + template_str, + parameters, + capabilities, + no_execute_changeset, + role_arn, + notification_arns, + s3_uploader, + tags, + region, + fail_on_empty_changeset=True, + confirm_changeset=False, + ): + try: + result, changeset_type = self.deployer.create_and_wait_for_changeset( + stack_name=stack_name, + cfn_template=template_str, + parameter_values=parameters, + capabilities=capabilities, + role_arn=role_arn, + notification_arns=notification_arns, + s3_uploader=s3_uploader, + tags=tags, + ) + + if no_execute_changeset: + click.echo(self.MSG_NO_EXECUTE_CHANGESET.format(changeset_id=result["Id"])) + return + + if confirm_changeset: + click.secho(self.MSG_CONFIRM_CHANGESET_HEADER, fg="yellow") + click.secho("=" * len(self.MSG_CONFIRM_CHANGESET_HEADER), fg="yellow") + if not click.confirm(f"{self.MSG_CONFIRM_CHANGESET}", default=False): + return + + self.deployer.execute_changeset(result["Id"], stack_name) + self.deployer.wait_for_execute(stack_name, changeset_type) + click.echo(self.MSG_EXECUTE_SUCCESS.format(stack_name=stack_name, region=region)) + + except deploy_exceptions.ChangeEmptyError as ex: + if fail_on_empty_changeset: + raise + click.echo(str(ex)) + + def merge_parameters(self, template_dict, parameter_overrides): + """ + CloudFormation CreateChangeset requires a value for every parameter + from the template, either specifying a new value or use previous value. + For convenience, this method will accept new parameter values and + generates a dict of all parameters in a format that ChangeSet API + will accept + + :param parameter_overrides: + :return: + """ + parameter_values = [] + + if not isinstance(template_dict.get("Parameters", None), dict): + return parameter_values + + for key, _ in template_dict["Parameters"].items(): + + obj = {"ParameterKey": key} + + if key in parameter_overrides: + obj["ParameterValue"] = parameter_overrides[key] + else: + obj["UsePreviousValue"] = True + + parameter_values.append(obj) + + return parameter_values diff --git a/samcli/commands/deploy/exceptions.py b/samcli/commands/deploy/exceptions.py new file mode 100644 index 0000000000..04d155e1bc --- /dev/null +++ b/samcli/commands/deploy/exceptions.py @@ -0,0 +1,69 @@ +""" +Exceptions that are raised by sam deploy +""" +from samcli.commands.exceptions import UserException + + +class ChangeEmptyError(UserException): + def __init__(self, stack_name): + self.stack_name = stack_name + message_fmt = "No changes to deploy. Stack {stack_name} is up to date" + super(ChangeEmptyError, self).__init__(message=message_fmt.format(stack_name=self.stack_name)) + + +class ChangeSetError(UserException): + def __init__(self, stack_name, msg): + self.stack_name = stack_name + self.msg = msg + message_fmt = "Failed to create changeset for the stack: {stack_name}, {msg}" + super(ChangeSetError, self).__init__(message=message_fmt.format(stack_name=self.stack_name, msg=self.msg)) + + +class DeployFailedError(UserException): + def __init__(self, stack_name, msg): + self.stack_name = stack_name + self.msg = msg + + message_fmt = "Failed to create/update the stack: {stack_name}, {msg}" + + super(DeployFailedError, self).__init__(message=message_fmt.format(stack_name=self.stack_name, msg=msg)) + + +class GuidedDeployFailedError(UserException): + def __init__(self, msg): + self.msg = msg + super(GuidedDeployFailedError, self).__init__(message=msg) + + +class DeployStackOutPutFailedError(UserException): + def __init__(self, stack_name, msg): + self.stack_name = stack_name + self.msg = msg + + message_fmt = "Failed to get outputs from stack: {stack_name}, {msg}" + + super(DeployStackOutPutFailedError, self).__init__( + message=message_fmt.format(stack_name=self.stack_name, msg=msg) + ) + + +class DeployBucketInDifferentRegionError(UserException): + def __init__(self, msg): + self.msg = msg + + message_fmt = "{msg} : deployment s3 bucket is in a different region, try sam deploy --guided" + + super(DeployBucketInDifferentRegionError, self).__init__(message=message_fmt.format(msg=self.msg)) + + +class DeployBucketRequiredError(UserException): + def __init__(self): + + message_fmt = ( + "Templates with a size greater than 51,200 bytes must be deployed " + "via an S3 Bucket. Please add the --s3-bucket parameter to your " + "command. The local template will be copied to that S3 bucket and " + "then deployed." + ) + + super(DeployBucketRequiredError, self).__init__(message=message_fmt) diff --git a/samcli/commands/exceptions.py b/samcli/commands/exceptions.py index 3912ab9424..932bccf480 100644 --- a/samcli/commands/exceptions.py +++ b/samcli/commands/exceptions.py @@ -5,6 +5,12 @@ import click +class ConfigException(click.ClickException): + """ + Exception class when configuration file fails checks. + """ + + class UserException(click.ClickException): """ Base class for all exceptions that need to be surfaced to the user. Typically, we will display the exception @@ -18,3 +24,9 @@ class CredentialsError(UserException): """ Exception class when credentials that have been passed are invalid. """ + + +class RegionError(UserException): + """ + Exception class when no valid region is passed to a client. + """ diff --git a/samcli/commands/init/__init__.py b/samcli/commands/init/__init__.py index 798e341366..2cd8889281 100644 --- a/samcli/commands/init/__init__.py +++ b/samcli/commands/init/__init__.py @@ -8,6 +8,7 @@ import click +from samcli.cli.cli_config_file import configuration_option, TomlProvider from samcli.commands.exceptions import UserException from samcli.cli.main import pass_context, common_options, global_cfg from samcli.local.common.runtime_template import RUNTIMES, SUPPORTED_DEP_MANAGERS @@ -61,6 +62,7 @@ short_help="Init an AWS SAM application.", context_settings=dict(help_option_names=["-h", "--help"]), ) +@configuration_option(provider=TomlProvider(section="parameters")) @click.option( "--no-interactive", is_flag=True, diff --git a/samcli/commands/local/generate_event/event_generation.py b/samcli/commands/local/generate_event/event_generation.py index 12379c5892..42f943273f 100644 --- a/samcli/commands/local/generate_event/event_generation.py +++ b/samcli/commands/local/generate_event/event_generation.py @@ -3,11 +3,14 @@ """ import functools + import click -from samcli.cli.options import debug_option import samcli.commands.local.lib.generated_sample_events.events as events +from samcli.cli.cli_config_file import TomlProvider, get_ctx_defaults +from samcli.cli.options import debug_option from samcli.lib.telemetry.metrics import track_command +import samcli.lib.config.samconfig as samconfig class ServiceCommand(click.MultiCommand): @@ -150,9 +153,18 @@ def get_command(self, ctx, cmd_name): command_callback = functools.partial( self.cmd_implementation, self.events_lib, self.top_level_cmd_name, cmd_name ) + + config = get_ctx_defaults( + cmd_name=cmd_name, + provider=TomlProvider(section="parameters"), + ctx=ctx, + config_env_name=samconfig.DEFAULT_ENV, + ) + cmd = click.Command( name=cmd_name, short_help=self.subcmd_definition[cmd_name]["help"], + context_settings={"default_map": config}, params=parameters, callback=command_callback, ) diff --git a/samcli/commands/local/invoke/cli.py b/samcli/commands/local/invoke/cli.py index 311420b9a7..15705935cb 100644 --- a/samcli/commands/local/invoke/cli.py +++ b/samcli/commands/local/invoke/cli.py @@ -8,6 +8,7 @@ from samcli.cli.main import pass_context, common_options as cli_framework_options, aws_creds_options from samcli.commands.local.cli_common.options import invoke_common_options from samcli.lib.telemetry.metrics import track_command +from samcli.cli.cli_config_file import configuration_option, TomlProvider LOG = logging.getLogger(__name__) @@ -30,6 +31,7 @@ @click.command("invoke", help=HELP_TEXT, short_help="Invokes a local Lambda function once.") +@configuration_option(provider=TomlProvider(section="parameters")) @click.option( "--event", "-e", @@ -47,7 +49,7 @@ def cli( ctx, function_identifier, - template, + template_file, event, no_event, env_vars, @@ -68,7 +70,7 @@ def cli( do_cli( ctx, function_identifier, - template, + template_file, event, no_event, env_vars, diff --git a/samcli/commands/local/start_api/cli.py b/samcli/commands/local/start_api/cli.py index 83f699e33b..bfb7447fbe 100644 --- a/samcli/commands/local/start_api/cli.py +++ b/samcli/commands/local/start_api/cli.py @@ -8,6 +8,7 @@ from samcli.cli.main import pass_context, common_options as cli_framework_options, aws_creds_options from samcli.commands.local.cli_common.options import invoke_common_options, service_common_options from samcli.lib.telemetry.metrics import track_command +from samcli.cli.cli_config_file import configuration_option, TomlProvider LOG = logging.getLogger(__name__) @@ -31,6 +32,7 @@ short_help="Sets up a local endpoint you can use to test your API. Supports hot-reloading " "so you don't need to restart this service when you make changes to your function.", ) +@configuration_option(provider=TomlProvider(section="parameters")) @service_common_options(3000) @click.option( "--static-dir", @@ -50,7 +52,7 @@ def cli( port, static_dir, # Common Options for Lambda Invoke - template, + template_file, env_vars, debug_port, debug_args, @@ -70,7 +72,7 @@ def cli( host, port, static_dir, - template, + template_file, env_vars, debug_port, debug_args, diff --git a/samcli/commands/local/start_lambda/cli.py b/samcli/commands/local/start_lambda/cli.py index b607febe2e..1e171fa5e7 100644 --- a/samcli/commands/local/start_lambda/cli.py +++ b/samcli/commands/local/start_lambda/cli.py @@ -8,6 +8,7 @@ from samcli.cli.main import pass_context, common_options as cli_framework_options, aws_creds_options from samcli.commands.local.cli_common.options import invoke_common_options, service_common_options from samcli.lib.telemetry.metrics import track_command +from samcli.cli.cli_config_file import configuration_option, TomlProvider LOG = logging.getLogger(__name__) @@ -49,6 +50,7 @@ help=HELP_TEXT, short_help="Starts a local endpoint you can use to invoke your local Lambda functions.", ) +@configuration_option(provider=TomlProvider(section="parameters")) @service_common_options(3001) @invoke_common_options @cli_framework_options @@ -61,7 +63,7 @@ def cli( host, port, # Common Options for Lambda Invoke - template, + template_file, env_vars, debug_port, debug_args, @@ -80,7 +82,7 @@ def cli( ctx, host, port, - template, + template_file, env_vars, debug_port, debug_args, diff --git a/samcli/commands/logs/command.py b/samcli/commands/logs/command.py index f6b479cb46..9dd1f6620f 100644 --- a/samcli/commands/logs/command.py +++ b/samcli/commands/logs/command.py @@ -7,6 +7,7 @@ from samcli.cli.main import pass_context, common_options as cli_framework_options, aws_creds_options from samcli.lib.telemetry.metrics import track_command +from samcli.cli.cli_config_file import configuration_option, TomlProvider LOG = logging.getLogger(__name__) @@ -32,6 +33,7 @@ @click.command("logs", help=HELP_TEXT, short_help="Fetch logs for a function") +@configuration_option(provider=TomlProvider(section="parameters")) @click.option( "--name", "-n", diff --git a/samcli/commands/package/command.py b/samcli/commands/package/command.py index 3966bd2460..b6cd4c9b4c 100644 --- a/samcli/commands/package/command.py +++ b/samcli/commands/package/command.py @@ -1,16 +1,12 @@ """ CLI command for "package" command """ -from functools import partial - import click + +from samcli.cli.cli_config_file import configuration_option, TomlProvider from samcli.cli.main import pass_context, common_options, aws_creds_options -from samcli.commands._utils.options import ( - metadata_override_option, - _TEMPLATE_OPTION_DEFAULT_VALUE, - get_or_default_template_file_name, -) +from samcli.commands._utils.options import metadata_override_option, template_click_option from samcli.commands._utils.resources import resources_generator from samcli.lib.telemetry.metrics import track_command @@ -40,18 +36,8 @@ def resources_and_properties_help_string(): @click.command("package", short_help=SHORT_HELP, help=HELP_TEXT, context_settings=dict(max_content_width=120)) -# TODO(TheSriram): Move to template_common_option across aws-sam-cli -@click.option( - "--template", - "--template-file", - "-t", - default=_TEMPLATE_OPTION_DEFAULT_VALUE, - type=click.Path(), - envvar="SAM_TEMPLATE_FILE", - callback=partial(get_or_default_template_file_name, include_build=True), - show_default=True, - help="AWS SAM template file", -) +@configuration_option(provider=TomlProvider(section="parameters")) +@template_click_option(include_build=True) @click.option( "--s3-bucket", required=True, @@ -97,12 +83,12 @@ def resources_and_properties_help_string(): @aws_creds_options @pass_context @track_command -def cli(ctx, template, s3_bucket, s3_prefix, kms_key_id, output_template_file, use_json, force_upload, metadata): +def cli(ctx, template_file, s3_bucket, s3_prefix, kms_key_id, output_template_file, use_json, force_upload, metadata): # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing do_cli( - template, + template_file, s3_bucket, s3_prefix, kms_key_id, diff --git a/samcli/commands/package/exceptions.py b/samcli/commands/package/exceptions.py index c0aa9bd7f4..51d3811915 100644 --- a/samcli/commands/package/exceptions.py +++ b/samcli/commands/package/exceptions.py @@ -72,3 +72,21 @@ def __init__(self, template_file, ex): super(PackageFailedError, self).__init__( message=message_fmt.format(template_file=self.template_file, ex=self.ex) ) + + +class NoSuchBucketError(UserException): + def __init__(self, **kwargs): + self.kwargs = kwargs + + message_fmt = "\nS3 Bucket does not exist." + + super(NoSuchBucketError, self).__init__(message=message_fmt.format(**self.kwargs)) + + +class BucketNotSpecifiedError(UserException): + def __init__(self, **kwargs): + self.kwargs = kwargs + + message_fmt = "\nS3 Bucket not specified, use --s3-bucket to specify a bucket name or run sam deploy --guided" + + super(BucketNotSpecifiedError, self).__init__(message=message_fmt.format(**self.kwargs)) diff --git a/samcli/commands/package/package_context.py b/samcli/commands/package/package_context.py index 27c9332ab6..7e985ab715 100644 --- a/samcli/commands/package/package_context.py +++ b/samcli/commands/package/package_context.py @@ -34,7 +34,7 @@ class PackageContext: MSG_PACKAGED_TEMPLATE_WRITTEN = ( - "Successfully packaged artifacts and wrote output template " + "\nSuccessfully packaged artifacts and wrote output template " "to file {output_file_name}." "\n" "Execute the following command to deploy the packaged template" @@ -56,6 +56,7 @@ def __init__( metadata, region, profile, + on_deploy=False, ): self.template_file = template_file self.s3_bucket = s3_bucket @@ -67,6 +68,7 @@ def __init__( self.metadata = metadata self.region = region self.profile = profile + self.on_deploy = on_deploy self.s3_uploader = None def __enter__(self): @@ -91,7 +93,7 @@ def run(self): self.write_output(self.output_template_file, exported_str) - if self.output_template_file: + if self.output_template_file and not self.on_deploy: msg = self.MSG_PACKAGED_TEMPLATE_WRITTEN.format( output_file_name=self.output_template_file, output_file_path=os.path.abspath(self.output_template_file), diff --git a/samcli/commands/publish/command.py b/samcli/commands/publish/command.py index 643895327e..54802aec5c 100644 --- a/samcli/commands/publish/command.py +++ b/samcli/commands/publish/command.py @@ -11,6 +11,7 @@ from samcli.commands._utils.options import template_common_option from samcli.commands._utils.template import get_template_data from samcli.lib.telemetry.metrics import track_command +from samcli.cli.cli_config_file import configuration_option, TomlProvider LOG = logging.getLogger(__name__) @@ -40,16 +41,17 @@ @click.command("publish", help=HELP_TEXT, short_help=SHORT_HELP) +@configuration_option(provider=TomlProvider(section="parameters")) @template_common_option @click.option("--semantic-version", help=SEMANTIC_VERSION_HELP) @aws_creds_options @cli_framework_options @pass_context @track_command -def cli(ctx, template, semantic_version): +def cli(ctx, template_file, semantic_version): # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - do_cli(ctx, template, semantic_version) # pragma: no cover + do_cli(ctx, template_file, semantic_version) # pragma: no cover def do_cli(ctx, template, semantic_version): diff --git a/samcli/commands/validate/validate.py b/samcli/commands/validate/validate.py index 4b554b0686..ab908ea2e9 100644 --- a/samcli/commands/validate/validate.py +++ b/samcli/commands/validate/validate.py @@ -10,19 +10,21 @@ from samcli.cli.main import pass_context, common_options as cli_framework_options, aws_creds_options from samcli.commands._utils.options import template_option_without_build from samcli.lib.telemetry.metrics import track_command +from samcli.cli.cli_config_file import configuration_option, TomlProvider @click.command("validate", short_help="Validate an AWS SAM template.") +@configuration_option(provider=TomlProvider(section="parameters")) @template_option_without_build @aws_creds_options @cli_framework_options @pass_context @track_command -def cli(ctx, template): +def cli(ctx, template_file): # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - do_cli(ctx, template) # pragma: no cover + do_cli(ctx, template_file) # pragma: no cover def do_cli(ctx, template): diff --git a/samcli/lib/bootstrap/__init__.py b/samcli/lib/bootstrap/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/lib/bootstrap/bootstrap.py b/samcli/lib/bootstrap/bootstrap.py new file mode 100644 index 0000000000..5457a564b3 --- /dev/null +++ b/samcli/lib/bootstrap/bootstrap.py @@ -0,0 +1,159 @@ +""" +Bootstrap's user's development environment by creating cloud resources required by SAM CLI +""" + +import json +import logging + +import boto3 + +import click + +from botocore.config import Config +from botocore.exceptions import ClientError, BotoCoreError, NoRegionError, NoCredentialsError + +from samcli.commands.bootstrap.exceptions import ManagedStackError +from samcli import __version__ +from samcli.cli.global_config import GlobalConfig +from samcli.commands.exceptions import UserException, CredentialsError, RegionError + + +SAM_CLI_STACK_NAME = "aws-sam-cli-managed-default" +LOG = logging.getLogger(__name__) + + +def manage_stack(profile, region): + try: + session = boto3.Session(profile_name=profile if profile else None) + cloudformation_client = session.client("cloudformation", config=Config(region_name=region if region else None)) + except NoCredentialsError: + raise CredentialsError( + "Error Setting Up Managed Stack Client: Unable to resolve credentials for the AWS SDK for Python client. Please see their documentation for options to pass in credentials: https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html" + ) + except NoRegionError: + raise RegionError( + "Error Setting Up Managed Stack Client: Unable to resolve a region. Please provide a region via the --region parameter or by the AWS_REGION environment variable." + ) + return _create_or_get_stack(cloudformation_client) + + +def _create_or_get_stack(cloudformation_client): + try: + stack = None + try: + ds_resp = cloudformation_client.describe_stacks(StackName=SAM_CLI_STACK_NAME) + stacks = ds_resp["Stacks"] + stack = stacks[0] + click.echo("\n\tLooking for resources needed for deployment: Found!") + except ClientError: + click.echo("\n\tLooking for resources needed for deployment: Not found.") + stack = _create_stack(cloudformation_client) # exceptions are not captured from subcommands + # Sanity check for non-none stack? Sanity check for tag? + tags = stack["Tags"] + try: + sam_cli_tag = next(t for t in tags if t["Key"] == "ManagedStackSource") + if not sam_cli_tag["Value"] == "AwsSamCli": + msg = ( + "Stack " + + SAM_CLI_STACK_NAME + + " ManagedStackSource tag shows " + + sam_cli_tag["Value"] + + " which does not match the AWS SAM CLI generated tag value of AwsSamCli. " + "Failing as the stack was likely not created by the AWS SAM CLI." + ) + raise UserException(msg) + except StopIteration: + msg = ( + "Stack " + SAM_CLI_STACK_NAME + " exists, but the ManagedStackSource tag is missing. " + "Failing as the stack was likely not created by the AWS SAM CLI." + ) + raise UserException(msg) + outputs = stack["Outputs"] + try: + bucket_name = next(o for o in outputs if o["OutputKey"] == "SourceBucket")["OutputValue"] + except StopIteration: + msg = ( + "Stack " + SAM_CLI_STACK_NAME + " exists, but is missing the managed source bucket key. " + "Failing as this stack was likely not created by the AWS SAM CLI." + ) + raise UserException(msg) + # This bucket name is what we would write to a config file + return bucket_name + except (ClientError, BotoCoreError) as ex: + LOG.debug("Failed to create managed resources", exc_info=ex) + raise ManagedStackError(str(ex)) + + +def _create_stack(cloudformation_client): + click.echo("\tCreating the required resources...") + change_set_name = "InitialCreation" + change_set_resp = cloudformation_client.create_change_set( + StackName=SAM_CLI_STACK_NAME, + TemplateBody=_get_stack_template(), + Tags=[{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], + ChangeSetType="CREATE", + ChangeSetName=change_set_name, # this must be unique for the stack, but we only create so that's fine + ) + stack_id = change_set_resp["StackId"] + change_waiter = cloudformation_client.get_waiter("change_set_create_complete") + change_waiter.wait( + ChangeSetName=change_set_name, StackName=SAM_CLI_STACK_NAME, WaiterConfig={"Delay": 15, "MaxAttempts": 60} + ) + cloudformation_client.execute_change_set(ChangeSetName=change_set_name, StackName=SAM_CLI_STACK_NAME) + stack_waiter = cloudformation_client.get_waiter("stack_create_complete") + stack_waiter.wait(StackName=stack_id, WaiterConfig={"Delay": 15, "MaxAttempts": 60}) + ds_resp = cloudformation_client.describe_stacks(StackName=SAM_CLI_STACK_NAME) + stacks = ds_resp["Stacks"] + click.echo("\tSuccessfully created!") + return stacks[0] + + +def _get_stack_template(): + gc = GlobalConfig() + info = {"version": __version__, "installationId": gc.installation_id if gc.installation_id else "unknown"} + + template = """ + AWSTemplateFormatVersion : '2010-09-09' + Transform: AWS::Serverless-2016-10-31 + Description: Managed Stack for AWS SAM CLI + + Metadata: + SamCliInfo: {info} + + Resources: + SamCliSourceBucket: + Type: AWS::S3::Bucket + Properties: + VersioningConfiguration: + Status: Enabled + Tags: + - Key: ManagedStackSource + Value: AwsSamCli + + SamCliSourceBucketBucketPolicy: + Type: AWS::S3::BucketPolicy + Properties: + Bucket: !Ref SamCliSourceBucket + PolicyDocument: + Statement: + - + Action: + - "s3:GetObject" + Effect: "Allow" + Resource: + Fn::Join: + - "" + - + - "arn:aws:s3:::" + - + !Ref SamCliSourceBucket + - "/*" + Principal: + Service: serverlessrepo.amazonaws.com + + Outputs: + SourceBucket: + Value: !Ref SamCliSourceBucket + """ + + return template.format(info=json.dumps(info)) diff --git a/samcli/lib/config/__init__.py b/samcli/lib/config/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/lib/config/exceptions.py b/samcli/lib/config/exceptions.py new file mode 100644 index 0000000000..50297ce722 --- /dev/null +++ b/samcli/lib/config/exceptions.py @@ -0,0 +1,7 @@ +""" +Exceptions to be used by samconfig.py +""" + + +class SamConfigVersionException(Exception): + pass diff --git a/samcli/lib/config/samconfig.py b/samcli/lib/config/samconfig.py new file mode 100644 index 0000000000..e8f6eaff7c --- /dev/null +++ b/samcli/lib/config/samconfig.py @@ -0,0 +1,190 @@ +""" +Class representing the samconfig.toml +""" + +import os +import logging + +from pathlib import Path + +import tomlkit + +from samcli.lib.config.version import SAM_CONFIG_VERSION, VERSION_KEY +from samcli.lib.config.exceptions import SamConfigVersionException + +LOG = logging.getLogger(__name__) + +DEFAULT_CONFIG_FILE_NAME = "samconfig.toml" +DEFAULT_ENV = "default" + + +class SamConfig: + """ + Class to interface with `samconfig.toml` file. + """ + + document = None + + def __init__(self, config_dir, filename=None): + """ + Initialize the class + + Parameters + ---------- + config_dir : string + Directory where the configuration file needs to be stored + + filename : string + Optional. Name of the configuration file. It is recommended to stick with default so in the future we + could automatically support auto-resolving multiple config files within same directory. + """ + self.filepath = Path(config_dir, filename or DEFAULT_CONFIG_FILE_NAME) + + def get_all(self, cmd_names, section, env=DEFAULT_ENV): + """ + Gets a value from the configuration file for the given environment, command and section + + Parameters + ---------- + cmd_names : list(str) + List of representing the entire command. Ex: ["local", "generate-event", "s3", "put"] + + section : str + Specific section within the command to look into Ex: `parameters` + + env : str + Optional, Name of the environment + + Returns + ------- + dict + Dictionary of configuration options in the file. None, if the config doesn't exist. + + Raises + ------ + KeyError + If the config file does *not* have the specific section + + tomlkit.exceptions.TOMLKitError + If the configuration file is invalid + """ + + env = env or DEFAULT_ENV + + self._read() + return self.document[env][self._to_key(cmd_names)][section] + + def put(self, cmd_names, section, key, value, env=DEFAULT_ENV): + """ + Writes the `key=value` under the given section. You have to call the `flush()` method after `put()` in + order to write the values back to the config file. Otherwise they will be just saved in-memory, available + for future access, but never saved back to the file. + + Parameters + ---------- + cmd_names : list(str) + List of representing the entire command. Ex: ["local", "generate-event", "s3", "put"] + + section : str + Specific section within the command to look into Ex: `parameters` + + key : str + Key to write the data under + + value + Value to write. Could be any of the supported TOML types. + + env : str + Optional, Name of the environment + + Raises + ------ + tomlkit.exceptions.TOMLKitError + If the data is invalid + """ + + if not self.document: + self._read() + # Empty document prepare the initial structure. + self.document.update({env: {self._to_key(cmd_names): {section: {key: value}}}}) + # Only update appropriate key value pairs within a section + self.document[env][self._to_key(cmd_names)][section].update({key: value}) + + def flush(self): + """ + Write the data back to file + + Raises + ------ + tomlkit.exceptions.TOMLKitError + If the data is invalid + + """ + self._write() + + def sanity_check(self): + """ + Sanity check the contents of samconfig + """ + try: + self._read() + except tomlkit.exceptions.TOMLKitError: + return False + else: + return True + + def exists(self): + return self.filepath.exists() + + def path(self): + return str(self.filepath) + + @staticmethod + def config_dir(template_file_path=None): + """ + SAM Config file is always relative to the SAM Template. If it the template is not + given, then it is relative to cwd() + """ + if template_file_path: + return os.path.dirname(template_file_path) + + return os.getcwd() + + def _read(self): + if not self.document: + try: + txt = self.filepath.read_text() + self.document = tomlkit.loads(txt) + self._version_sanity_check(self._version()) + except OSError: + self.document = tomlkit.document() + + if self.document.body: + self._version_sanity_check(self._version()) + return self.document + + def _write(self): + if not self.document: + return + if not self.exists(): + open(self.filepath, "a+").close() + + current_version = self._version() if self._version() else SAM_CONFIG_VERSION + try: + self.document.add(VERSION_KEY, current_version) + except tomlkit.exceptions.KeyAlreadyPresent: + # NOTE(TheSriram): Do not attempt to re-write an existing version + pass + self.filepath.write_text(tomlkit.dumps(self.document)) + + def _version(self): + return self.document.get(VERSION_KEY, None) + + def _version_sanity_check(self, version): + if not isinstance(version, float): + raise SamConfigVersionException(f"'{VERSION_KEY}' key is not present or is in unrecognized format. ") + + @staticmethod + def _to_key(cmd_names): + # construct a parsed name that is of the format: a_b_c_d + return "_".join([cmd.replace("-", "_").replace(" ", "_") for cmd in cmd_names]) diff --git a/samcli/lib/config/version.py b/samcli/lib/config/version.py new file mode 100644 index 0000000000..bd5a7f330f --- /dev/null +++ b/samcli/lib/config/version.py @@ -0,0 +1,6 @@ +""" +Constants and helper functions for samconfig.toml's versioning. +""" + +SAM_CONFIG_VERSION = 0.1 +VERSION_KEY = "version" diff --git a/samcli/lib/deploy/__init__.py b/samcli/lib/deploy/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/lib/deploy/deployer.py b/samcli/lib/deploy/deployer.py new file mode 100644 index 0000000000..170bcc8778 --- /dev/null +++ b/samcli/lib/deploy/deployer.py @@ -0,0 +1,441 @@ +""" +Cloudformation deploy class which also streams events and changeset information +""" + +# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import sys +import math +from collections import OrderedDict +import logging +import time +from datetime import datetime + +import botocore + +from samcli.lib.deploy.utils import DeployColor +from samcli.commands.deploy.exceptions import ( + DeployFailedError, + ChangeSetError, + DeployStackOutPutFailedError, + DeployBucketInDifferentRegionError, +) +from samcli.commands._utils.table_print import pprint_column_names, pprint_columns +from samcli.commands.deploy import exceptions as deploy_exceptions +from samcli.lib.package.artifact_exporter import mktempfile, parse_s3_url +from samcli.lib.utils.time import utc_to_timestamp + +LOG = logging.getLogger(__name__) + +DESCRIBE_STACK_EVENTS_FORMAT_STRING = ( + "{ResourceStatus:<{0}} {ResourceType:<{1}} {LogicalResourceId:<{2}} {ResourceStatusReason:<{3}}" +) +DESCRIBE_STACK_EVENTS_DEFAULT_ARGS = OrderedDict( + { + "ResourceStatus": "ResourceStatus", + "ResourceType": "ResourceType", + "LogicalResourceId": "LogicalResourceId", + "ResourceStatusReason": "ResourceStatusReason", + } +) + +DESCRIBE_STACK_EVENTS_TABLE_HEADER_NAME = "CloudFormation events from changeset" + +DESCRIBE_CHANGESET_FORMAT_STRING = "{Operation:<{0}} {LogicalResourceId:<{1}} {ResourceType:<{2}}" +DESCRIBE_CHANGESET_DEFAULT_ARGS = OrderedDict( + {"Operation": "Operation", "LogicalResourceId": "LogicalResourceId", "ResourceType": "ResourceType"} +) + +DESCRIBE_CHANGESET_TABLE_HEADER_NAME = "CloudFormation stack changeset" + +OUTPUTS_FORMAT_STRING = "{OutputKey-Description:<{0}} {OutputValue:<{1}}" +OUTPUTS_DEFAULTS_ARGS = OrderedDict({"OutputKey-Description": "OutputKey-Description", "OutputValue": "OutputValue"}) + + +class Deployer: + def __init__(self, cloudformation_client, changeset_prefix="samcli-deploy"): + self._client = cloudformation_client + self.changeset_prefix = changeset_prefix + # 500ms of sleep time between stack checks and describe stack events. + self.client_sleep = 0.5 + # 2000ms of backoff time which is exponentially used, when there are exceptions during describe stack events + self.backoff = 2 + # Maximum number of attempts before raising exception back up the chain. + self.max_attempts = 3 + self.deploy_color = DeployColor() + + def has_stack(self, stack_name): + """ + Checks if a CloudFormation stack with given name exists + + :param stack_name: Name or ID of the stack + :return: True if stack exists. False otherwise + """ + try: + resp = self._client.describe_stacks(StackName=stack_name) + if not resp["Stacks"]: + return False + + # When you run CreateChangeSet on a a stack that does not exist, + # CloudFormation will create a stack and set it's status + # REVIEW_IN_PROGRESS. However this stack is cannot be manipulated + # by "update" commands. Under this circumstances, we treat like + # this stack does not exist and call CreateChangeSet will + # ChangeSetType set to CREATE and not UPDATE. + stack = resp["Stacks"][0] + return stack["StackStatus"] != "REVIEW_IN_PROGRESS" + + except botocore.exceptions.ClientError as e: + # If a stack does not exist, describe_stacks will throw an + # exception. Unfortunately we don't have a better way than parsing + # the exception msg to understand the nature of this exception. + + if "Stack with id {0} does not exist".format(stack_name) in str(e): + LOG.debug("Stack with id %s does not exist", stack_name) + return False + except botocore.exceptions.BotoCoreError as e: + # If there are credentials, environment errors, + # catch that and throw a deploy failed error. + + LOG.debug("Botocore Exception : %s", str(e)) + raise DeployFailedError(stack_name=stack_name, msg=str(e)) + + except Exception as e: + # We don't know anything about this exception. Don't handle + LOG.debug("Unable to get stack details.", exc_info=e) + raise e + + def create_changeset( + self, stack_name, cfn_template, parameter_values, capabilities, role_arn, notification_arns, s3_uploader, tags + ): + """ + Call Cloudformation to create a changeset and wait for it to complete + + :param stack_name: Name or ID of stack + :param cfn_template: CloudFormation template string + :param parameter_values: Template parameters object + :param capabilities: Array of capabilities passed to CloudFormation + :param tags: Array of tags passed to CloudFormation + :return: + """ + if not self.has_stack(stack_name): + changeset_type = "CREATE" + # When creating a new stack, UsePreviousValue=True is invalid. + # For such parameters, users should either override with new value, + # or set a Default value in template to successfully create a stack. + parameter_values = [x for x in parameter_values if not x.get("UsePreviousValue", False)] + else: + changeset_type = "UPDATE" + # UsePreviousValue not valid if parameter is new + summary = self._client.get_template_summary(StackName=stack_name) + existing_parameters = [parameter["ParameterKey"] for parameter in summary["Parameters"]] + parameter_values = [ + x + for x in parameter_values + if not (x.get("UsePreviousValue", False) and x["ParameterKey"] not in existing_parameters) + ] + + # Each changeset will get a unique name based on time. + # Description is also setup based on current date and that SAM CLI is used. + kwargs = { + "ChangeSetName": self.changeset_prefix + str(int(time.time())), + "StackName": stack_name, + "TemplateBody": cfn_template, + "ChangeSetType": changeset_type, + "Parameters": parameter_values, + "Capabilities": capabilities, + "Description": "Created by SAM CLI at {0} UTC".format(datetime.utcnow().isoformat()), + "Tags": tags, + } + + # If an S3 uploader is available, use TemplateURL to deploy rather than + # TemplateBody. This is required for large templates. + if s3_uploader: + with mktempfile() as temporary_file: + temporary_file.write(kwargs.pop("TemplateBody")) + temporary_file.flush() + + # TemplateUrl property requires S3 URL to be in path-style format + parts = parse_s3_url( + s3_uploader.upload_with_dedup(temporary_file.name, "template"), version_property="Version" + ) + kwargs["TemplateURL"] = s3_uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None)) + + # don't set these arguments if not specified to use existing values + if role_arn is not None: + kwargs["RoleARN"] = role_arn + if notification_arns is not None: + kwargs["NotificationARNs"] = notification_arns + return self._create_change_set(stack_name=stack_name, changeset_type=changeset_type, **kwargs) + + def _create_change_set(self, stack_name, changeset_type, **kwargs): + try: + resp = self._client.create_change_set(**kwargs) + return resp, changeset_type + except botocore.exceptions.ClientError as ex: + if "The bucket you are attempting to access must be addressed using the specified endpoint" in str(ex): + raise DeployBucketInDifferentRegionError(f"Failed to create/update stack {stack_name}") + raise ChangeSetError(stack_name=stack_name, msg=str(ex)) + + except Exception as ex: + LOG.debug("Unable to create changeset", exc_info=ex) + raise ChangeSetError(stack_name=stack_name, msg=str(ex)) + + @pprint_column_names( + format_string=DESCRIBE_CHANGESET_FORMAT_STRING, + format_kwargs=DESCRIBE_CHANGESET_DEFAULT_ARGS, + table_header=DESCRIBE_CHANGESET_TABLE_HEADER_NAME, + ) + def describe_changeset(self, change_set_id, stack_name, **kwargs): + """ + Call Cloudformation to describe a changeset + + :param change_set_id: ID of the changeset + :param stack_name: Name of the CloudFormation stack + :return: dictionary of changes described in the changeset. + """ + paginator = self._client.get_paginator("describe_change_set") + response_iterator = paginator.paginate(ChangeSetName=change_set_id, StackName=stack_name) + changes = {"Add": [], "Modify": [], "Remove": []} + changes_showcase = {"Add": "+ Add", "Modify": "* Modify", "Remove": "- Delete"} + changeset = False + for item in response_iterator: + cf_changes = item.get("Changes") + for change in cf_changes: + changeset = True + resource_props = change.get("ResourceChange") + action = resource_props.get("Action") + changes[action].append( + { + "LogicalResourceId": resource_props.get("LogicalResourceId"), + "ResourceType": resource_props.get("ResourceType"), + } + ) + + for k, v in changes.items(): + for value in v: + row_color = self.deploy_color.get_changeset_action_color(action=k) + pprint_columns( + columns=[changes_showcase.get(k, k), value["LogicalResourceId"], value["ResourceType"]], + width=kwargs["width"], + margin=kwargs["margin"], + format_string=DESCRIBE_CHANGESET_FORMAT_STRING, + format_args=kwargs["format_args"], + columns_dict=DESCRIBE_CHANGESET_DEFAULT_ARGS.copy(), + color=row_color, + ) + + if not changeset: + # There can be cases where there are no changes, + # but could be an an addition of a SNS notification topic. + pprint_columns( + columns=["-", "-", "-"], + width=kwargs["width"], + margin=kwargs["margin"], + format_string=DESCRIBE_CHANGESET_FORMAT_STRING, + format_args=kwargs["format_args"], + columns_dict=DESCRIBE_CHANGESET_DEFAULT_ARGS.copy(), + ) + + return changes + + def wait_for_changeset(self, changeset_id, stack_name): + """ + Waits until the changeset creation completes + + :param changeset_id: ID or name of the changeset + :param stack_name: Stack name + :return: Latest status of the create-change-set operation + """ + sys.stdout.write("\nWaiting for changeset to be created..\n") + sys.stdout.flush() + + # Wait for changeset to be created + waiter = self._client.get_waiter("change_set_create_complete") + # Poll every 5 seconds. Changeset creation should be fast + waiter_config = {"Delay": 5} + try: + waiter.wait(ChangeSetName=changeset_id, StackName=stack_name, WaiterConfig=waiter_config) + except botocore.exceptions.WaiterError as ex: + + resp = ex.last_response + status = resp["Status"] + reason = resp["StatusReason"] + + if ( + status == "FAILED" + and "The submitted information didn't contain changes." in reason + or "No updates are to be performed" in reason + ): + raise deploy_exceptions.ChangeEmptyError(stack_name=stack_name) + + raise ChangeSetError( + stack_name=stack_name, msg="ex: {0} Status: {1}. Reason: {2}".format(ex, status, reason) + ) + + def execute_changeset(self, changeset_id, stack_name): + """ + Calls CloudFormation to execute changeset + + :param changeset_id: ID of the changeset + :param stack_name: Name or ID of the stack + :return: Response from execute-change-set call + """ + try: + return self._client.execute_change_set(ChangeSetName=changeset_id, StackName=stack_name) + except botocore.exceptions.ClientError as ex: + raise DeployFailedError(stack_name=stack_name, msg=str(ex)) + + def get_last_event_time(self, stack_name): + """ + Finds the last event time stamp thats present for the stack, if not get the current time + :param stack_name: Name or ID of the stack + :return: unix epoch + """ + try: + return utc_to_timestamp( + self._client.describe_stack_events(StackName=stack_name)["StackEvents"][0]["Timestamp"] + ) + except KeyError: + return time.time() + + @pprint_column_names( + format_string=DESCRIBE_STACK_EVENTS_FORMAT_STRING, + format_kwargs=DESCRIBE_STACK_EVENTS_DEFAULT_ARGS, + table_header=DESCRIBE_STACK_EVENTS_TABLE_HEADER_NAME, + ) + def describe_stack_events(self, stack_name, time_stamp_marker, **kwargs): + """ + Calls CloudFormation to get current stack events + :param stack_name: Name or ID of the stack + :param time_stamp_marker: last event time on the stack to start streaming events from. + :return: + """ + + stack_change_in_progress = True + events = set() + retry_attempts = 0 + + while stack_change_in_progress and retry_attempts <= self.max_attempts: + try: + + # Only sleep if there have been no retry_attempts + time.sleep(self.client_sleep if retry_attempts == 0 else 0) + describe_stacks_resp = self._client.describe_stacks(StackName=stack_name) + paginator = self._client.get_paginator("describe_stack_events") + response_iterator = paginator.paginate(StackName=stack_name) + stack_status = describe_stacks_resp["Stacks"][0]["StackStatus"] + for event_items in response_iterator: + for event in event_items["StackEvents"]: + if event["EventId"] not in events and utc_to_timestamp(event["Timestamp"]) > time_stamp_marker: + events.add(event["EventId"]) + row_color = self.deploy_color.get_stack_events_status_color(status=event["ResourceStatus"]) + pprint_columns( + columns=[ + event["ResourceStatus"], + event["ResourceType"], + event["LogicalResourceId"], + event.get("ResourceStatusReason", "-"), + ], + width=kwargs["width"], + margin=kwargs["margin"], + format_string=DESCRIBE_STACK_EVENTS_FORMAT_STRING, + format_args=kwargs["format_args"], + columns_dict=DESCRIBE_STACK_EVENTS_DEFAULT_ARGS.copy(), + color=row_color, + ) + + if self._check_stack_complete(stack_status): + stack_change_in_progress = False + break + except botocore.exceptions.ClientError: + retry_attempts = retry_attempts + 1 + if retry_attempts > self.max_attempts: + raise + # Sleep in exponential backoff mode + time.sleep(math.pow(self.backoff, retry_attempts)) + + def _check_stack_complete(self, status): + return "COMPLETE" in status and "CLEANUP" not in status + + def wait_for_execute(self, stack_name, changeset_type): + sys.stdout.write( + "\n{} - Waiting for stack create/update " + "to complete\n".format(datetime.now().strftime("%Y-%m-%d %H:%M:%S")) + ) + sys.stdout.flush() + + self.describe_stack_events(stack_name, self.get_last_event_time(stack_name)) + + # Pick the right waiter + if changeset_type == "CREATE": + waiter = self._client.get_waiter("stack_create_complete") + elif changeset_type == "UPDATE": + waiter = self._client.get_waiter("stack_update_complete") + else: + raise RuntimeError("Invalid changeset type {0}".format(changeset_type)) + + # Poll every 5 seconds. Optimizing for the case when the stack has only + # minimal changes, such the Code for Lambda Function + waiter_config = {"Delay": 5, "MaxAttempts": 720} + + try: + waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) + except botocore.exceptions.WaiterError as ex: + LOG.debug("Execute changeset waiter exception", exc_info=ex) + + raise deploy_exceptions.DeployFailedError(stack_name=stack_name, msg=str(ex)) + + self.get_stack_outputs(stack_name=stack_name) + + def create_and_wait_for_changeset( + self, stack_name, cfn_template, parameter_values, capabilities, role_arn, notification_arns, s3_uploader, tags + ): + try: + result, changeset_type = self.create_changeset( + stack_name, cfn_template, parameter_values, capabilities, role_arn, notification_arns, s3_uploader, tags + ) + self.wait_for_changeset(result["Id"], stack_name) + self.describe_changeset(result["Id"], stack_name) + return result, changeset_type + except botocore.exceptions.ClientError as ex: + raise DeployFailedError(stack_name=stack_name, msg=str(ex)) + + @pprint_column_names(format_string=OUTPUTS_FORMAT_STRING, format_kwargs=OUTPUTS_DEFAULTS_ARGS) + def _stack_outputs(self, stack_outputs, **kwargs): + for output in stack_outputs: + pprint_columns( + columns=[" - ".join([output["OutputKey"], output.get("Description", "")]), output["OutputValue"]], + width=kwargs["width"], + margin=kwargs["margin"], + format_string=OUTPUTS_FORMAT_STRING, + format_args=kwargs["format_args"], + columns_dict=OUTPUTS_DEFAULTS_ARGS.copy(), + ) + + def get_stack_outputs(self, stack_name, echo=True): + try: + stacks_description = self._client.describe_stacks(StackName=stack_name) + try: + outputs = stacks_description["Stacks"][0]["Outputs"] + if echo: + sys.stdout.write("\nStack {stack_name} outputs:\n".format(stack_name=stack_name)) + sys.stdout.flush() + self._stack_outputs(stack_outputs=outputs) + return outputs + except KeyError: + return None + + except botocore.exceptions.ClientError as ex: + raise DeployStackOutPutFailedError(stack_name=stack_name, msg=str(ex)) diff --git a/samcli/lib/deploy/utils.py b/samcli/lib/deploy/utils.py new file mode 100644 index 0000000000..6c556c7068 --- /dev/null +++ b/samcli/lib/deploy/utils.py @@ -0,0 +1,34 @@ +""" +Utilities for Deploy +""" + +from samcli.lib.utils.colors import Colored + + +class DeployColor: + def __init__(self): + self._color = Colored() + self.changeset_color_map = {"Add": "green", "Modify": "yellow", "Remove": "red"} + self.status_color_map = { + "CREATE_COMPLETE": "green", + "CREATE_FAILED": "red", + "CREATE_IN_PROGRESS": "yellow", + "DELETE_COMPLETE": "green", + "DELETE_FAILED": "red", + "DELETE_IN_PROGRESS": "red", + "REVIEW_IN_PROGRESS": "yellow", + "ROLLBACK_COMPLETE": "red", + "ROLLBACK_IN_PROGRESS": "red", + "UPDATE_COMPLETE": "green", + "UPDATE_COMPLETE_CLEANUP_IN_PROGRESS": "yellow", + "UPDATE_IN_PROGRESS": "yellow", + "UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS": "red", + "UPDATE_ROLLBACK_FAILED": "red", + "UPDATE_ROLLBACK_IN_PROGRESS": "red", + } + + def get_stack_events_status_color(self, status): + return self.status_color_map.get(status, "yellow") + + def get_changeset_action_color(self, action): + return self.changeset_color_map.get(action, "yellow") diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index 8b11ea8b02..3fdada7ff2 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -171,7 +171,6 @@ def zip_folder(folder_path): :param folder_path: :return: Name of the zipfile """ - filename = os.path.join(tempfile.gettempdir(), "data-" + uuid.uuid4().hex) zipfile_name = make_zip(filename, folder_path) @@ -539,7 +538,6 @@ def __init__( """ Reads the template and makes it ready for export """ - if not (is_local_folder(parent_dir) and os.path.isabs(parent_dir)): raise ValueError("parent_dir parameter must be " "an absolute path to a folder {0}".format(parent_dir)) diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index 8b55e24a39..3e9e167ced 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -27,19 +27,11 @@ from boto3.s3 import transfer +from samcli.commands.package.exceptions import NoSuchBucketError, BucketNotSpecifiedError LOG = logging.getLogger(__name__) -class NoSuchBucketError(Exception): - def __init__(self, **kwargs): - msg = self.fmt.format(**kwargs) - Exception.__init__(self, msg) - self.kwargs = kwargs - - fmt = "S3 Bucket does not exist. " "Execute the command to create a new bucket" "\n" "aws s3 mb s3://{bucket_name}" - - class S3Uploader: """ Class to upload objects to S3 bucket that use versioning. If bucket @@ -99,6 +91,8 @@ def upload(self, file_name, remote_path): additional_args["Metadata"] = self.artifact_metadata print_progress_callback = ProgressPercentage(file_name, remote_path) + if not self.bucket_name: + raise BucketNotSpecifiedError() future = self.transfer_manager.upload( file_name, self.bucket_name, remote_path, additional_args, [print_progress_callback] ) @@ -125,7 +119,6 @@ def upload_with_dedup(self, file_name, extension=None): # uploads of same object. Uploader will check if the file exists in S3 # and re-upload only if necessary. So the template points to same file # in multiple places, this will upload only once - filemd5 = self.file_checksum(file_name) remote_path = filemd5 if extension: @@ -143,6 +136,8 @@ def file_exists(self, remote_path): try: # Find the object that matches this ETag + if not self.bucket_name: + raise BucketNotSpecifiedError() self.s3.head_object(Bucket=self.bucket_name, Key=remote_path) return True except botocore.exceptions.ClientError: @@ -151,6 +146,8 @@ def file_exists(self, remote_path): return False def make_url(self, obj_path): + if not self.bucket_name: + raise BucketNotSpecifiedError() return "s3://{0}/{1}".format(self.bucket_name, obj_path) def file_checksum(self, file_name): @@ -208,3 +205,5 @@ def on_progress(self, bytes_transferred, **kwargs): "\rUploading to %s %s / %s (%.2f%%)" % (self._remote_path, self._seen_so_far, self._size, percentage) ) sys.stderr.flush() + if int(percentage) == 100: + sys.stderr.write("\n") diff --git a/samcli/lib/samlib/cloudformation_command.py b/samcli/lib/samlib/cloudformation_command.py deleted file mode 100644 index e9bdbb4304..0000000000 --- a/samcli/lib/samlib/cloudformation_command.py +++ /dev/null @@ -1,55 +0,0 @@ -""" -Utility to call cloudformation command with args -""" - -import os -import logging -import platform -import subprocess -import sys - -from samcli.cli.global_config import GlobalConfig - -LOG = logging.getLogger(__name__) - - -def execute_command(command, args, template_file): - LOG.debug("%s command is called", command) - try: - aws_cmd = find_executable("aws") - - # Add SAM CLI information for AWS CLI to know about the caller. - gc = GlobalConfig() - env = os.environ.copy() - if gc.telemetry_enabled: - env["AWS_EXECUTION_ENV"] = "SAM-" + gc.installation_id - - args = list(args) - if template_file: - # Since --template-file was parsed separately, add it here manually - args.extend(["--template-file", template_file]) - - subprocess.check_call([aws_cmd, "cloudformation", command] + args, env=env) - LOG.debug("%s command successful", command) - except subprocess.CalledProcessError as e: - # Underlying aws command will print the exception to the user - LOG.debug("Exception: %s", e) - sys.exit(e.returncode) - - -def find_executable(execname): - - if platform.system().lower() == "windows": - options = ["{}.cmd".format(execname), "{}.exe".format(execname), execname] - else: - options = [execname] - - for name in options: - try: - subprocess.Popen([name], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - # No exception. Let's pick this - return name - except OSError as ex: - LOG.debug("Unable to find executable %s", name, exc_info=ex) - - raise OSError("Cannot find AWS CLI installation, was looking at executables with names: {}".format(options)) diff --git a/samcli/lib/utils/sam_logging.py b/samcli/lib/utils/sam_logging.py index bf40d2ccaf..2222640ab4 100644 --- a/samcli/lib/utils/sam_logging.py +++ b/samcli/lib/utils/sam_logging.py @@ -28,3 +28,22 @@ def configure_logger(logger, formatter, level): logger.setLevel(level) logger.propagate = False logger.addHandler(log_stream_handler) + + @staticmethod + def configure_null_logger(logger): + """ + Configure a Logger with a NullHandler + + Useful for libraries that do not follow: https://docs.python.org/3.6/howto/logging.html#configuring-logging-for-a-library + + Parameters + ---------- + logger logging.getLogger + Logger to configure + + Returns + ------- + None + """ + logger.propagate = False + logger.addHandler(logging.NullHandler()) diff --git a/samcli/lib/utils/temp_file_utils.py b/samcli/lib/utils/temp_file_utils.py new file mode 100644 index 0000000000..20f094b024 --- /dev/null +++ b/samcli/lib/utils/temp_file_utils.py @@ -0,0 +1,26 @@ +""" +Helper functions for temporary files +""" +import os +import contextlib +import tempfile + + +def remove(path): + if path: + try: + os.remove(path) + except OSError: + pass + + +@contextlib.contextmanager +def tempfile_platform_independent(): + # NOTE(TheSriram): Setting delete=False is specific to windows. + # https://docs.python.org/3/library/tempfile.html#tempfile.NamedTemporaryFile + _tempfile = tempfile.NamedTemporaryFile(delete=False) + try: + yield _tempfile + finally: + _tempfile.close() + remove(_tempfile.name) diff --git a/samcli/lib/utils/time.py b/samcli/lib/utils/time.py index 3989cc03ad..02b078337f 100644 --- a/samcli/lib/utils/time.py +++ b/samcli/lib/utils/time.py @@ -65,6 +65,16 @@ def to_timestamp(some_time): return int((some_time - datetime.datetime(1970, 1, 1)).total_seconds() * 1000.0) +def utc_to_timestamp(utc): + """ + Converts utc timestamp with tz_info set to utc to Unix timestamp + :param utc: datetime.datetime + :return: UNIX timestamp + """ + + return to_timestamp(utc.replace(tzinfo=None)) + + def to_utc(some_time): """ Convert the given date to UTC, if the date contains a timezone. diff --git a/samcli/local/init/templates/cookiecutter-aws-sam-hello-dotnet/{{cookiecutter.project_name}}/README.md b/samcli/local/init/templates/cookiecutter-aws-sam-hello-dotnet/{{cookiecutter.project_name}}/README.md index 17d9cf30ec..63c74c5d4e 100644 --- a/samcli/local/init/templates/cookiecutter-aws-sam-hello-dotnet/{{cookiecutter.project_name}}/README.md +++ b/samcli/local/init/templates/cookiecutter-aws-sam-hello-dotnet/{{cookiecutter.project_name}}/README.md @@ -23,44 +23,26 @@ The Serverless Application Model Command Line Interface (SAM CLI) is an extensio To use the SAM CLI, you need the following tools. -* AWS CLI - [Install the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html) and [configure it with your AWS credentials]. * SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) * .NET Core - [Install .NET Core](https://www.microsoft.com/net/download) * Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community) -The SAM CLI uses an Amazon S3 bucket to store your application's deployment artifacts. If you don't have a bucket suitable for this purpose, create one. Replace `BUCKET_NAME` in the commands in this section with a unique bucket name. +To build and deploy your application for the first time, run the following in your shell: ```bash -{{ cookiecutter.project_name }}$ aws s3 mb s3://BUCKET_NAME +sam build +sam deploy --guided ``` -To prepare the application for deployment, use the `sam package` command. +The first command will build the source of your application. The second command will package and deploy your application to AWS, with a series of prompts: -```bash -{{ cookiecutter.project_name }}$ sam package \ - --output-template-file packaged.yaml \ - --s3-bucket BUCKET_NAME -``` - -The SAM CLI creates deployment packages, uploads them to the S3 bucket, and creates a new version of the template that refers to the artifacts in the bucket. +* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name. +* **AWS Region**: The AWS region you want to deploy your app to. +* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes. +* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modified IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command. +* **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application. -To deploy the application, use the `sam deploy` command. - -```bash -{{ cookiecutter.project_name }}$ sam deploy \ - --template-file packaged.yaml \ - --stack-name {{ cookiecutter.project_name }} \ - --capabilities CAPABILITY_IAM -``` - -After deployment is complete you can run the following command to retrieve the API Gateway Endpoint URL: - -```bash -{{ cookiecutter.project_name }}$ aws cloudformation describe-stacks \ - --stack-name {{ cookiecutter.project_name }} \ - --query 'Stacks[].Outputs[?OutputKey==`HelloWorldApi`]' \ - --output table -``` +You can find your API Gateway Endpoint URL in the output values displayed after deployment. ## Use the SAM CLI to build and test locally @@ -123,11 +105,10 @@ Tests are defined in the `test` folder in this project. ## Cleanup -To delete the sample application and the bucket that you created, use the AWS CLI. +To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following: ```bash -{{ cookiecutter.project_name }}$ aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} -{{ cookiecutter.project_name }}$ aws s3 rb s3://BUCKET_NAME +aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} ``` ## Resources diff --git a/samcli/local/init/templates/cookiecutter-aws-sam-hello-golang/{{cookiecutter.project_name}}/README.md b/samcli/local/init/templates/cookiecutter-aws-sam-hello-golang/{{cookiecutter.project_name}}/README.md index 2d783abbc2..8645d7d669 100644 --- a/samcli/local/init/templates/cookiecutter-aws-sam-hello-golang/{{cookiecutter.project_name}}/README.md +++ b/samcli/local/init/templates/cookiecutter-aws-sam-hello-golang/{{cookiecutter.project_name}}/README.md @@ -77,38 +77,21 @@ AWS Lambda Python runtime requires a flat folder with all dependencies including ... ``` -First and foremost, we need a `S3 bucket` where we can upload our Lambda functions packaged as ZIP before we deploy anything - If you don't have a S3 bucket to store code artifacts then this is a good time to create one: +To deploy your application for the first time, run the following in your shell: ```bash -aws s3 mb s3://BUCKET_NAME +sam deploy --guided ``` -Next, run the following command to package our Lambda function to S3: +The command will package and deploy your application to AWS, with a series of prompts: -```bash -sam package \ - --output-template-file packaged.yaml \ - --s3-bucket REPLACE_THIS_WITH_YOUR_S3_BUCKET_NAME -``` - -Next, the following command will create a Cloudformation Stack and deploy your SAM resources. - -```bash -sam deploy \ - --template-file packaged.yaml \ - --stack-name {{ cookiecutter.project_name.lower().replace(' ', '-') }} \ - --capabilities CAPABILITY_IAM -``` - -> **See [Serverless Application Model (SAM) HOWTO Guide](https://github.com/awslabs/serverless-application-model/blob/master/HOWTO.md) for more details in how to get started.** +* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name. +* **AWS Region**: The AWS region you want to deploy your app to. +* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes. +* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modified IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command. +* **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application. -After deployment is complete you can run the following command to retrieve the API Gateway Endpoint URL: - -```bash -aws cloudformation describe-stacks \ - --stack-name {{ cookiecutter.project_name.lower().replace(' ', '-') }} \ - --query 'Stacks[].Outputs' -``` +You can find your API Gateway Endpoint URL in the output values displayed after deployment. ### Testing @@ -153,25 +136,6 @@ If it's already installed, run the following command to ensure it's the latest v ```shell choco upgrade golang ``` -## AWS CLI commands - -AWS CLI commands to package, deploy and describe outputs defined within the cloudformation stack: - -```bash -sam package \ - --template-file template.yaml \ - --output-template-file packaged.yaml \ - --s3-bucket REPLACE_THIS_WITH_YOUR_S3_BUCKET_NAME - -sam deploy \ - --template-file packaged.yaml \ - --stack-name {{ cookiecutter.project_name.lower().replace(' ', '-') }} \ - --capabilities CAPABILITY_IAM \ - --parameter-overrides MyParameterSample=MySampleValue - -aws cloudformation describe-stacks \ - --stack-name {{ cookiecutter.project_name.lower().replace(' ', '-') }} --query 'Stacks[].Outputs' -``` ## Bringing to the next level diff --git a/samcli/local/init/templates/cookiecutter-aws-sam-hello-java-gradle/{{cookiecutter.project_name}}/README.md b/samcli/local/init/templates/cookiecutter-aws-sam-hello-java-gradle/{{cookiecutter.project_name}}/README.md index a1729ec9d0..f1b9712296 100644 --- a/samcli/local/init/templates/cookiecutter-aws-sam-hello-java-gradle/{{cookiecutter.project_name}}/README.md +++ b/samcli/local/init/templates/cookiecutter-aws-sam-hello-java-gradle/{{cookiecutter.project_name}}/README.md @@ -23,44 +23,26 @@ The Serverless Application Model Command Line Interface (SAM CLI) is an extensio To use the SAM CLI, you need the following tools. -* AWS CLI - [Install the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html) and [configure it with your AWS credentials]. * SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) * Java8 - [Install the Java SE Development Kit 8](http://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html) * Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community) -The SAM CLI uses an Amazon S3 bucket to store your application's deployment artifacts. If you don't have a bucket suitable for this purpose, create one. Replace `BUCKET_NAME` in the commands in this section with a unique bucket name. +To build and deploy your application for the first time, run the following in your shell: ```bash -{{ cookiecutter.project_name }}$ aws s3 mb s3://BUCKET_NAME +sam build +sam deploy --guided ``` -To prepare the application for deployment, use the `sam package` command. +The first command will build the source of your application. The second command will package and deploy your application to AWS, with a series of prompts: -```bash -{{ cookiecutter.project_name }}$ sam package \ - --output-template-file packaged.yaml \ - --s3-bucket BUCKET_NAME -``` - -The SAM CLI creates deployment packages, uploads them to the S3 bucket, and creates a new version of the template that refers to the artifacts in the bucket. +* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name. +* **AWS Region**: The AWS region you want to deploy your app to. +* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes. +* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modified IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command. +* **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application. -To deploy the application, use the `sam deploy` command. - -```bash -{{ cookiecutter.project_name }}$ sam deploy \ - --template-file packaged.yaml \ - --stack-name {{ cookiecutter.project_name }} \ - --capabilities CAPABILITY_IAM -``` - -After deployment is complete you can run the following command to retrieve the API Gateway Endpoint URL: - -```bash -{{ cookiecutter.project_name }}$ aws cloudformation describe-stacks \ - --stack-name {{ cookiecutter.project_name }} \ - --query 'Stacks[].Outputs[?OutputKey==`HelloWorldApi`]' \ - --output table -``` +You can find your API Gateway Endpoint URL in the output values displayed after deployment. ## Use the SAM CLI to build and test locally @@ -124,11 +106,10 @@ HelloWorldFunction$ gradle test ## Cleanup -To delete the sample application and the bucket that you created, use the AWS CLI. +To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following: ```bash -{{ cookiecutter.project_name }}$ aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} -{{ cookiecutter.project_name }}$ aws s3 rb s3://BUCKET_NAME +aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} ``` ## Resources diff --git a/samcli/local/init/templates/cookiecutter-aws-sam-hello-java-maven/{{cookiecutter.project_name}}/README.md b/samcli/local/init/templates/cookiecutter-aws-sam-hello-java-maven/{{cookiecutter.project_name}}/README.md index 5b46ee17f5..34c8f025f2 100644 --- a/samcli/local/init/templates/cookiecutter-aws-sam-hello-java-maven/{{cookiecutter.project_name}}/README.md +++ b/samcli/local/init/templates/cookiecutter-aws-sam-hello-java-maven/{{cookiecutter.project_name}}/README.md @@ -23,45 +23,27 @@ The Serverless Application Model Command Line Interface (SAM CLI) is an extensio To use the SAM CLI, you need the following tools. -* AWS CLI - [Install the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html) and [configure it with your AWS credentials]. * SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) * Java8 - [Install the Java SE Development Kit 8](http://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html) * Maven - [Install Maven](https://maven.apache.org/install.html) * Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community) -The SAM CLI uses an Amazon S3 bucket to store your application's deployment artifacts. If you don't have a bucket suitable for this purpose, create one. Replace `BUCKET_NAME` in the commands in this section with a unique bucket name. +To build and deploy your application for the first time, run the following in your shell: ```bash -{{ cookiecutter.project_name }}$ aws s3 mb s3://BUCKET_NAME +sam build +sam deploy --guided ``` -To prepare the application for deployment, use the `sam package` command. +The first command will build the source of your application. The second command will package and deploy your application to AWS, with a series of prompts: -```bash -{{ cookiecutter.project_name }}$ sam package \ - --output-template-file packaged.yaml \ - --s3-bucket BUCKET_NAME -``` - -The SAM CLI creates deployment packages, uploads them to the S3 bucket, and creates a new version of the template that refers to the artifacts in the bucket. +* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name. +* **AWS Region**: The AWS region you want to deploy your app to. +* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes. +* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modified IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command. +* **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application. -To deploy the application, use the `sam deploy` command. - -```bash -{{ cookiecutter.project_name }}$ sam deploy \ - --template-file packaged.yaml \ - --stack-name {{ cookiecutter.project_name }} \ - --capabilities CAPABILITY_IAM -``` - -After deployment is complete you can run the following command to retrieve the API Gateway Endpoint URL: - -```bash -{{ cookiecutter.project_name }}$ aws cloudformation describe-stacks \ - --stack-name {{ cookiecutter.project_name }} \ - --query 'Stacks[].Outputs[?OutputKey==`HelloWorldApi`]' \ - --output table -``` +You can find your API Gateway Endpoint URL in the output values displayed after deployment. ## Use the SAM CLI to build and test locally @@ -125,11 +107,10 @@ HelloWorldFunction$ mvn test ## Cleanup -To delete the sample application and the bucket that you created, use the AWS CLI. +To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following: ```bash -{{ cookiecutter.project_name }}$ aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} -{{ cookiecutter.project_name }}$ aws s3 rb s3://BUCKET_NAME +aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} ``` ## Resources diff --git a/samcli/local/init/templates/cookiecutter-aws-sam-hello-nodejs/{{cookiecutter.project_name}}/README.md b/samcli/local/init/templates/cookiecutter-aws-sam-hello-nodejs/{{cookiecutter.project_name}}/README.md index baab85559a..aa68a714ed 100644 --- a/samcli/local/init/templates/cookiecutter-aws-sam-hello-nodejs/{{cookiecutter.project_name}}/README.md +++ b/samcli/local/init/templates/cookiecutter-aws-sam-hello-nodejs/{{cookiecutter.project_name}}/README.md @@ -23,44 +23,26 @@ The Serverless Application Model Command Line Interface (SAM CLI) is an extensio To use the SAM CLI, you need the following tools. -* AWS CLI - [Install the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html) and [configure it with your AWS credentials]. * SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) * Node.js - [Install Node.js 10](https://nodejs.org/en/), including the NPM package management tool. * Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community) -The SAM CLI uses an Amazon S3 bucket to store your application's deployment artifacts. If you don't have a bucket suitable for this purpose, create one. Replace `BUCKET_NAME` in the commands in this section with a unique bucket name. +To build and deploy your application for the first time, run the following in your shell: ```bash -{{ cookiecutter.project_name }}$ aws s3 mb s3://BUCKET_NAME +sam build +sam deploy --guided ``` -To prepare the application for deployment, use the `sam package` command. +The first command will build the source of your application. The second command will package and deploy your application to AWS, with a series of prompts: -```bash -{{ cookiecutter.project_name }}$ sam package \ - --output-template-file packaged.yaml \ - --s3-bucket BUCKET_NAME -``` - -The SAM CLI creates deployment packages, uploads them to the S3 bucket, and creates a new version of the template that refers to the artifacts in the bucket. +* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name. +* **AWS Region**: The AWS region you want to deploy your app to. +* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes. +* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modified IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command. +* **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application. -To deploy the application, use the `sam deploy` command. - -```bash -{{ cookiecutter.project_name }}$ sam deploy \ - --template-file packaged.yaml \ - --stack-name {{ cookiecutter.project_name }} \ - --capabilities CAPABILITY_IAM -``` - -After deployment is complete you can run the following command to retrieve the API Gateway Endpoint URL: - -```bash -{{ cookiecutter.project_name }}$ aws cloudformation describe-stacks \ - --stack-name {{ cookiecutter.project_name }} \ - --query 'Stacks[].Outputs[?OutputKey==`HelloWorldApi`]' \ - --output table -``` +You can find your API Gateway Endpoint URL in the output values displayed after deployment. ## Use the SAM CLI to build and test locally @@ -125,15 +107,14 @@ hello-world$ npm run test ## Cleanup -To delete the sample application and the bucket that you created, use the AWS CLI. +To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following: ```bash -{{ cookiecutter.project_name }}$ aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} -{{ cookiecutter.project_name }}$ aws s3 rb s3://BUCKET_NAME +aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} ``` ## Resources See the [AWS SAM developer guide](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/what-is-sam.html) for an introduction to SAM specification, the SAM CLI, and serverless application concepts. -Next, you can use AWS Serverless Application Repository to deploy ready to use Apps that go beyond hello world samples and learn how authors developed their applications: [AWS Serverless Application Repository main page](https://aws.amazon.com/serverless/serverlessrepo/) \ No newline at end of file +Next, you can use AWS Serverless Application Repository to deploy ready to use Apps that go beyond hello world samples and learn how authors developed their applications: [AWS Serverless Application Repository main page](https://aws.amazon.com/serverless/serverlessrepo/) diff --git a/samcli/local/init/templates/cookiecutter-aws-sam-hello-python/{{cookiecutter.project_name}}/README.md b/samcli/local/init/templates/cookiecutter-aws-sam-hello-python/{{cookiecutter.project_name}}/README.md index a12489cb39..0fe6ea9a37 100644 --- a/samcli/local/init/templates/cookiecutter-aws-sam-hello-python/{{cookiecutter.project_name}}/README.md +++ b/samcli/local/init/templates/cookiecutter-aws-sam-hello-python/{{cookiecutter.project_name}}/README.md @@ -23,7 +23,6 @@ The Serverless Application Model Command Line Interface (SAM CLI) is an extensio To use the SAM CLI, you need the following tools. -* AWS CLI - [Install the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html) and [configure it with your AWS credentials]. * SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) {%- if cookiecutter.runtime == 'python2.7' %} * [Python 2.7 installed](https://www.python.org/downloads/) @@ -32,39 +31,22 @@ To use the SAM CLI, you need the following tools. {%- endif %} * Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community) -The SAM CLI uses an Amazon S3 bucket to store your application's deployment artifacts. If you don't have a bucket suitable for this purpose, create one. Replace `BUCKET_NAME` in the commands in this section with a unique bucket name. +To build and deploy your application for the first time, run the following in your shell: ```bash -{{ cookiecutter.project_name }}$ aws s3 mb s3://BUCKET_NAME +sam build +sam deploy --guided ``` -To prepare the application for deployment, use the `sam package` command. +The first command will build the source of your application. The second command will package and deploy your application to AWS, with a series of prompts: -```bash -{{ cookiecutter.project_name }}$ sam package \ - --output-template-file packaged.yaml \ - --s3-bucket BUCKET_NAME -``` - -The SAM CLI creates deployment packages, uploads them to the S3 bucket, and creates a new version of the template that refers to the artifacts in the bucket. +* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name. +* **AWS Region**: The AWS region you want to deploy your app to. +* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes. +* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modified IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command. +* **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application. -To deploy the application, use the `sam deploy` command. - -```bash -{{ cookiecutter.project_name }}$ sam deploy \ - --template-file packaged.yaml \ - --stack-name {{ cookiecutter.project_name }} \ - --capabilities CAPABILITY_IAM -``` - -After deployment is complete you can run the following command to retrieve the API Gateway Endpoint URL: - -```bash -{{ cookiecutter.project_name }}$ aws cloudformation describe-stacks \ - --stack-name {{ cookiecutter.project_name }} \ - --query 'Stacks[].Outputs[?OutputKey==`HelloWorldApi`]' \ - --output table -``` +You can find your API Gateway Endpoint URL in the output values displayed after deployment. ## Use the SAM CLI to build and test locally @@ -128,11 +110,10 @@ Tests are defined in the `tests` folder in this project. Use PIP to install the ## Cleanup -To delete the sample application and the bucket that you created, use the AWS CLI. +To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following: ```bash -{{ cookiecutter.project_name }}$ aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} -{{ cookiecutter.project_name }}$ aws s3 rb s3://BUCKET_NAME +aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} ``` ## Resources diff --git a/samcli/local/init/templates/cookiecutter-aws-sam-hello-ruby/{{cookiecutter.project_name}}/README.md b/samcli/local/init/templates/cookiecutter-aws-sam-hello-ruby/{{cookiecutter.project_name}}/README.md index 8c426579f1..0ffe88b084 100644 --- a/samcli/local/init/templates/cookiecutter-aws-sam-hello-ruby/{{cookiecutter.project_name}}/README.md +++ b/samcli/local/init/templates/cookiecutter-aws-sam-hello-ruby/{{cookiecutter.project_name}}/README.md @@ -23,44 +23,26 @@ The Serverless Application Model Command Line Interface (SAM CLI) is an extensio To use the SAM CLI, you need the following tools. -* AWS CLI - [Install the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html) and [configure it with your AWS credentials]. * SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) * Ruby - [Install Ruby 2.5](https://www.ruby-lang.org/en/documentation/installation/) * Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community) -The SAM CLI uses an Amazon S3 bucket to store your application's deployment artifacts. If you don't have a bucket suitable for this purpose, create one. Replace `BUCKET_NAME` in the commands in this section with a unique bucket name. +To build and deploy your application for the first time, run the following in your shell: ```bash -{{ cookiecutter.project_name }}$ aws s3 mb s3://BUCKET_NAME +sam build +sam deploy --guided ``` -To prepare the application for deployment, use the `sam package` command. +The first command will build the source of your application. The second command will package and deploy your application to AWS, with a series of prompts: -```bash -{{ cookiecutter.project_name }}$ sam package \ - --output-template-file packaged.yaml \ - --s3-bucket BUCKET_NAME -``` - -The SAM CLI creates deployment packages, uploads them to the S3 bucket, and creates a new version of the template that refers to the artifacts in the bucket. +* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name. +* **AWS Region**: The AWS region you want to deploy your app to. +* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes. +* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modified IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command. +* **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application. -To deploy the application, use the `sam deploy` command. - -```bash -{{ cookiecutter.project_name }}$ sam deploy \ - --template-file packaged.yaml \ - --stack-name {{ cookiecutter.project_name }} \ - --capabilities CAPABILITY_IAM -``` - -After deployment is complete you can run the following command to retrieve the API Gateway Endpoint URL: - -```bash -{{ cookiecutter.project_name }}$ aws cloudformation describe-stacks \ - --stack-name {{ cookiecutter.project_name }} \ - --query 'Stacks[].Outputs[?OutputKey==`HelloWorldApi`]' \ - --output table -``` +You can find your API Gateway Endpoint URL in the output values displayed after deployment. ## Use the SAM CLI to build and test locally @@ -123,11 +105,10 @@ Tests are defined in the `tests` folder in this project. ## Cleanup -To delete the sample application and the bucket that you created, use the AWS CLI. +To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following: ```bash -{{ cookiecutter.project_name }}$ aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} -{{ cookiecutter.project_name }}$ aws s3 rb s3://BUCKET_NAME +aws cloudformation delete-stack --stack-name {{ cookiecutter.project_name }} ``` ## Resources diff --git a/tests/integration/buildcmd/test_build_cmd.py b/tests/integration/buildcmd/test_build_cmd.py index 4f44590ab7..13e37aebb5 100644 --- a/tests/integration/buildcmd/test_build_cmd.py +++ b/tests/integration/buildcmd/test_build_cmd.py @@ -1,10 +1,11 @@ import sys import os -import subprocess import logging from unittest import skipIf from pathlib import Path from parameterized import parameterized +from subprocess import Popen, PIPE, TimeoutExpired + import pytest from .build_integ_base import BuildIntegBase @@ -13,6 +14,8 @@ LOG = logging.getLogger(__name__) +TIMEOUT = 300 + @skipIf( ((IS_WINDOWS and RUNNING_ON_CI) and not CI_OVERRIDE), @@ -31,6 +34,7 @@ class TestBuildCommand_PythonFunctions(BuildIntegBase): FUNCTION_LOGICAL_ID = "Function" + @pytest.mark.flaky(reruns=3) @parameterized.expand( [ ("python2.7", False), @@ -48,8 +52,12 @@ def test_with_default_requirements(self, runtime, use_container): cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) LOG.info("Running Command: {}", cmdlist) - process = subprocess.Popen(cmdlist, cwd=self.working_dir) - process.wait() + process = Popen(cmdlist, cwd=self.working_dir) + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self._verify_built_artifact( self.default_build_dir, self.FUNCTION_LOGICAL_ID, self.EXPECTED_FILES_PROJECT_MANIFEST @@ -98,15 +106,20 @@ def _get_python_version(self): "Skip build tests on windows when running in CI unless overridden", ) class TestBuildCommand_ErrorCases(BuildIntegBase): + @pytest.mark.flaky(reruns=3) def test_unsupported_runtime(self): overrides = {"Runtime": "unsupportedpython", "CodeUri": "Python"} cmdlist = self.get_command_list(parameter_overrides=overrides) LOG.info("Running Command: {}", cmdlist) - process = subprocess.Popen(cmdlist, cwd=self.working_dir, stdout=subprocess.PIPE) - process.wait() - - process_stdout = b"".join(process.stdout.readlines()).strip().decode("utf-8") + process = Popen(cmdlist, cwd=self.working_dir, stdout=PIPE) + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip().decode("utf-8") self.assertEqual(1, process.returncode) self.assertIn("Build Failed", process_stdout) @@ -124,6 +137,7 @@ class TestBuildCommand_NodeFunctions(BuildIntegBase): FUNCTION_LOGICAL_ID = "Function" + @pytest.mark.flaky(reruns=3) @parameterized.expand( [ ("nodejs6.10", False), @@ -141,8 +155,12 @@ def test_with_default_package_json(self, runtime, use_container): cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) LOG.info("Running Command: {}", cmdlist) - process = subprocess.Popen(cmdlist, cwd=self.working_dir) - process.wait() + process = Popen(cmdlist, cwd=self.working_dir) + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self._verify_built_artifact( self.default_build_dir, @@ -198,11 +216,11 @@ class TestBuildCommand_RubyFunctions(BuildIntegBase): FUNCTION_LOGICAL_ID = "Function" @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") @parameterized.expand([("ruby2.5")]) def test_building_ruby_in_container(self, runtime): self._test_with_default_gemfile(runtime, "use_container") + @pytest.mark.flaky(reruns=3) @parameterized.expand([("ruby2.5")]) def test_building_ruby_in_process(self, runtime): self._test_with_default_gemfile(runtime, False) @@ -212,8 +230,12 @@ def _test_with_default_gemfile(self, runtime, use_container): cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) LOG.info("Running Command: {}".format(cmdlist)) - process = subprocess.Popen(cmdlist, cwd=self.working_dir) - process.wait() + process = Popen(cmdlist, cwd=self.working_dir) + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self._verify_built_artifact( self.default_build_dir, @@ -283,7 +305,6 @@ class TestBuildCommand_Java(BuildIntegBase): UNIX_LINE_ENDING = b"\n" @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") @parameterized.expand( [ ("java8", USING_GRADLE_PATH, EXPECTED_FILES_PROJECT_MANIFEST_GRADLE), @@ -301,6 +322,7 @@ class TestBuildCommand_Java(BuildIntegBase): def test_building_java_in_container(self, runtime, code_path, expected_files): self._test_with_building_java(runtime, code_path, expected_files, "use_container") + @pytest.mark.flaky(reruns=3) @parameterized.expand( [ ("java8", USING_GRADLE_PATH, EXPECTED_FILES_PROJECT_MANIFEST_GRADLE), @@ -313,6 +335,7 @@ def test_building_java_in_container(self, runtime, code_path, expected_files): def test_building_java8_in_process(self, runtime, code_path, expected_files): self._test_with_building_java(runtime, code_path, expected_files, False) + @pytest.mark.flaky(reruns=3) @parameterized.expand( [ ("java11", USING_GRADLE_PATH, EXPECTED_FILES_PROJECT_MANIFEST_GRADLE), @@ -333,8 +356,12 @@ def _test_with_building_java(self, runtime, code_path, expected_files, use_conta self._change_to_unix_line_ending(os.path.join(self.test_data_path, self.USING_GRADLEW_PATH, "gradlew")) LOG.info("Running Command: {}".format(cmdlist)) - process = subprocess.Popen(cmdlist, cwd=self.working_dir) - process.wait() + process = Popen(cmdlist, cwd=self.working_dir) + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self._verify_built_artifact( self.default_build_dir, self.FUNCTION_LOGICAL_ID, expected_files, self.EXPECTED_DEPENDENCIES @@ -408,6 +435,7 @@ class TestBuildCommand_Dotnet_cli_package(BuildIntegBase): "HelloWorld.dll", } + @pytest.mark.flaky(reruns=3) @parameterized.expand( [ ("dotnetcore2.0", "Dotnetcore2.0", None), @@ -431,8 +459,12 @@ def test_with_dotnetcore(self, runtime, code_uri, mode): if mode: newenv["SAM_BUILD_MODE"] = mode - process = subprocess.Popen(cmdlist, cwd=self.working_dir, env=newenv) - process.wait() + process = Popen(cmdlist, cwd=self.working_dir, env=newenv) + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self._verify_built_artifact( self.default_build_dir, self.FUNCTION_LOGICAL_ID, self.EXPECTED_FILES_PROJECT_MANIFEST @@ -455,6 +487,7 @@ def test_with_dotnetcore(self, runtime, code_uri, mode): self.verify_docker_container_cleanedup(runtime) + @pytest.mark.flaky(reruns=3) @parameterized.expand([("dotnetcore2.0", "Dotnetcore2.0"), ("dotnetcore2.1", "Dotnetcore2.1")]) def test_must_fail_with_container(self, runtime, code_uri): use_container = True @@ -466,8 +499,12 @@ def test_must_fail_with_container(self, runtime, code_uri): cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) LOG.info("Running Command: {}".format(cmdlist)) - process = subprocess.Popen(cmdlist, cwd=self.working_dir) - process.wait() + process = Popen(cmdlist, cwd=self.working_dir) + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise # Must error out, because container builds are not supported self.assertEqual(process.returncode, 1) @@ -507,16 +544,18 @@ class TestBuildCommand_SingleFunctionBuilds(BuildIntegBase): "requirements.txt", } + @pytest.mark.flaky(reruns=3) def test_function_not_found(self): overrides = {"Runtime": "python3.7", "CodeUri": "Python", "Handler": "main.handler"} cmdlist = self.get_command_list(parameter_overrides=overrides, function_identifier="FunctionNotInTemplate") - process = subprocess.Popen(cmdlist, cwd=self.working_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout, stderr = process.communicate() + process = Popen(cmdlist, cwd=self.working_dir, stderr=PIPE) + _, stderr = process.communicate(timeout=TIMEOUT) self.assertEqual(process.returncode, 1) self.assertIn("FunctionNotInTemplate not found", str(stderr.decode("utf8"))) + @pytest.mark.flaky(reruns=3) @parameterized.expand( [ ("python3.7", False, "FunctionOne"), @@ -532,8 +571,12 @@ def test_build_single_function(self, runtime, use_container, function_identifier ) LOG.info("Running Command: {}", cmdlist) - process = subprocess.Popen(cmdlist, cwd=self.working_dir) - process.wait() + process = Popen(cmdlist, cwd=self.working_dir) + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self._verify_built_artifact(self.default_build_dir, function_identifier, self.EXPECTED_FILES_PROJECT_MANIFEST) diff --git a/tests/integration/deploy/__init__.py b/tests/integration/deploy/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/integration/deploy/deploy_integ_base.py b/tests/integration/deploy/deploy_integ_base.py new file mode 100644 index 0000000000..00165f4adf --- /dev/null +++ b/tests/integration/deploy/deploy_integ_base.py @@ -0,0 +1,89 @@ +import os +import uuid +import json +import time +from pathlib import Path +from unittest import TestCase + +import boto3 + + +class DeployIntegBase(TestCase): + @classmethod + def setUpClass(cls): + pass + + def setUp(self): + super(DeployIntegBase, self).setUp() + + def tearDown(self): + super(DeployIntegBase, self).tearDown() + + def base_command(self): + command = "sam" + if os.getenv("SAM_CLI_DEV"): + command = "samdev" + + return command + + def get_deploy_command_list( + self, + s3_bucket=None, + stack_name=None, + template=None, + template_file=None, + s3_prefix=None, + capabilities=None, + force_upload=False, + notification_arns=None, + fail_on_empty_changeset=False, + confirm_changeset=False, + no_execute_changeset=False, + parameter_overrides=None, + role_arn=None, + kms_key_id=None, + tags=None, + profile=None, + region=None, + guided=False, + ): + command_list = [self.base_command(), "deploy"] + + if guided: + command_list = command_list + ["--guided"] + if s3_bucket: + command_list = command_list + ["--s3-bucket", str(s3_bucket)] + if capabilities: + command_list = command_list + ["--capabilities", str(capabilities)] + if parameter_overrides: + command_list = command_list + ["--parameter-overrides", str(parameter_overrides)] + if role_arn: + command_list = command_list + ["--role-arn", str(role_arn)] + if notification_arns: + command_list = command_list + ["--notification-arns", str(notification_arns)] + if stack_name: + command_list = command_list + ["--stack-name", str(stack_name)] + if template: + command_list = command_list + ["--template", str(template)] + if template_file: + command_list = command_list + ["--template-file", str(template_file)] + if s3_prefix: + command_list = command_list + ["--s3-prefix", str(s3_prefix)] + if kms_key_id: + command_list = command_list + ["--kms-key-id", str(kms_key_id)] + if no_execute_changeset: + command_list = command_list + ["--no-execute-changeset"] + if force_upload: + command_list = command_list + ["--force-upload"] + if fail_on_empty_changeset: + command_list = command_list + ["--fail-on-empty-changeset"] + if confirm_changeset: + command_list = command_list + ["--confirm-changeset"] + if tags: + command_list = command_list + ["--tags", str(tags)] + if region: + command_list = command_list + ["--region", str(region)] + if profile: + command_list = command_list + ["--profile", str(profile)] + + return command_list diff --git a/tests/integration/deploy/test_deploy_command.py b/tests/integration/deploy/test_deploy_command.py new file mode 100644 index 0000000000..bd2b92970e --- /dev/null +++ b/tests/integration/deploy/test_deploy_command.py @@ -0,0 +1,423 @@ +import os +import tempfile +import uuid +import time +from subprocess import Popen, PIPE, TimeoutExpired +from unittest import skipIf + +import boto3 +from parameterized import parameterized + +from samcli.lib.config.samconfig import DEFAULT_CONFIG_FILE_NAME +from samcli.lib.bootstrap.bootstrap import SAM_CLI_STACK_NAME +from tests.integration.deploy.deploy_integ_base import DeployIntegBase +from tests.integration.package.package_integ_base import PackageIntegBase +from tests.testing_utils import RUNNING_ON_CI, RUNNING_TEST_FOR_MASTER_ON_CI + +# Deploy tests require credentials and CI/CD will only add credentials to the env if the PR is from the same repo. +# This is to restrict package tests to run outside of CI/CD and when the branch is not master. +SKIP_DEPLOY_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI +CFN_SLEEP = 3 +TIMEOUT = 300 + + +@skipIf(SKIP_DEPLOY_TESTS, "Skip deploy tests in CI/CD only") +class TestDeploy(PackageIntegBase, DeployIntegBase): + def setUp(self): + self.cf_client = boto3.client("cloudformation") + self.sns_arn = os.environ.get("AWS_SNS") + self.stack_names = [] + time.sleep(CFN_SLEEP) + super(TestDeploy, self).setUp() + + def tearDown(self): + for stack_name in self.stack_names: + self.cf_client.delete_stack(StackName=stack_name) + super(TestDeploy, self).tearDown() + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_package_and_deploy_no_s3_bucket_all_args(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + with tempfile.NamedTemporaryFile(delete=False) as output_template_file: + # Package necessary artifacts. + package_command_list = self.get_command_list( + s3_bucket=self.s3_bucket.name, template=template_path, output_template_file=output_template_file.name + ) + + package_process = Popen(package_command_list, stdout=PIPE) + try: + package_process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + package_process.kill() + raise + + self.assertEqual(package_process.returncode, 0) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(stack_name) + + # Deploy and only show changeset. + deploy_command_list_no_execute = self.get_deploy_command_list( + template_file=output_template_file.name, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix="integ_deploy", + s3_bucket=self.s3_bucket.name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=True, + tags="integ=true clarity=yes", + ) + + deploy_process_no_execute = Popen(deploy_command_list_no_execute, stdout=PIPE) + try: + deploy_process_no_execute.communicate(timeout=TIMEOUT) + except TimeoutExpired: + deploy_process_no_execute.kill() + raise + self.assertEqual(deploy_process_no_execute.returncode, 0) + + # Deploy the given stack with the changeset. + deploy_command_list_execute = self.get_deploy_command_list( + template_file=output_template_file.name, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix="integ_deploy", + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + tags="integ=true clarity=yes", + ) + + deploy_process = Popen(deploy_command_list_execute, stdout=PIPE) + try: + deploy_process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + deploy_process.kill() + raise + self.assertEqual(deploy_process.returncode, 0) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_no_package_and_deploy_with_s3_bucket_all_args(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(stack_name) + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix="integ_deploy", + s3_bucket=self.s3_bucket.name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes", + confirm_changeset=False, + ) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE) + try: + deploy_process_execute.communicate(timeout=TIMEOUT) + except TimeoutExpired: + deploy_process_execute.kill() + raise + self.assertEqual(deploy_process_execute.returncode, 0) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_no_package_and_deploy_with_s3_bucket_all_args_confirm_changeset(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(stack_name) + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix="integ_deploy", + s3_bucket=self.s3_bucket.name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes", + confirm_changeset=True, + ) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE, stdin=PIPE) + deploy_process_execute.communicate("Y".encode(), timeout=TIMEOUT) + self.assertEqual(deploy_process_execute.returncode, 0) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_without_s3_bucket(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix="integ_deploy", + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes", + confirm_changeset=False, + ) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE) + try: + _, stderr = deploy_process_execute.communicate(timeout=TIMEOUT) + except TimeoutExpired: + deploy_process_execute.kill() + raise + # Error asking for s3 bucket + self.assertEqual(deploy_process_execute.returncode, 1) + stderr = stderr.strip() + self.assertIn( + bytes( + f"S3 Bucket not specified, use --s3-bucket to specify a bucket name or run sam deploy --guided", + encoding="utf-8", + ), + stderr, + ) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_without_stack_name(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + capabilities="CAPABILITY_IAM", + s3_prefix="integ_deploy", + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes", + confirm_changeset=False, + ) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE) + try: + deploy_process_execute.communicate(timeout=TIMEOUT) + except TimeoutExpired: + deploy_process_execute.kill() + raise + # Error no stack name present + self.assertEqual(deploy_process_execute.returncode, 2) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_without_capabilities(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + s3_prefix="integ_deploy", + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes", + confirm_changeset=False, + ) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE) + try: + deploy_process_execute.communicate(timeout=TIMEOUT) + except TimeoutExpired: + deploy_process_execute.kill() + raise + # Error capabilities not specified + self.assertEqual(deploy_process_execute.returncode, 1) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_without_template_file(self, template_file): + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list( + stack_name=stack_name, + s3_prefix="integ_deploy", + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes", + confirm_changeset=False, + ) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE) + try: + deploy_process_execute.communicate(timeout=TIMEOUT) + except TimeoutExpired: + deploy_process_execute.kill() + raise + # Error template file not specified + self.assertEqual(deploy_process_execute.returncode, 1) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_with_s3_bucket_switch_region(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix="integ_deploy", + s3_bucket=self.bucket_name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes", + confirm_changeset=False, + ) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE) + try: + deploy_process_execute.communicate(timeout=TIMEOUT) + except TimeoutExpired: + deploy_process_execute.kill() + raise + # Deploy should succeed + self.assertEqual(deploy_process_execute.returncode, 0) + + # Try to deploy to another region. + deploy_command_list = self.get_deploy_command_list( + template_file=template_path, + stack_name=stack_name, + capabilities="CAPABILITY_IAM", + s3_prefix="integ_deploy", + s3_bucket=self.bucket_name, + force_upload=True, + notification_arns=self.sns_arn, + parameter_overrides="Parameter=Clarity", + kms_key_id=self.kms_key, + no_execute_changeset=False, + tags="integ=true clarity=yes", + confirm_changeset=False, + region="eu-west-2", + ) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE) + try: + _, stderr = deploy_process_execute.communicate(timeout=TIMEOUT) + except TimeoutExpired: + deploy_process_execute.kill() + raise + # Deploy should fail, asking for s3 bucket + self.assertEqual(deploy_process_execute.returncode, 1) + stderr = stderr.strip() + self.assertIn( + bytes( + f"Error: Failed to create/update stack {stack_name} : " + f"deployment s3 bucket is in a different region, try sam deploy --guided", + encoding="utf-8", + ), + stderr, + ) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_guided(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(stack_name) + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list(template_file=template_path, guided=True) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE, stdin=PIPE) + deploy_process_execute.communicate("{}\n\n\n\n\n\n".format(stack_name).encode()) + + # Deploy should succeed with a managed stack + self.assertEqual(deploy_process_execute.returncode, 0) + self.stack_names.append(SAM_CLI_STACK_NAME) + # Remove samconfig.toml + os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_guided_set_parameter(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(stack_name) + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list(template_file=template_path, guided=True) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE, stdin=PIPE) + deploy_process_execute.communicate("{}\n\nSuppliedParameter\n\n\n\n".format(stack_name).encode()) + + # Deploy should succeed with a managed stack + self.assertEqual(deploy_process_execute.returncode, 0) + self.stack_names.append(SAM_CLI_STACK_NAME) + # Remove samconfig.toml + os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_guided_set_capabilities(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(stack_name) + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list(template_file=template_path, guided=True) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE, stdin=PIPE) + deploy_process_execute.communicate( + "{}\n\nSuppliedParameter\n\nn\nCAPABILITY_IAM CAPABILITY_NAMED_IAM\n\n".format(stack_name).encode() + ) + + # Deploy should succeed with a managed stack + self.assertEqual(deploy_process_execute.returncode, 0) + self.stack_names.append(SAM_CLI_STACK_NAME) + # Remove samconfig.toml + os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_guided_set_confirm_changeset(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(stack_name) + + # Package and Deploy in one go without confirming change set. + deploy_command_list = self.get_deploy_command_list(template_file=template_path, guided=True) + + deploy_process_execute = Popen(deploy_command_list, stdout=PIPE, stderr=PIPE, stdin=PIPE) + deploy_process_execute.communicate("{}\n\nSuppliedParameter\nY\n\n\nY\n".format(stack_name).encode()) + + # Deploy should succeed with a managed stack + self.assertEqual(deploy_process_execute.returncode, 0) + self.stack_names.append(SAM_CLI_STACK_NAME) + # Remove samconfig.toml + os.remove(self.test_data_path.joinpath(DEFAULT_CONFIG_FILE_NAME)) diff --git a/tests/integration/init/test_init_command.py b/tests/integration/init/test_init_command.py index bad04e227f..cb1499ab83 100644 --- a/tests/integration/init/test_init_command.py +++ b/tests/integration/init/test_init_command.py @@ -1,8 +1,10 @@ from unittest import TestCase -from subprocess import Popen +from subprocess import Popen, TimeoutExpired import os import tempfile +TIMEOUT = 300 + class TestBasicInitCommand(TestCase): def test_init_command_passes_and_dir_created(self): @@ -24,9 +26,13 @@ def test_init_command_passes_and_dir_created(self): temp, ] ) - return_code = process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - self.assertEqual(return_code, 0) + self.assertEqual(process.returncode, 0) self.assertTrue(os.path.isdir(temp + "/sam-app")) def test_init_new_app_template(self): @@ -48,9 +54,13 @@ def test_init_new_app_template(self): temp, ] ) - return_code = process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - self.assertEqual(return_code, 0) + self.assertEqual(process.returncode, 0) self.assertTrue(os.path.isdir(temp + "/qs-scratch")) def test_init_command_java_maven(self): @@ -72,9 +82,13 @@ def test_init_command_java_maven(self): temp, ] ) - return_code = process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - self.assertEqual(return_code, 0) + self.assertEqual(process.returncode, 0) self.assertTrue(os.path.isdir(temp + "/sam-app-maven")) def test_init_command_java_gradle(self): @@ -96,9 +110,13 @@ def test_init_command_java_gradle(self): temp, ] ) - return_code = process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - self.assertEqual(return_code, 0) + self.assertEqual(process.returncode, 0) self.assertTrue(os.path.isdir(temp + "/sam-app-gradle")) def test_init_command_with_extra_context_parameter(self): @@ -122,9 +140,13 @@ def test_init_command_with_extra_context_parameter(self): temp, ] ) - return_code = process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - self.assertEqual(return_code, 0) + self.assertEqual(process.returncode, 0) self.assertTrue(os.path.isdir(temp + "/sam-app-maven")) @staticmethod diff --git a/tests/integration/local/generate_event/test_cli_integ.py b/tests/integration/local/generate_event/test_cli_integ.py index a5f33885b1..f5b463ad57 100644 --- a/tests/integration/local/generate_event/test_cli_integ.py +++ b/tests/integration/local/generate_event/test_cli_integ.py @@ -6,8 +6,8 @@ class Test_EventGeneration_Integ(TestCase): def test_generate_event_substitution(self): process = Popen([Test_EventGeneration_Integ._get_command(), "local", "generate-event", "s3", "put"]) - return_code = process.wait() - self.assertEqual(return_code, 0) + process.communicate() + self.assertEqual(process.returncode, 0) @staticmethod def _get_command(): diff --git a/tests/integration/local/invoke/runtimes/test_with_runtime_zips.py b/tests/integration/local/invoke/runtimes/test_with_runtime_zips.py index 3b6177e1ad..88581995fa 100644 --- a/tests/integration/local/invoke/runtimes/test_with_runtime_zips.py +++ b/tests/integration/local/invoke/runtimes/test_with_runtime_zips.py @@ -3,13 +3,15 @@ import os import tempfile -from subprocess import Popen, PIPE +from subprocess import Popen, PIPE, TimeoutExpired from nose_parameterized import parameterized, param import pytest from tests.integration.local.invoke.invoke_integ_base import InvokeIntegBase from pathlib import Path +TIMEOUT = 300 + class TestWithDifferentLambdaRuntimeZips(InvokeIntegBase): template = Path("runtimes", "template.yaml") @@ -35,10 +37,14 @@ def test_runtime_zip(self, function_name): ) process = Popen(command_list, stdout=PIPE) - return_code = process.wait() - - self.assertEqual(return_code, 0) - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + self.assertEqual(process.returncode, 0) + process_stdout = stdout.strip() self.assertEqual(process_stdout.decode("utf-8"), '"Hello World"') @pytest.mark.timeout(timeout=300, method="thread") @@ -50,8 +56,12 @@ def test_custom_provided_runtime(self): command_list = command_list + ["--skip-pull-image"] process = Popen(command_list, stdout=PIPE) - return_code = process.wait() - - self.assertEqual(return_code, 0) - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + self.assertEqual(process.returncode, 0) + process_stdout = stdout.strip() self.assertEqual(process_stdout.decode("utf-8"), '{"body":"hello 曰有冥 world 🐿","statusCode":200,"headers":{}}') diff --git a/tests/integration/local/invoke/test_integrations_cli.py b/tests/integration/local/invoke/test_integrations_cli.py index f9c6d66587..9a5279a3e7 100644 --- a/tests/integration/local/invoke/test_integrations_cli.py +++ b/tests/integration/local/invoke/test_integrations_cli.py @@ -6,7 +6,7 @@ from unittest import skipIf from nose_parameterized import parameterized -from subprocess import Popen, PIPE +from subprocess import Popen, PIPE, TimeoutExpired from timeit import default_timer as timer import pytest import docker @@ -21,59 +21,75 @@ from pathlib import Path +TIMEOUT = 300 + class TestSamPython36HelloWorldIntegration(InvokeIntegBase): template = Path("template.yml") @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_returncode_is_zero(self): command_list = self.get_command_list( "HelloWorldServerlessFunction", template_path=self.template_path, event_path=self.event_path ) process = Popen(command_list, stdout=PIPE) - return_code = process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - self.assertEqual(return_code, 0) + self.assertEqual(process.returncode, 0) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_function_with_metadata(self): command_list = self.get_command_list("FunctionWithMetadata", template_path=self.template_path, no_event=True) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() self.assertEqual(process_stdout.decode("utf-8"), '"Hello World in a different dir"') @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_returns_execpted_results(self): command_list = self.get_command_list( "HelloWorldServerlessFunction", template_path=self.template_path, event_path=self.event_path ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() self.assertEqual(process_stdout.decode("utf-8"), '"Hello world"') @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_of_lambda_function(self): command_list = self.get_command_list( "HelloWorldLambdaFunction", template_path=self.template_path, event_path=self.event_path ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() self.assertEqual(process_stdout.decode("utf-8"), '"Hello world"') @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") @parameterized.expand( [("TimeoutFunction"), ("TimeoutFunctionWithParameter"), ("TimeoutFunctionWithStringParameter")] ) @@ -84,18 +100,23 @@ def test_invoke_with_timeout_set(self, function_name): start = timer() process = Popen(command_list, stdout=PIPE) - return_code = process.wait() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + end = timer() wall_clock_cli_duration = end - start - process_stdout = b"".join(process.stdout.readlines()).strip() + process_stdout = stdout.strip() # validate the time of the cli (timeout is set to 5s) self.assertGreater(wall_clock_cli_duration, 5) self.assertLess(wall_clock_cli_duration, 20) - self.assertEqual(return_code, 0) + self.assertEqual(process.returncode, 0) self.assertEqual( process_stdout.decode("utf-8"), "", @@ -103,7 +124,6 @@ def test_invoke_with_timeout_set(self, function_name): ) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_with_env_vars(self): command_list = self.get_command_list( "EchoCustomEnvVarFunction", @@ -113,53 +133,66 @@ def test_invoke_with_env_vars(self): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() self.assertEqual(process_stdout.decode("utf-8"), '"MyVar"') @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_when_function_writes_stdout(self): command_list = self.get_command_list( "WriteToStdoutFunction", template_path=self.template_path, event_path=self.event_path ) process = Popen(command_list, stdout=PIPE, stderr=PIPE) - process.wait() + try: + stdout, stderr = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - process_stdout = b"".join(process.stdout.readlines()).strip() - process_stderr = b"".join(process.stderr.readlines()).strip() + process_stdout = stdout.strip() + process_stderr = stderr.strip() self.assertIn("Docker Lambda is writing to stdout", process_stderr.decode("utf-8")) self.assertIn("wrote to stdout", process_stdout.decode("utf-8")) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_when_function_writes_stderr(self): command_list = self.get_command_list( "WriteToStderrFunction", template_path=self.template_path, event_path=self.event_path ) process = Popen(command_list, stderr=PIPE) - process.wait() + try: + _, stderr = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - process_stderr = b"".join(process.stderr.readlines()).strip() + process_stderr = stderr.strip() self.assertIn("Docker Lambda is writing to stderr", process_stderr.decode("utf-8")) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_returns_expected_result_when_no_event_given(self): command_list = self.get_command_list("EchoEventFunction", template_path=self.template_path) process = Popen(command_list, stdout=PIPE) - return_code = process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - self.assertEqual(return_code, 0) + process_stdout = stdout.strip() + + self.assertEqual(process.returncode, 0) self.assertEqual("{}", process_stdout.decode("utf-8")) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_with_env_using_parameters(self): command_list = self.get_command_list( "EchoEnvWithParameters", @@ -169,8 +202,13 @@ def test_invoke_with_env_using_parameters(self): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() environ = json.loads(process_stdout.decode("utf-8")) self.assertEqual(environ["Region"], "us-east-1") @@ -187,7 +225,6 @@ def test_invoke_with_env_using_parameters(self): self.assertEqual(environ["MyRuntimeVersion"], "v0") @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_with_env_using_parameters_with_custom_region(self): custom_region = "my-custom-region" @@ -196,14 +233,18 @@ def test_invoke_with_env_using_parameters_with_custom_region(self): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() environ = json.loads(process_stdout.decode("utf-8")) self.assertEqual(environ["Region"], custom_region) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_with_env_with_aws_creds(self): custom_region = "my-custom-region" key = "key" @@ -222,8 +263,13 @@ def test_invoke_with_env_with_aws_creds(self): env["AWS_SESSION_TOKEN"] = session process = Popen(command_list, stdout=PIPE, env=env) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() environ = json.loads(process_stdout.decode("utf-8")) self.assertEqual(environ["AWS_DEFAULT_REGION"], custom_region) @@ -233,7 +279,6 @@ def test_invoke_with_env_with_aws_creds(self): self.assertEqual(environ["AWS_SESSION_TOKEN"], session) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_with_docker_network_of_host(self): command_list = self.get_command_list( "HelloWorldServerlessFunction", @@ -243,12 +288,15 @@ def test_invoke_with_docker_network_of_host(self): ) process = Popen(command_list, stdout=PIPE) - return_code = process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - self.assertEqual(return_code, 0) + self.assertEqual(process.returncode, 0) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") @skipIf(IS_WINDOWS, "The test hangs on Windows due to trying to attach to a non-existing network") def test_invoke_with_docker_network_of_host_in_env_var(self): command_list = self.get_command_list( @@ -259,13 +307,17 @@ def test_invoke_with_docker_network_of_host_in_env_var(self): env["SAM_DOCKER_NETWORK"] = "non-existing-network" process = Popen(command_list, stderr=PIPE, env=env) - process.wait() - process_stderr = b"".join(process.stderr.readlines()).strip() + try: + _, stderr = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stderr = stderr.strip() self.assertIn('Not Found ("network non-existing-network not found")', process_stderr.decode("utf-8")) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_sam_template_file_env_var_set(self): command_list = self.get_command_list("HelloWorldFunctionInNonDefaultTemplate", event_path=self.event_path) @@ -274,13 +326,18 @@ def test_sam_template_file_env_var_set(self): env["SAM_TEMPLATE_FILE"] = str(self.test_data_path.joinpath("invoke", "sam-template.yaml")) process = Popen(command_list, stdout=PIPE, env=env) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() self.assertEqual(process_stdout.decode("utf-8"), '"Hello world"') @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") + @pytest.mark.timeout(timeout=TIMEOUT, method="thread") def test_skip_pull_image_in_env_var(self): docker.from_env().api.pull("lambci/lambda:python3.6") @@ -292,8 +349,13 @@ def test_skip_pull_image_in_env_var(self): env["SAM_SKIP_PULL_IMAGE"] = "True" process = Popen(command_list, stderr=PIPE, env=env) - process.wait() - process_stderr = b"".join(process.stderr.readlines()).strip() + try: + _, stderr = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stderr = stderr.strip() self.assertIn("Requested to skip pulling images", process_stderr.decode("utf-8")) @@ -307,7 +369,6 @@ def tearDown(self): shutil.rmtree(self.config_dir, ignore_errors=True) @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_existing_env_variables_precedence_over_profiles(self): profile = "default" custom_config = self._create_config_file(profile) @@ -331,8 +392,13 @@ def test_existing_env_variables_precedence_over_profiles(self): env["AWS_SHARED_CREDENTIALS_FILE"] = custom_cred process = Popen(command_list, stdout=PIPE, env=env) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() environ = json.loads(process_stdout.decode("utf-8")) # Environment variables we explicitly set take priority over profiles. @@ -343,7 +409,6 @@ def test_existing_env_variables_precedence_over_profiles(self): self.assertEqual(environ["AWS_SESSION_TOKEN"], "priority_secret_token") @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_default_profile_with_custom_configs(self): profile = "default" custom_config = self._create_config_file(profile) @@ -365,8 +430,13 @@ def test_default_profile_with_custom_configs(self): env["AWS_SHARED_CREDENTIALS_FILE"] = custom_cred process = Popen(command_list, stdout=PIPE, env=env) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() environ = json.loads(process_stdout.decode("utf-8")) self.assertEqual(environ["AWS_DEFAULT_REGION"], "us-west-1") @@ -376,7 +446,6 @@ def test_default_profile_with_custom_configs(self): self.assertEqual(environ["AWS_SESSION_TOKEN"], "sessiontoken") @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_custom_profile_with_custom_configs(self): custom_config = self._create_config_file("custom") custom_cred = self._create_cred_file("custom") @@ -397,8 +466,13 @@ def test_custom_profile_with_custom_configs(self): env["AWS_SHARED_CREDENTIALS_FILE"] = custom_cred process = Popen(command_list, stdout=PIPE, env=env) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + + process_stdout = stdout.strip() environ = json.loads(process_stdout.decode("utf-8")) self.assertEqual(environ["AWS_DEFAULT_REGION"], "us-west-1") @@ -408,7 +482,6 @@ def test_custom_profile_with_custom_configs(self): self.assertEqual(environ["AWS_SESSION_TOKEN"], "sessiontoken") @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") def test_custom_profile_through_envrionment_variables(self): # When using a custom profile in a custom location, you need both the config # and credential file otherwise we fail to find a region or the profile (depending @@ -434,8 +507,12 @@ def test_custom_profile_through_envrionment_variables(self): env["AWS_PROFILE"] = "custom" process = Popen(command_list, stdout=PIPE, env=env) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() environ = json.loads(process_stdout.decode("utf-8")) self.assertEqual(environ["AWS_DEFAULT_REGION"], "us-west-1") @@ -523,9 +600,13 @@ def test_reference_of_layer_version(self, function_logical_id): ) process = Popen(command_list, stdout=PIPE) - process.wait() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - process_stdout = b"".join(process.stdout.readlines()).strip() + process_stdout = stdout.strip() expected_output = '"This is a Layer Ping from simple_python"' @@ -543,12 +624,16 @@ def test_download_one_layer(self, function_logical_id): ) process = Popen(command_list, stdout=PIPE) - process.wait() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - process_stdout = b"".join(process.stdout.readlines()[-1:]).strip() + process_stdout = stdout.decode("utf-8").strip().split(os.linesep)[-1] expected_output = '"Layer1"' - self.assertEqual(process_stdout.decode("utf-8"), expected_output) + self.assertEqual(process_stdout, expected_output) @parameterized.expand([("ChangedLayerVersionServerlessFunction"), ("ChangedLayerVersionLambdaFunction")]) def test_publish_changed_download_layer(self, function_logical_id): @@ -565,12 +650,16 @@ def test_publish_changed_download_layer(self, function_logical_id): ) process = Popen(command_list, stdout=PIPE) - process.wait() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - process_stdout = b"".join(process.stdout.readlines()[-1:]).strip() + process_stdout = stdout.decode("utf-8").strip().split(os.linesep)[-1] expected_output = '"Layer1"' - self.assertEqual(process_stdout.decode("utf-8"), expected_output) + self.assertEqual(process_stdout, expected_output) self.layer_utils.upsert_layer( layer_name=layer_name, ref_layer_name="ChangedLayerArn", layer_zip="changedlayer1.zip" @@ -586,12 +675,16 @@ def test_publish_changed_download_layer(self, function_logical_id): ) process = Popen(command_list, stdout=PIPE) - process.wait() + try: + stdout, _ = process.communicate() + except TimeoutExpired: + process.kill() + raise - process_stdout = b"".join(process.stdout.readlines()[-1:]).strip() + process_stdout = stdout.decode("utf-8").strip().split(os.linesep)[-1] expected_output = '"Changed_Layer_1"' - self.assertEqual(process_stdout.decode("utf-8"), expected_output) + self.assertEqual(process_stdout, expected_output) @parameterized.expand([("TwoLayerVersionServerlessFunction"), ("TwoLayerVersionLambdaFunction")]) def test_download_two_layers(self, function_logical_id): @@ -606,14 +699,18 @@ def test_download_two_layers(self, function_logical_id): ) process = Popen(command_list, stdout=PIPE) - process.wait() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - stdout = process.stdout.readlines() + stdout = stdout - process_stdout = b"".join(stdout[-1:]).strip() + process_stdout = stdout.decode("utf-8").strip().split(os.linesep)[-1] expected_output = '"Layer2"' - self.assertEqual(process_stdout.decode("utf-8"), expected_output) + self.assertEqual(process_stdout, expected_output) def test_caching_two_layers(self): @@ -627,7 +724,11 @@ def test_caching_two_layers(self): ) process = Popen(command_list, stdout=PIPE) - process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self.assertEqual(2, len(os.listdir(str(self.layer_cache)))) @@ -645,7 +746,11 @@ def test_caching_two_layers_with_layer_cache_env_set(self): env["SAM_LAYER_CACHE_BASEDIR"] = str(self.layer_cache) process = Popen(command_list, stdout=PIPE, env=env) - process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self.assertEqual(2, len(os.listdir(str(self.layer_cache)))) @@ -680,9 +785,13 @@ def test_layer_does_not_exist(self): ) process = Popen(command_list, stderr=PIPE) - process.wait() + try: + _, stderr = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - process_stderr = b"".join(process.stderr.readlines()).strip() + process_stderr = stderr.strip() error_output = process_stderr.decode("utf-8") expected_error_output = "{} was not found.".format(non_existent_layer_arn) @@ -699,9 +808,13 @@ def test_account_does_not_exist_for_layer(self): ) process = Popen(command_list, stderr=PIPE) - process.wait() + try: + _, stderr = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise - process_stderr = b"".join(process.stderr.readlines()).strip() + process_stderr = stderr.strip() error_output = process_stderr.decode("utf-8") expected_error_output = ( diff --git a/tests/integration/package/package_integ_base.py b/tests/integration/package/package_integ_base.py index 35d8ff47c0..88e966419a 100644 --- a/tests/integration/package/package_integ_base.py +++ b/tests/integration/package/package_integ_base.py @@ -1,35 +1,44 @@ import os import uuid import json -import tempfile import time from pathlib import Path from unittest import TestCase import boto3 +S3_SLEEP = 3 + class PackageIntegBase(TestCase): @classmethod def setUpClass(cls): cls.region_name = os.environ.get("AWS_DEFAULT_REGION") - cls.bucket_name = str(uuid.uuid4()) + cls.pre_created_bucket = os.environ.get("AWS_S3", False) + cls.bucket_name = cls.pre_created_bucket if cls.pre_created_bucket else str(uuid.uuid4()) cls.test_data_path = Path(__file__).resolve().parents[1].joinpath("testdata", "package") - # Create S3 bucket + # Intialize S3 client s3 = boto3.resource("s3") # Use a pre-created KMS Key cls.kms_key = os.environ.get("AWS_KMS_KEY") + # Use a pre-created S3 Bucket if present else create a new one cls.s3_bucket = s3.Bucket(cls.bucket_name) - cls.s3_bucket.create() + if not cls.pre_created_bucket: + cls.s3_bucket.create() + time.sleep(S3_SLEEP) + + def setUp(self): + super(PackageIntegBase, self).setUp() - # Given 3 seconds for all the bucket creation to complete - time.sleep(3) + def tearDown(self): + super(PackageIntegBase, self).tearDown() @classmethod def tearDownClass(cls): cls.s3_bucket.objects.all().delete() - cls.s3_bucket.delete() + if not cls.pre_created_bucket: + cls.s3_bucket.delete() def base_command(self): command = "sam" diff --git a/tests/integration/package/test_package_command.py b/tests/integration/package/test_package_command.py index 3d3bb60dfc..c9ddce1ea8 100644 --- a/tests/integration/package/test_package_command.py +++ b/tests/integration/package/test_package_command.py @@ -1,4 +1,4 @@ -from subprocess import Popen, PIPE +from subprocess import Popen, PIPE, TimeoutExpired import tempfile from unittest import skipIf @@ -10,6 +10,7 @@ # Package tests require credentials and CI/CD will only add credentials to the env if the PR is from the same repo. # This is to restrict package tests to run outside of CI/CD and when the branch is not master. SKIP_PACKAGE_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI +TIMEOUT = 300 @skipIf(SKIP_PACKAGE_TESTS, "Skip package tests in CI/CD only") @@ -26,8 +27,12 @@ def test_package_template_flag(self, template_file): command_list = self.get_command_list(s3_bucket=self.s3_bucket.name, template=template_path) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() self.assertIn("{bucket_name}".format(bucket_name=self.s3_bucket.name), process_stdout.decode("utf-8")) @@ -54,8 +59,12 @@ def test_package_barebones(self, template_file): command_list = self.get_command_list(s3_bucket=self.s3_bucket.name, template_file=template_path) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() self.assertIn("{bucket_name}".format(bucket_name=self.s3_bucket.name), process_stdout.decode("utf-8")) @@ -63,7 +72,11 @@ def test_package_without_required_args(self): command_list = self.get_command_list() process = Popen(command_list, stdout=PIPE) - process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self.assertNotEqual(process.returncode, 0) @parameterized.expand( @@ -92,8 +105,12 @@ def test_package_with_prefix(self, template_file): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() self.assertIn("{bucket_name}".format(bucket_name=self.s3_bucket.name), process_stdout.decode("utf-8")) @@ -131,8 +148,12 @@ def test_package_with_output_template_file(self, template_file): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() self.assertIn( bytes( @@ -177,8 +198,12 @@ def test_package_with_json(self, template_file): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() self.assertIn( bytes( @@ -225,8 +250,12 @@ def test_package_with_force_upload(self, template_file): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() self.assertIn( bytes( @@ -271,8 +300,12 @@ def test_package_with_kms_key(self, template_file): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() self.assertIn( bytes( @@ -317,8 +350,12 @@ def test_package_with_metadata(self, template_file): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() self.assertIn( bytes( diff --git a/tests/integration/publish/publish_app_integ_base.py b/tests/integration/publish/publish_app_integ_base.py index 1b430a2375..cf0a1e2d12 100644 --- a/tests/integration/publish/publish_app_integ_base.py +++ b/tests/integration/publish/publish_app_integ_base.py @@ -2,34 +2,42 @@ import json import uuid import shutil -import tempfile import time +import tempfile from unittest import TestCase import boto3 from pathlib import Path +S3_SLEEP = 3 + class PublishAppIntegBase(TestCase): @classmethod def setUpClass(cls): cls.region_name = os.environ.get("AWS_DEFAULT_REGION") - cls.bucket_name = str(uuid.uuid4()) + cls.pre_created_bucket = os.environ.get("AWS_S3", False) + cls.bucket_name = cls.pre_created_bucket if cls.pre_created_bucket else str(uuid.uuid4()) cls.bucket_name_placeholder = "" cls.application_name_placeholder = "" cls.temp_dir = Path(tempfile.mkdtemp()) cls.test_data_path = Path(__file__).resolve().parents[1].joinpath("testdata", "publish") cls.sar_client = boto3.client("serverlessrepo", region_name=cls.region_name) - # Create S3 bucket + # Intialize S3 client s3 = boto3.resource("s3") + # Use a pre-created S3 Bucket if present else create a new one cls.s3_bucket = s3.Bucket(cls.bucket_name) - cls.s3_bucket.create() - - # Grant serverlessrepo read access to the bucket - bucket_policy_template = cls.test_data_path.joinpath("s3_bucket_policy.json").read_text(encoding="utf-8") - bucket_policy = bucket_policy_template.replace(cls.bucket_name_placeholder, cls.bucket_name) - cls.s3_bucket.Policy().put(Policy=bucket_policy) + if not cls.pre_created_bucket: + cls.s3_bucket.create() + # Wait for bucket to be created. + time.sleep(S3_SLEEP) + # Grant serverlessrepo read access to the bucket + bucket_policy_template = cls.test_data_path.joinpath("s3_bucket_policy.json").read_text(encoding="utf-8") + bucket_policy = bucket_policy_template.replace(cls.bucket_name_placeholder, cls.bucket_name) + cls.s3_bucket.Policy().put(Policy=bucket_policy) + # Wait for bucket policy to be applied. + time.sleep(S3_SLEEP) # Upload test files to S3 root_path = Path(__file__).resolve().parents[3] @@ -43,9 +51,6 @@ def setUpClass(cls): code_body = cls.test_data_path.joinpath("main.py").read_text(encoding="utf-8") cls.s3_bucket.put_object(Key="main.py", Body=code_body) - # Given 3 seconds for all the bucket creation to complete - time.sleep(3) - @classmethod def tearDownClass(cls): cls.s3_bucket.delete_objects( @@ -53,7 +58,8 @@ def tearDownClass(cls): "Objects": [{"Key": "LICENSE"}, {"Key": "README.md"}, {"Key": "README_UPDATE.md"}, {"Key": "main.py"}] } ) - cls.s3_bucket.delete() + if not cls.pre_created_bucket: + cls.s3_bucket.delete() @classmethod def replace_template_placeholder(cls, placeholder, replace_text): diff --git a/tests/integration/publish/test_command_integ.py b/tests/integration/publish/test_command_integ.py index 0b509b9d73..b96e106b89 100644 --- a/tests/integration/publish/test_command_integ.py +++ b/tests/integration/publish/test_command_integ.py @@ -1,7 +1,7 @@ import re import time import json -from subprocess import Popen, PIPE +from subprocess import Popen, PIPE, TimeoutExpired from unittest import skipIf @@ -12,6 +12,7 @@ # Publish tests require credentials and CI/CD will only add credentials to the env if the PR is from the same repo. # This is to restrict publish tests to run outside of CI/CD and when the branch is not master. SKIP_PUBLISH_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI +TIMEOUT = 300 @skipIf(SKIP_PUBLISH_TESTS, "Skip publish tests in CI/CD only") @@ -38,8 +39,12 @@ def test_update_application(self): command_list = self.get_command_list(template_path=template_path, region=self.region_name) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() expected_msg = 'The following metadata of application "{}" has been updated:'.format(self.application_id) self.assertIn(expected_msg, process_stdout.decode("utf-8")) @@ -53,8 +58,12 @@ def test_create_application_version(self): command_list = self.get_command_list(template_path=template_path, region=self.region_name) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() expected_msg = 'The following metadata of application "{}" has been updated:'.format(self.application_id) self.assertIn(expected_msg, process_stdout.decode("utf-8")) @@ -70,8 +79,12 @@ def test_create_application_version_with_semantic_version_option(self): ) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() expected_msg = 'The following metadata of application "{}" has been updated:'.format(self.application_id) self.assertIn(expected_msg, process_stdout.decode("utf-8")) @@ -87,6 +100,8 @@ class TestPublishNewApp(PublishAppIntegBase): def setUp(self): super(TestPublishNewApp, self).setUp() self.application_id = None + # Sleep for a little bit to make server happy + time.sleep(2) def tearDown(self): super(TestPublishNewApp, self).tearDown() @@ -99,8 +114,12 @@ def test_create_application(self): command_list = self.get_command_list(template_path=template_path, region=self.region_name) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() expected_msg = "Created new application with the following metadata:" self.assertIn(expected_msg, process_stdout.decode("utf-8")) @@ -119,8 +138,12 @@ def test_publish_not_packaged_template(self): command_list = self.get_command_list(template_path=template_path, region=self.region_name) process = Popen(command_list, stderr=PIPE) - process.wait() - process_stderr = b"".join(process.stderr.readlines()).strip() + try: + _, stderr = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stderr = stderr.strip() expected_msg = "Please make sure that you have uploaded application artifacts to S3" self.assertIn(expected_msg, process_stderr.decode("utf-8")) @@ -130,8 +153,12 @@ def test_create_application_infer_region_from_env(self): command_list = self.get_command_list(template_path=template_path) process = Popen(command_list, stdout=PIPE) - process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip() + try: + stdout, _ = process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + process_stdout = stdout.strip() expected_msg = "Created new application with the following metadata:" self.assertIn(expected_msg, process_stdout.decode("utf-8")) diff --git a/tests/integration/telemetry/test_installed_metric.py b/tests/integration/telemetry/test_installed_metric.py index fa4d3b679b..c9b7434f16 100644 --- a/tests/integration/telemetry/test_installed_metric.py +++ b/tests/integration/telemetry/test_installed_metric.py @@ -16,10 +16,9 @@ def test_send_installed_metric_on_first_run(self): # Start the CLI process = self.run_cmd() - (_, stderrdata) = process.communicate() + _, stderrdata = process.communicate() - retcode = process.poll() - self.assertEqual(retcode, 0, "Command should successfully complete") + self.assertEqual(process.returncode, 0, "Command should successfully complete") # Make sure the prompt was printed. Otherwise this test is not valid self.assertIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) @@ -66,10 +65,9 @@ def test_must_not_send_installed_metric_when_prompt_is_disabled(self): # Start the CLI process = self.run_cmd() - (stdoutdata, stderrdata) = process.communicate() + stdoutdata, stderrdata = process.communicate() - retcode = process.poll() - self.assertEqual(retcode, 0, "Command should successfully complete") + self.assertEqual(process.returncode, 0, "Command should successfully complete") self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stdoutdata.decode()) self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) @@ -88,9 +86,8 @@ def test_must_not_send_installed_metric_on_second_run(self): # First Run process1 = self.run_cmd() - (_, stderrdata) = process1.communicate() - retcode = process1.poll() - self.assertEqual(retcode, 0, "Command should successfully complete") + _, stderrdata = process1.communicate() + self.assertEqual(process1.returncode, 0, "Command should successfully complete") self.assertIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) self.assertEqual( 1, len(filter_installed_metric_requests(server.get_all_requests())), "'installed' metric should be sent" @@ -98,9 +95,8 @@ def test_must_not_send_installed_metric_on_second_run(self): # Second Run process2 = self.run_cmd() - (stdoutdata, stderrdata) = process2.communicate() - retcode = process2.poll() - self.assertEqual(retcode, 0) + stdoutdata, stderrdata = process2.communicate() + self.assertEqual(process2.returncode, 0) self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stdoutdata.decode()) self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) self.assertEqual( diff --git a/tests/integration/telemetry/test_prompt.py b/tests/integration/telemetry/test_prompt.py index 876dd326cf..270c813fc5 100644 --- a/tests/integration/telemetry/test_prompt.py +++ b/tests/integration/telemetry/test_prompt.py @@ -10,7 +10,7 @@ def test_must_prompt_if_config_is_not_set(self): self.unset_config() process = self.run_cmd() - (stdoutdata, stderrdata) = process.communicate() + _, stderrdata = process.communicate() # Telemetry prompt should be printed to the terminal self.assertIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) @@ -25,7 +25,7 @@ def test_must_not_prompt_if_config_is_set(self, telemetry_enabled, msg): self.set_config(telemetry_enabled=telemetry_enabled) process = self.run_cmd() - (stdoutdata, stderrdata) = process.communicate() + stdoutdata, stderrdata = process.communicate() self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stdoutdata.decode()) self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) @@ -38,11 +38,11 @@ def test_prompt_must_not_display_on_second_run(self): # First Run process = self.run_cmd() - (stdoutdata, stderrdata) = process.communicate() + _, stderrdata = process.communicate() self.assertIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) # Second Run process = self.run_cmd() - (stdoutdata, stderrdata) = process.communicate() + stdoutdata, stderrdata = process.communicate() self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stdoutdata.decode()) self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) diff --git a/tests/integration/telemetry/test_telemetry_contract.py b/tests/integration/telemetry/test_telemetry_contract.py index 0d7f40afb8..08b3585b99 100644 --- a/tests/integration/telemetry/test_telemetry_contract.py +++ b/tests/integration/telemetry/test_telemetry_contract.py @@ -16,17 +16,17 @@ def test_must_not_send_metrics_if_disabled_using_envvar(self): with TelemetryServer() as server: # Start the CLI, but opt-out of Telemetry using env var process = self.run_cmd(optout_envvar_value="0") - (_, stderrdata) = process.communicate() - retcode = process.poll() - self.assertEqual(retcode, 0, "Command should successfully complete") + process.communicate() + + self.assertEqual(process.returncode, 0, "Command should successfully complete") all_requests = server.get_all_requests() self.assertEqual(0, len(all_requests), "No metrics should be sent") # Now run again without the Env Var Opt out process = self.run_cmd() - (_, stderrdata) = process.communicate() - retcode = process.poll() - self.assertEqual(retcode, 0, "Command should successfully complete") + process.communicate() + + self.assertEqual(process.returncode, 0, "Command should successfully complete") all_requests = server.get_all_requests() self.assertEqual(1, len(all_requests), "Command run metric should be sent") @@ -40,17 +40,17 @@ def test_must_send_metrics_if_enabled_via_envvar(self): with TelemetryServer() as server: # Run without any envvar.Should not publish metrics process = self.run_cmd() - (_, stderrdata) = process.communicate() - retcode = process.poll() - self.assertEqual(retcode, 0, "Command should successfully complete") + process.communicate() + + self.assertEqual(process.returncode, 0, "Command should successfully complete") all_requests = server.get_all_requests() self.assertEqual(0, len(all_requests), "No metric should be sent") # Opt-in via env var process = self.run_cmd(optout_envvar_value="1") - (_, stderrdata) = process.communicate() - retcode = process.poll() - self.assertEqual(retcode, 0, "Command should successfully complete") + process.communicate() + + self.assertEqual(process.returncode, 0, "Command should successfully complete") all_requests = server.get_all_requests() self.assertEqual(1, len(all_requests), "Command run metric must be sent") @@ -66,7 +66,6 @@ def test_must_not_crash_when_offline(self): # Start the CLI process = self.run_cmd() - (_, stderrdata) = process.communicate() + process.communicate() - retcode = process.poll() - self.assertEqual(retcode, 0, "Command should successfully complete") + self.assertEqual(process.returncode, 0, "Command should successfully complete") diff --git a/tests/integration/testdata/package/aws-serverless-function.yaml b/tests/integration/testdata/package/aws-serverless-function.yaml index 1691cffe8e..ef8b30c245 100644 --- a/tests/integration/testdata/package/aws-serverless-function.yaml +++ b/tests/integration/testdata/package/aws-serverless-function.yaml @@ -2,6 +2,12 @@ AWSTemplateFormatVersion : '2010-09-09' Transform: AWS::Serverless-2016-10-31 Description: A hello world application. +Parameters: + Parameter: + Type: String + Default: Sample + Description: A custom parameter + Resources: HelloWorldFunction: Type: AWS::Serverless::Function diff --git a/tests/integration/testdata/publish/s3_bucket_policy.json b/tests/integration/testdata/publish/s3_bucket_policy.json index 1eb9115c3c..abf3a0cc64 100644 --- a/tests/integration/testdata/publish/s3_bucket_policy.json +++ b/tests/integration/testdata/publish/s3_bucket_policy.json @@ -10,4 +10,4 @@ "Resource": "arn:aws:s3:::/*" } ] -} +} \ No newline at end of file diff --git a/tests/regression/deploy/__init__.py b/tests/regression/deploy/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/regression/deploy/regression_deploy_base.py b/tests/regression/deploy/regression_deploy_base.py new file mode 100644 index 0000000000..9c482d7a3c --- /dev/null +++ b/tests/regression/deploy/regression_deploy_base.py @@ -0,0 +1,109 @@ +import os +from subprocess import Popen, PIPE, TimeoutExpired +from unittest import TestCase + +TIMEOUT = 300 + + +class DeployRegressionBase(TestCase): + @classmethod + def setUpClass(cls): + pass + + @classmethod + def tearDownClass(cls): + pass + + def base_command(self, base): + command = [base] + if os.getenv("SAM_CLI_DEV") and base == "sam": + command = ["samdev"] + elif base == "aws": + command = [base, "cloudformation"] + + return command + + def get_deploy_command_list( + self, + base="sam", + s3_bucket=None, + stack_name=None, + template=None, + template_file=None, + s3_prefix=None, + capabilities=None, + force_upload=False, + notification_arns=None, + fail_on_empty_changeset=False, + no_execute_changeset=False, + parameter_overrides=None, + role_arn=None, + kms_key_id=None, + tags=None, + profile=None, + region=None, + ): + command_list = self.base_command(base=base) + + command_list = command_list + ["deploy"] + + if s3_bucket: + command_list = command_list + ["--s3-bucket", str(s3_bucket)] + if capabilities: + command_list = command_list + ["--capabilities", str(capabilities)] + if parameter_overrides: + command_list = command_list + ["--parameter-overrides", str(parameter_overrides)] + if role_arn: + command_list = command_list + ["--role-arn", str(role_arn)] + if notification_arns: + command_list = command_list + ["--notification-arns", str(notification_arns)] + if stack_name: + command_list = command_list + ["--stack-name", str(stack_name)] + if template: + command_list = command_list + ["--template", str(template)] + if template_file: + command_list = command_list + ["--template-file", str(template_file)] + if s3_prefix: + command_list = command_list + ["--s3-prefix", str(s3_prefix)] + if kms_key_id: + command_list = command_list + ["--kms-key-id", str(kms_key_id)] + if no_execute_changeset: + command_list = command_list + ["--no-execute-changeset"] + if force_upload: + command_list = command_list + ["--force-upload"] + if fail_on_empty_changeset: + command_list = command_list + ["--fail-on-empty-changeset"] + if tags: + command_list = command_list + ["--tags", str(tags)] + if region: + command_list = command_list + ["--region", str(region)] + if profile: + command_list = command_list + ["--profile", str(profile)] + + return command_list + + def deploy_regression_check(self, args, sam_return_code=0, aws_return_code=0, commands=[]): + sam_stack_name = args.get("sam_stack_name", None) + aws_stack_name = args.get("aws_stack_name", None) + if sam_stack_name: + del args["sam_stack_name"] + if aws_stack_name: + del args["aws_stack_name"] + + aws_command_list = self.get_deploy_command_list(base="aws", stack_name=aws_stack_name, **args) + process = Popen(aws_command_list, stdout=PIPE) + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + self.assertEqual(process.returncode, aws_return_code) + + sam_command_list = self.get_deploy_command_list(stack_name=sam_stack_name, **args) + process = Popen(sam_command_list, stdout=PIPE) + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise + self.assertEqual(process.returncode, sam_return_code) diff --git a/tests/regression/deploy/test_deploy_regression.py b/tests/regression/deploy/test_deploy_regression.py new file mode 100644 index 0000000000..f585ec450b --- /dev/null +++ b/tests/regression/deploy/test_deploy_regression.py @@ -0,0 +1,161 @@ +import os +import tempfile +import uuid +import time +from subprocess import Popen, PIPE, TimeoutExpired +from unittest import skipIf + +import boto3 +from parameterized import parameterized + +from tests.regression.deploy.regression_deploy_base import DeployRegressionBase +from tests.regression.package.regression_package_base import PackageRegressionBase +from tests.testing_utils import RUNNING_ON_CI, RUNNING_TEST_FOR_MASTER_ON_CI + +# Package Regression tests require credentials and CI/CD will only add credentials to the env if the PR is from the same repo. +# This is to restrict package tests to run outside of CI/CD and when the branch is not master. +SKIP_DEPLOY_REGRESSION_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI +CFN_SLEEP = 3 +TIMEOUT = 300 +# Only testing return codes to be equivalent + + +@skipIf(SKIP_DEPLOY_REGRESSION_TESTS, "Skip deploy regression tests in CI/CD only") +class TestDeployRegression(PackageRegressionBase, DeployRegressionBase): + def setUp(self): + self.sns_arn = os.environ.get("AWS_SNS") + self.kms_key = os.environ.get("AWS_KMS_KEY") + self.stack_names = [] + self.cf_client = boto3.client("cloudformation") + time.sleep(CFN_SLEEP) + super(TestDeployRegression, self).setUp() + + def tearDown(self): + for stack_name in self.stack_names: + self.cf_client.delete_stack(StackName=stack_name) + super(TestDeployRegression, self).tearDown() + + def prepare_package(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + output_template_file = tempfile.NamedTemporaryFile(delete=False) + package_command_list = self.get_command_list( + s3_bucket=self.s3_bucket.name, template_file=template_path, output_template_file=output_template_file.name + ) + + package_process = Popen(package_command_list, stdout=PIPE) + try: + stdout, _ = package_process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + package_process.kill() + raise + self.assertEqual(package_process.returncode, 0) + return output_template_file.name + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_with_all_args(self, template_file): + + output_template_file = self.prepare_package(template_file=template_file) + + sam_stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(sam_stack_name) + + aws_stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(aws_stack_name) + + arguments = { + "template_file": output_template_file, + "aws_stack_name": aws_stack_name, + "sam_stack_name": sam_stack_name, + "capabilities": "CAPABILITY_IAM", + "s3_prefix": "regress_deploy", + "force_upload": True, + "notification_arns": self.sns_arn, + "parameter_overrides": "Parameter=Clarity", + "kms_key_id": self.kms_key, + "tags": "integ=true clarity=yes", + } + + self.deploy_regression_check(arguments) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_with_no_stack_name(self, template_file): + output_template_file = self.prepare_package(template_file=template_file) + + arguments = { + "template_file": output_template_file, + "capabilities": "CAPABILITY_IAM", + "s3_prefix": "regress_deploy", + "force_upload": True, + "notification_arns": self.sns_arn, + "parameter_overrides": "Parameter=Clarity", + "kms_key_id": self.kms_key, + "tags": "integ=true clarity=yes", + } + + self.deploy_regression_check(arguments, sam_return_code=2, aws_return_code=2) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_with_no_capabilities(self, template_file): + output_template_file = self.prepare_package(template_file=template_file) + + sam_stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(sam_stack_name) + + aws_stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(aws_stack_name) + + arguments = { + "template_file": output_template_file, + "aws_stack_name": aws_stack_name, + "sam_stack_name": sam_stack_name, + "s3_prefix": "regress_deploy", + "force_upload": True, + "notification_arns": self.sns_arn, + "parameter_overrides": "Parameter=Clarity", + "kms_key_id": self.kms_key, + "tags": "integ=true clarity=yes", + } + + self.deploy_regression_check(arguments, sam_return_code=1, aws_return_code=255) + + def test_deploy_with_no_template_file(self): + sam_stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(sam_stack_name) + + aws_stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(aws_stack_name) + + arguments = { + "aws_stack_name": aws_stack_name, + "sam_stack_name": sam_stack_name, + "s3_prefix": "regress_deploy", + "force_upload": True, + "notification_arns": self.sns_arn, + "parameter_overrides": "Parameter=Clarity", + "kms_key_id": self.kms_key, + "tags": "integ=true clarity=yes", + } + # if no template file is specified, sam cli looks for a template.yaml in the current working directory. + self.deploy_regression_check(arguments, sam_return_code=1, aws_return_code=2) + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_deploy_with_no_changes(self, template_file): + output_template_file = self.prepare_package(template_file=template_file) + + stack_name = "a" + str(uuid.uuid4()).replace("-", "")[:10] + self.stack_names.append(stack_name) + + arguments = { + "template_file": output_template_file, + "capabilities": "CAPABILITY_IAM", + "sam_stack_name": stack_name, + "aws_stack_name": stack_name, + "s3_prefix": "regress_deploy", + "force_upload": True, + "notification_arns": self.sns_arn, + "parameter_overrides": "Parameter=Clarity", + "kms_key_id": self.kms_key, + "tags": "integ=true clarity=yes", + } + + self.deploy_regression_check(arguments, sam_return_code=0, aws_return_code=0) diff --git a/tests/regression/package/regression_package_base.py b/tests/regression/package/regression_package_base.py index 7f80dbb1ba..b39e4fcbb9 100644 --- a/tests/regression/package/regression_package_base.py +++ b/tests/regression/package/regression_package_base.py @@ -1,34 +1,39 @@ import os -import uuid import json -import tempfile import time +import tempfile +import uuid from pathlib import Path -from subprocess import Popen, PIPE +from subprocess import Popen, PIPE, TimeoutExpired from unittest import TestCase import boto3 +S3_SLEEP = 3 +TIMEOUT = 300 + class PackageRegressionBase(TestCase): @classmethod def setUpClass(cls): cls.region_name = os.environ.get("AWS_DEFAULT_REGION") - cls.bucket_name = str(uuid.uuid4()) + cls.pre_created_bucket = os.environ.get("AWS_S3", False) + cls.bucket_name = cls.pre_created_bucket if cls.pre_created_bucket else str(uuid.uuid4()) cls.test_data_path = Path(__file__).resolve().parents[2].joinpath("integration", "testdata", "package") - # Create S3 bucket + # Intialize S3 client s3 = boto3.resource("s3") + # Use a pre-created S3 Bucket if present else create a new one cls.s3_bucket = s3.Bucket(cls.bucket_name) - cls.s3_bucket.create() - - # Given 3 seconds for all the bucket creation to complete - time.sleep(3) + if not cls.pre_created_bucket: + cls.s3_bucket.create() + time.sleep(S3_SLEEP) @classmethod def tearDownClass(cls): cls.s3_bucket.objects.all().delete() - cls.s3_bucket.delete() + if not cls.pre_created_bucket: + cls.s3_bucket.delete() def base_command(self, base): command = [base] @@ -81,7 +86,11 @@ def regression_check(self, args): with tempfile.NamedTemporaryFile(delete=False) as output_template_file_sam: sam_command_list = self.get_command_list(output_template_file=output_template_file_sam.name, **args) process = Popen(sam_command_list, stdout=PIPE) - process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self.assertEqual(process.returncode, 0) output_sam = output_template_file_sam.read() @@ -90,7 +99,11 @@ def regression_check(self, args): base="aws", output_template_file=output_template_file_aws.name, **args ) process = Popen(aws_command_list, stdout=PIPE) - process.wait() + try: + process.communicate(timeout=TIMEOUT) + except TimeoutExpired: + process.kill() + raise self.assertEqual(process.returncode, 0) output_aws = output_template_file_aws.read() diff --git a/tests/regression/package/test_package_regression.py b/tests/regression/package/test_package_regression.py index ac55205f09..88bb144af1 100644 --- a/tests/regression/package/test_package_regression.py +++ b/tests/regression/package/test_package_regression.py @@ -1,6 +1,3 @@ -from subprocess import Popen, PIPE -import tempfile - from unittest import skipIf from parameterized import parameterized diff --git a/tests/testing_utils.py b/tests/testing_utils.py index 94df3a9418..41011b761a 100644 --- a/tests/testing_utils.py +++ b/tests/testing_utils.py @@ -3,6 +3,7 @@ import tempfile import shutil + IS_WINDOWS = platform.system().lower() == "windows" RUNNING_ON_CI = os.environ.get("APPVEYOR", False) RUNNING_TEST_FOR_MASTER_ON_CI = os.environ.get("APPVEYOR_REPO_BRANCH", "master") != "master" diff --git a/tests/unit/cli/test_cli_config_file.py b/tests/unit/cli/test_cli_config_file.py new file mode 100644 index 0000000000..63caae55a9 --- /dev/null +++ b/tests/unit/cli/test_cli_config_file.py @@ -0,0 +1,124 @@ +import tempfile + +from pathlib import Path +from unittest import TestCase +from unittest.mock import MagicMock + +from samcli.commands.exceptions import ConfigException +from samcli.cli.cli_config_file import TomlProvider, configuration_option, configuration_callback, get_ctx_defaults +from samcli.lib.config.samconfig import SamConfig + + +class MockContext: + def __init__(self, info_name, parent, params=None, command=None): + self.info_name = info_name + self.parent = parent + self.params = params + self.command = command + + +class TestTomlProvider(TestCase): + def setUp(self): + self.toml_provider = TomlProvider() + self.config_env = "config_env" + self.parameters = "parameters" + self.cmd_name = "topic" + + def test_toml_valid_with_section(self): + config_dir = tempfile.gettempdir() + configpath = Path(config_dir, "samconfig.toml") + configpath.write_text("version=0.1\n[config_env.topic.parameters]\nword='clarity'\n") + self.assertEqual( + TomlProvider(section=self.parameters)(config_dir, self.config_env, [self.cmd_name]), {"word": "clarity"} + ) + + def test_toml_valid_with_no_version(self): + config_dir = tempfile.gettempdir() + configpath = Path(config_dir, "samconfig.toml") + configpath.write_text("[config_env.topic.parameters]\nword='clarity'\n") + with self.assertRaises(ConfigException): + TomlProvider(section=self.parameters)(config_dir, self.config_env, [self.cmd_name]) + + def test_toml_valid_with_invalid_version(self): + config_dir = tempfile.gettempdir() + configpath = Path(config_dir, "samconfig.toml") + configpath.write_text("version='abc'\n[config_env.topic.parameters]\nword='clarity'\n") + with self.assertRaises(ConfigException): + TomlProvider(section=self.parameters)(config_dir, self.config_env, [self.cmd_name]) + + def test_toml_invalid_empty_dict(self): + config_dir = tempfile.gettempdir() + configpath = Path(config_dir, "samconfig.toml") + configpath.write_text("[topic]\nword=clarity\n") + + self.assertEqual(self.toml_provider(config_dir, self.config_env, [self.cmd_name]), {}) + + +class TestCliConfiguration(TestCase): + def setUp(self): + self.cmd_name = "test_cmd" + self.option_name = "test_option" + self.config_env = "test_config_env" + self.saved_callback = MagicMock() + self.provider = MagicMock() + self.ctx = MagicMock() + self.param = MagicMock() + self.value = MagicMock() + + class Dummy: + pass + + def test_callback_with_valid_config_env(self): + mock_context1 = MockContext(info_name="sam", parent=None) + mock_context2 = MockContext(info_name="local", parent=mock_context1) + mock_context3 = MockContext(info_name="start-api", parent=mock_context2) + self.ctx.parent = mock_context3 + self.ctx.info_name = "test_info" + configuration_callback( + cmd_name=self.cmd_name, + option_name=self.option_name, + config_env_name=self.config_env, + saved_callback=self.saved_callback, + provider=self.provider, + ctx=self.ctx, + param=self.param, + value=self.value, + ) + self.assertEqual(self.saved_callback.call_count, 1) + for arg in [self.ctx, self.param, self.value]: + self.assertIn(arg, self.saved_callback.call_args[0]) + + def test_configuration_option(self): + toml_provider = TomlProvider() + click_option = configuration_option(provider=toml_provider) + clc = click_option(self.Dummy()) + self.assertEqual(clc.__click_params__[0].is_eager, True) + self.assertEqual(clc.__click_params__[0].help, "Read config-env from Configuration File.") + self.assertEqual(clc.__click_params__[0].hidden, True) + self.assertEqual(clc.__click_params__[0].expose_value, False) + self.assertEqual(clc.__click_params__[0].callback.args, (None, "--config-env", "default", None, toml_provider)) + + def test_get_ctx_defaults_non_nested(self): + provider = MagicMock() + + mock_context1 = MockContext(info_name="sam", parent=None) + mock_context2 = MockContext(info_name="local", parent=mock_context1) + mock_context3 = MockContext(info_name="start-api", parent=mock_context2) + + get_ctx_defaults("start-api", provider, mock_context3, "default") + + provider.assert_called_with(SamConfig.config_dir(), "default", ["local", "start-api"]) + + def test_get_ctx_defaults_nested(self): + provider = MagicMock() + + mock_context1 = MockContext(info_name="sam", parent=None) + mock_context2 = MockContext(info_name="local", parent=mock_context1) + mock_context3 = MockContext(info_name="generate-event", parent=mock_context2) + mock_context4 = MockContext(info_name="alexa-skills-kit", parent=mock_context3) + + get_ctx_defaults("intent-answer", provider, mock_context4, "default") + + provider.assert_called_with( + SamConfig.config_dir(), "default", ["local", "generate-event", "alexa-skills-kit", "intent-answer"] + ) diff --git a/tests/unit/cli/test_types.py b/tests/unit/cli/test_types.py index b0cd383db9..f1c5b12297 100644 --- a/tests/unit/cli/test_types.py +++ b/tests/unit/cli/test_types.py @@ -2,7 +2,7 @@ from unittest.mock import Mock, ANY from nose_parameterized import parameterized -from samcli.cli.types import CfnParameterOverridesType +from samcli.cli.types import CfnParameterOverridesType, CfnTags from samcli.cli.types import CfnMetadataType @@ -12,19 +12,12 @@ def setUp(self): @parameterized.expand( [ - ("some string"), - # Key must not contain spaces - ('ParameterKey="Ke y",ParameterValue=Value'), - # No value - ("ParameterKey=Key,ParameterValue="), - # No key - ("ParameterKey=,ParameterValue=Value"), - # Case sensitive - ("parameterkey=Key,ParameterValue=Value"), - # No space after comma - ("ParameterKey=Key, ParameterValue=Value"), + # Random string + ("some string",), + # Only commas + (",,",), # Bad separator - ("ParameterKey:Key,ParameterValue:Value"), + ("ParameterKey:Key,ParameterValue:Value",), ] ) def test_must_fail_on_invalid_format(self, input): @@ -36,44 +29,48 @@ def test_must_fail_on_invalid_format(self, input): @parameterized.expand( [ ( - "ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro", + ("ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro",), {"KeyPairName": "MyKey", "InstanceType": "t1.micro"}, ), - ('ParameterKey="Key",ParameterValue=Val\\ ue', {"Key": "Val ue"}), - ('ParameterKey="Key",ParameterValue="Val\\"ue"', {"Key": 'Val"ue'}), - ("ParameterKey=Key,ParameterValue=Value", {"Key": "Value"}), - ('ParameterKey=Key,ParameterValue=""', {"Key": ""}), + (("KeyPairName=MyKey InstanceType=t1.micro",), {"KeyPairName": "MyKey", "InstanceType": "t1.micro"}), + (("KeyPairName=MyKey, InstanceType=t1.micro,",), {"KeyPairName": "MyKey,", "InstanceType": "t1.micro,"}), + (('ParameterKey="Ke y",ParameterValue=Value',), {"ParameterKey": "Ke y"}), + ((("ParameterKey=Key,ParameterValue="),), {"ParameterKey": "Key,ParameterValue="}), + (('ParameterKey="Key",ParameterValue=Val\\ ue',), {"Key": "Val ue"}), + (('ParameterKey="Key",ParameterValue="Val\\"ue"',), {"Key": 'Val"ue'}), + (("ParameterKey=Key,ParameterValue=Value",), {"Key": "Value"}), + (('ParameterKey=Key,ParameterValue=""',), {"Key": ""}), ( # Trailing and leading whitespaces - " ParameterKey=Key,ParameterValue=Value ParameterKey=Key2,ParameterValue=Value2 ", + (" ParameterKey=Key,ParameterValue=Value ParameterKey=Key2,ParameterValue=Value2 ",), {"Key": "Value", "Key2": "Value2"}, ), ( # Quotes at the end - 'ParameterKey=Key,ParameterValue=Value\\"', + ('ParameterKey=Key,ParameterValue=Value\\"',), {"Key": 'Value"'}, ), ( # Quotes at the start - 'ParameterKey=Key,ParameterValue=\\"Value', + ('ParameterKey=Key,ParameterValue=\\"Value',), {"Key": '"Value'}, ), ( # Value is spacial characters - "ParameterKey=Key,ParameterValue==-_)(*&^%$#@!`~:;,. ParameterKey=Key2,ParameterValue=Value2", + ("ParameterKey=Key,ParameterValue==-_)(*&^%$#@!`~:;,. ParameterKey=Key2,ParameterValue=Value2",), {"Key": "=-_)(*&^%$#@!`~:;,.", "Key2": "Value2"}, ), - ('ParameterKey=Key1230,ParameterValue="{\\"a\\":\\"b\\"}"', {"Key1230": '{"a":"b"}'}), + (('ParameterKey=Key1230,ParameterValue="{\\"a\\":\\"b\\"}"',), {"Key1230": '{"a":"b"}'}), ( # Must ignore empty inputs - "", + ("",), {}, ), ] ) def test_successful_parsing(self, input, expected): result = self.param_type.convert(input, None, None) - self.assertEqual(result, expected, msg="Failed with Input = " + input) + self.assertEqual(result, expected, msg="Failed with Input = " + str(input)) class TestCfnMetadataType(TestCase): @@ -97,9 +94,9 @@ def setUp(self): # Non-string ("{1:1}"), # Wrong notation - ("a==b"), + # ("a==b"), # Wrong multi-key notation - ("a==b,c==d"), + # ("a==b,c==d"), ] ) def test_must_fail_on_invalid_format(self, input): @@ -120,3 +117,29 @@ def test_must_fail_on_invalid_format(self, input): def test_successful_parsing(self, input, expected): result = self.param_type.convert(input, None, None) self.assertEqual(result, expected, msg="Failed with Input = " + input) + + +class TestCfnTags(TestCase): + def setUp(self): + self.param_type = CfnTags() + + @parameterized.expand( + [ + # Just a string + ("some string"), + # Wrong notation + # ("a==b"), + # Wrong multi-key notation + # ("a==b,c==d"), + ] + ) + def test_must_fail_on_invalid_format(self, input): + self.param_type.fail = Mock() + self.param_type.convert(input, "param", "ctx") + + self.param_type.fail.assert_called_with(ANY, "param", "ctx") + + @parameterized.expand([(("a=b",), {"a": "b"}), (("a=b", "c=d"), {"a": "b", "c": "d"}), (("",), {})]) + def test_successful_parsing(self, input, expected): + result = self.param_type.convert(input, None, None) + self.assertEqual(result, expected, msg="Failed with Input = " + str(input)) diff --git a/tests/unit/commands/_utils/custom_options/__init__.py b/tests/unit/commands/_utils/custom_options/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/_utils/custom_options/test_option_nargs.py b/tests/unit/commands/_utils/custom_options/test_option_nargs.py new file mode 100644 index 0000000000..18472b3e97 --- /dev/null +++ b/tests/unit/commands/_utils/custom_options/test_option_nargs.py @@ -0,0 +1,40 @@ +from unittest import TestCase +from unittest.mock import MagicMock + +from samcli.commands._utils.custom_options.option_nargs import OptionNargs + + +class MockRArgs: + def __init__(self, rargs): + self.rargs = rargs + + +class TestOptionNargs(TestCase): + def setUp(self): + self.name = "test" + self.opt = "--use" + self.prefixes = ["--", "-"] + self.arg = "first" + self.rargs_list = ["second", "third", "--nextopt"] + self.expected_args = tuple([self.arg] + self.rargs_list[:-1]) + self.option_nargs = OptionNargs(param_decls=(self.name, self.opt)) + + def test_option(self): + parser = MagicMock() + ctx = MagicMock() + self.option_nargs.add_to_parser(parser=parser, ctx=ctx) + # Get option parser + + parser._long_opt.get.assert_called_with(self.opt) + self.assertEqual(self.option_nargs._nargs_parser, parser._long_opt.get()) + + # set prefixes + self.option_nargs._nargs_parser.prefixes = self.prefixes + + # create new state with remaining args + state = MockRArgs(self.rargs_list) + # call process with the monkey patched `parser_process` within `add_to_process` + parser._long_opt.get().process(self.arg, state) + + # finally call parser.process with ("first", "second", "third") + self.option_nargs._previous_parser_process.assert_called_with(self.expected_args, state) diff --git a/tests/unit/commands/_utils/test_options.py b/tests/unit/commands/_utils/test_options.py index 43276c824c..e7ebb65c41 100644 --- a/tests/unit/commands/_utils/test_options.py +++ b/tests/unit/commands/_utils/test_options.py @@ -5,8 +5,20 @@ import os from unittest import TestCase -from unittest.mock import patch -from samcli.commands._utils.options import get_or_default_template_file_name, _TEMPLATE_OPTION_DEFAULT_VALUE +from unittest.mock import patch, MagicMock + +import click + +from samcli.commands._utils.options import ( + get_or_default_template_file_name, + _TEMPLATE_OPTION_DEFAULT_VALUE, + guided_deploy_stack_name, +) +from tests.unit.cli.test_cli_config_file import MockContext + + +class Mock: + pass class TestGetOrDefaultTemplateFileName(TestCase): @@ -50,3 +62,62 @@ def test_must_return_built_template(self, os_mock): result = get_or_default_template_file_name(None, None, _TEMPLATE_OPTION_DEFAULT_VALUE, include_build=True) self.assertEqual(result, "absPath") os_mock.path.abspath.assert_called_with(expected) + + @patch("samcli.commands._utils.options.os") + def test_verify_ctx(self, os_mock): + + ctx = Mock() + + expected = os.path.join(".aws-sam", "build", "template.yaml") + + os_mock.path.exists.return_value = True + os_mock.path.join = os.path.join # Use the real method + os_mock.path.abspath.return_value = "a/b/c/absPath" + os_mock.path.dirname.return_value = "a/b/c" + + result = get_or_default_template_file_name(ctx, None, _TEMPLATE_OPTION_DEFAULT_VALUE, include_build=True) + self.assertEqual(result, "a/b/c/absPath") + self.assertEqual(ctx.samconfig_dir, "a/b/c") + os_mock.path.abspath.assert_called_with(expected) + + +class TestGuidedDeployStackName(TestCase): + def test_must_return_provided_value_guided(self): + stack_name = "provided-stack" + mock_params = MagicMock() + mock_params.get = MagicMock(return_value=True) + result = guided_deploy_stack_name( + ctx=MockContext(info_name="test", parent=None, params=mock_params), + param=MagicMock(), + provided_value=stack_name, + ) + self.assertEqual(result, stack_name) + + def test_must_return_default_value_guided(self): + stack_name = None + mock_params = MagicMock() + mock_params.get = MagicMock(return_value=True) + result = guided_deploy_stack_name( + ctx=MockContext(info_name="test", parent=None, params=mock_params), + param=MagicMock(), + provided_value=stack_name, + ) + self.assertEqual(result, "sam-app") + + def test_must_return_provided_value_non_guided(self): + stack_name = "provided-stack" + mock_params = MagicMock() + mock_params.get = MagicMock(return_value=False) + result = guided_deploy_stack_name(ctx=MagicMock(), param=MagicMock(), provided_value=stack_name) + self.assertEqual(result, "provided-stack") + + def test_exception_missing_parameter_no_value_non_guided(self): + stack_name = None + mock_params = MagicMock() + mock_params.get = MagicMock(return_value=False) + with self.assertRaises(click.BadOptionUsage): + guided_deploy_stack_name( + ctx=MockContext(info_name="test", parent=None, params=mock_params), + param=MagicMock(), + provided_value=stack_name, + ) diff --git a/tests/unit/commands/_utils/test_table_print.py b/tests/unit/commands/_utils/test_table_print.py new file mode 100644 index 0000000000..518a30e43a --- /dev/null +++ b/tests/unit/commands/_utils/test_table_print.py @@ -0,0 +1,83 @@ +import io +from contextlib import redirect_stdout +from collections import OrderedDict +from unittest import TestCase + +from samcli.commands._utils.table_print import pprint_column_names, pprint_columns + +TABLE_FORMAT_STRING = "{Alpha:<{0}} {Beta:<{1}} {Gamma:<{2}}" +TABLE_FORMAT_ARGS = OrderedDict({"Alpha": "Alpha", "Beta": "Beta", "Gamma": "Gamma"}) + + +class TestTablePrint(TestCase): + def setUp(self): + self.redirect_out = io.StringIO() + + def test_pprint_column_names(self): + @pprint_column_names(TABLE_FORMAT_STRING, TABLE_FORMAT_ARGS) + def to_be_decorated(*args, **kwargs): + pass + + with redirect_stdout(self.redirect_out): + to_be_decorated() + output = ( + "------------------------------------------------------------------------------------------------\n" + "Alpha Beta Gamma \n" + "------------------------------------------------------------------------------------------------\n" + "------------------------------------------------------------------------------------------------\n" + ) + + self.assertEqual(output, self.redirect_out.getvalue()) + + def test_pprint_column_names_and_text(self): + @pprint_column_names(TABLE_FORMAT_STRING, TABLE_FORMAT_ARGS) + def to_be_decorated(*args, **kwargs): + pprint_columns( + columns=["A", "B", "C"], + width=kwargs["width"], + margin=kwargs["margin"], + format_args=kwargs["format_args"], + format_string=TABLE_FORMAT_STRING, + columns_dict=TABLE_FORMAT_ARGS.copy(), + ) + + with redirect_stdout(self.redirect_out): + to_be_decorated() + + output = ( + "------------------------------------------------------------------------------------------------\n" + "Alpha Beta Gamma \n" + "------------------------------------------------------------------------------------------------\n" + "A B C \n" + "------------------------------------------------------------------------------------------------\n" + ) + self.assertEqual(output, self.redirect_out.getvalue()) + + def test_pprint_exceptions_with_no_column_names(self): + with self.assertRaises(ValueError): + + @pprint_column_names(TABLE_FORMAT_STRING, {}) + def to_be_decorated(*args, **kwargs): + pprint_columns( + columns=["A", "B", "C"], + width=kwargs["width"], + margin=kwargs["margin"], + format_args=kwargs["format_args"], + format_string=TABLE_FORMAT_STRING, + columns_dict=TABLE_FORMAT_ARGS.copy(), + ) + + def test_pprint_exceptions_with_too_many_column_names(self): + massive_dictionary = {str(i): str(i) for i in range(100)} + with self.assertRaises(ValueError): + + @pprint_column_names(TABLE_FORMAT_STRING, massive_dictionary) + def to_be_decorated(*args, **kwargs): + pprint_columns( + columns=["A", "B", "C"], + width=kwargs["width"], + margin=kwargs["margin"], + format_args=kwargs["format_args"], + format_string=TABLE_FORMAT_STRING, + columns_dict=TABLE_FORMAT_ARGS.copy(), + ) diff --git a/tests/unit/commands/_utils/test_template.py b/tests/unit/commands/_utils/test_template.py index fe80c7dc79..4a773f2b37 100644 --- a/tests/unit/commands/_utils/test_template.py +++ b/tests/unit/commands/_utils/test_template.py @@ -12,6 +12,7 @@ RESOURCES_WITH_LOCAL_PATHS, _update_relative_paths, move_template, + get_template_parameters, ) @@ -45,6 +46,26 @@ def test_must_read_file_and_parse(self, pathlib_mock, yaml_parse_mock): m.assert_called_with(filename, "r") yaml_parse_mock.assert_called_with(file_data) + @patch("samcli.commands._utils.template.yaml_parse") + @patch("samcli.commands._utils.template.pathlib") + def test_must_read_file_and_get_parameters(self, pathlib_mock, yaml_parse_mock): + filename = "filename" + file_data = "contents of the file" + parse_result = {"Parameters": {"Myparameter": "String"}} + + pathlib_mock.Path.return_value.exists.return_value = True # Fake that the file exists + + m = mock_open(read_data=file_data) + yaml_parse_mock.return_value = parse_result + + with patch("samcli.commands._utils.template.open", m): + result = get_template_parameters(filename) + + self.assertEqual(result, {"Myparameter": "String"}) + + m.assert_called_with(filename, "r") + yaml_parse_mock.assert_called_with(file_data) + @parameterized.expand([param(ValueError()), param(yaml.YAMLError())]) @patch("samcli.commands._utils.template.yaml_parse") @patch("samcli.commands._utils.template.pathlib") diff --git a/tests/unit/commands/deploy/__init__.py b/tests/unit/commands/deploy/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/deploy/test_command.py b/tests/unit/commands/deploy/test_command.py new file mode 100644 index 0000000000..4ea77dc5d5 --- /dev/null +++ b/tests/unit/commands/deploy/test_command.py @@ -0,0 +1,500 @@ +from unittest import TestCase +from unittest.mock import patch, Mock, ANY, MagicMock, call + +from samcli.commands.deploy.command import do_cli +from tests.unit.cli.test_cli_config_file import MockContext + + +class TestDeployliCommand(TestCase): + def setUp(self): + + self.template_file = "input-template-file" + self.stack_name = "stack-name" + self.s3_bucket = "s3-bucket" + self.s3_prefix = "s3-prefix" + self.kms_key_id = "kms-key-id" + self.no_execute_changeset = False + self.notification_arns = [] + self.parameter_overrides = {"a": "b"} + self.capabilities = ("CAPABILITY_IAM",) + self.tags = {"c": "d"} + self.fail_on_empty_changset = True + self.role_arn = "role_arn" + self.force_upload = False + self.metadata = {"abc": "def"} + self.region = None + self.profile = None + self.use_json = True + self.metadata = {} + self.guided = False + self.confirm_changeset = False + + @patch("samcli.commands.package.command.click") + @patch("samcli.commands.package.package_context.PackageContext") + @patch("samcli.commands.deploy.command.click") + @patch("samcli.commands.deploy.deploy_context.DeployContext") + def test_all_args(self, mock_deploy_context, mock_deploy_click, mock_package_context, mock_package_click): + + context_mock = Mock() + mock_deploy_context.return_value.__enter__.return_value = context_mock + + do_cli( + template_file=self.template_file, + stack_name=self.stack_name, + s3_bucket=self.s3_bucket, + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region=self.region, + profile=self.profile, + use_json=self.use_json, + metadata=self.metadata, + guided=self.guided, + confirm_changeset=self.confirm_changeset, + ) + + mock_deploy_context.assert_called_with( + template_file=ANY, + stack_name=self.stack_name, + s3_bucket=self.s3_bucket, + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region=self.region, + profile=self.profile, + confirm_changeset=self.confirm_changeset, + ) + + context_mock.run.assert_called_with() + self.assertEqual(context_mock.run.call_count, 1) + + @patch("samcli.commands.package.command.click") + @patch("samcli.commands.package.package_context.PackageContext") + @patch("samcli.commands.deploy.command.click") + @patch("samcli.commands.deploy.deploy_context.DeployContext") + @patch("samcli.commands.deploy.command.save_config") + @patch("samcli.commands.deploy.command.manage_stack") + @patch("samcli.commands.deploy.command.get_template_parameters") + @patch("samcli.commands.deploy.command.get_config_ctx") + def test_all_args_guided( + self, + mock_get_config_ctx, + mock_get_template_parameters, + mock_managed_stack, + mock_save_config, + mock_deploy_context, + mock_deploy_click, + mock_package_context, + mock_package_click, + ): + + context_mock = Mock() + mock_sam_config = MagicMock() + mock_sam_config.exists = MagicMock(return_value=True) + mock_get_config_ctx.return_value = (None, mock_sam_config) + mock_get_template_parameters.return_value = { + "Myparameter": {"Type": "String"}, + "MyNoEchoParameter": {"Type": "String", "NoEcho": True}, + } + mock_deploy_context.return_value.__enter__.return_value = context_mock + mock_deploy_click.prompt = MagicMock( + side_effect=["sam-app", "us-east-1", "guidedParameter", "secure", ("CAPABILITY_IAM",)] + ) + mock_deploy_click.confirm = MagicMock(side_effect=[True, False, True]) + + mock_managed_stack.return_value = "managed-s3-bucket" + mock_save_config.return_value = True + + do_cli( + template_file=self.template_file, + stack_name=self.stack_name, + s3_bucket=None, + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region=self.region, + profile=self.profile, + use_json=self.use_json, + metadata=self.metadata, + guided=True, + confirm_changeset=True, + ) + + mock_deploy_context.assert_called_with( + template_file=ANY, + stack_name="sam-app", + s3_bucket="managed-s3-bucket", + force_upload=self.force_upload, + s3_prefix="sam-app", + kms_key_id=self.kms_key_id, + parameter_overrides={"Myparameter": "guidedParameter", "MyNoEchoParameter": "secure"}, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region="us-east-1", + profile=self.profile, + confirm_changeset=True, + ) + + context_mock.run.assert_called_with() + mock_save_config.assert_called_with( + "input-template-file", + capabilities=("CAPABILITY_IAM",), + confirm_changeset=True, + profile=self.profile, + region="us-east-1", + s3_bucket="managed-s3-bucket", + stack_name="sam-app", + s3_prefix="sam-app", + parameter_overrides={ + "Myparameter": {"Value": "guidedParameter", "Hidden": False}, + "MyNoEchoParameter": {"Value": "secure", "Hidden": True}, + }, + ) + mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1") + self.assertEqual(context_mock.run.call_count, 1) + + @patch("samcli.commands.package.command.click") + @patch("samcli.commands.package.package_context.PackageContext") + @patch("samcli.commands.deploy.command.click") + @patch("samcli.commands.deploy.deploy_context.DeployContext") + @patch("samcli.commands.deploy.command.manage_stack") + @patch("samcli.commands.deploy.command.get_template_parameters") + @patch("samcli.commands.deploy.command.get_config_ctx") + def test_all_args_guided_no_save_echo_param_to_config( + self, + mock_get_config_ctx, + mock_get_template_parameters, + mock_managed_stack, + mock_deploy_context, + mock_deploy_click, + mock_package_context, + mock_package_click, + ): + + context_mock = Mock() + mock_sam_config = MagicMock() + mock_sam_config.exists = MagicMock(return_value=True) + mock_get_config_ctx.return_value = (MockContext(info_name="deploy", parent=None), mock_sam_config) + mock_get_template_parameters.return_value = { + "Myparameter": {"Type": "String"}, + "MyNoEchoParameter": {"Type": "String", "NoEcho": True}, + } + mock_deploy_context.return_value.__enter__.return_value = context_mock + mock_deploy_click.prompt = MagicMock( + side_effect=["sam-app", "us-east-1", "guidedParameter", "secure", ("CAPABILITY_IAM",)] + ) + mock_deploy_click.confirm = MagicMock(side_effect=[True, False, True]) + + mock_managed_stack.return_value = "managed-s3-bucket" + + do_cli( + template_file=self.template_file, + stack_name=self.stack_name, + s3_bucket=None, + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region=self.region, + profile=self.profile, + use_json=self.use_json, + metadata=self.metadata, + guided=True, + confirm_changeset=True, + ) + + mock_deploy_context.assert_called_with( + template_file=ANY, + stack_name="sam-app", + s3_bucket="managed-s3-bucket", + force_upload=self.force_upload, + s3_prefix="sam-app", + kms_key_id=self.kms_key_id, + parameter_overrides={"Myparameter": "guidedParameter", "MyNoEchoParameter": "secure"}, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region="us-east-1", + profile=self.profile, + confirm_changeset=True, + ) + + context_mock.run.assert_called_with() + mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1") + self.assertEqual(context_mock.run.call_count, 1) + + self.assertEqual(mock_sam_config.put.call_count, 7) + self.assertEqual( + mock_sam_config.put.call_args_list, + [ + call(["deploy"], "parameters", "stack_name", "sam-app"), + call(["deploy"], "parameters", "s3_bucket", "managed-s3-bucket"), + call(["deploy"], "parameters", "s3_prefix", "sam-app"), + call(["deploy"], "parameters", "region", "us-east-1"), + call(["deploy"], "parameters", "confirm_changeset", True), + call(["deploy"], "parameters", "capabilities", "CAPABILITY_IAM"), + call(["deploy"], "parameters", "parameter_overrides", "Myparameter=guidedParameter"), + ], + ) + + @patch("samcli.commands.package.command.click") + @patch("samcli.commands.package.package_context.PackageContext") + @patch("samcli.commands.deploy.command.click") + @patch("samcli.commands.deploy.deploy_context.DeployContext") + @patch("samcli.commands.deploy.command.manage_stack") + @patch("samcli.commands.deploy.command.get_template_parameters") + @patch("samcli.commands.deploy.command.SamConfig") + @patch("samcli.commands.deploy.command.get_cmd_names") + def test_all_args_guided_no_params_save_config( + self, + mock_get_cmd_names, + mock_sam_config, + mock_get_template_parameters, + mock_managed_stack, + mock_deploy_context, + mock_deploy_click, + mock_package_context, + mock_package_click, + ): + + context_mock = Mock() + + mock_get_template_parameters.return_value = {} + mock_deploy_context.return_value.__enter__.return_value = context_mock + mock_deploy_click.prompt = MagicMock(side_effect=["sam-app", "us-east-1", ("CAPABILITY_IAM",)]) + mock_deploy_click.confirm = MagicMock(side_effect=[True, False, True]) + mock_get_cmd_names.return_value = ["deploy"] + mock_managed_stack.return_value = "managed-s3-bucket" + + do_cli( + template_file=self.template_file, + stack_name=self.stack_name, + s3_bucket=None, + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region=self.region, + profile=self.profile, + use_json=self.use_json, + metadata=self.metadata, + guided=True, + confirm_changeset=True, + ) + + mock_deploy_context.assert_called_with( + template_file=ANY, + stack_name="sam-app", + s3_bucket="managed-s3-bucket", + force_upload=self.force_upload, + s3_prefix="sam-app", + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region="us-east-1", + profile=self.profile, + confirm_changeset=True, + ) + + context_mock.run.assert_called_with() + mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1") + self.assertEqual(context_mock.run.call_count, 1) + + @patch("samcli.commands.package.command.click") + @patch("samcli.commands.package.package_context.PackageContext") + @patch("samcli.commands.deploy.command.click") + @patch("samcli.commands.deploy.deploy_context.DeployContext") + @patch("samcli.commands.deploy.command.save_config") + @patch("samcli.commands.deploy.command.manage_stack") + @patch("samcli.commands.deploy.command.get_template_parameters") + @patch("samcli.commands.deploy.command.get_config_ctx") + def test_all_args_guided_no_params_no_save_config( + self, + mock_get_config_ctx, + mock_get_template_parameters, + mock_managed_stack, + mock_save_config, + mock_deploy_context, + mock_deploy_click, + mock_package_context, + mock_package_click, + ): + + context_mock = Mock() + mock_sam_config = MagicMock() + mock_sam_config.exists = MagicMock(return_value=True) + mock_get_config_ctx.return_value = (None, mock_sam_config) + mock_get_template_parameters.return_value = {} + mock_deploy_context.return_value.__enter__.return_value = context_mock + mock_deploy_click.prompt = MagicMock(side_effect=["sam-app", "us-east-1", ("CAPABILITY_IAM",)]) + mock_deploy_click.confirm = MagicMock(side_effect=[True, False, False]) + + mock_managed_stack.return_value = "managed-s3-bucket" + + do_cli( + template_file=self.template_file, + stack_name=self.stack_name, + s3_bucket=None, + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region=self.region, + profile=self.profile, + use_json=self.use_json, + metadata=self.metadata, + guided=True, + confirm_changeset=True, + ) + + mock_deploy_context.assert_called_with( + template_file=ANY, + stack_name="sam-app", + s3_bucket="managed-s3-bucket", + force_upload=self.force_upload, + s3_prefix="sam-app", + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region="us-east-1", + profile=self.profile, + confirm_changeset=True, + ) + + context_mock.run.assert_called_with() + self.assertEqual(mock_save_config.call_count, 0) + mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1") + self.assertEqual(context_mock.run.call_count, 1) + + @patch("samcli.commands.package.command.click") + @patch("samcli.commands.package.package_context.PackageContext") + @patch("samcli.commands.deploy.command.click") + @patch("samcli.commands.deploy.deploy_context.DeployContext") + @patch("samcli.commands.deploy.command.save_config") + @patch("samcli.commands.deploy.command.manage_stack") + @patch("samcli.commands.deploy.command.get_template_parameters") + @patch("samcli.commands.deploy.command.get_config_ctx") + def test_all_args_guided_no_params_no_save_config( + self, + mock_get_config_ctx, + mock_get_template_parameters, + mock_managed_stack, + mock_save_config, + mock_deploy_context, + mock_deploy_click, + mock_package_context, + mock_package_click, + ): + + context_mock = Mock() + mock_sam_config = MagicMock() + mock_sam_config.exists = MagicMock(return_value=True) + mock_get_config_ctx.return_value = (None, mock_sam_config) + mock_get_template_parameters.return_value = {} + mock_deploy_context.return_value.__enter__.return_value = context_mock + mock_deploy_click.prompt = MagicMock(side_effect=["sam-app", "us-east-1", ("CAPABILITY_IAM",)]) + mock_deploy_click.confirm = MagicMock(side_effect=[True, False, False]) + + mock_managed_stack.return_value = "managed-s3-bucket" + + do_cli( + template_file=self.template_file, + stack_name=self.stack_name, + s3_bucket=None, + force_upload=self.force_upload, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region=self.region, + profile=self.profile, + use_json=self.use_json, + metadata=self.metadata, + guided=True, + confirm_changeset=True, + ) + + mock_deploy_context.assert_called_with( + template_file=ANY, + stack_name="sam-app", + s3_bucket="managed-s3-bucket", + force_upload=self.force_upload, + s3_prefix="sam-app", + kms_key_id=self.kms_key_id, + parameter_overrides=self.parameter_overrides, + capabilities=self.capabilities, + no_execute_changeset=self.no_execute_changeset, + role_arn=self.role_arn, + notification_arns=self.notification_arns, + fail_on_empty_changeset=self.fail_on_empty_changset, + tags=self.tags, + region="us-east-1", + profile=self.profile, + confirm_changeset=True, + ) + + context_mock.run.assert_called_with() + self.assertEqual(mock_save_config.call_count, 0) + mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1") + self.assertEqual(context_mock.run.call_count, 1) diff --git a/tests/unit/commands/deploy/test_deploy_context.py b/tests/unit/commands/deploy/test_deploy_context.py new file mode 100644 index 0000000000..d2859537ae --- /dev/null +++ b/tests/unit/commands/deploy/test_deploy_context.py @@ -0,0 +1,142 @@ +"""Test sam deploy command""" +from unittest import TestCase +from unittest.mock import patch, MagicMock +import tempfile + +from samcli.lib.deploy.deployer import Deployer +from samcli.commands.deploy.deploy_context import DeployContext +from samcli.commands.deploy.exceptions import DeployBucketRequiredError, DeployFailedError, ChangeEmptyError + + +class TestPackageCommand(TestCase): + def setUp(self): + self.deploy_command_context = DeployContext( + template_file="template-file", + stack_name="stack-name", + s3_bucket="s3-bucket", + force_upload=True, + s3_prefix="s3-prefix", + kms_key_id="kms-key-id", + parameter_overrides={"a": "b"}, + capabilities="CAPABILITY_IAM", + no_execute_changeset=False, + role_arn="role-arn", + notification_arns=[], + fail_on_empty_changeset=False, + tags={"a": "b"}, + region=None, + profile=None, + confirm_changeset=False, + ) + + def test_template_improper(self): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + with self.assertRaises(DeployFailedError): + self.deploy_command_context.template_file = template_file.name + self.deploy_command_context.run() + + def test_template_size_large_no_s3_bucket(self): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b" " * 51200) + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + self.deploy_command_context.s3_bucket = None + with self.assertRaises(DeployBucketRequiredError): + self.deploy_command_context.run() + + @patch("boto3.Session") + @patch.object(Deployer, "create_and_wait_for_changeset", MagicMock(return_value=({"Id": "test"}, "CREATE"))) + @patch.object(Deployer, "execute_changeset", MagicMock()) + @patch.object(Deployer, "wait_for_execute", MagicMock()) + def test_template_size_large_and_s3_bucket(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b" " * 51200) + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + self.deploy_command_context.run() + + @patch("boto3.Session") + def test_template_valid(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + + self.deploy_command_context.deploy = MagicMock() + self.deploy_command_context.run() + + @patch("boto3.Session") + @patch.object( + Deployer, "create_and_wait_for_changeset", MagicMock(side_effect=ChangeEmptyError(stack_name="stack-name")) + ) + def test_template_valid_change_empty(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.fail_on_empty_changeset = True + self.deploy_command_context.template_file = template_file.name + + with self.assertRaises(ChangeEmptyError): + self.deploy_command_context.run() + + @patch("boto3.Session") + @patch.object( + Deployer, "create_and_wait_for_changeset", MagicMock(side_effect=ChangeEmptyError(stack_name="stack-name")) + ) + def test_template_valid_change_empty_no_fail_on_empty_changeset(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + + self.deploy_command_context.run() + + @patch("boto3.Session") + @patch.object(Deployer, "create_and_wait_for_changeset", MagicMock(return_value=({"Id": "test"}, "CREATE"))) + @patch.object(Deployer, "execute_changeset", MagicMock()) + @patch.object(Deployer, "wait_for_execute", MagicMock()) + def test_template_valid_execute_changeset(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + + self.deploy_command_context.run() + self.assertEqual(self.deploy_command_context.deployer.create_and_wait_for_changeset.call_count, 1) + self.assertEqual(self.deploy_command_context.deployer.execute_changeset.call_count, 1) + self.assertEqual(self.deploy_command_context.deployer.wait_for_execute.call_count, 1) + self.assertEqual(self.deploy_command_context.deployer.get_stack_outputs.call_count, 1) + + @patch("boto3.Session") + @patch.object(Deployer, "create_and_wait_for_changeset", MagicMock(return_value=({"Id": "test"}, "CREATE"))) + @patch.object(Deployer, "execute_changeset", MagicMock()) + @patch.object(Deployer, "wait_for_execute", MagicMock()) + def test_template_valid_no_execute_changeset(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b"{}") + template_file.flush() + self.deploy_command_context.template_file = template_file.name + self.deploy_command_context.no_execute_changeset = True + + self.deploy_command_context.run() + self.assertEqual(self.deploy_command_context.deployer.create_and_wait_for_changeset.call_count, 1) + self.assertEqual(self.deploy_command_context.deployer.execute_changeset.call_count, 0) + self.assertEqual(self.deploy_command_context.deployer.wait_for_execute.call_count, 0) + + @patch("boto3.Session") + @patch.object(Deployer, "create_and_wait_for_changeset", MagicMock(return_value=({"Id": "test"}, "CREATE"))) + @patch.object(Deployer, "execute_changeset", MagicMock()) + @patch.object(Deployer, "wait_for_execute", MagicMock()) + def test_template_valid_execute_changeset(self, patched_boto): + with tempfile.NamedTemporaryFile(delete=False) as template_file: + template_file.write(b'{"Parameters": {"a":"b","c":"d"}}') + template_file.flush() + self.deploy_command_context.template_file = template_file.name + self.deploy_command_context.run() + self.assertEqual(self.deploy_command_context.deployer.create_and_wait_for_changeset.call_count, 1) + self.assertEqual( + self.deploy_command_context.deployer.create_and_wait_for_changeset.call_args[1]["parameter_values"], + [{"ParameterKey": "a", "ParameterValue": "b"}, {"ParameterKey": "c", "UsePreviousValue": True}], + ) diff --git a/tests/unit/commands/init/test_cli.py b/tests/unit/commands/init/test_cli.py index 3c7a9b1274..90088f19b8 100644 --- a/tests/unit/commands/init/test_cli.py +++ b/tests/unit/commands/init/test_cli.py @@ -133,12 +133,14 @@ def test_init_cli_interactive_multiple_dep_mgrs(self, generate_project_patch, sd # 2: gradle as the dependency manager # test-project: response to name # N: Don't clone/update the source repo + # 1: first app template user_input = """ 1 5 2 test-project N +1 """ runner = CliRunner() result = runner.invoke(init_cmd, input=user_input) diff --git a/tests/unit/commands/local/generate_event/test_event_generation.py b/tests/unit/commands/local/generate_event/test_event_generation.py index d719fb45db..74e998ad52 100644 --- a/tests/unit/commands/local/generate_event/test_event_generation.py +++ b/tests/unit/commands/local/generate_event/test_event_generation.py @@ -124,7 +124,11 @@ def test_subcommand_get_command_return_value(self, click_mock, functools_mock, o s = EventTypeSubCommand(self.events_lib_mock, "hello", all_commands) s.get_command(None, "hi") click_mock.Command.assert_called_once_with( - name="hi", short_help="Generates a hello Event", params=[], callback=callback_object_mock + name="hi", + short_help="Generates a hello Event", + params=[], + callback=callback_object_mock, + context_settings={"default_map": {}}, ) def test_subcommand_list_return_value(self): diff --git a/tests/unit/commands/samconfig/__init__.py b/tests/unit/commands/samconfig/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/samconfig/test_samconfig.py b/tests/unit/commands/samconfig/test_samconfig.py new file mode 100644 index 0000000000..e34f8231f2 --- /dev/null +++ b/tests/unit/commands/samconfig/test_samconfig.py @@ -0,0 +1,687 @@ +""" +Tests whether SAM Config is being read by all CLI commands +""" + +import json +import os +import shutil +import tempfile +from pathlib import Path +from contextlib import contextmanager +from samcli.lib.config.samconfig import SamConfig, DEFAULT_ENV + +from click.testing import CliRunner + +from unittest import TestCase +from unittest.mock import patch, ANY +import logging + +LOG = logging.getLogger() +logging.basicConfig() + + +class TestSamConfigForAllCommands(TestCase): + def setUp(self): + self._old_cwd = os.getcwd() + + self.scratch_dir = tempfile.mkdtemp() + Path(self.scratch_dir, "envvar.json").write_text("{}") + + os.chdir(self.scratch_dir) + + def tearDown(self): + os.chdir(self._old_cwd) + shutil.rmtree(self.scratch_dir) + self.scratch_dir = None + + @patch("samcli.commands.init.do_cli") + def test_init(self, do_cli_mock): + config_values = { + "no_interactive": True, + "location": "github.com", + "runtime": "nodejs10.x", + "dependency_manager": "maven", + "output_dir": "myoutput", + "name": "myname", + "app_template": "apptemplate", + "no_input": True, + "extra_context": '{"key": "value", "key2": "value2"}', + } + + with samconfig_parameters(["init"], self.scratch_dir, **config_values) as config_path: + from samcli.commands.init import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with( + ANY, + True, + "github.com", + "nodejs10.x", + "maven", + "myoutput", + "myname", + "apptemplate", + True, + '{"key": "value", "key2": "value2"}', + ) + + @patch("samcli.commands.validate.validate.do_cli") + def test_validate(self, do_cli_mock): + config_values = {"template_file": "mytemplate.yaml"} + + with samconfig_parameters(["validate"], self.scratch_dir, **config_values) as config_path: + + from samcli.commands.validate.validate import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with(ANY, str(Path(os.getcwd(), "mytemplate.yaml"))) + + @patch("samcli.commands.build.command.do_cli") + def test_build(self, do_cli_mock): + config_values = { + "function_identifier": "foo", + "template_file": "mytemplate.yaml", + "base_dir": "basedir", + "build_dir": "builddir", + "use_container": True, + "manifest": "requirements.txt", + "docker_network": "mynetwork", + "skip_pull_image": True, + "parameter_overrides": "ParameterKey=Key,ParameterValue=Value ParameterKey=Key2,ParameterValue=Value2", + } + + with samconfig_parameters(["build"], self.scratch_dir, **config_values) as config_path: + + from samcli.commands.build.command import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with( + "foo", + str(Path(os.getcwd(), "mytemplate.yaml")), + "basedir", + "builddir", + True, + True, + "requirements.txt", + "mynetwork", + True, + {"Key": "Value", "Key2": "Value2"}, + None, + ) + + @patch("samcli.commands.local.invoke.cli.do_cli") + def test_local_invoke(self, do_cli_mock): + config_values = { + "function_identifier": "foo", + "template_file": "mytemplate.yaml", + "event": "event", + "no_event": False, + "env_vars": "envvar.json", + "debug_port": [1, 2, 3], + "debug_args": "args", + "debugger_path": "mypath", + "docker_volume_basedir": "basedir", + "docker_network": "mynetwork", + "log_file": "logfile", + "layer_cache_basedir": "basedir", + "skip_pull_image": True, + "force_image_build": True, + "parameter_overrides": "ParameterKey=Key,ParameterValue=Value ParameterKey=Key2,ParameterValue=Value2", + } + + # NOTE: Because we don't load the full Click BaseCommand here, this is mounted as top-level command + with samconfig_parameters(["invoke"], self.scratch_dir, **config_values) as config_path: + + from samcli.commands.local.invoke.cli import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with( + ANY, + "foo", + str(Path(os.getcwd(), "mytemplate.yaml")), + "event", + False, + "envvar.json", + (1, 2, 3), + "args", + "mypath", + "basedir", + "mynetwork", + "logfile", + "basedir", + True, + True, + {"Key": "Value", "Key2": "Value2"}, + ) + + @patch("samcli.commands.local.start_api.cli.do_cli") + def test_local_start_api(self, do_cli_mock): + + config_values = { + "template_file": "mytemplate.yaml", + "host": "127.0.0.1", + "port": 12345, + "static_dir": "static_dir", + "env_vars": "envvar.json", + "debug_port": [1, 2, 3], + "debug_args": "args", + "debugger_path": "mypath", + "docker_volume_basedir": "basedir", + "docker_network": "mynetwork", + "log_file": "logfile", + "layer_cache_basedir": "basedir", + "skip_pull_image": True, + "force_image_build": True, + "parameter_overrides": "ParameterKey=Key,ParameterValue=Value ParameterKey=Key2,ParameterValue=Value2", + } + + # NOTE: Because we don't load the full Click BaseCommand here, this is mounted as top-level command + with samconfig_parameters(["start-api"], self.scratch_dir, **config_values) as config_path: + + from samcli.commands.local.start_api.cli import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with( + ANY, + "127.0.0.1", + 12345, + "static_dir", + str(Path(os.getcwd(), "mytemplate.yaml")), + "envvar.json", + (1, 2, 3), + "args", + "mypath", + "basedir", + "mynetwork", + "logfile", + "basedir", + True, + True, + {"Key": "Value", "Key2": "Value2"}, + ) + + @patch("samcli.commands.local.start_lambda.cli.do_cli") + def test_local_start_lambda(self, do_cli_mock): + + config_values = { + "template_file": "mytemplate.yaml", + "host": "127.0.0.1", + "port": 12345, + "env_vars": "envvar.json", + "debug_port": [1, 2, 3], + "debug_args": "args", + "debugger_path": "mypath", + "docker_volume_basedir": "basedir", + "docker_network": "mynetwork", + "log_file": "logfile", + "layer_cache_basedir": "basedir", + "skip_pull_image": True, + "force_image_build": True, + "parameter_overrides": "ParameterKey=Key,ParameterValue=Value", + } + + # NOTE: Because we don't load the full Click BaseCommand here, this is mounted as top-level command + with samconfig_parameters(["start-lambda"], self.scratch_dir, **config_values) as config_path: + + from samcli.commands.local.start_lambda.cli import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with( + ANY, + "127.0.0.1", + 12345, + str(Path(os.getcwd(), "mytemplate.yaml")), + "envvar.json", + (1, 2, 3), + "args", + "mypath", + "basedir", + "mynetwork", + "logfile", + "basedir", + True, + True, + {"Key": "Value"}, + ) + + @patch("samcli.commands.package.command.do_cli") + def test_package(self, do_cli_mock): + + config_values = { + "template_file": "mytemplate.yaml", + "s3_bucket": "mybucket", + "force_upload": True, + "s3_prefix": "myprefix", + "kms_key_id": "mykms", + "use_json": True, + "metadata": '{"m1": "value1", "m2": "value2"}', + "region": "myregion", + "output_template_file": "output.yaml", + } + + with samconfig_parameters(["package"], self.scratch_dir, **config_values) as config_path: + + from samcli.commands.package.command import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with( + str(Path(os.getcwd(), "mytemplate.yaml")), + "mybucket", + "myprefix", + "mykms", + "output.yaml", + True, + True, + {"m1": "value1", "m2": "value2"}, + "myregion", + None, + ) + + @patch("samcli.commands.deploy.command.do_cli") + def test_deploy(self, do_cli_mock): + + config_values = { + "template_file": "mytemplate.yaml", + "stack_name": "mystack", + "s3_bucket": "mybucket", + "force_upload": True, + "s3_prefix": "myprefix", + "kms_key_id": "mykms", + "parameter_overrides": "ParameterKey=Key,ParameterValue=Value", + "capabilities": "cap1 cap2", + "no_execute_changeset": True, + "role_arn": "arn", + "notification_arns": "notify1 notify2", + "fail_on_empty_changeset": True, + "use_json": True, + "tags": 'a=tag1 b="tag with spaces"', + "metadata": '{"m1": "value1", "m2": "value2"}', + "guided": True, + "confirm_changeset": True, + "region": "myregion", + } + + with samconfig_parameters(["deploy"], self.scratch_dir, **config_values) as config_path: + + from samcli.commands.deploy.command import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with( + str(Path(os.getcwd(), "mytemplate.yaml")), + "mystack", + "mybucket", + True, + "myprefix", + "mykms", + {"Key": "Value"}, + ["cap1", "cap2"], + True, + "arn", + ["notify1", "notify2"], + True, + True, + {"a": "tag1", "b": '"tag with spaces"'}, + {"m1": "value1", "m2": "value2"}, + True, + True, + "myregion", + None, + ) + + @patch("samcli.commands.logs.command.do_cli") + def test_logs(self, do_cli_mock): + config_values = { + "name": "myfunction", + "stack_name": "mystack", + "filter": "myfilter", + "tail": True, + "start_time": "starttime", + "end_time": "endtime", + } + + with samconfig_parameters(["logs"], self.scratch_dir, **config_values) as config_path: + from samcli.commands.logs.command import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with("myfunction", "mystack", "myfilter", True, "starttime", "endtime") + + @patch("samcli.commands.publish.command.do_cli") + def test_publish(self, do_cli_mock): + config_values = {"template_file": "mytemplate.yaml", "semantic_version": "0.1.1"} + + with samconfig_parameters(["publish"], self.scratch_dir, **config_values) as config_path: + from samcli.commands.publish.command import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, []) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with(ANY, str(Path(os.getcwd(), "mytemplate.yaml")), "0.1.1") + + def test_info_must_not_read_from_config(self): + config_values = {"a": "b"} + + with samconfig_parameters([], self.scratch_dir, **config_values) as config_path: + from samcli.cli.main import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke(cli, ["--info"]) + + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + info_result = json.loads(result.output) + self.assertTrue("version" in info_result) + + +class TestSamConfigWithOverrides(TestCase): + def setUp(self): + self._old_cwd = os.getcwd() + + self.scratch_dir = tempfile.mkdtemp() + Path(self.scratch_dir, "otherenvvar.json").write_text("{}") + + os.chdir(self.scratch_dir) + + def tearDown(self): + os.chdir(self._old_cwd) + shutil.rmtree(self.scratch_dir) + self.scratch_dir = None + + @patch("samcli.commands.local.start_lambda.cli.do_cli") + def test_override_with_cli_params(self, do_cli_mock): + + config_values = { + "template_file": "mytemplate.yaml", + "host": "127.0.0.1", + "port": 12345, + "env_vars": "envvar.json", + "debug_port": [1, 2, 3], + "debug_args": "args", + "debugger_path": "mypath", + "docker_volume_basedir": "basedir", + "docker_network": "mynetwork", + "log_file": "logfile", + "layer_cache_basedir": "basedir", + "skip_pull_image": True, + "force_image_build": True, + "parameter_overrides": "ParameterKey=Key,ParameterValue=Value", + } + + # NOTE: Because we don't load the full Click BaseCommand here, this is mounted as top-level command + with samconfig_parameters(["start-lambda"], self.scratch_dir, **config_values) as config_path: + + from samcli.commands.local.start_lambda.cli import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke( + cli, + [ + "--template-file", + "othertemplate.yaml", + "--host", + "otherhost", + "--port", + 9999, + "--env-vars", + "otherenvvar.json", + "--debug-port", + 9, + "--debug-port", + 8, + "--debug-port", + 7, + "--debug-args", + "otherargs", + "--debugger-path", + "otherpath", + "--docker-volume-basedir", + "otherbasedir", + "--docker-network", + "othernetwork", + "--log-file", + "otherlogfile", + "--layer-cache-basedir", + "otherbasedir", + "--skip-pull-image", + "--force-image-build", + "--parameter-overrides", + "A=123 C=D E=F12! G=H", + ], + ) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with( + ANY, + "otherhost", + 9999, + str(Path(os.getcwd(), "othertemplate.yaml")), + "otherenvvar.json", + (9, 8, 7), + "otherargs", + "otherpath", + "otherbasedir", + "othernetwork", + "otherlogfile", + "otherbasedir", + True, + True, + {"A": "123", "C": "D", "E": "F12!", "G": "H"}, + ) + + @patch("samcli.commands.local.start_lambda.cli.do_cli") + def test_override_with_cli_params_and_envvars(self, do_cli_mock): + + config_values = { + "template_file": "mytemplate.yaml", + "host": "127.0.0.1", + "port": 12345, + "env_vars": "envvar.json", + "debug_port": [1, 2, 3], + "debug_args": "args", + "debugger_path": "mypath", + "docker_volume_basedir": "basedir", + "docker_network": "mynetwork", + "log_file": "logfile", + "layer_cache_basedir": "basedir", + "skip_pull_image": True, + "force_image_build": False, + } + + # NOTE: Because we don't load the full Click BaseCommand here, this is mounted as top-level command + with samconfig_parameters(["start-lambda"], self.scratch_dir, **config_values) as config_path: + + from samcli.commands.local.start_lambda.cli import cli + + LOG.debug(Path(config_path).read_text()) + runner = CliRunner() + result = runner.invoke( + cli, + env={ + "SAM_TEMPLATE_FILE": "envtemplate.yaml", + "SAM_SKIP_PULL_IMAGE": "False", + "SAM_FORCE_IMAGE_BUILD": "False", + "SAM_DOCKER_NETWORK": "envnetwork", + # Debug port is exclusively provided through envvars and not thru CLI args + "SAM_DEBUG_PORT": "13579", + "DEBUGGER_ARGS": "envargs", + "SAM_DOCKER_VOLUME_BASEDIR": "envbasedir", + "SAM_LAYER_CACHE_BASEDIR": "envlayercache", + }, + args=[ + "--host", + "otherhost", + "--port", + 9999, + "--env-vars", + "otherenvvar.json", + "--debugger-path", + "otherpath", + "--log-file", + "otherlogfile", + # this is a case where cli args takes precedence over both + # config file and envvar + "--force-image-build", + # Parameter overrides is exclusively provided through CLI args and not config + "--parameter-overrides", + "A=123 C=D E=F12! G=H", + ], + ) + + LOG.info(result.output) + LOG.info(result.exception) + if result.exception: + LOG.exception("Command failed", exc_info=result.exc_info) + self.assertIsNone(result.exception) + + do_cli_mock.assert_called_with( + ANY, + "otherhost", + 9999, + str(Path(os.getcwd(), "envtemplate.yaml")), + "otherenvvar.json", + (13579,), + "envargs", + "otherpath", + "envbasedir", + "envnetwork", + "otherlogfile", + "envlayercache", + False, + True, + {"A": "123", "C": "D", "E": "F12!", "G": "H"}, + ) + + +@contextmanager +def samconfig_parameters(cmd_names, config_dir=None, env=None, **kwargs): + """ + ContextManager to write a new SAM Config and remove the file after the contextmanager exists + + Parameters + ---------- + cmd_names : list(str) + Name of the full commnad split as a list: ["generate-event", "s3", "put"] + + config_dir : str + Path where the SAM config file should be written to. Defaults to os.getcwd() + + env : str + Optional name of the config environment. This is currently unused + + kwargs : dict + Parameter names and values to be written to the file. + + Returns + ------- + Path to the config file + """ + + env = env or DEFAULT_ENV + section = "parameters" + samconfig = SamConfig(config_dir=config_dir) + + try: + for k, v in kwargs.items(): + samconfig.put(cmd_names, section, k, v, env=env) + + samconfig.flush() + yield samconfig.path() + finally: + Path(samconfig.path()).unlink() diff --git a/tests/unit/commands/test_deploy.py b/tests/unit/commands/test_deploy.py deleted file mode 100644 index 90387d5095..0000000000 --- a/tests/unit/commands/test_deploy.py +++ /dev/null @@ -1,20 +0,0 @@ -""" -Tests Deploy CLI command -""" - -from unittest import TestCase -from unittest.mock import patch - -from samcli.commands.deploy import do_cli as deploy_cli - - -class TestCli(TestCase): - def setUp(self): - self.args = ("--force-upload",) - self.expected_args = self.args + ("--stack-name", "stackName") - - @patch("samcli.commands.deploy.execute_command") - def test_deploy_must_pass_args(self, execute_command_mock): - execute_command_mock.return_value = True - deploy_cli(self.args, "file.yaml", "stackName") - execute_command_mock.assert_called_with("deploy", self.expected_args, template_file="file.yaml") diff --git a/tests/unit/lib/bootstrap/test_bootstrap.py b/tests/unit/lib/bootstrap/test_bootstrap.py new file mode 100644 index 0000000000..9c17c198f0 --- /dev/null +++ b/tests/unit/lib/bootstrap/test_bootstrap.py @@ -0,0 +1,208 @@ +from unittest import TestCase +from unittest.mock import patch, Mock + +import botocore.session + +from botocore.exceptions import ClientError, NoCredentialsError, NoRegionError +from botocore.stub import Stubber + +from samcli.commands.bootstrap.exceptions import ManagedStackError +from samcli.commands.exceptions import UserException, CredentialsError, RegionError +from samcli.lib.bootstrap.bootstrap import manage_stack, _create_or_get_stack, _get_stack_template, SAM_CLI_STACK_NAME + + +class TestBootstrapManagedStack(TestCase): + def _stubbed_cf_client(self): + cf = botocore.session.get_session().create_client("cloudformation") + return [cf, Stubber(cf)] + + @patch("boto3.Session") + def test_client_missing_credentials(self, boto_mock): + session_mock = Mock() + session_mock.client.side_effect = NoCredentialsError() + boto_mock.return_value = session_mock + with self.assertRaises(CredentialsError): + manage_stack("testprofile", "fake-region") + + @patch("boto3.Session") + def test_client_missing_region(self, boto_mock): + session_mock = Mock() + session_mock.client.side_effect = NoRegionError() + boto_mock.return_value = session_mock + with self.assertRaises(RegionError): + manage_stack("testprofile", "fake-region") + + def test_new_stack(self): + stub_cf, stubber = self._stubbed_cf_client() + # first describe_stacks call will fail + ds_params = {"StackName": SAM_CLI_STACK_NAME} + stubber.add_client_error("describe_stacks", service_error_code="ClientError", expected_params=ds_params) + # creating change set + ccs_params = { + "StackName": SAM_CLI_STACK_NAME, + "TemplateBody": _get_stack_template(), + "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], + "ChangeSetType": "CREATE", + "ChangeSetName": "InitialCreation", + } + ccs_resp = {"Id": "id", "StackId": "aws-sam-cli-managed-default"} + stubber.add_response("create_change_set", ccs_resp, ccs_params) + # describe change set creation status for waiter + dcs_params = {"ChangeSetName": "InitialCreation", "StackName": SAM_CLI_STACK_NAME} + dcs_resp = {"Status": "CREATE_COMPLETE"} + stubber.add_response("describe_change_set", dcs_resp, dcs_params) + # executing change set + ecs_params = {"ChangeSetName": "InitialCreation", "StackName": SAM_CLI_STACK_NAME} + ecs_resp = {} + stubber.add_response("execute_change_set", ecs_resp, ecs_params) + # two describe_stacks calls will succeed - one for waiter, one direct + post_create_ds_resp = { + "Stacks": [ + { + "StackName": SAM_CLI_STACK_NAME, + "CreationTime": "2019-11-13", + "StackStatus": "CREATE_COMPLETE", + "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], + "Outputs": [{"OutputKey": "SourceBucket", "OutputValue": "generated-src-bucket"}], + } + ] + } + stubber.add_response("describe_stacks", post_create_ds_resp, ds_params) + stubber.add_response("describe_stacks", post_create_ds_resp, ds_params) + stubber.activate() + _create_or_get_stack(stub_cf) + stubber.assert_no_pending_responses() + stubber.deactivate() + + def test_stack_exists(self): + stub_cf, stubber = self._stubbed_cf_client() + ds_resp = { + "Stacks": [ + { + "StackName": SAM_CLI_STACK_NAME, + "CreationTime": "2019-11-13", + "StackStatus": "CREATE_COMPLETE", + "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], + "Outputs": [{"OutputKey": "SourceBucket", "OutputValue": "generated-src-bucket"}], + } + ] + } + ds_params = {"StackName": SAM_CLI_STACK_NAME} + stubber.add_response("describe_stacks", ds_resp, ds_params) + stubber.activate() + _create_or_get_stack(stub_cf) + stubber.assert_no_pending_responses() + stubber.deactivate() + + def test_stack_missing_bucket(self): + stub_cf, stubber = self._stubbed_cf_client() + ds_resp = { + "Stacks": [ + { + "StackName": SAM_CLI_STACK_NAME, + "CreationTime": "2019-11-13", + "StackStatus": "CREATE_COMPLETE", + "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], + "Outputs": [], + } + ] + } + ds_params = {"StackName": SAM_CLI_STACK_NAME} + stubber.add_response("describe_stacks", ds_resp, ds_params) + stubber.activate() + with self.assertRaises(UserException): + _create_or_get_stack(stub_cf) + stubber.assert_no_pending_responses() + stubber.deactivate() + + def test_stack_missing_tag(self): + stub_cf, stubber = self._stubbed_cf_client() + ds_resp = { + "Stacks": [ + { + "StackName": SAM_CLI_STACK_NAME, + "CreationTime": "2019-11-13", + "StackStatus": "CREATE_COMPLETE", + "Tags": [], + "Outputs": [{"OutputKey": "SourceBucket", "OutputValue": "generated-src-bucket"}], + } + ] + } + ds_params = {"StackName": SAM_CLI_STACK_NAME} + stubber.add_response("describe_stacks", ds_resp, ds_params) + stubber.activate() + with self.assertRaises(UserException): + _create_or_get_stack(stub_cf) + stubber.assert_no_pending_responses() + stubber.deactivate() + + def test_stack_wrong_tag(self): + stub_cf, stubber = self._stubbed_cf_client() + ds_resp = { + "Stacks": [ + { + "StackName": SAM_CLI_STACK_NAME, + "CreationTime": "2019-11-13", + "StackStatus": "CREATE_COMPLETE", + "Tags": [{"Key": "ManagedStackSource", "Value": "WHY WOULD YOU EVEN DO THIS"}], + "Outputs": [{"OutputKey": "SourceBucket", "OutputValue": "generated-src-bucket"}], + } + ] + } + ds_params = {"StackName": SAM_CLI_STACK_NAME} + stubber.add_response("describe_stacks", ds_resp, ds_params) + stubber.activate() + with self.assertRaises(UserException): + _create_or_get_stack(stub_cf) + stubber.assert_no_pending_responses() + stubber.deactivate() + + def test_change_set_creation_fails(self): + stub_cf, stubber = self._stubbed_cf_client() + # first describe_stacks call will fail + ds_params = {"StackName": SAM_CLI_STACK_NAME} + stubber.add_client_error("describe_stacks", service_error_code="ClientError", expected_params=ds_params) + # creating change set - fails + ccs_params = { + "StackName": SAM_CLI_STACK_NAME, + "TemplateBody": _get_stack_template(), + "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], + "ChangeSetType": "CREATE", + "ChangeSetName": "InitialCreation", + } + stubber.add_client_error("create_change_set", service_error_code="ClientError", expected_params=ccs_params) + stubber.activate() + with self.assertRaises(ManagedStackError): + _create_or_get_stack(stub_cf) + stubber.assert_no_pending_responses() + stubber.deactivate() + + def test_change_set_execution_fails(self): + stub_cf, stubber = self._stubbed_cf_client() + # first describe_stacks call will fail + ds_params = {"StackName": SAM_CLI_STACK_NAME} + stubber.add_client_error("describe_stacks", service_error_code="ClientError", expected_params=ds_params) + # creating change set + ccs_params = { + "StackName": SAM_CLI_STACK_NAME, + "TemplateBody": _get_stack_template(), + "Tags": [{"Key": "ManagedStackSource", "Value": "AwsSamCli"}], + "ChangeSetType": "CREATE", + "ChangeSetName": "InitialCreation", + } + ccs_resp = {"Id": "id", "StackId": "aws-sam-cli-managed-default"} + stubber.add_response("create_change_set", ccs_resp, ccs_params) + # describe change set creation status for waiter + dcs_params = {"ChangeSetName": "InitialCreation", "StackName": SAM_CLI_STACK_NAME} + dcs_resp = {"Status": "CREATE_COMPLETE"} + stubber.add_response("describe_change_set", dcs_resp, dcs_params) + # executing change set - fails + ecs_params = {"ChangeSetName": "InitialCreation", "StackName": SAM_CLI_STACK_NAME} + stubber.add_client_error( + "execute_change_set", service_error_code="InsufficientCapabilities", expected_params=ecs_params + ) + stubber.activate() + with self.assertRaises(ManagedStackError): + _create_or_get_stack(stub_cf) + stubber.assert_no_pending_responses() + stubber.deactivate() diff --git a/tests/unit/lib/deploy/__init__.py b/tests/unit/lib/deploy/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/lib/deploy/test_deployer.py b/tests/unit/lib/deploy/test_deployer.py new file mode 100644 index 0000000000..1eb1567cec --- /dev/null +++ b/tests/unit/lib/deploy/test_deployer.py @@ -0,0 +1,593 @@ +import uuid +import time +from datetime import datetime, timedelta +from unittest import TestCase +from unittest.mock import patch, MagicMock, ANY + +from botocore.exceptions import ClientError, WaiterError, BotoCoreError + +from samcli.commands.deploy.exceptions import ( + DeployFailedError, + ChangeSetError, + DeployStackOutPutFailedError, + DeployBucketInDifferentRegionError, +) +from samcli.lib.deploy.deployer import Deployer +from samcli.lib.package.s3_uploader import S3Uploader +from samcli.lib.utils.time import utc_to_timestamp, to_datetime + + +class MockPaginator: + def __init__(self, resp): + self.resp = resp + + def paginate(self, ChangeSetName=None, StackName=None): + return self.resp + + +class MockChangesetWaiter: + def __init__(self, ex=None): + self.ex = ex + + def wait(self, ChangeSetName, StackName, WaiterConfig): + if self.ex: + raise self.ex + return + + +class MockCreateUpdateWaiter: + def __init__(self, ex=None): + self.ex = ex + + def wait(self, StackName, WaiterConfig): + if self.ex: + raise self.ex + return + + +class TestDeployer(TestCase): + def setUp(self): + self.session = MagicMock() + self.cloudformation_client = self.session.client("cloudformation") + self.s3_client = self.session.client("s3") + self.deployer = Deployer(self.cloudformation_client) + + def test_deployer_init(self): + self.assertEqual(self.deployer._client, self.cloudformation_client) + self.assertEqual(self.deployer.changeset_prefix, "samcli-deploy") + + def test_deployer_has_no_stack(self): + self.deployer._client.describe_stacks = MagicMock(return_value={"Stacks": []}) + self.assertEqual(self.deployer.has_stack("test"), False) + + def test_deployer_has_stack_in_review(self): + self.deployer._client.describe_stacks = MagicMock( + return_value={"Stacks": [{"StackStatus": "REVIEW_IN_PROGRESS"}]} + ) + self.assertEqual(self.deployer.has_stack("test"), False) + + def test_deployer_has_stack_exception_non_exsistent(self): + self.deployer._client.describe_stacks = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "Stack with id test does not exist"}}, + operation_name="stack_status", + ) + ) + self.assertEqual(self.deployer.has_stack("test"), False) + + def test_deployer_has_stack_exception(self): + self.deployer._client.describe_stacks = MagicMock(side_effect=Exception()) + with self.assertRaises(Exception): + self.deployer.has_stack("test") + + def test_deployer_has_stack_exception_botocore(self): + self.deployer._client.describe_stacks = MagicMock(side_effect=BotoCoreError()) + with self.assertRaises(DeployFailedError): + self.deployer.has_stack("test") + + def test_create_changeset(self): + self.deployer.has_stack = MagicMock(return_value=False) + self.deployer.create_changeset( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + {"ParameterKey": "c", "UsePreviousValue": True}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + ) + + self.assertEqual(self.deployer._client.create_change_set.call_count, 1) + self.deployer._client.create_change_set.assert_called_with( + Capabilities=["CAPABILITY_IAM"], + ChangeSetName=ANY, + ChangeSetType="CREATE", + Description=ANY, + NotificationARNs=[], + Parameters=[{"ParameterKey": "a", "ParameterValue": "b"}], + RoleARN="role-arn", + StackName="test", + Tags={"unit": "true"}, + TemplateURL=ANY, + ) + + def test_update_changeset(self): + self.deployer.has_stack = MagicMock(return_value=True) + self.deployer.create_changeset( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + {"ParameterKey": "c", "UsePreviousValue": True}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + ) + + self.assertEqual(self.deployer._client.create_change_set.call_count, 1) + self.deployer._client.create_change_set.assert_called_with( + Capabilities=["CAPABILITY_IAM"], + ChangeSetName=ANY, + ChangeSetType="UPDATE", + Description=ANY, + NotificationARNs=[], + Parameters=[{"ParameterKey": "a", "ParameterValue": "b"}], + RoleARN="role-arn", + StackName="test", + Tags={"unit": "true"}, + TemplateURL=ANY, + ) + + def test_create_changeset_exception(self): + self.deployer.has_stack = MagicMock(return_value=False) + self.deployer._client.create_change_set = MagicMock(side_effect=Exception) + with self.assertRaises(ChangeSetError): + self.deployer.create_changeset( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + {"ParameterKey": "c", "UsePreviousValue": True}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + ) + + def test_create_changeset_ClientErrorException(self): + error_message = ( + "An error occurred (ValidationError) when calling the CreateChangeSet " + "operation: S3 error: The bucket you are attempting to access must be " + "addressed using the specified endpoint. " + "Please send all future requests to this " + "endpoint.\nFor more information " + "check http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html" + ) + self.deployer.has_stack = MagicMock(return_value=False) + self.deployer._client.create_change_set = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": error_message}}, operation_name="create_changeset" + ) + ) + with self.assertRaises(DeployBucketInDifferentRegionError): + self.deployer.create_changeset( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + {"ParameterKey": "c", "UsePreviousValue": True}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + ) + + def test_create_changeset_ClientErrorException_generic(self): + self.deployer.has_stack = MagicMock(return_value=False) + self.deployer._client.create_change_set = MagicMock( + side_effect=ClientError(error_response={"Error": {"Message": "Message"}}, operation_name="create_changeset") + ) + with self.assertRaises(ChangeSetError): + self.deployer.create_changeset( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + {"ParameterKey": "c", "UsePreviousValue": True}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + ) + + def test_describe_changeset_with_changes(self): + response = [ + { + "Changes": [ + {"ResourceChange": {"LogicalResourceId": "resource_id1", "ResourceType": "s3", "Action": "Add"}} + ] + }, + { + "Changes": [ + {"ResourceChange": {"LogicalResourceId": "resource_id2", "ResourceType": "kms", "Action": "Add"}} + ] + }, + { + "Changes": [ + {"ResourceChange": {"LogicalResourceId": "resource_id3", "ResourceType": "lambda", "Action": "Add"}} + ] + }, + ] + self.deployer._client.get_paginator = MagicMock(return_value=MockPaginator(resp=response)) + changes = self.deployer.describe_changeset("change_id", "test") + self.assertEqual( + changes, + { + "Add": [ + {"LogicalResourceId": "resource_id1", "ResourceType": "s3"}, + {"LogicalResourceId": "resource_id2", "ResourceType": "kms"}, + {"LogicalResourceId": "resource_id3", "ResourceType": "lambda"}, + ], + "Modify": [], + "Remove": [], + }, + ) + + def test_describe_changeset_with_no_changes(self): + response = [{"Changes": []}] + self.deployer._client.get_paginator = MagicMock(return_value=MockPaginator(resp=response)) + changes = self.deployer.describe_changeset("change_id", "test") + self.assertEqual(changes, {"Add": [], "Modify": [], "Remove": []}) + + def test_wait_for_changeset(self): + self.deployer._client.get_waiter = MagicMock(return_value=MockChangesetWaiter()) + self.deployer.wait_for_changeset("test-id", "test-stack") + + def test_wait_for_changeset_exception_ChangeEmpty(self): + self.deployer._client.get_waiter = MagicMock( + return_value=MockChangesetWaiter( + ex=WaiterError( + name="wait_for_changeset", + reason="unit-test", + last_response={"Status": "Failed", "StatusReason": "It's a unit test"}, + ) + ) + ) + with self.assertRaises(ChangeSetError): + self.deployer.wait_for_changeset("test-id", "test-stack") + + def test_execute_changeset(self): + self.deployer.execute_changeset("id", "test") + self.deployer._client.execute_change_set.assert_called_with(ChangeSetName="id", StackName="test") + + def test_execute_changeset_exception(self): + self.deployer._client.execute_change_set = MagicMock( + side_effect=ClientError(error_response={"Error": {"Message": "Error"}}, operation_name="execute_changeset") + ) + with self.assertRaises(DeployFailedError): + self.deployer.execute_changeset("id", "test") + + def test_get_last_event_time(self): + timestamp = datetime.utcnow() + self.deployer._client.describe_stack_events = MagicMock( + return_value={"StackEvents": [{"Timestamp": timestamp}]} + ) + self.assertEqual(self.deployer.get_last_event_time("test"), utc_to_timestamp(timestamp)) + + def test_get_last_event_time_unknown_last_time(self): + current_timestamp = datetime.utcnow() + self.deployer._client.describe_stack_events = MagicMock(side_effect=KeyError) + # Convert to milliseconds from seconds + last_stack_event_timestamp = to_datetime(self.deployer.get_last_event_time("test") * 1000) + self.assertEqual(last_stack_event_timestamp.year, current_timestamp.year) + self.assertEqual(last_stack_event_timestamp.month, current_timestamp.month) + self.assertEqual(last_stack_event_timestamp.day, current_timestamp.day) + self.assertEqual(last_stack_event_timestamp.hour, current_timestamp.hour) + self.assertEqual(last_stack_event_timestamp.minute, current_timestamp.minute) + self.assertEqual(last_stack_event_timestamp.second, current_timestamp.second) + + @patch("time.sleep") + def test_describe_stack_events(self, patched_time): + current_timestamp = datetime.utcnow() + + self.deployer._client.describe_stacks = MagicMock( + side_effect=[ + {"Stacks": [{"StackStatus": "CREATE_IN_PROGRESS"}]}, + {"Stacks": [{"StackStatus": "CREATE_IN_PROGRESS"}]}, + {"Stacks": [{"StackStatus": "CREATE_COMPLETE_CLEANUP_IN_PROGRESS"}]}, + {"Stacks": [{"StackStatus": "CREATE_COMPLETE"}]}, + ] + ) + self.deployer._client.get_paginator = MagicMock( + return_value=MockPaginator( + [ + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_IN_PROGRESS", + "ResourceType": "s3", + "LogicalResourceId": "mybucket", + } + ] + }, + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_IN_PROGRESS", + "ResourceType": "kms", + "LogicalResourceId": "mykms", + } + ] + }, + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_COMPLETE", + "ResourceType": "s3", + "LogicalResourceId": "mybucket", + } + ] + }, + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_COMPLETE", + "ResourceType": "kms", + "LogicalResourceId": "mykms", + } + ] + }, + ] + ) + ) + + self.deployer.describe_stack_events("test", time.time() - 1) + + @patch("time.sleep") + def test_describe_stack_events_exceptions(self, patched_time): + + self.deployer._client.describe_stacks = MagicMock( + side_effect=[ + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + ] + ) + with self.assertRaises(ClientError): + self.deployer.describe_stack_events("test", time.time()) + + @patch("time.sleep") + def test_describe_stack_events_resume_after_exceptions(self, patched_time): + current_timestamp = datetime.utcnow() + + self.deployer._client.describe_stacks = MagicMock( + side_effect=[ + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + ClientError( + error_response={"Error": {"Message": "Rate Exceeded"}}, operation_name="describe_stack_events" + ), + {"Stacks": [{"StackStatus": "CREATE_IN_PROGRESS"}]}, + {"Stacks": [{"StackStatus": "CREATE_IN_PROGRESS"}]}, + {"Stacks": [{"StackStatus": "CREATE_COMPLETE_CLEANUP_IN_PROGRESS"}]}, + {"Stacks": [{"StackStatus": "CREATE_COMPLETE"}]}, + ] + ) + + self.deployer._client.get_paginator = MagicMock( + return_value=MockPaginator( + [ + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_IN_PROGRESS", + "ResourceType": "s3", + "LogicalResourceId": "mybucket", + } + ] + }, + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_IN_PROGRESS", + "ResourceType": "kms", + "LogicalResourceId": "mykms", + } + ] + }, + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_COMPLETE", + "ResourceType": "s3", + "LogicalResourceId": "mybucket", + } + ] + }, + { + "StackEvents": [ + { + "EventId": str(uuid.uuid4()), + "Timestamp": current_timestamp, + "ResourceStatus": "CREATE_COMPLETE", + "ResourceType": "kms", + "LogicalResourceId": "mykms", + } + ] + }, + ] + ) + ) + + self.deployer.describe_stack_events("test", time.time()) + + def test_check_stack_status(self): + self.assertEqual(self.deployer._check_stack_complete("CREATE_COMPLETE"), True) + self.assertEqual(self.deployer._check_stack_complete("CREATE_FAILED"), False) + self.assertEqual(self.deployer._check_stack_complete("CREATE_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("DELETE_COMPLETE"), True) + self.assertEqual(self.deployer._check_stack_complete("DELETE_FAILED"), False) + self.assertEqual(self.deployer._check_stack_complete("DELETE_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("REVIEW_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("ROLLBACK_COMPLETE"), True) + self.assertEqual(self.deployer._check_stack_complete("ROLLBACK_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("UPDATE_COMPLETE"), True) + self.assertEqual(self.deployer._check_stack_complete("UPDATE_COMPLETE_CLEANUP_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("UPDATE_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS"), False) + self.assertEqual(self.deployer._check_stack_complete("UPDATE_ROLLBACK_FAILED"), False) + self.assertEqual(self.deployer._check_stack_complete("UPDATE_ROLLBACK_IN_PROGRESS"), False) + + @patch("time.sleep") + def test_wait_for_execute(self, patched_time): + self.deployer.describe_stack_events = MagicMock() + self.deployer._client.get_waiter = MagicMock(return_value=MockCreateUpdateWaiter()) + self.deployer.wait_for_execute("test", "CREATE") + self.deployer.wait_for_execute("test", "UPDATE") + with self.assertRaises(RuntimeError): + self.deployer.wait_for_execute("test", "DESTRUCT") + + self.deployer._client.get_waiter = MagicMock( + return_value=MockCreateUpdateWaiter( + ex=WaiterError( + name="create_changeset", + reason="unit-test", + last_response={"Status": "Failed", "StatusReason": "It's a unit test"}, + ) + ) + ) + with self.assertRaises(DeployFailedError): + self.deployer.wait_for_execute("test", "CREATE") + + def test_create_and_wait_for_changeset(self): + self.deployer.create_changeset = MagicMock(return_value=({"Id": "test"}, "create")) + self.deployer.wait_for_changeset = MagicMock() + self.deployer.describe_changeset = MagicMock() + + result = self.deployer.create_and_wait_for_changeset( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + {"ParameterKey": "c", "UsePreviousValue": True}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + ) + + self.assertEqual(result, ({"Id": "test"}, "create")) + + def test_create_and_wait_for_changeset_exception(self): + self.deployer.create_changeset = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "Something Wrong"}}, operation_name="create_changeset" + ) + ) + with self.assertRaises(DeployFailedError): + self.deployer.create_and_wait_for_changeset( + stack_name="test", + cfn_template=" ", + parameter_values=[ + {"ParameterKey": "a", "ParameterValue": "b"}, + {"ParameterKey": "c", "UsePreviousValue": True}, + ], + capabilities=["CAPABILITY_IAM"], + role_arn="role-arn", + notification_arns=[], + s3_uploader=S3Uploader(s3_client=self.s3_client, bucket_name="test_bucket"), + tags={"unit": "true"}, + ) + + def test_get_stack_outputs(self): + outputs = { + "Stacks": [ + { + "Outputs": [ + {"OutputKey": "Key1", "OutputValue": "Value1", "Description": "output for s3"}, + {"OutputKey": "Key2", "OutputValue": "Value2", "Description": "output for kms"}, + ] + } + ] + } + self.deployer._client.describe_stacks = MagicMock(return_value=outputs) + + self.assertEqual(outputs["Stacks"][0]["Outputs"], self.deployer.get_stack_outputs(stack_name="test")) + self.deployer._client.describe_stacks.assert_called_with(StackName="test") + + @patch("samcli.lib.deploy.deployer.pprint_columns") + def test_get_stack_outputs_no_echo(self, mock_pprint_columns): + outputs = { + "Stacks": [ + { + "Outputs": [ + {"OutputKey": "Key1", "OutputValue": "Value1", "Description": "output for s3"}, + {"OutputKey": "Key2", "OutputValue": "Value2", "Description": "output for kms"}, + ] + } + ] + } + self.deployer._client.describe_stacks = MagicMock(return_value=outputs) + + self.assertEqual( + outputs["Stacks"][0]["Outputs"], self.deployer.get_stack_outputs(stack_name="test", echo=False) + ) + self.deployer._client.describe_stacks.assert_called_with(StackName="test") + self.assertEqual(mock_pprint_columns.call_count, 0) + + def test_get_stack_outputs_no_outputs_no_exception(self): + outputs = {"Stacks": [{"SomeOtherKey": "Value"}]} + self.deployer._client.describe_stacks = MagicMock(return_value=outputs) + + self.assertEqual(None, self.deployer.get_stack_outputs(stack_name="test")) + self.deployer._client.describe_stacks.assert_called_with(StackName="test") + + def test_get_stack_outputs_exception(self): + self.deployer._client.describe_stacks = MagicMock( + side_effect=ClientError(error_response={"Error": {"Message": "Error"}}, operation_name="describe_stacks") + ) + + with self.assertRaises(DeployStackOutPutFailedError): + self.deployer.get_stack_outputs(stack_name="test") diff --git a/tests/unit/lib/package/test_s3_uploader.py b/tests/unit/lib/package/test_s3_uploader.py index 1b47871e0e..496d9db08c 100644 --- a/tests/unit/lib/package/test_s3_uploader.py +++ b/tests/unit/lib/package/test_s3_uploader.py @@ -7,7 +7,8 @@ from pathlib import Path from botocore.exceptions import ClientError -from samcli.lib.package.s3_uploader import S3Uploader, NoSuchBucketError +from samcli.commands.package.exceptions import NoSuchBucketError, BucketNotSpecifiedError +from samcli.lib.package.s3_uploader import S3Uploader class TestS3Uploader(TestCase): @@ -143,6 +144,21 @@ def test_s3_upload(self): s3_url = s3_uploader.upload(f.name, remote_path) self.assertEqual(s3_url, "s3://{0}/{1}/{2}".format(self.bucket_name, self.prefix, remote_path)) + def test_s3_upload_no_bucket(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=None, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + ) + s3_uploader.artifact_metadata = {"a": "b"} + remote_path = Path.joinpath(Path(os.getcwd()), Path("tmp")) + with self.assertRaises(BucketNotSpecifiedError) as ex: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + s3_uploader.upload(f.name, remote_path) + self.assertEqual(BucketNotSpecifiedError().message, str(ex)) + def test_s3_upload_with_dedup(self): s3_uploader = S3Uploader( s3_client=self.s3, diff --git a/tests/unit/lib/samconfig/__init__.py b/tests/unit/lib/samconfig/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/lib/samconfig/test_samconfig.py b/tests/unit/lib/samconfig/test_samconfig.py new file mode 100644 index 0000000000..d6277db9ed --- /dev/null +++ b/tests/unit/lib/samconfig/test_samconfig.py @@ -0,0 +1,68 @@ +import os +from pathlib import Path + +from unittest import TestCase + +from samcli.lib.config.exceptions import SamConfigVersionException +from samcli.lib.config.version import VERSION_KEY, SAM_CONFIG_VERSION +from samcli.lib.config.samconfig import SamConfig, DEFAULT_CONFIG_FILE_NAME + + +class TestSamConfig(TestCase): + def setUp(self): + self.config_dir = os.getcwd() + self.samconfig = SamConfig(self.config_dir) + + def tearDown(self): + if self.samconfig.exists(): + os.remove(self.samconfig.path()) + + def _setup_config(self): + self.samconfig.put(cmd_names=["local", "start", "api"], section="parameters", key="port", value=5401) + self.samconfig.flush() + self.assertTrue(self.samconfig.exists()) + self.assertTrue(self.samconfig.sanity_check()) + self.assertEqual(SAM_CONFIG_VERSION, self.samconfig.document.get(VERSION_KEY)) + + def test_init(self): + self.assertEqual(self.samconfig.filepath, Path(self.config_dir, DEFAULT_CONFIG_FILE_NAME)) + + def test_check_config_get(self): + self._setup_config() + self.assertEqual( + {"port": 5401}, self.samconfig.get_all(cmd_names=["local", "start", "api"], section="parameters") + ) + + def test_check_config_exists(self): + self._setup_config() + self.assertTrue(self.samconfig.exists()) + + def test_check_sanity(self): + self.assertTrue(self.samconfig.sanity_check()) + + def test_check_version_non_supported_type(self): + self._setup_config() + self.samconfig.document.remove(VERSION_KEY) + self.samconfig.document.add(VERSION_KEY, "aadeff") + with self.assertRaises(SamConfigVersionException): + self.samconfig.sanity_check() + + def test_check_version_no_version_exists(self): + self._setup_config() + self.samconfig.document.remove(VERSION_KEY) + with self.assertRaises(SamConfigVersionException): + self.samconfig.sanity_check() + + def test_check_version_float(self): + self._setup_config() + self.samconfig.document.remove(VERSION_KEY) + self.samconfig.document.add(VERSION_KEY, 0.2) + self.samconfig.sanity_check() + + def test_write_config_file_non_standard_version(self): + self._setup_config() + self.samconfig.document.remove(VERSION_KEY) + self.samconfig.document.add(VERSION_KEY, 0.2) + self.samconfig.put(cmd_names=["local", "start", "api"], section="parameters", key="skip_pull_image", value=True) + self.samconfig.sanity_check() + self.assertEqual(self.samconfig.document.get(VERSION_KEY), 0.2) diff --git a/tests/unit/lib/samlib/test_cloudformation_command.py b/tests/unit/lib/samlib/test_cloudformation_command.py deleted file mode 100644 index e846570c96..0000000000 --- a/tests/unit/lib/samlib/test_cloudformation_command.py +++ /dev/null @@ -1,166 +0,0 @@ -""" -Tests Deploy CLI -""" - -import os -from subprocess import CalledProcessError, PIPE - -from unittest import TestCase -from unittest.mock import patch, call, ANY - -from samcli.lib.samlib.cloudformation_command import execute_command, find_executable - - -class TestExecuteCommand(TestCase): - def setUp(self): - self.args = ("--arg1", "value1", "different args", "more") - - @patch("subprocess.check_call") - @patch("samcli.lib.samlib.cloudformation_command.find_executable") - def test_must_add_template_file(self, find_executable_mock, check_call_mock): - find_executable_mock.return_value = "mycmd" - check_call_mock.return_value = True - execute_command("command", self.args, "/path/to/template") - - check_call_mock.assert_called_with( - ["mycmd", "cloudformation", "command"] - + ["--arg1", "value1", "different args", "more", "--template-file", "/path/to/template"], - env=ANY, - ) - - @patch("subprocess.check_call") - @patch("samcli.lib.samlib.cloudformation_command.find_executable") - @patch("samcli.lib.samlib.cloudformation_command.GlobalConfig") - def test_must_add_sam_cli_info_to_execution_env_var_if_telemetry_is_on( - self, global_config_mock, find_executable_mock, check_call_mock - ): - installation_id = "testtest" - global_config_mock.return_value.installation_id = installation_id - global_config_mock.return_value.telemetry_enabled = True - - expected_env = os.environ.copy() - expected_env["AWS_EXECUTION_ENV"] = "SAM-" + installation_id - - find_executable_mock.return_value = "mycmd" - check_call_mock.return_value = True - execute_command("command", self.args, "/path/to/template") - - check_call_mock.assert_called() - kwargs = check_call_mock.call_args[1] - self.assertIn("env", kwargs) - self.assertEqual(kwargs["env"], expected_env) - - @patch("subprocess.check_call") - @patch("samcli.lib.samlib.cloudformation_command.find_executable") - @patch("samcli.lib.samlib.cloudformation_command.GlobalConfig") - def test_must_not_set_exec_env(self, global_config_mock, find_executable_mock, check_call_mock): - global_config_mock.return_value.telemetry_enabled = False - - # Expected to pass just a copy of the environment variables without modification - expected_env = os.environ.copy() - - find_executable_mock.return_value = "mycmd" - check_call_mock.return_value = True - execute_command("command", self.args, "/path/to/template") - - check_call_mock.assert_called() - kwargs = check_call_mock.call_args[1] - self.assertIn("env", kwargs) - self.assertEqual(kwargs["env"], expected_env) - - @patch("sys.exit") - @patch("subprocess.check_call") - @patch("samcli.lib.samlib.cloudformation_command.find_executable") - def test_command_must_exit_with_status_code(self, find_executable_mock, check_call_mock, exit_mock): - find_executable_mock.return_value = "mycmd" - check_call_mock.side_effect = CalledProcessError(2, "Error") - exit_mock.return_value = True - execute_command("command", self.args, None) - exit_mock.assert_called_with(2) - - -class TestFindExecutable(TestCase): - @patch("subprocess.Popen") - @patch("platform.system") - def test_must_use_raw_name(self, platform_system_mock, popen_mock): - platform_system_mock.return_value = "Linux" - execname = "foo" - - find_executable(execname) - - self.assertEqual(popen_mock.mock_calls, [call([execname], stdout=PIPE, stderr=PIPE)]) - - @patch("subprocess.Popen") - @patch("platform.system") - def test_must_use_name_with_cmd_extension_on_windows(self, platform_system_mock, popen_mock): - platform_system_mock.return_value = "windows" - execname = "foo" - expected = "foo.cmd" - - result = find_executable(execname) - self.assertEqual(result, expected) - - self.assertEqual(popen_mock.mock_calls, [call(["foo.cmd"], stdout=PIPE, stderr=PIPE)]) - - @patch("subprocess.Popen") - @patch("platform.system") - def test_must_use_name_with_exe_extension_on_windows(self, platform_system_mock, popen_mock): - platform_system_mock.return_value = "windows" - execname = "foo" - expected = "foo.exe" - - popen_mock.side_effect = [OSError, "success"] # fail on .cmd extension - - result = find_executable(execname) - self.assertEqual(result, expected) - - self.assertEqual( - popen_mock.mock_calls, - [call(["foo.cmd"], stdout=PIPE, stderr=PIPE), call(["foo.exe"], stdout=PIPE, stderr=PIPE)], - ) - - @patch("subprocess.Popen") - @patch("platform.system") - def test_must_use_name_with_no_extension_on_windows(self, platform_system_mock, popen_mock): - platform_system_mock.return_value = "windows" - execname = "foo" - expected = "foo" - - popen_mock.side_effect = [OSError, OSError, "success"] # fail on .cmd and .exe extension - - result = find_executable(execname) - self.assertEqual(result, expected) - - self.assertEqual( - popen_mock.mock_calls, - [ - call(["foo.cmd"], stdout=PIPE, stderr=PIPE), - call(["foo.exe"], stdout=PIPE, stderr=PIPE), - call(["foo"], stdout=PIPE, stderr=PIPE), - ], - ) - - @patch("subprocess.Popen") - @patch("platform.system") - def test_must_raise_error_if_executable_not_found(self, platform_system_mock, popen_mock): - platform_system_mock.return_value = "windows" - execname = "foo" - - popen_mock.side_effect = [OSError, OSError, OSError, "success"] # fail on all executable names - - with self.assertRaises(OSError) as ctx: - find_executable(execname) - - expected = "Cannot find AWS CLI installation, was looking at executables with names: {}".format( - ["foo.cmd", "foo.exe", "foo"] - ) - self.assertEqual(expected, str(ctx.exception)) - - self.assertEqual( - popen_mock.mock_calls, - [ - call(["foo.cmd"], stdout=PIPE, stderr=PIPE), - call(["foo.exe"], stdout=PIPE, stderr=PIPE), - call(["foo"], stdout=PIPE, stderr=PIPE), - ], - ) diff --git a/tests/unit/lib/utils/test_file_utils.py b/tests/unit/lib/utils/test_file_utils.py new file mode 100644 index 0000000000..c26eb3c7de --- /dev/null +++ b/tests/unit/lib/utils/test_file_utils.py @@ -0,0 +1,21 @@ +import os +import tempfile +from unittest import TestCase + +from samcli.lib.utils.temp_file_utils import remove, tempfile_platform_independent + + +class TestFile(TestCase): + def test_file_remove(self): + _file = tempfile.NamedTemporaryFile(delete=False) + _file.close() + remove(_file.name) + self.assertFalse(os.path.exists(_file.name)) + # No Exception thrown + remove(os.path.join(os.getcwd(), "random")) + + def test_temp_file(self): + _path = None + with tempfile_platform_independent() as _tempf: + _path = _tempf.name + self.assertFalse(os.path.exists(_path)) diff --git a/tests/unit/lib/utils/test_sam_logging.py b/tests/unit/lib/utils/test_sam_logging.py index b2fb1654ce..9a4824478a 100644 --- a/tests/unit/lib/utils/test_sam_logging.py +++ b/tests/unit/lib/utils/test_sam_logging.py @@ -22,3 +22,13 @@ def test_configure_samcli_logger(self, logging_patch): logger_mock.addHandler.assert_called_once_with(stream_handler_mock) stream_handler_mock.setLevel.assert_called_once_with(2) stream_handler_mock.setFormatter.assert_called_once_with(formatter_mock) + + @patch("samcli.lib.utils.sam_logging.logging") + def test_configure_samcli_logger(self, logging_patch): + logger_mock = Mock() + + SamCliLogger.configure_null_logger(logger_mock) + + self.assertFalse(logger_mock.propagate) + + logger_mock.addHandler.assert_called_once_with(logging_patch.NullHandler()) diff --git a/tests/unit/lib/utils/test_time.py b/tests/unit/lib/utils/test_time.py index 14b879b0e7..df63d2cc81 100644 --- a/tests/unit/lib/utils/test_time.py +++ b/tests/unit/lib/utils/test_time.py @@ -1,8 +1,10 @@ +import time import datetime from unittest import TestCase -from samcli.lib.utils.time import to_timestamp, timestamp_to_iso, parse_date, to_utc +from samcli.lib.utils.time import to_timestamp, timestamp_to_iso, parse_date, to_utc, utc_to_timestamp +from dateutil.tz import tzutc class TestTimestampToIso(TestCase): @@ -26,6 +28,12 @@ def test_must_convert_to_timestamp(self): self.assertEqual(expected, to_timestamp(date)) + def test_convert_utc_to_timestamp(self): + timestamp = time.time() + utc = datetime.datetime.utcfromtimestamp(timestamp) + # compare in milliseconds + self.assertEqual(int(timestamp * 1000), utc_to_timestamp(utc)) + class TestToUtc(TestCase): def test_with_timezone(self):