From 6d1dd9215c2c71ddf9341342908449539417e5af Mon Sep 17 00:00:00 2001 From: Alex Wood Date: Thu, 17 Oct 2019 14:15:28 -0700 Subject: [PATCH 1/7] Make --no-event the default for local invoke If no explicit `--event` is passed in, the default assumption is that no event is desired. STDIN events must be explicit. This is a breaking change. --- samcli/commands/local/invoke/cli.py | 24 +++++------ tests/unit/commands/local/invoke/test_cli.py | 42 ++------------------ 2 files changed, 15 insertions(+), 51 deletions(-) diff --git a/samcli/commands/local/invoke/cli.py b/samcli/commands/local/invoke/cli.py index 95b97f27b8..d12f6e73fd 100644 --- a/samcli/commands/local/invoke/cli.py +++ b/samcli/commands/local/invoke/cli.py @@ -14,14 +14,17 @@ HELP_TEXT = """ You can use this command to execute your function in a Lambda-like environment locally. -You can pass in the event body via stdin or by using the -e (--event) parameter. -Logs from the Lambda function will be output via stdout.\n +You can pass in an event body using the -e (--event) parameter. +Logs from the Lambda function will be written to stdout.\n +\b +Invoking a Lambda function without an input event +$ sam local invoke "HelloWorldFunction"\n \b Invoking a Lambda function using an event file $ sam local invoke "HelloWorldFunction" -e event.json\n \b Invoking a Lambda function using input from stdin -$ echo '{"message": "Hey, are you there?" }' | sam local invoke "HelloWorldFunction" \n +$ echo '{"message": "Hey, are you there?" }' | sam local invoke "HelloWorldFunction" --event - \n """ STDIN_FILE_NAME = "-" @@ -31,11 +34,10 @@ "--event", "-e", type=click.Path(), - default=STDIN_FILE_NAME, # Defaults to stdin help="JSON file containing event data passed to the Lambda function during invoke. If this option " - "is not specified, we will default to reading JSON from stdin", + "is not specified, no event is assumed. Pass in the value '-' to input JSON via stdin", ) -@click.option("--no-event", is_flag=True, default=False, help="Invoke Function with an empty event") +@click.option("--no-event", is_flag=True, default=True, help="DEPRECATED: By default no event is assumed.", hidden=True) @invoke_common_options @cli_framework_options @aws_creds_options @@ -116,14 +118,10 @@ def do_cli( # pylint: disable=R0914 LOG.debug("local invoke command is called") - if no_event and event != STDIN_FILE_NAME: - # Do not know what the user wants. no_event and event both passed in. - raise UserException("no_event and event cannot be used together. Please provide only one.") - - if no_event: - event_data = "{}" - else: + if event: event_data = _get_event(event) + else: + event_data = "{}" # Pass all inputs to setup necessary context to invoke function locally. # Handler exception raised by the processor for invalid args and print errors diff --git a/tests/unit/commands/local/invoke/test_cli.py b/tests/unit/commands/local/invoke/test_cli.py index a15448833b..a8cfaad6ae 100644 --- a/tests/unit/commands/local/invoke/test_cli.py +++ b/tests/unit/commands/local/invoke/test_cli.py @@ -32,7 +32,7 @@ def setUp(self): self.docker_network = "network" self.log_file = "logfile" self.skip_pull_image = True - self.no_event = False + self.no_event = True self.parameter_overrides = {} self.layer_cache_basedir = "/some/layers/path" self.force_image_build = True @@ -98,7 +98,7 @@ def test_cli_must_setup_context_and_invoke(self, get_event_mock, InvokeContextMo @patch("samcli.commands.local.cli_common.invoke_context.InvokeContext") @patch("samcli.commands.local.invoke.cli._get_event") def test_cli_must_invoke_with_no_event(self, get_event_mock, InvokeContextMock): - self.no_event = True + self.event = None ctx_mock = Mock() ctx_mock.region = self.region_name @@ -111,7 +111,7 @@ def test_cli_must_invoke_with_no_event(self, get_event_mock, InvokeContextMock): ctx=ctx_mock, function_identifier=self.function_id, template=self.template, - event=STDIN_FILE_NAME, + event=self.event, no_event=self.no_event, env_vars=self.env_vars, debug_port=self.debug_port, @@ -144,44 +144,10 @@ def test_cli_must_invoke_with_no_event(self, get_event_mock, InvokeContextMock): aws_profile=self.profile, ) + get_event_mock.assert_not_called() context_mock.local_lambda_runner.invoke.assert_called_with( context_mock.function_name, event="{}", stdout=context_mock.stdout, stderr=context_mock.stderr ) - get_event_mock.assert_not_called() - - @patch("samcli.commands.local.cli_common.invoke_context.InvokeContext") - @patch("samcli.commands.local.invoke.cli._get_event") - def test_must_raise_user_exception_on_no_event_and_event(self, get_event_mock, InvokeContextMock): - self.no_event = True - - ctx_mock = Mock() - ctx_mock.region = self.region_name - ctx_mock.profile = self.profile - - with self.assertRaises(UserException) as ex_ctx: - - invoke_cli( - ctx=ctx_mock, - function_identifier=self.function_id, - template=self.template, - event=self.eventfile, - no_event=self.no_event, - env_vars=self.env_vars, - debug_port=self.debug_port, - debug_args=self.debug_args, - debugger_path=self.debugger_path, - docker_volume_basedir=self.docker_volume_basedir, - docker_network=self.docker_network, - log_file=self.log_file, - skip_pull_image=self.skip_pull_image, - parameter_overrides=self.parameter_overrides, - layer_cache_basedir=self.layer_cache_basedir, - force_image_build=self.force_image_build, - ) - - msg = str(ex_ctx.exception) - self.assertEqual(msg, "no_event and event cannot be used together. Please provide only one.") - @parameterized.expand( [ param(FunctionNotFound("not found"), "Function id not found in template"), From 0812b24d5f12ea7b16b8e84af182a7934ccbc0d2 Mon Sep 17 00:00:00 2001 From: Alex Wood Date: Wed, 30 Oct 2019 16:01:53 -0700 Subject: [PATCH 2/7] Remove Exception Test for Local Invoke We don't want an exception, the test is invalid. --- .../local/invoke/test_integrations_cli.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/tests/integration/local/invoke/test_integrations_cli.py b/tests/integration/local/invoke/test_integrations_cli.py index 9814c71b39..f9c6d66587 100644 --- a/tests/integration/local/invoke/test_integrations_cli.py +++ b/tests/integration/local/invoke/test_integrations_cli.py @@ -151,7 +151,6 @@ def test_invoke_when_function_writes_stderr(self): @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_returns_expected_result_when_no_event_given(self): command_list = self.get_command_list("EchoEventFunction", template_path=self.template_path) - command_list.append("--no-event") process = Popen(command_list, stdout=PIPE) return_code = process.wait() process_stdout = b"".join(process.stdout.readlines()).strip() @@ -159,20 +158,6 @@ def test_invoke_returns_expected_result_when_no_event_given(self): self.assertEqual(return_code, 0) self.assertEqual("{}", process_stdout.decode("utf-8")) - @pytest.mark.flaky(reruns=3) - @pytest.mark.timeout(timeout=300, method="thread") - def test_invoke_raises_exception_with_noargs_and_event(self): - command_list = self.get_command_list( - "HelloWorldLambdaFunction", template_path=self.template_path, event_path=self.event_path - ) - command_list.append("--no-event") - process = Popen(command_list, stderr=PIPE) - process.wait() - - process_stderr = b"".join(process.stderr.readlines()).strip() - error_output = process_stderr.decode("utf-8") - self.assertIn("no_event and event cannot be used together. Please provide only one.", error_output) - @pytest.mark.flaky(reruns=3) @pytest.mark.timeout(timeout=300, method="thread") def test_invoke_with_env_using_parameters(self): From a54a4ffdbcb698d5b23b6b3986a090b50e607362 Mon Sep 17 00:00:00 2001 From: gomi_ningen Date: Tue, 5 Nov 2019 06:34:39 +0900 Subject: [PATCH 3/7] fix: slack channel link (#1498) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1171112229..1989d97b14 100644 --- a/README.md +++ b/README.md @@ -86,7 +86,7 @@ Read the [SAM Documentation Contribution Guide](https://github.com/awsdocs/aws-s started. ### Join the SAM Community on Slack -[Join the SAM developers channel (#samdev)](https://join.slack.com/t/awsdevelopers/shared_invite/enQtMzg3NTc5OTM2MzcxLTdjYTdhYWE3OTQyYTU4Njk1ZWY4Y2ZjYjBhMTUxNGYzNDg5MWQ1ZTc5MTRlOGY0OTI4NTdlZTMwNmI5YTgwOGM/) on Slack to collaborate with fellow community members and the AWS SAM team. +[Join the SAM developers channel (#samdev)](https://join.slack.com/t/awsdevelopers/shared_invite/enQtMzg3NTc5OTM2MzcxLTIxNjc0ZTJkNmYyNWY3OWE4NTFiNzU1ZTM2Y2VkNmFlNjQ2YjI3YTE1ZDA5YjE5NDE2MjVmYWFlYWIxNjE2NjU) on Slack to collaborate with fellow community members and the AWS SAM team. From 7cbcfffa4c4c8eac237cfa8613912504d61a8d01 Mon Sep 17 00:00:00 2001 From: Sriram Madapusi Vasudevan <3770774+TheSriram@users.noreply.github.com> Date: Tue, 5 Nov 2019 10:49:59 -0800 Subject: [PATCH 4/7] feat: sam package without awscli pre-installed (#1437) * feat: sam package without awscli pre-installed - rely on boto3 instead - port over code from aws cli * tests: port artifact exporter unit tests * tests: add package command unit tests * refactor: package context class - unit tests for package context class - removal old `test_package.py` * feat: s3 uploader changes - add unit tests for s3 uploader * refactor: sam package - artifact exporter - refactor of the port from `aws-cli` for artifact exporter - add unit tests for artifact exporter * refactor: unit tests for `package context` class * feat: sam package `click` wiring - added unit tests for the click wiring * tests: fix unit tests on windows - fix for smoke tests * tests: fixing temporary files usage for windows - https://docs.python.org/3/library/tempfile.html * refactor: cleanup package command - remove unneeded variables - remove unneeded exceptions, inherit package specific exceptions from UserException - refactor resources and their respective properties enums to its own file. This file is cross-referenced across package and other commands. - include jmespath as explicit dependency - refactor unit tests as required based on code structure changes. * fix: resources and property locations in `sam package` help text Usage: samdev package [OPTIONS] The SAM package command creates a zip of your code and dependencies and uploads it to S3. The command returns a copy of your template, replacing references to local artifacts with the S3 location where the command uploaded the artifacts. The following resources and their property locations are supported. Resource : AWS::ServerlessRepo::Application | Location : LicenseUrl Resource : AWS::ServerlessRepo::Application | Location : ReadmeUrl Resource : AWS::Serverless::Function | Location : CodeUri Resource : AWS::Serverless::Api | Location : DefinitionUri Resource : AWS::AppSync::GraphQLSchema | Location :DefinitionS3Location Resource : AWS::AppSync::Resolver | Location : RequestMappingTemplateS3Location Resource : AWS::AppSync::Resolver | Location : ResponseMappingTemplateS3Location Resource : AWS::AppSync::FunctionConfiguration | Location : RequestMappingTemplateS3Location Resource : AWS::AppSync::FunctionConfiguration | Location : ResponseMappingTemplateS3Location Resource : AWS::Lambda::Function | Location : Code Resource : AWS::ApiGatewayRestApi | Location : BodyS3Location Resource : AWS::ElasticBeanstalk::ApplicationVersion | Location : SourceBundle Resource : AWS::CloudFormation::Stack | Location : TemplateURL Resource : AWS::Serverless::Application | Location : Location Resource : AWS::Lambda::LayerVersion | Location : Content Resource : AWS::Serverless::LayerVersion | Location : ContentUri Resource : AWS::Glue::Job | Location : Command.ScriptLocation * chore: change pylint line similiarity * tests: seed integration tests for sam package - add one barebones test to check `sam package` works * fix: resource naming for resources enum * tests: sam package integration tests - With AWS::Serverless::Function Resource * fix: port code base to remove python2 quirks - rebased on top of develop which removes python2 support * tests: add all supported resources for sam package integ tests * feat: sam package supports --metadata - Additional metadata click parameter type - Added unit, integration tests for `--metadata` * fix: stricter checking on `--metadata` flag on `sam package` - Dont allow list or dict in values portion of a metadata dict * fix: missed integ test for `sam package` - added tests for `AWS::ServerlessRepo::Application` Metadata field * tests: regression test suite for `sam package` - compare output template-file from `sam package` and `aws cloudformation package` * fix: better comments, exception handling - template can now be specified as `-t`, `--template-file` and `--template`. Future PR will standardize it across the codebase. Current commit aims to fix it only in `package` space. - exception handling on passing an unknown profile name - refactor catching of OSError within package_context.py - New exception class for package failures. * refactor: regression test suite - move regression checking logic to base class - address comments on exceptions, appveyor file, encoding * lint: linter fixes --- .pylintrc | 2 +- appveyor-windows.yml | 4 + appveyor.yml | 9 + requirements/base.txt | 1 + samcli/cli/context.py | 8 +- samcli/cli/types.py | 49 + samcli/commands/_utils/options.py | 14 +- samcli/commands/_utils/resources.py | 46 + samcli/commands/_utils/template.py | 27 +- samcli/commands/exceptions.py | 6 + samcli/commands/package/__init__.py | 63 +- samcli/commands/package/command.py | 144 +++ samcli/commands/package/exceptions.py | 74 ++ samcli/commands/package/package_context.py | 120 ++ samcli/lib/package/__init__.py | 0 samcli/lib/package/artifact_exporter.py | 625 ++++++++++ samcli/lib/package/s3_uploader.py | 210 ++++ tests/integration/package/__init__.py | 0 .../integration/package/package_integ_base.py | 76 ++ .../package/test_package_command.py | 331 +++++ .../integration/testdata/package/LICENSE.txt | 1 + tests/integration/testdata/package/README.md | 1 + .../package/aws-apigateway-restapi.yaml | 11 + .../aws-appsync-functionconfiguration.yaml | 11 + .../package/aws-appsync-graphqlschema.yaml | 10 + .../package/aws-appsync-resolver.yaml | 11 + .../package/aws-cloudformation-stack.yaml | 9 + ...s-elasticbeanstalk-applicationversion.yaml | 11 + .../testdata/package/aws-glue-job.yaml | 43 + .../testdata/package/aws-lambda-function.yaml | 16 + .../package/aws-lambda-layerversion.yaml | 14 + .../testdata/package/aws-serverless-api.yaml | 17 + .../package/aws-serverless-application.yaml | 9 + .../package/aws-serverless-function.yaml | 12 + .../package/aws-serverless-layerversion.yaml | 16 + .../aws-serverlessrepo-application.yaml | 16 + .../testdata/package/graphql.schema | 5 + .../testdata/package/input-mapping-template | 25 + .../integration/testdata/package/openapi.json | 16 + .../testdata/package/output-mapping-template | 21 + .../integration/testdata/package/swagger.yaml | 13 + tests/regression/__init__.py | 0 tests/regression/package/__init__.py | 0 .../package/regression_package_base.py | 97 ++ .../package/test_package_regression.py | 105 ++ tests/testing_utils.py | 53 + tests/unit/cli/test_types.py | 47 + tests/unit/commands/_utils/test_template.py | 8 +- tests/unit/commands/package/__init__.py | 0 tests/unit/commands/package/test_command.py | 55 + .../commands/package/test_package_context.py | 86 ++ tests/unit/commands/test_package.py | 20 - tests/unit/lib/package/__init__.py | 0 .../lib/package/test_artifact_exporter.py | 1100 +++++++++++++++++ tests/unit/lib/package/test_s3_uploader.py | 159 +++ 55 files changed, 3718 insertions(+), 109 deletions(-) create mode 100644 samcli/commands/_utils/resources.py create mode 100644 samcli/commands/package/command.py create mode 100644 samcli/commands/package/exceptions.py create mode 100644 samcli/commands/package/package_context.py create mode 100644 samcli/lib/package/__init__.py create mode 100644 samcli/lib/package/artifact_exporter.py create mode 100644 samcli/lib/package/s3_uploader.py create mode 100644 tests/integration/package/__init__.py create mode 100644 tests/integration/package/package_integ_base.py create mode 100644 tests/integration/package/test_package_command.py create mode 100644 tests/integration/testdata/package/LICENSE.txt create mode 100644 tests/integration/testdata/package/README.md create mode 100644 tests/integration/testdata/package/aws-apigateway-restapi.yaml create mode 100644 tests/integration/testdata/package/aws-appsync-functionconfiguration.yaml create mode 100644 tests/integration/testdata/package/aws-appsync-graphqlschema.yaml create mode 100644 tests/integration/testdata/package/aws-appsync-resolver.yaml create mode 100644 tests/integration/testdata/package/aws-cloudformation-stack.yaml create mode 100644 tests/integration/testdata/package/aws-elasticbeanstalk-applicationversion.yaml create mode 100644 tests/integration/testdata/package/aws-glue-job.yaml create mode 100644 tests/integration/testdata/package/aws-lambda-function.yaml create mode 100644 tests/integration/testdata/package/aws-lambda-layerversion.yaml create mode 100644 tests/integration/testdata/package/aws-serverless-api.yaml create mode 100644 tests/integration/testdata/package/aws-serverless-application.yaml create mode 100644 tests/integration/testdata/package/aws-serverless-function.yaml create mode 100644 tests/integration/testdata/package/aws-serverless-layerversion.yaml create mode 100644 tests/integration/testdata/package/aws-serverlessrepo-application.yaml create mode 100644 tests/integration/testdata/package/graphql.schema create mode 100644 tests/integration/testdata/package/input-mapping-template create mode 100644 tests/integration/testdata/package/openapi.json create mode 100644 tests/integration/testdata/package/output-mapping-template create mode 100644 tests/integration/testdata/package/swagger.yaml create mode 100644 tests/regression/__init__.py create mode 100644 tests/regression/package/__init__.py create mode 100644 tests/regression/package/regression_package_base.py create mode 100644 tests/regression/package/test_package_regression.py create mode 100644 tests/unit/commands/package/__init__.py create mode 100644 tests/unit/commands/package/test_command.py create mode 100644 tests/unit/commands/package/test_package_context.py delete mode 100644 tests/unit/commands/test_package.py create mode 100644 tests/unit/lib/package/__init__.py create mode 100644 tests/unit/lib/package/test_artifact_exporter.py create mode 100644 tests/unit/lib/package/test_s3_uploader.py diff --git a/.pylintrc b/.pylintrc index 82e54ec0c3..767f748fba 100644 --- a/.pylintrc +++ b/.pylintrc @@ -221,7 +221,7 @@ notes=FIXME,XXX [SIMILARITIES] # Minimum lines number of a similarity. -min-similarity-lines=6 +min-similarity-lines=12 # Ignore comments when computing similarities. ignore-comments=yes diff --git a/appveyor-windows.yml b/appveyor-windows.yml index e4a64727ec..d7631f4053 100644 --- a/appveyor-windows.yml +++ b/appveyor-windows.yml @@ -49,6 +49,9 @@ install: # Upgrade setuptools, wheel and virtualenv - "python -m pip install --upgrade setuptools wheel virtualenv" + # Install AWS CLI Globally via pip3 + - "pip install awscli" + # Create new virtual environment with chosen python version and activate it - "python -m virtualenv venv" - "venv\\Scripts\\activate" @@ -72,6 +75,7 @@ test_script: - "venv\\Scripts\\activate" - "docker system prune -a -f" - "pytest -vv tests/integration" + - "pytest -vv tests/regression" # Uncomment for RDP # on_finish: diff --git a/appveyor.yml b/appveyor.yml index 03aa53763d..f464399ad2 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -37,6 +37,9 @@ for: - "echo %PATH%" - "python -m pip install --upgrade setuptools wheel virtualenv" + # Install AWS CLI Globally outside of a venv. + - "pip install awscli" + # Create new virtual environment and activate it - "rm -rf venv" - "python -m virtualenv venv" @@ -90,6 +93,11 @@ for: - sh: "chmod +x /tmp/black" - sh: "/tmp/black --version" + # Install AWS CLI + - sh: "virtualenv aws_cli" + - sh: "./aws_cli/bin/python -m pip install awscli" + - sh: "PATH=$(echo $PWD'/aws_cli/bin'):$PATH" + build_script: - "python -c \"import sys; print(sys.executable)\"" - "pip install -e \".[dev]\"" @@ -103,6 +111,7 @@ for: # Runs only in Linux - sh: "pytest -vv tests/integration" + - sh: "pytest -vv -n 4 tests/regression" - sh: "/tmp/black --check setup.py tests samcli scripts" - sh: "python scripts/check-isolated-needs-update.py" diff --git a/requirements/base.txt b/requirements/base.txt index 0c4dac42ca..bd43830f76 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -2,6 +2,7 @@ chevron~=0.12 click~=7.0 Flask~=1.0.2 boto3~=1.9, >=1.9.56 +jmespath~=0.9.4 PyYAML~=5.1 cookiecutter~=1.6.0 aws-sam-translator==1.15.1 diff --git a/samcli/cli/context.py b/samcli/cli/context.py index f1d220598e..488c6a1ee0 100644 --- a/samcli/cli/context.py +++ b/samcli/cli/context.py @@ -5,8 +5,11 @@ import uuid import logging import boto3 +import botocore import click +from samcli.commands.exceptions import CredentialsError + class Context: """ @@ -139,4 +142,7 @@ def _refresh_session(self): the Boto3's session object are read-only. Therefore when Click parses new AWS session related properties (like region & profile), it will call this method to create a new session with latest values for these properties. """ - boto3.setup_default_session(region_name=self._aws_region, profile_name=self._aws_profile) + try: + boto3.setup_default_session(region_name=self._aws_region, profile_name=self._aws_profile) + except botocore.exceptions.ProfileNotFound as ex: + raise CredentialsError(str(ex)) diff --git a/samcli/cli/types.py b/samcli/cli/types.py index 1a147fa4dc..faacbacf70 100644 --- a/samcli/cli/types.py +++ b/samcli/cli/types.py @@ -3,6 +3,9 @@ """ import re +import json +from json import JSONDecodeError + import click @@ -64,3 +67,49 @@ def _unquote(value): value = value.strip('"') return value.replace("\\ ", " ").replace('\\"', '"') + + +class CfnMetadataType(click.ParamType): + """ + Custom Click options type to accept values for metadata parameters. + metadata parameters can be of the type KeyName1=string,KeyName2=string or {"string":"string"} + """ + + _EXAMPLE = 'KeyName1=string,KeyName2=string or {"string":"string"}' + + _pattern = r"([A-Za-z0-9\"]+)=([A-Za-z0-9\"]+)" + + # NOTE(TheSriram): name needs to be added to click.ParamType requires it. + name = "CfnMetadata" + + def convert(self, value, param, ctx): + result = {} + fail = False + if not value: + return result + try: + # Look to load the value into json if we can. + result = json.loads(value) + for val in result.values(): + if isinstance(val, (dict, list)): + # Need a non nested dictionary or a dictionary with non list values, + # If either is found, fail the conversion. + fail = True + except JSONDecodeError: + # if looking for a json format failed, look at if the specified value follows + # KeyName1=string,KeyName2=string format + groups = re.findall(self._pattern, value) + + if not groups: + fail = True + for group in groups: + key, value = group + # assign to result['KeyName1'] = string and so on. + result[key] = value + + if fail: + return self.fail( + "{} is not in valid format. It must look something like '{}'".format(value, self._EXAMPLE), param, ctx + ) + + return result diff --git a/samcli/commands/_utils/options.py b/samcli/commands/_utils/options.py index 0b3362d867..6537f6cabe 100644 --- a/samcli/commands/_utils/options.py +++ b/samcli/commands/_utils/options.py @@ -7,7 +7,7 @@ from functools import partial import click -from samcli.cli.types import CfnParameterOverridesType +from samcli.cli.types import CfnParameterOverridesType, CfnMetadataType _TEMPLATE_OPTION_DEFAULT_VALUE = "template.[yaml|yml]" @@ -122,3 +122,15 @@ def parameter_override_click_option(): def parameter_override_option(f): return parameter_override_click_option()(f) + + +def metadata_click_option(): + return click.option( + "--metadata", + type=CfnMetadataType(), + help="Optional. A map of metadata to attach to ALL the artifacts that are referenced in your template.", + ) + + +def metadata_override_option(f): + return metadata_click_option()(f) diff --git a/samcli/commands/_utils/resources.py b/samcli/commands/_utils/resources.py new file mode 100644 index 0000000000..221a19a664 --- /dev/null +++ b/samcli/commands/_utils/resources.py @@ -0,0 +1,46 @@ +""" +Enums for Resources and thier Location Properties, along with utlity functions +""" + +AWS_SERVERLESSREPO_APPLICATION = "AWS::ServerlessRepo::Application" +AWS_SERVERLESS_FUNCTION = "AWS::Serverless::Function" +AWS_SERVERLESS_API = "AWS::Serverless::Api" +AWS_APPSYNC_GRAPHQLSCHEMA = "AWS::AppSync::GraphQLSchema" +AWS_APPSYNC_RESOLVER = "AWS::AppSync::Resolver" +AWS_APPSYNC_FUNCTIONCONFIGURATION = "AWS::AppSync::FunctionConfiguration" +AWS_LAMBDA_FUNCTION = "AWS::Lambda::Function" +AWS_APIGATEWAY_RESTAPI = "AWS::ApiGateway::RestApi" +AWS_ELASTICBEANSTALK_APPLICATIONVERSION = "AWS::ElasticBeanstalk::ApplicationVersion" +AWS_CLOUDFORMATION_STACK = "AWS::CloudFormation::Stack" +AWS_SERVERLESS_APPLICATION = "AWS::Serverless::Application" +AWS_LAMBDA_LAYERVERSION = "AWS::Lambda::LayerVersion" +AWS_SERVERLESS_LAYERVERSION = "AWS::Serverless::LayerVersion" +AWS_GLUE_JOB = "AWS::Glue::Job" + +METADATA_WITH_LOCAL_PATHS = {AWS_SERVERLESSREPO_APPLICATION: ["LicenseUrl", "ReadmeUrl"]} + +RESOURCES_WITH_LOCAL_PATHS = { + AWS_SERVERLESS_FUNCTION: ["CodeUri"], + AWS_SERVERLESS_API: ["DefinitionUri"], + AWS_APPSYNC_GRAPHQLSCHEMA: ["DefinitionS3Location"], + AWS_APPSYNC_RESOLVER: ["RequestMappingTemplateS3Location", "ResponseMappingTemplateS3Location"], + AWS_APPSYNC_FUNCTIONCONFIGURATION: ["RequestMappingTemplateS3Location", "ResponseMappingTemplateS3Location"], + AWS_LAMBDA_FUNCTION: ["Code"], + AWS_APIGATEWAY_RESTAPI: ["BodyS3Location"], + AWS_ELASTICBEANSTALK_APPLICATIONVERSION: ["SourceBundle"], + AWS_CLOUDFORMATION_STACK: ["TemplateURL"], + AWS_SERVERLESS_APPLICATION: ["Location"], + AWS_LAMBDA_LAYERVERSION: ["Content"], + AWS_SERVERLESS_LAYERVERSION: ["ContentUri"], + AWS_GLUE_JOB: ["Command.ScriptLocation"], +} + + +def resources_generator(): + """ + Generator to yield set of resources and their locations that are supported for package operations + :return: + """ + for resource, locations in dict({**METADATA_WITH_LOCAL_PATHS, **RESOURCES_WITH_LOCAL_PATHS}).items(): + for location in locations: + yield resource, location diff --git a/samcli/commands/_utils/template.py b/samcli/commands/_utils/template.py index 370fac617d..164a90cbf2 100644 --- a/samcli/commands/_utils/template.py +++ b/samcli/commands/_utils/template.py @@ -7,24 +7,7 @@ import yaml from samcli.yamlhelper import yaml_parse, yaml_dump - - -_METADATA_WITH_LOCAL_PATHS = {"AWS::ServerlessRepo::Application": ["LicenseUrl", "ReadmeUrl"]} - -_RESOURCES_WITH_LOCAL_PATHS = { - "AWS::Serverless::Function": ["CodeUri"], - "AWS::Serverless::Api": ["DefinitionUri"], - "AWS::AppSync::GraphQLSchema": ["DefinitionS3Location"], - "AWS::AppSync::Resolver": ["RequestMappingTemplateS3Location", "ResponseMappingTemplateS3Location"], - "AWS::AppSync::FunctionConfiguration": ["RequestMappingTemplateS3Location", "ResponseMappingTemplateS3Location"], - "AWS::Lambda::Function": ["Code"], - "AWS::ApiGateway::RestApi": ["BodyS3Location"], - "AWS::ElasticBeanstalk::ApplicationVersion": ["SourceBundle"], - "AWS::CloudFormation::Stack": ["TemplateURL"], - "AWS::Serverless::Application": ["Location"], - "AWS::Lambda::LayerVersion": ["Content"], - "AWS::Serverless::LayerVersion": ["ContentUri"], -} +from samcli.commands._utils.resources import METADATA_WITH_LOCAL_PATHS, RESOURCES_WITH_LOCAL_PATHS def get_template_data(template_file): @@ -126,11 +109,11 @@ def _update_relative_paths(template_dict, original_root, new_root): for resource_type, properties in template_dict.get("Metadata", {}).items(): - if resource_type not in _METADATA_WITH_LOCAL_PATHS: + if resource_type not in METADATA_WITH_LOCAL_PATHS: # Unknown resource. Skipping continue - for path_prop_name in _METADATA_WITH_LOCAL_PATHS[resource_type]: + for path_prop_name in METADATA_WITH_LOCAL_PATHS[resource_type]: path = properties.get(path_prop_name) updated_path = _resolve_relative_to(path, original_root, new_root) @@ -143,11 +126,11 @@ def _update_relative_paths(template_dict, original_root, new_root): for _, resource in template_dict.get("Resources", {}).items(): resource_type = resource.get("Type") - if resource_type not in _RESOURCES_WITH_LOCAL_PATHS: + if resource_type not in RESOURCES_WITH_LOCAL_PATHS: # Unknown resource. Skipping continue - for path_prop_name in _RESOURCES_WITH_LOCAL_PATHS[resource_type]: + for path_prop_name in RESOURCES_WITH_LOCAL_PATHS[resource_type]: properties = resource.get("Properties", {}) path = properties.get(path_prop_name) diff --git a/samcli/commands/exceptions.py b/samcli/commands/exceptions.py index 1fb557df32..3912ab9424 100644 --- a/samcli/commands/exceptions.py +++ b/samcli/commands/exceptions.py @@ -12,3 +12,9 @@ class UserException(click.ClickException): """ exit_code = 1 + + +class CredentialsError(UserException): + """ + Exception class when credentials that have been passed are invalid. + """ diff --git a/samcli/commands/package/__init__.py b/samcli/commands/package/__init__.py index 03864917fa..6e45897bbb 100644 --- a/samcli/commands/package/__init__.py +++ b/samcli/commands/package/__init__.py @@ -1,63 +1,6 @@ """ -CLI command for "package" command +`sam package` command """ -from functools import partial -import click - -from samcli.cli.main import pass_context, common_options -from samcli.commands._utils.options import get_or_default_template_file_name, _TEMPLATE_OPTION_DEFAULT_VALUE -from samcli.lib.samlib.cloudformation_command import execute_command -from samcli.commands.exceptions import UserException -from samcli.lib.telemetry.metrics import track_command - - -SHORT_HELP = "Package an AWS SAM application. This is an alias for 'aws cloudformation package'." - - -HELP_TEXT = """The SAM package command creates a zip of your code and dependencies and uploads it to S3. The command -returns a copy of your template, replacing references to local artifacts with the S3 location where the command -uploaded the artifacts. - -\b -e.g. sam package --template-file template.yaml --output-template-file packaged.yaml ---s3-bucket REPLACE_THIS_WITH_YOUR_S3_BUCKET_NAME - -\b -This is an alias for aws cloudformation package. To learn about other parameters you can use, -run aws cloudformation package help. -""" - - -@click.command("package", short_help=SHORT_HELP, context_settings={"ignore_unknown_options": True}, help=HELP_TEXT) -@click.option( - "--template-file", - default=_TEMPLATE_OPTION_DEFAULT_VALUE, - type=click.Path(), - callback=partial(get_or_default_template_file_name, include_build=True), - show_default=False, - help="The path where your AWS SAM template is located", -) -@click.option( - "--s3-bucket", - required=True, - help="The name of the S3 bucket where this command uploads the artifacts that " "are referenced in your template.", -) -@click.argument("args", nargs=-1, type=click.UNPROCESSED) -@common_options -@pass_context -@track_command -def cli(ctx, args, template_file, s3_bucket): - - # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - - do_cli(args, template_file, s3_bucket) # pragma: no cover - - -def do_cli(args, template_file, s3_bucket): - args = args + ("--s3-bucket", s3_bucket) - - try: - execute_command("package", args, template_file) - except OSError as ex: - raise UserException(str(ex)) +# Expose the cli object here +from .command import cli # noqa diff --git a/samcli/commands/package/command.py b/samcli/commands/package/command.py new file mode 100644 index 0000000000..3966bd2460 --- /dev/null +++ b/samcli/commands/package/command.py @@ -0,0 +1,144 @@ +""" +CLI command for "package" command +""" +from functools import partial + +import click + +from samcli.cli.main import pass_context, common_options, aws_creds_options +from samcli.commands._utils.options import ( + metadata_override_option, + _TEMPLATE_OPTION_DEFAULT_VALUE, + get_or_default_template_file_name, +) +from samcli.commands._utils.resources import resources_generator +from samcli.lib.telemetry.metrics import track_command + +SHORT_HELP = "Package an AWS SAM application." + + +def resources_and_properties_help_string(): + """ + Total list of resources and their property locations that are supported for `sam package` + :return: str + """ + return "".join( + f"\nResource : {resource} | Location : {location}\n".format(resource=resource, location=location) + for resource, location in resources_generator() + ) + + +HELP_TEXT = ( + """The SAM package command creates a zip of your code and dependencies and uploads it to S3. The command +returns a copy of your template, replacing references to local artifacts with the S3 location where the command +uploaded the artifacts. + +The following resources and their property locations are supported. +""" + + resources_and_properties_help_string() +) + + +@click.command("package", short_help=SHORT_HELP, help=HELP_TEXT, context_settings=dict(max_content_width=120)) +# TODO(TheSriram): Move to template_common_option across aws-sam-cli +@click.option( + "--template", + "--template-file", + "-t", + default=_TEMPLATE_OPTION_DEFAULT_VALUE, + type=click.Path(), + envvar="SAM_TEMPLATE_FILE", + callback=partial(get_or_default_template_file_name, include_build=True), + show_default=True, + help="AWS SAM template file", +) +@click.option( + "--s3-bucket", + required=True, + help="The name of the S3 bucket where this command uploads the artifacts that are referenced in your template.", +) +@click.option( + "--s3-prefix", + required=False, + help="A prefix name that the command adds to the artifacts " + "name when it uploads them to the S3 bucket. The prefix name is a " + "path name (folder name) for the S3 bucket.", +) +@click.option( + "--kms-key-id", + required=False, + help="The ID of an AWS KMS key that the command uses to encrypt artifacts that are at rest in the S3 bucket.", +) +@click.option( + "--output-template-file", + required=False, + type=click.Path(), + help="The path to the file where the command " + "writes the output AWS CloudFormation template. If you don't specify a " + "path, the command writes the template to the standard output.", +) +@click.option( + "--use-json", + required=False, + is_flag=True, + help="Indicates whether to use JSON as the format for " + "the output AWS CloudFormation template. YAML is used by default.", +) +@click.option( + "--force-upload", + required=False, + is_flag=True, + help="Indicates whether to override existing files " + "in the S3 bucket. Specify this flag to upload artifacts even if they " + "match existing artifacts in the S3 bucket.", +) +@metadata_override_option +@common_options +@aws_creds_options +@pass_context +@track_command +def cli(ctx, template, s3_bucket, s3_prefix, kms_key_id, output_template_file, use_json, force_upload, metadata): + + # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing + + do_cli( + template, + s3_bucket, + s3_prefix, + kms_key_id, + output_template_file, + use_json, + force_upload, + metadata, + ctx.region, + ctx.profile, + ) # pragma: no cover + + +def do_cli( + template_file, + s3_bucket, + s3_prefix, + kms_key_id, + output_template_file, + use_json, + force_upload, + metadata, + region, + profile, +): + from samcli.commands.package.package_context import PackageContext + + with PackageContext( + template_file=template_file, + s3_bucket=s3_bucket, + s3_prefix=s3_prefix, + kms_key_id=kms_key_id, + output_template_file=output_template_file, + use_json=use_json, + force_upload=force_upload, + metadata=metadata, + region=region, + profile=profile, + ) as package_context: + package_context.run() diff --git a/samcli/commands/package/exceptions.py b/samcli/commands/package/exceptions.py new file mode 100644 index 0000000000..c0aa9bd7f4 --- /dev/null +++ b/samcli/commands/package/exceptions.py @@ -0,0 +1,74 @@ +""" +Exceptions that are raised by sam package +""" +from samcli.commands.exceptions import UserException + + +class InvalidLocalPathError(UserException): + def __init__(self, resource_id, property_name, local_path): + self.resource_id = resource_id + self.property_name = property_name + self.local_path = local_path + message_fmt = ( + "Parameter {property_name} of resource {resource_id} refers " + "to a file or folder that does not exist {local_path}" + ) + super(InvalidLocalPathError, self).__init__( + message=message_fmt.format( + resource_id=self.resource_id, property_name=self.property_name, local_path=self.local_path + ) + ) + + +class InvalidTemplateUrlParameterError(UserException): + def __init__(self, resource_id, property_name, template_path): + self.resource_id = resource_id + self.property_name = property_name + self.template_path = template_path + + message_fmt = ( + "{property_name} parameter of {resource_id} resource is invalid. " + "It must be a S3 URL or path to CloudFormation " + "template file. Actual: {template_path}" + ) + super(InvalidTemplateUrlParameterError, self).__init__( + message=message_fmt.format( + property_name=self.property_name, resource_id=self.resource_id, template_path=self.template_path + ) + ) + + +class ExportFailedError(UserException): + def __init__(self, resource_id, property_name, property_value, ex): + self.resource_id = resource_id + self.property_name = property_name + self.property_value = property_value + self.ex = ex + + message_fmt = ( + "Unable to upload artifact {property_value} referenced " + "by {property_name} parameter of {resource_id} resource." + "\n" + "{ex}" + ) + + super(ExportFailedError, self).__init__( + message=message_fmt.format( + property_value=self.property_value, + property_name=self.property_name, + resource_id=self.resource_id, + ex=self.ex, + ) + ) + + +class PackageFailedError(UserException): + def __init__(self, template_file, ex): + self.template_file = template_file + self.ex = ex + + message_fmt = "Failed to package template: {template_file}. \n {ex}" + + super(PackageFailedError, self).__init__( + message=message_fmt.format(template_file=self.template_file, ex=self.ex) + ) diff --git a/samcli/commands/package/package_context.py b/samcli/commands/package/package_context.py new file mode 100644 index 0000000000..27c9332ab6 --- /dev/null +++ b/samcli/commands/package/package_context.py @@ -0,0 +1,120 @@ +""" +Logic for uploading to s3 based on supplied template file and s3 bucket +""" + +# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import os +import logging +import json + +import boto3 +from botocore.config import Config +import click + +from samcli.commands.package.exceptions import PackageFailedError +from samcli.lib.package.artifact_exporter import Template +from samcli.yamlhelper import yaml_dump +from samcli.lib.package.s3_uploader import S3Uploader + +LOG = logging.getLogger(__name__) + + +class PackageContext: + + MSG_PACKAGED_TEMPLATE_WRITTEN = ( + "Successfully packaged artifacts and wrote output template " + "to file {output_file_name}." + "\n" + "Execute the following command to deploy the packaged template" + "\n" + "sam deploy --template-file {output_file_path} " + "--stack-name " + "\n" + ) + + def __init__( + self, + template_file, + s3_bucket, + s3_prefix, + kms_key_id, + output_template_file, + use_json, + force_upload, + metadata, + region, + profile, + ): + self.template_file = template_file + self.s3_bucket = s3_bucket + self.s3_prefix = s3_prefix + self.kms_key_id = kms_key_id + self.output_template_file = output_template_file + self.use_json = use_json + self.force_upload = force_upload + self.metadata = metadata + self.region = region + self.profile = profile + self.s3_uploader = None + + def __enter__(self): + return self + + def __exit__(self, *args): + pass + + def run(self): + + session = boto3.Session(profile_name=self.profile if self.profile else None) + s3_client = session.client( + "s3", config=Config(signature_version="s3v4", region_name=self.region if self.region else None) + ) + + self.s3_uploader = S3Uploader(s3_client, self.s3_bucket, self.s3_prefix, self.kms_key_id, self.force_upload) + # attach the given metadata to the artifacts to be uploaded + self.s3_uploader.artifact_metadata = self.metadata + + try: + exported_str = self._export(self.template_file, self.use_json) + + self.write_output(self.output_template_file, exported_str) + + if self.output_template_file: + msg = self.MSG_PACKAGED_TEMPLATE_WRITTEN.format( + output_file_name=self.output_template_file, + output_file_path=os.path.abspath(self.output_template_file), + ) + click.echo(msg) + except OSError as ex: + raise PackageFailedError(template_file=self.template_file, ex=str(ex)) + + def _export(self, template_path, use_json): + template = Template(template_path, os.getcwd(), self.s3_uploader) + exported_template = template.export() + + if use_json: + exported_str = json.dumps(exported_template, indent=4, ensure_ascii=False) + else: + exported_str = yaml_dump(exported_template) + + return exported_str + + def write_output(self, output_file_name, data): + if output_file_name is None: + click.echo(data) + return + + with open(output_file_name, "w") as fp: + fp.write(data) diff --git a/samcli/lib/package/__init__.py b/samcli/lib/package/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py new file mode 100644 index 0000000000..8b11ea8b02 --- /dev/null +++ b/samcli/lib/package/artifact_exporter.py @@ -0,0 +1,625 @@ +""" +Logic for uploading to S3 per Cloudformation Specific Resource +""" +# pylint: disable=no-member + +# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import logging +import os +import tempfile +import zipfile +import contextlib +from contextlib import contextmanager +import uuid +from urllib.parse import urlparse, parse_qs +import shutil +from botocore.utils import set_value_from_jmespath +import jmespath + +from samcli.commands._utils.resources import ( + AWS_SERVERLESSREPO_APPLICATION, + AWS_SERVERLESS_FUNCTION, + AWS_SERVERLESS_API, + AWS_APPSYNC_GRAPHQLSCHEMA, + AWS_APPSYNC_RESOLVER, + AWS_APPSYNC_FUNCTIONCONFIGURATION, + AWS_LAMBDA_FUNCTION, + AWS_APIGATEWAY_RESTAPI, + AWS_ELASTICBEANSTALK_APPLICATIONVERSION, + AWS_CLOUDFORMATION_STACK, + AWS_SERVERLESS_APPLICATION, + AWS_LAMBDA_LAYERVERSION, + AWS_SERVERLESS_LAYERVERSION, + AWS_GLUE_JOB, +) + +from samcli.commands._utils.template import METADATA_WITH_LOCAL_PATHS, RESOURCES_WITH_LOCAL_PATHS +from samcli.commands.package import exceptions +from samcli.yamlhelper import yaml_dump, yaml_parse + + +LOG = logging.getLogger(__name__) + + +def is_path_value_valid(path): + return isinstance(path, str) + + +def make_abs_path(directory, path): + if is_path_value_valid(path) and not os.path.isabs(path): + return os.path.normpath(os.path.join(directory, path)) + return path + + +def is_s3_url(url): + try: + parse_s3_url(url) + return True + except ValueError: + return False + + +def is_local_folder(path): + return is_path_value_valid(path) and os.path.isdir(path) + + +def is_local_file(path): + return is_path_value_valid(path) and os.path.isfile(path) + + +def is_zip_file(path): + return is_path_value_valid(path) and zipfile.is_zipfile(path) + + +def parse_s3_url(url, bucket_name_property="Bucket", object_key_property="Key", version_property=None): + + if isinstance(url, str) and url.startswith("s3://"): + + parsed = urlparse(url) + query = parse_qs(parsed.query) + + if parsed.netloc and parsed.path: + result = dict() + result[bucket_name_property] = parsed.netloc + result[object_key_property] = parsed.path.lstrip("/") + + # If there is a query string that has a single versionId field, + # set the object version and return + if version_property is not None and "versionId" in query and len(query["versionId"]) == 1: + result[version_property] = query["versionId"][0] + + return result + + raise ValueError("URL given to the parse method is not a valid S3 url " "{0}".format(url)) + + +def upload_local_artifacts(resource_id, resource_dict, property_name, parent_dir, uploader): + """ + Upload local artifacts referenced by the property at given resource and + return S3 URL of the uploaded object. It is the responsibility of callers + to ensure property value is a valid string + + If path refers to a file, this method will upload the file. If path refers + to a folder, this method will zip the folder and upload the zip to S3. + If path is omitted, this method will zip the current working folder and + upload. + + If path is already a path to S3 object, this method does nothing. + + :param resource_id: Id of the CloudFormation resource + :param resource_dict: Dictionary containing resource definition + :param property_name: Property name of CloudFormation resource where this + local path is present + :param parent_dir: Resolve all relative paths with respect to this + directory + :param uploader: Method to upload files to S3 + + :return: S3 URL of the uploaded object + :raise: ValueError if path is not a S3 URL or a local path + """ + + local_path = jmespath.search(property_name, resource_dict) + + if local_path is None: + # Build the root directory and upload to S3 + local_path = parent_dir + + if is_s3_url(local_path): + # A valid CloudFormation template will specify artifacts as S3 URLs. + # This check is supporting the case where your resource does not + # refer to local artifacts + # Nothing to do if property value is an S3 URL + LOG.debug("Property %s of %s is already a S3 URL", property_name, resource_id) + return local_path + + local_path = make_abs_path(parent_dir, local_path) + + # Or, pointing to a folder. Zip the folder and upload + if is_local_folder(local_path): + return zip_and_upload(local_path, uploader) + + # Path could be pointing to a file. Upload the file + if is_local_file(local_path): + return uploader.upload_with_dedup(local_path) + + raise exceptions.InvalidLocalPathError(resource_id=resource_id, property_name=property_name, local_path=local_path) + + +def zip_and_upload(local_path, uploader): + with zip_folder(local_path) as zip_file: + return uploader.upload_with_dedup(zip_file) + + +@contextmanager +def zip_folder(folder_path): + """ + Zip the entire folder and return a file to the zip. Use this inside + a "with" statement to cleanup the zipfile after it is used. + + :param folder_path: + :return: Name of the zipfile + """ + + filename = os.path.join(tempfile.gettempdir(), "data-" + uuid.uuid4().hex) + + zipfile_name = make_zip(filename, folder_path) + try: + yield zipfile_name + finally: + if os.path.exists(zipfile_name): + os.remove(zipfile_name) + + +def make_zip(file_name, source_root): + zipfile_name = "{0}.zip".format(file_name) + source_root = os.path.abspath(source_root) + with open(zipfile_name, "wb") as f: + zip_file = zipfile.ZipFile(f, "w", zipfile.ZIP_DEFLATED) + with contextlib.closing(zip_file) as zf: + for root, _, files in os.walk(source_root, followlinks=True): + for filename in files: + full_path = os.path.join(root, filename) + relative_path = os.path.relpath(full_path, source_root) + zf.write(full_path, relative_path) + + return zipfile_name + + +@contextmanager +def mktempfile(): + directory = tempfile.gettempdir() + filename = os.path.join(directory, uuid.uuid4().hex) + + try: + with open(filename, "w+") as handle: + yield handle + finally: + if os.path.exists(filename): + os.remove(filename) + + +def copy_to_temp_dir(filepath): + tmp_dir = tempfile.mkdtemp() + dst = os.path.join(tmp_dir, os.path.basename(filepath)) + shutil.copyfile(filepath, dst) + return tmp_dir + + +class Resource: + """ + Base class representing a CloudFormation resource that can be exported + """ + + RESOURCE_TYPE = None + PROPERTY_NAME = None + PACKAGE_NULL_PROPERTY = True + # Set this property to True in base class if you want the exporter to zip + # up the file before uploading This is useful for Lambda functions. + FORCE_ZIP = False + + def __init__(self, uploader): + self.uploader = uploader + + def export(self, resource_id, resource_dict, parent_dir): + if resource_dict is None: + return + + property_value = jmespath.search(self.PROPERTY_NAME, resource_dict) + + if not property_value and not self.PACKAGE_NULL_PROPERTY: + return + + if isinstance(property_value, dict): + LOG.debug("Property %s of %s resource is not a URL", self.PROPERTY_NAME, resource_id) + return + + # If property is a file but not a zip file, place file in temp + # folder and send the temp folder to be zipped + temp_dir = None + if is_local_file(property_value) and not is_zip_file(property_value) and self.FORCE_ZIP: + temp_dir = copy_to_temp_dir(property_value) + set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, temp_dir) + + try: + self.do_export(resource_id, resource_dict, parent_dir) + + except Exception as ex: + LOG.debug("Unable to export", exc_info=ex) + raise exceptions.ExportFailedError( + resource_id=resource_id, property_name=self.PROPERTY_NAME, property_value=property_value, ex=ex + ) + finally: + if temp_dir: + shutil.rmtree(temp_dir) + + def do_export(self, resource_id, resource_dict, parent_dir): + """ + Default export action is to upload artifacts and set the property to + S3 URL of the uploaded object + """ + uploaded_url = upload_local_artifacts(resource_id, resource_dict, self.PROPERTY_NAME, parent_dir, self.uploader) + set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, uploaded_url) + + +class ResourceWithS3UrlDict(Resource): + """ + Represents CloudFormation resources that need the S3 URL to be specified as + an dict like {Bucket: "", Key: "", Version: ""} + """ + + BUCKET_NAME_PROPERTY = None + OBJECT_KEY_PROPERTY = None + VERSION_PROPERTY = None + + def do_export(self, resource_id, resource_dict, parent_dir): + """ + Upload to S3 and set property to an dict representing the S3 url + of the uploaded object + """ + + artifact_s3_url = upload_local_artifacts( + resource_id, resource_dict, self.PROPERTY_NAME, parent_dir, self.uploader + ) + + parsed_url = parse_s3_url( + artifact_s3_url, + bucket_name_property=self.BUCKET_NAME_PROPERTY, + object_key_property=self.OBJECT_KEY_PROPERTY, + version_property=self.VERSION_PROPERTY, + ) + set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, parsed_url) + + +class ServerlessFunctionResource(Resource): + RESOURCE_TYPE = AWS_SERVERLESS_FUNCTION + PROPERTY_NAME = RESOURCES_WITH_LOCAL_PATHS[RESOURCE_TYPE][0] + FORCE_ZIP = True + + +class ServerlessApiResource(Resource): + RESOURCE_TYPE = AWS_SERVERLESS_API + PROPERTY_NAME = RESOURCES_WITH_LOCAL_PATHS[RESOURCE_TYPE][0] + # Don't package the directory if DefinitionUri is omitted. + # Necessary to support DefinitionBody + PACKAGE_NULL_PROPERTY = False + + +class GraphQLSchemaResource(Resource): + RESOURCE_TYPE = AWS_APPSYNC_GRAPHQLSCHEMA + PROPERTY_NAME = RESOURCES_WITH_LOCAL_PATHS[RESOURCE_TYPE][0] + # Don't package the directory if DefinitionS3Location is omitted. + # Necessary to support Definition + PACKAGE_NULL_PROPERTY = False + + +class AppSyncResolverRequestTemplateResource(Resource): + RESOURCE_TYPE = AWS_APPSYNC_RESOLVER + PROPERTY_NAME = RESOURCES_WITH_LOCAL_PATHS[RESOURCE_TYPE][0] + # Don't package the directory if RequestMappingTemplateS3Location is omitted. + # Necessary to support RequestMappingTemplate + PACKAGE_NULL_PROPERTY = False + + +class AppSyncResolverResponseTemplateResource(Resource): + RESOURCE_TYPE = AWS_APPSYNC_RESOLVER + PROPERTY_NAME = RESOURCES_WITH_LOCAL_PATHS[RESOURCE_TYPE][1] + # Don't package the directory if ResponseMappingTemplateS3Location is omitted. + # Necessary to support ResponseMappingTemplate + PACKAGE_NULL_PROPERTY = False + + +class AppSyncFunctionConfigurationRequestTemplateResource(Resource): + RESOURCE_TYPE = AWS_APPSYNC_FUNCTIONCONFIGURATION + PROPERTY_NAME = RESOURCES_WITH_LOCAL_PATHS[RESOURCE_TYPE][0] + # Don't package the directory if RequestMappingTemplateS3Location is omitted. + # Necessary to support RequestMappingTemplate + PACKAGE_NULL_PROPERTY = False + + +class AppSyncFunctionConfigurationResponseTemplateResource(Resource): + RESOURCE_TYPE = AWS_APPSYNC_FUNCTIONCONFIGURATION + PROPERTY_NAME = RESOURCES_WITH_LOCAL_PATHS[RESOURCE_TYPE][1] + # Don't package the directory if ResponseMappingTemplateS3Location is omitted. + # Necessary to support ResponseMappingTemplate + PACKAGE_NULL_PROPERTY = False + + +class LambdaFunctionResource(ResourceWithS3UrlDict): + RESOURCE_TYPE = AWS_LAMBDA_FUNCTION + PROPERTY_NAME = RESOURCES_WITH_LOCAL_PATHS[RESOURCE_TYPE][0] + BUCKET_NAME_PROPERTY = "S3Bucket" + OBJECT_KEY_PROPERTY = "S3Key" + VERSION_PROPERTY = "S3ObjectVersion" + FORCE_ZIP = True + + +class ApiGatewayRestApiResource(ResourceWithS3UrlDict): + RESOURCE_TYPE = AWS_APIGATEWAY_RESTAPI + PROPERTY_NAME = RESOURCES_WITH_LOCAL_PATHS[RESOURCE_TYPE][0] + PACKAGE_NULL_PROPERTY = False + BUCKET_NAME_PROPERTY = "Bucket" + OBJECT_KEY_PROPERTY = "Key" + VERSION_PROPERTY = "Version" + + +class ElasticBeanstalkApplicationVersion(ResourceWithS3UrlDict): + RESOURCE_TYPE = AWS_ELASTICBEANSTALK_APPLICATIONVERSION + PROPERTY_NAME = RESOURCES_WITH_LOCAL_PATHS[RESOURCE_TYPE][0] + BUCKET_NAME_PROPERTY = "S3Bucket" + OBJECT_KEY_PROPERTY = "S3Key" + VERSION_PROPERTY = None + + +class LambdaLayerVersionResource(ResourceWithS3UrlDict): + RESOURCE_TYPE = AWS_LAMBDA_LAYERVERSION + PROPERTY_NAME = RESOURCES_WITH_LOCAL_PATHS[RESOURCE_TYPE][0] + BUCKET_NAME_PROPERTY = "S3Bucket" + OBJECT_KEY_PROPERTY = "S3Key" + VERSION_PROPERTY = "S3ObjectVersion" + FORCE_ZIP = True + + +class ServerlessLayerVersionResource(Resource): + RESOURCE_TYPE = AWS_SERVERLESS_LAYERVERSION + PROPERTY_NAME = RESOURCES_WITH_LOCAL_PATHS[RESOURCE_TYPE][0] + FORCE_ZIP = True + + +class ServerlessRepoApplicationLicense(Resource): + RESOURCE_TYPE = AWS_SERVERLESSREPO_APPLICATION + PROPERTY_NAME = METADATA_WITH_LOCAL_PATHS[RESOURCE_TYPE][0] + PACKAGE_NULL_PROPERTY = False + + +class ServerlessRepoApplicationReadme(Resource): + RESOURCE_TYPE = AWS_SERVERLESSREPO_APPLICATION + PROPERTY_NAME = METADATA_WITH_LOCAL_PATHS[RESOURCE_TYPE][1] + PACKAGE_NULL_PROPERTY = False + + +class CloudFormationStackResource(Resource): + """ + Represents CloudFormation::Stack resource that can refer to a nested + stack template via TemplateURL property. + """ + + RESOURCE_TYPE = AWS_CLOUDFORMATION_STACK + PROPERTY_NAME = RESOURCES_WITH_LOCAL_PATHS[RESOURCE_TYPE][0] + + def do_export(self, resource_id, resource_dict, parent_dir): + """ + If the nested stack template is valid, this method will + export on the nested template, upload the exported template to S3 + and set property to URL of the uploaded S3 template + """ + + template_path = resource_dict.get(self.PROPERTY_NAME, None) + + if ( + template_path is None + or is_s3_url(template_path) + or template_path.startswith(self.uploader.s3.meta.endpoint_url) + or template_path.startswith("https://s3.amazonaws.com/") + ): + # Nothing to do + return + + abs_template_path = make_abs_path(parent_dir, template_path) + if not is_local_file(abs_template_path): + raise exceptions.InvalidTemplateUrlParameterError( + property_name=self.PROPERTY_NAME, resource_id=resource_id, template_path=abs_template_path + ) + + exported_template_dict = Template(template_path, parent_dir, self.uploader).export() + + exported_template_str = yaml_dump(exported_template_dict) + + with mktempfile() as temporary_file: + temporary_file.write(exported_template_str) + temporary_file.flush() + + url = self.uploader.upload_with_dedup(temporary_file.name, "template") + + # TemplateUrl property requires S3 URL to be in path-style format + parts = parse_s3_url(url, version_property="Version") + s3_path_url = self.uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None)) + set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, s3_path_url) + + +class ServerlessApplicationResource(CloudFormationStackResource): + """ + Represents Serverless::Application resource that can refer to a nested + app template via Location property. + """ + + RESOURCE_TYPE = AWS_SERVERLESS_APPLICATION + PROPERTY_NAME = RESOURCES_WITH_LOCAL_PATHS[AWS_SERVERLESS_APPLICATION][0] + + +class GlueJobCommandScriptLocationResource(Resource): + """ + Represents Glue::Job resource. + """ + + RESOURCE_TYPE = AWS_GLUE_JOB + # Note the PROPERTY_NAME includes a '.' implying it's nested. + PROPERTY_NAME = RESOURCES_WITH_LOCAL_PATHS[AWS_GLUE_JOB][0] + + +RESOURCES_EXPORT_LIST = [ + ServerlessFunctionResource, + ServerlessApiResource, + GraphQLSchemaResource, + AppSyncResolverRequestTemplateResource, + AppSyncResolverResponseTemplateResource, + AppSyncFunctionConfigurationRequestTemplateResource, + AppSyncFunctionConfigurationResponseTemplateResource, + ApiGatewayRestApiResource, + LambdaFunctionResource, + ElasticBeanstalkApplicationVersion, + CloudFormationStackResource, + ServerlessApplicationResource, + ServerlessLayerVersionResource, + LambdaLayerVersionResource, + GlueJobCommandScriptLocationResource, +] + +METADATA_EXPORT_LIST = [ServerlessRepoApplicationReadme, ServerlessRepoApplicationLicense] + + +def include_transform_export_handler(template_dict, uploader, parent_dir): + if template_dict.get("Name", None) != "AWS::Include": + return template_dict + + include_location = template_dict.get("Parameters", {}).get("Location", None) + if not include_location or not is_path_value_valid(include_location) or is_s3_url(include_location): + # `include_location` is either empty, or not a string, or an S3 URI + return template_dict + + # We are confident at this point that `include_location` is a string containing the local path + abs_include_location = os.path.join(parent_dir, include_location) + if is_local_file(abs_include_location): + template_dict["Parameters"]["Location"] = uploader.upload_with_dedup(abs_include_location) + else: + raise exceptions.InvalidLocalPathError( + resource_id="AWS::Include", property_name="Location", local_path=abs_include_location + ) + + return template_dict + + +GLOBAL_EXPORT_DICT = {"Fn::Transform": include_transform_export_handler} + + +class Template: + """ + Class to export a CloudFormation template + """ + + def __init__( + self, + template_path, + parent_dir, + uploader, + resources_to_export=frozenset(RESOURCES_EXPORT_LIST), + metadata_to_export=frozenset(METADATA_EXPORT_LIST), + ): + """ + Reads the template and makes it ready for export + """ + + if not (is_local_folder(parent_dir) and os.path.isabs(parent_dir)): + raise ValueError("parent_dir parameter must be " "an absolute path to a folder {0}".format(parent_dir)) + + abs_template_path = make_abs_path(parent_dir, template_path) + template_dir = os.path.dirname(abs_template_path) + + with open(abs_template_path, "r") as handle: + template_str = handle.read() + + self.template_dict = yaml_parse(template_str) + self.template_dir = template_dir + self.resources_to_export = resources_to_export + self.metadata_to_export = metadata_to_export + self.uploader = uploader + + def export_global_artifacts(self, template_dict): + """ + Template params such as AWS::Include transforms are not specific to + any resource type but contain artifacts that should be exported, + here we iterate through the template dict and export params with a + handler defined in GLOBAL_EXPORT_DICT + """ + for key, val in template_dict.items(): + if key in GLOBAL_EXPORT_DICT: + template_dict[key] = GLOBAL_EXPORT_DICT[key](val, self.uploader, self.template_dir) + elif isinstance(val, dict): + self.export_global_artifacts(val) + elif isinstance(val, list): + for item in val: + if isinstance(item, dict): + self.export_global_artifacts(item) + return template_dict + + def export_metadata(self, template_dict): + """ + Exports the local artifacts referenced by the metadata section in + the given template to an s3 bucket. + + :return: The template with references to artifacts that have been + exported to s3. + """ + if "Metadata" not in template_dict: + return template_dict + + for metadata_type, metadata_dict in template_dict["Metadata"].items(): + for exporter_class in self.metadata_to_export: + if exporter_class.RESOURCE_TYPE != metadata_type: + continue + + exporter = exporter_class(self.uploader) + exporter.export(metadata_type, metadata_dict, self.template_dir) + + return template_dict + + def export(self): + """ + Exports the local artifacts referenced by the given template to an + s3 bucket. + + :return: The template with references to artifacts that have been + exported to s3. + """ + self.template_dict = self.export_metadata(self.template_dict) + + if "Resources" not in self.template_dict: + return self.template_dict + + self.template_dict = self.export_global_artifacts(self.template_dict) + + for resource_id, resource in self.template_dict["Resources"].items(): + + resource_type = resource.get("Type", None) + resource_dict = resource.get("Properties", None) + + for exporter_class in self.resources_to_export: + if exporter_class.RESOURCE_TYPE != resource_type: + continue + + # Export code resources + exporter = exporter_class(self.uploader) + exporter.export(resource_id, resource_dict, self.template_dir) + + return self.template_dict diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py new file mode 100644 index 0000000000..8b55e24a39 --- /dev/null +++ b/samcli/lib/package/s3_uploader.py @@ -0,0 +1,210 @@ +""" +Client for uploading packaged artifacts to s3 +""" + +# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import hashlib +import logging +import threading +import os +import sys +from collections import abc + +import botocore +import botocore.exceptions + +from boto3.s3 import transfer + + +LOG = logging.getLogger(__name__) + + +class NoSuchBucketError(Exception): + def __init__(self, **kwargs): + msg = self.fmt.format(**kwargs) + Exception.__init__(self, msg) + self.kwargs = kwargs + + fmt = "S3 Bucket does not exist. " "Execute the command to create a new bucket" "\n" "aws s3 mb s3://{bucket_name}" + + +class S3Uploader: + """ + Class to upload objects to S3 bucket that use versioning. If bucket + does not already use versioning, this class will turn on versioning. + """ + + @property + def artifact_metadata(self): + """ + Metadata to attach to the object(s) uploaded by the uploader. + """ + return self._artifact_metadata + + @artifact_metadata.setter + def artifact_metadata(self, val): + if val is not None and not isinstance(val, abc.Mapping): + raise TypeError("Artifact metadata should be in dict type") + self._artifact_metadata = val + + def __init__(self, s3_client, bucket_name, prefix=None, kms_key_id=None, force_upload=False): + self.s3 = s3_client + self.bucket_name = bucket_name + self.prefix = prefix + self.kms_key_id = kms_key_id or None + self.force_upload = force_upload + self.transfer_manager = transfer.create_transfer_manager(self.s3, transfer.TransferConfig()) + + self._artifact_metadata = None + + def upload(self, file_name, remote_path): + """ + Uploads given file to S3 + :param file_name: Path to the file that will be uploaded + :param remote_path: be uploaded + :return: VersionId of the latest upload + """ + + if self.prefix: + remote_path = "{0}/{1}".format(self.prefix, remote_path) + + # Check if a file with same data exists + if not self.force_upload and self.file_exists(remote_path): + LOG.debug("File with same data is already exists at %s. " "Skipping upload", remote_path) + return self.make_url(remote_path) + + try: + + # Default to regular server-side encryption unless customer has + # specified their own KMS keys + additional_args = {"ServerSideEncryption": "AES256"} + + if self.kms_key_id: + additional_args["ServerSideEncryption"] = "aws:kms" + additional_args["SSEKMSKeyId"] = self.kms_key_id + + if self.artifact_metadata: + additional_args["Metadata"] = self.artifact_metadata + + print_progress_callback = ProgressPercentage(file_name, remote_path) + future = self.transfer_manager.upload( + file_name, self.bucket_name, remote_path, additional_args, [print_progress_callback] + ) + future.result() + + return self.make_url(remote_path) + + except botocore.exceptions.ClientError as ex: + error_code = ex.response["Error"]["Code"] + if error_code == "NoSuchBucket": + raise NoSuchBucketError(bucket_name=self.bucket_name) + raise ex + + def upload_with_dedup(self, file_name, extension=None): + """ + Makes and returns name of the S3 object based on the file's MD5 sum + + :param file_name: file to upload + :param extension: String of file extension to append to the object + :return: S3 URL of the uploaded object + """ + + # This construction of remote_path is critical to preventing duplicate + # uploads of same object. Uploader will check if the file exists in S3 + # and re-upload only if necessary. So the template points to same file + # in multiple places, this will upload only once + + filemd5 = self.file_checksum(file_name) + remote_path = filemd5 + if extension: + remote_path = remote_path + "." + extension + + return self.upload(file_name, remote_path) + + def file_exists(self, remote_path): + """ + Check if the file we are trying to upload already exists in S3 + + :param remote_path: + :return: True, if file exists. False, otherwise + """ + + try: + # Find the object that matches this ETag + self.s3.head_object(Bucket=self.bucket_name, Key=remote_path) + return True + except botocore.exceptions.ClientError: + # Either File does not exist or we are unable to get + # this information. + return False + + def make_url(self, obj_path): + return "s3://{0}/{1}".format(self.bucket_name, obj_path) + + def file_checksum(self, file_name): + + with open(file_name, "rb") as file_handle: + md5 = hashlib.md5() + # Read file in chunks of 4096 bytes + block_size = 4096 + + # Save current cursor position and reset cursor to start of file + curpos = file_handle.tell() + file_handle.seek(0) + + buf = file_handle.read(block_size) + while buf: + md5.update(buf) + buf = file_handle.read(block_size) + + # Restore file cursor's position + file_handle.seek(curpos) + + return md5.hexdigest() + + def to_path_style_s3_url(self, key, version=None): + """ + This link describes the format of Path Style URLs + http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingBucket.html#access-bucket-intro + """ + base = self.s3.meta.endpoint_url + result = "{0}/{1}/{2}".format(base, self.bucket_name, key) + if version: + result = "{0}?versionId={1}".format(result, version) + + return result + + +class ProgressPercentage: + # This class was copied directly from S3Transfer docs + + def __init__(self, filename, remote_path): + self._filename = filename + self._remote_path = remote_path + self._size = float(os.path.getsize(filename)) + self._seen_so_far = 0 + self._lock = threading.Lock() + + def on_progress(self, bytes_transferred, **kwargs): + + # To simplify we'll assume this is hooked up + # to a single filename. + with self._lock: + self._seen_so_far += bytes_transferred + percentage = (self._seen_so_far / self._size) * 100 + sys.stderr.write( + "\rUploading to %s %s / %s (%.2f%%)" % (self._remote_path, self._seen_so_far, self._size, percentage) + ) + sys.stderr.flush() diff --git a/tests/integration/package/__init__.py b/tests/integration/package/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/integration/package/package_integ_base.py b/tests/integration/package/package_integ_base.py new file mode 100644 index 0000000000..35d8ff47c0 --- /dev/null +++ b/tests/integration/package/package_integ_base.py @@ -0,0 +1,76 @@ +import os +import uuid +import json +import tempfile +import time +from pathlib import Path +from unittest import TestCase + +import boto3 + + +class PackageIntegBase(TestCase): + @classmethod + def setUpClass(cls): + cls.region_name = os.environ.get("AWS_DEFAULT_REGION") + cls.bucket_name = str(uuid.uuid4()) + cls.test_data_path = Path(__file__).resolve().parents[1].joinpath("testdata", "package") + + # Create S3 bucket + s3 = boto3.resource("s3") + # Use a pre-created KMS Key + cls.kms_key = os.environ.get("AWS_KMS_KEY") + cls.s3_bucket = s3.Bucket(cls.bucket_name) + cls.s3_bucket.create() + + # Given 3 seconds for all the bucket creation to complete + time.sleep(3) + + @classmethod + def tearDownClass(cls): + cls.s3_bucket.objects.all().delete() + cls.s3_bucket.delete() + + def base_command(self): + command = "sam" + if os.getenv("SAM_CLI_DEV"): + command = "samdev" + + return command + + def get_command_list( + self, + s3_bucket=None, + template=None, + template_file=None, + s3_prefix=None, + output_template_file=None, + use_json=False, + force_upload=False, + kms_key_id=None, + metadata=None, + ): + command_list = [self.base_command(), "package"] + + if s3_bucket: + command_list = command_list + ["--s3-bucket", str(s3_bucket)] + if template: + command_list = command_list + ["--template", str(template)] + if template_file: + command_list = command_list + ["--template-file", str(template_file)] + + if s3_prefix: + command_list = command_list + ["--s3-prefix", str(s3_prefix)] + + if output_template_file: + command_list = command_list + ["--output-template-file", str(output_template_file)] + if kms_key_id: + command_list = command_list + ["--kms-key-id", str(kms_key_id)] + if use_json: + command_list = command_list + ["--use-json"] + if force_upload: + command_list = command_list + ["--force-upload"] + if metadata: + command_list = command_list + ["--metadata", json.dumps(metadata)] + + return command_list diff --git a/tests/integration/package/test_package_command.py b/tests/integration/package/test_package_command.py new file mode 100644 index 0000000000..3d3bb60dfc --- /dev/null +++ b/tests/integration/package/test_package_command.py @@ -0,0 +1,331 @@ +from subprocess import Popen, PIPE +import tempfile + +from unittest import skipIf +from parameterized import parameterized + +from .package_integ_base import PackageIntegBase +from tests.testing_utils import RUNNING_ON_CI, RUNNING_TEST_FOR_MASTER_ON_CI + +# Package tests require credentials and CI/CD will only add credentials to the env if the PR is from the same repo. +# This is to restrict package tests to run outside of CI/CD and when the branch is not master. +SKIP_PACKAGE_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI + + +@skipIf(SKIP_PACKAGE_TESTS, "Skip package tests in CI/CD only") +class TestPackage(PackageIntegBase): + def setUp(self): + super(TestPackage, self).setUp() + + def tearDown(self): + super(TestPackage, self).tearDown() + + @parameterized.expand(["aws-serverless-function.yaml"]) + def test_package_template_flag(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + command_list = self.get_command_list(s3_bucket=self.s3_bucket.name, template=template_path) + + process = Popen(command_list, stdout=PIPE) + process.wait() + process_stdout = b"".join(process.stdout.readlines()).strip() + + self.assertIn("{bucket_name}".format(bucket_name=self.s3_bucket.name), process_stdout.decode("utf-8")) + + @parameterized.expand( + [ + "aws-serverless-function.yaml", + "aws-serverless-api.yaml", + "aws-appsync-graphqlschema.yaml", + "aws-appsync-resolver.yaml", + "aws-appsync-functionconfiguration.yaml", + "aws-lambda-function.yaml", + "aws-apigateway-restapi.yaml", + "aws-elasticbeanstalk-applicationversion.yaml", + "aws-cloudformation-stack.yaml", + "aws-serverless-application.yaml", + "aws-lambda-layerversion.yaml", + "aws-serverless-layerversion.yaml", + "aws-glue-job.yaml", + "aws-serverlessrepo-application.yaml", + ] + ) + def test_package_barebones(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + command_list = self.get_command_list(s3_bucket=self.s3_bucket.name, template_file=template_path) + + process = Popen(command_list, stdout=PIPE) + process.wait() + process_stdout = b"".join(process.stdout.readlines()).strip() + + self.assertIn("{bucket_name}".format(bucket_name=self.s3_bucket.name), process_stdout.decode("utf-8")) + + def test_package_without_required_args(self): + command_list = self.get_command_list() + + process = Popen(command_list, stdout=PIPE) + process.wait() + self.assertNotEqual(process.returncode, 0) + + @parameterized.expand( + [ + "aws-serverless-function.yaml", + "aws-serverless-api.yaml", + "aws-appsync-graphqlschema.yaml", + "aws-appsync-resolver.yaml", + "aws-appsync-functionconfiguration.yaml", + "aws-lambda-function.yaml", + "aws-apigateway-restapi.yaml", + "aws-elasticbeanstalk-applicationversion.yaml", + "aws-cloudformation-stack.yaml", + "aws-serverless-application.yaml", + "aws-lambda-layerversion.yaml", + "aws-serverless-layerversion.yaml", + "aws-glue-job.yaml", + "aws-serverlessrepo-application.yaml", + ] + ) + def test_package_with_prefix(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + s3_prefix = "integ_test_prefix" + command_list = self.get_command_list( + s3_bucket=self.s3_bucket.name, template_file=template_path, s3_prefix=s3_prefix + ) + + process = Popen(command_list, stdout=PIPE) + process.wait() + process_stdout = b"".join(process.stdout.readlines()).strip() + + self.assertIn("{bucket_name}".format(bucket_name=self.s3_bucket.name), process_stdout.decode("utf-8")) + + self.assertIn("{s3_prefix}".format(s3_prefix=s3_prefix), process_stdout.decode("utf-8")) + + @parameterized.expand( + [ + "aws-serverless-function.yaml", + "aws-serverless-api.yaml", + "aws-appsync-graphqlschema.yaml", + "aws-appsync-resolver.yaml", + "aws-appsync-functionconfiguration.yaml", + "aws-lambda-function.yaml", + "aws-apigateway-restapi.yaml", + "aws-elasticbeanstalk-applicationversion.yaml", + "aws-cloudformation-stack.yaml", + "aws-serverless-application.yaml", + "aws-lambda-layerversion.yaml", + "aws-serverless-layerversion.yaml", + "aws-glue-job.yaml", + "aws-serverlessrepo-application.yaml", + ] + ) + def test_package_with_output_template_file(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + s3_prefix = "integ_test_prefix" + + with tempfile.NamedTemporaryFile(delete=False) as output_template: + + command_list = self.get_command_list( + s3_bucket=self.s3_bucket.name, + template_file=template_path, + s3_prefix=s3_prefix, + output_template_file=output_template.name, + ) + + process = Popen(command_list, stdout=PIPE) + process.wait() + process_stdout = b"".join(process.stdout.readlines()).strip() + + self.assertIn( + bytes( + "Successfully packaged artifacts and wrote output template to file {output_template_file}".format( + output_template_file=str(output_template.name) + ), + encoding="utf-8", + ), + process_stdout, + ) + + @parameterized.expand( + [ + "aws-serverless-function.yaml", + "aws-serverless-api.yaml", + "aws-appsync-graphqlschema.yaml", + "aws-appsync-resolver.yaml", + "aws-appsync-functionconfiguration.yaml", + "aws-lambda-function.yaml", + "aws-apigateway-restapi.yaml", + "aws-elasticbeanstalk-applicationversion.yaml", + "aws-cloudformation-stack.yaml", + "aws-serverless-application.yaml", + "aws-lambda-layerversion.yaml", + "aws-serverless-layerversion.yaml", + "aws-glue-job.yaml", + "aws-serverlessrepo-application.yaml", + ] + ) + def test_package_with_json(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + s3_prefix = "integ_test_prefix" + + with tempfile.NamedTemporaryFile(delete=False) as output_template: + + command_list = self.get_command_list( + s3_bucket=self.s3_bucket.name, + template_file=template_path, + s3_prefix=s3_prefix, + output_template_file=output_template.name, + use_json=True, + ) + + process = Popen(command_list, stdout=PIPE) + process.wait() + process_stdout = b"".join(process.stdout.readlines()).strip() + + self.assertIn( + bytes( + "Successfully packaged artifacts and wrote output template to file {output_template_file}".format( + output_template_file=str(output_template.name) + ), + encoding="utf-8", + ), + process_stdout, + ) + + @parameterized.expand( + [ + "aws-serverless-function.yaml", + "aws-serverless-api.yaml", + "aws-appsync-graphqlschema.yaml", + "aws-appsync-resolver.yaml", + "aws-appsync-functionconfiguration.yaml", + "aws-lambda-function.yaml", + "aws-apigateway-restapi.yaml", + "aws-elasticbeanstalk-applicationversion.yaml", + "aws-cloudformation-stack.yaml", + "aws-serverless-application.yaml", + "aws-lambda-layerversion.yaml", + "aws-serverless-layerversion.yaml", + "aws-glue-job.yaml", + "aws-serverlessrepo-application.yaml", + ] + ) + def test_package_with_force_upload(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + s3_prefix = "integ_test_prefix" + + with tempfile.NamedTemporaryFile(delete=False) as output_template: + # Upload twice and see the string to have packaged artifacts both times. + for _ in range(2): + + command_list = self.get_command_list( + s3_bucket=self.s3_bucket.name, + template_file=template_path, + s3_prefix=s3_prefix, + output_template_file=output_template.name, + force_upload=True, + ) + + process = Popen(command_list, stdout=PIPE) + process.wait() + process_stdout = b"".join(process.stdout.readlines()).strip() + + self.assertIn( + bytes( + "Successfully packaged artifacts and wrote output template to file {output_template_file}".format( + output_template_file=str(output_template.name) + ), + encoding="utf-8", + ), + process_stdout, + ) + + @parameterized.expand( + [ + "aws-serverless-function.yaml", + "aws-serverless-api.yaml", + "aws-appsync-graphqlschema.yaml", + "aws-appsync-resolver.yaml", + "aws-appsync-functionconfiguration.yaml", + "aws-lambda-function.yaml", + "aws-apigateway-restapi.yaml", + "aws-elasticbeanstalk-applicationversion.yaml", + "aws-cloudformation-stack.yaml", + "aws-serverless-application.yaml", + "aws-lambda-layerversion.yaml", + "aws-serverless-layerversion.yaml", + "aws-glue-job.yaml", + "aws-serverlessrepo-application.yaml", + ] + ) + def test_package_with_kms_key(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + s3_prefix = "integ_test_prefix" + + with tempfile.NamedTemporaryFile(delete=False) as output_template: + command_list = self.get_command_list( + s3_bucket=self.s3_bucket.name, + template_file=template_path, + s3_prefix=s3_prefix, + output_template_file=output_template.name, + force_upload=True, + kms_key_id=self.kms_key, + ) + + process = Popen(command_list, stdout=PIPE) + process.wait() + process_stdout = b"".join(process.stdout.readlines()).strip() + + self.assertIn( + bytes( + "Successfully packaged artifacts and wrote output template to file {output_template_file}".format( + output_template_file=str(output_template.name) + ), + encoding="utf-8", + ), + process_stdout, + ) + + @parameterized.expand( + [ + "aws-serverless-function.yaml", + "aws-serverless-api.yaml", + "aws-appsync-graphqlschema.yaml", + "aws-appsync-resolver.yaml", + "aws-appsync-functionconfiguration.yaml", + "aws-lambda-function.yaml", + "aws-apigateway-restapi.yaml", + "aws-elasticbeanstalk-applicationversion.yaml", + "aws-cloudformation-stack.yaml", + "aws-serverless-application.yaml", + "aws-lambda-layerversion.yaml", + "aws-serverless-layerversion.yaml", + "aws-glue-job.yaml", + "aws-serverlessrepo-application.yaml", + ] + ) + def test_package_with_metadata(self, template_file): + template_path = self.test_data_path.joinpath(template_file) + s3_prefix = "integ_test_prefix" + + with tempfile.NamedTemporaryFile(delete=False) as output_template: + command_list = self.get_command_list( + s3_bucket=self.s3_bucket.name, + template_file=template_path, + s3_prefix=s3_prefix, + output_template_file=output_template.name, + force_upload=True, + metadata={"integ": "yes"}, + ) + + process = Popen(command_list, stdout=PIPE) + process.wait() + process_stdout = b"".join(process.stdout.readlines()).strip() + + self.assertIn( + bytes( + "Successfully packaged artifacts and wrote output template to file {output_template_file}".format( + output_template_file=str(output_template.name) + ), + encoding="utf-8", + ), + process_stdout, + ) diff --git a/tests/integration/testdata/package/LICENSE.txt b/tests/integration/testdata/package/LICENSE.txt new file mode 100644 index 0000000000..806dfcbabf --- /dev/null +++ b/tests/integration/testdata/package/LICENSE.txt @@ -0,0 +1 @@ +Sample License \ No newline at end of file diff --git a/tests/integration/testdata/package/README.md b/tests/integration/testdata/package/README.md new file mode 100644 index 0000000000..b8ea75275e --- /dev/null +++ b/tests/integration/testdata/package/README.md @@ -0,0 +1 @@ +Sample Readme \ No newline at end of file diff --git a/tests/integration/testdata/package/aws-apigateway-restapi.yaml b/tests/integration/testdata/package/aws-apigateway-restapi.yaml new file mode 100644 index 0000000000..6dd4ef38ef --- /dev/null +++ b/tests/integration/testdata/package/aws-apigateway-restapi.yaml @@ -0,0 +1,11 @@ +AWSTemplateFormatVersion : '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: Simple Rest API + +Resources: + MyRestApi: + Type: AWS::ApiGateway::RestApi + Properties: + BodyS3Location: ./openapi.json + Description: A test API + Name: MyRestAPI \ No newline at end of file diff --git a/tests/integration/testdata/package/aws-appsync-functionconfiguration.yaml b/tests/integration/testdata/package/aws-appsync-functionconfiguration.yaml new file mode 100644 index 0000000000..ad035f2baa --- /dev/null +++ b/tests/integration/testdata/package/aws-appsync-functionconfiguration.yaml @@ -0,0 +1,11 @@ +AWSTemplateFormatVersion : '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: Simple Appsync Function Configuration + +Resources: + FunctionConfiguration: + Type: AWS::AppSync::FunctionConfiguration + Properties: + ApiId: "sam package integ test function configuration" + RequestMappingTemplateS3Location: ./input-mapping-template + ResponseMappingTemplateS3Location: ./output-mapping-template \ No newline at end of file diff --git a/tests/integration/testdata/package/aws-appsync-graphqlschema.yaml b/tests/integration/testdata/package/aws-appsync-graphqlschema.yaml new file mode 100644 index 0000000000..67daa55546 --- /dev/null +++ b/tests/integration/testdata/package/aws-appsync-graphqlschema.yaml @@ -0,0 +1,10 @@ +AWSTemplateFormatVersion : '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: Simple GraphQL Schema + +Resources: + Schema: + Type: AWS::AppSync::GraphQLSchema + Properties: + ApiId: "sam package integ test schema" + DefinitionS3Location: ./graphql.schema \ No newline at end of file diff --git a/tests/integration/testdata/package/aws-appsync-resolver.yaml b/tests/integration/testdata/package/aws-appsync-resolver.yaml new file mode 100644 index 0000000000..4f07b0a194 --- /dev/null +++ b/tests/integration/testdata/package/aws-appsync-resolver.yaml @@ -0,0 +1,11 @@ +AWSTemplateFormatVersion : '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: Simple Appsync Resolver + +Resources: + Resolver: + Type: AWS::AppSync::Resolver + Properties: + ApiId: "sam package integ test resolver" + RequestMappingTemplateS3Location: ./input-mapping-template + ResponseMappingTemplateS3Location: ./output-mapping-template \ No newline at end of file diff --git a/tests/integration/testdata/package/aws-cloudformation-stack.yaml b/tests/integration/testdata/package/aws-cloudformation-stack.yaml new file mode 100644 index 0000000000..4563f504ee --- /dev/null +++ b/tests/integration/testdata/package/aws-cloudformation-stack.yaml @@ -0,0 +1,9 @@ +AWSTemplateFormatVersion : '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: Simple Stack + +Resources: + myStack: + Type: AWS::CloudFormation::Stack + Properties: + TemplateURL: ./aws-apigateway-restapi.yaml \ No newline at end of file diff --git a/tests/integration/testdata/package/aws-elasticbeanstalk-applicationversion.yaml b/tests/integration/testdata/package/aws-elasticbeanstalk-applicationversion.yaml new file mode 100644 index 0000000000..dffdff756a --- /dev/null +++ b/tests/integration/testdata/package/aws-elasticbeanstalk-applicationversion.yaml @@ -0,0 +1,11 @@ +AWSTemplateFormatVersion : '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: Simple ElasticBeanStalk Application Version + +Resources: + myAppVersion: + Type: AWS::ElasticBeanstalk::ApplicationVersion + Properties: + ApplicationName: "my app" + Description: "my sample version" + SourceBundle: ./sample.zip \ No newline at end of file diff --git a/tests/integration/testdata/package/aws-glue-job.yaml b/tests/integration/testdata/package/aws-glue-job.yaml new file mode 100644 index 0000000000..cf417118ef --- /dev/null +++ b/tests/integration/testdata/package/aws-glue-job.yaml @@ -0,0 +1,43 @@ +AWSTemplateFormatVersion : '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: Simple Glue Job + +Resources: + MyJobRole: + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - + Effect: "Allow" + Principal: + Service: + - "glue.amazonaws.com" + Action: + - "sts:AssumeRole" + Path: "/" + Policies: + - + PolicyName: "root" + PolicyDocument: + Version: "2012-10-17" + Statement: + - + Effect: "Allow" + Action: "*" + Resource: "*" + + MyJob: + Type: AWS::Glue::Job + Properties: + Command: + Name: glueetl + ScriptLocation: . + DefaultArguments: + "--job-bookmark-option": "job-bookmark-enable" + ExecutionProperty: + MaxConcurrentRuns: 2 + MaxRetries: 0 + Name: cf-job1 + Role: !Ref MyJobRole \ No newline at end of file diff --git a/tests/integration/testdata/package/aws-lambda-function.yaml b/tests/integration/testdata/package/aws-lambda-function.yaml new file mode 100644 index 0000000000..cbeb67da99 --- /dev/null +++ b/tests/integration/testdata/package/aws-lambda-function.yaml @@ -0,0 +1,16 @@ +AWSTemplateFormatVersion : '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: Simple AWS Lambda Function + +Resources: + MyLambdaFunction: + Type: AWS::Lambda::Function + Properties: + Handler: index.handler + Role: + Fn::GetAtt: + - "LambdaExecutionRole" + - "Arn" + Code: "." + Runtime: nodejs8.10 + Timeout: 25 diff --git a/tests/integration/testdata/package/aws-lambda-layerversion.yaml b/tests/integration/testdata/package/aws-lambda-layerversion.yaml new file mode 100644 index 0000000000..70315e8360 --- /dev/null +++ b/tests/integration/testdata/package/aws-lambda-layerversion.yaml @@ -0,0 +1,14 @@ +AWSTemplateFormatVersion : '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: Simple AWS Lambda Layer Version + +Resources: + MyLayer: + Type: AWS::Lambda::LayerVersion + Properties: + CompatibleRuntimes: + - python3.7 + Content: "." + Description: My layer + LayerName: my-layer + LicenseInfo: MIT \ No newline at end of file diff --git a/tests/integration/testdata/package/aws-serverless-api.yaml b/tests/integration/testdata/package/aws-serverless-api.yaml new file mode 100644 index 0000000000..1d5f91e7c9 --- /dev/null +++ b/tests/integration/testdata/package/aws-serverless-api.yaml @@ -0,0 +1,17 @@ +AWSTemplateFormatVersion : '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: Simple Rest API + +Resources: + MyApi: + Type: AWS::Serverless::Api + Properties: + StageName: dev + Variables: + VarName: varValue + Cors: + AllowOrigin: "'*''" + AllowMethods: "'GET'" + AllowHeaders: "'origin, x-requested-with'" + MaxAge: "'510'" + DefinitionUri: ./swagger.yaml \ No newline at end of file diff --git a/tests/integration/testdata/package/aws-serverless-application.yaml b/tests/integration/testdata/package/aws-serverless-application.yaml new file mode 100644 index 0000000000..4cbee5ea8d --- /dev/null +++ b/tests/integration/testdata/package/aws-serverless-application.yaml @@ -0,0 +1,9 @@ +AWSTemplateFormatVersion : '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: Simple Serverless Application + +Resources: + myApp: + Type: AWS::Serverless::Application + Properties: + Location: ./aws-serverless-function.yaml \ No newline at end of file diff --git a/tests/integration/testdata/package/aws-serverless-function.yaml b/tests/integration/testdata/package/aws-serverless-function.yaml new file mode 100644 index 0000000000..1691cffe8e --- /dev/null +++ b/tests/integration/testdata/package/aws-serverless-function.yaml @@ -0,0 +1,12 @@ +AWSTemplateFormatVersion : '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: A hello world application. + +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + Handler: main.handler + Runtime: python3.6 + CodeUri: . + Timeout: 600 diff --git a/tests/integration/testdata/package/aws-serverless-layerversion.yaml b/tests/integration/testdata/package/aws-serverless-layerversion.yaml new file mode 100644 index 0000000000..1a3af0efbc --- /dev/null +++ b/tests/integration/testdata/package/aws-serverless-layerversion.yaml @@ -0,0 +1,16 @@ +AWSTemplateFormatVersion : '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: AWS Serverless LayerVersion Example + +Resources: + MyLayerVersion: + Type: AWS::Serverless::LayerVersion + Properties: + LayerName: MyLayer + Description: Layer description + ContentUri: "." + CompatibleRuntimes: + - nodejs6.10 + - nodejs8.10 + LicenseInfo: 'Available under the MIT-0 license.' + RetentionPolicy: Retain \ No newline at end of file diff --git a/tests/integration/testdata/package/aws-serverlessrepo-application.yaml b/tests/integration/testdata/package/aws-serverlessrepo-application.yaml new file mode 100644 index 0000000000..4bec75fee9 --- /dev/null +++ b/tests/integration/testdata/package/aws-serverlessrepo-application.yaml @@ -0,0 +1,16 @@ +AWSTemplateFormatVersion : '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: Sample ServerlessRepo Application + +Metadata: + AWS::ServerlessRepo::Application: + Name: my-app + Description: hello world + Author: user1 + SpdxLicenseId: Apache-2.0 + LicenseUrl: ./LICENSE.txt + ReadmeUrl: ./README.md + Labels: ['tests'] + HomePageUrl: https://github.com/user1/my-app-project + SemanticVersion: 0.0.1 + SourceCodeUrl: https://github.com/user1/my-app-project \ No newline at end of file diff --git a/tests/integration/testdata/package/graphql.schema b/tests/integration/testdata/package/graphql.schema new file mode 100644 index 0000000000..a2df3e809b --- /dev/null +++ b/tests/integration/testdata/package/graphql.schema @@ -0,0 +1,5 @@ +type Post { + id: ID! + name: String! + length(unit: LengthUnit = METER): Float +} \ No newline at end of file diff --git a/tests/integration/testdata/package/input-mapping-template b/tests/integration/testdata/package/input-mapping-template new file mode 100644 index 0000000000..a006924c38 --- /dev/null +++ b/tests/integration/testdata/package/input-mapping-template @@ -0,0 +1,25 @@ +#set($inputRoot = $input.path('$')) +{ + "photos": { + "page": $inputRoot.photos.page, + "pages": "$inputRoot.photos.pages", + "perpage": $inputRoot.photos.perpage, + "total": "$inputRoot.photos.total", + "photo": [ +#foreach($elem in $inputRoot.photos.photo) + { + "id": "$elem.id", + "owner": "$elem.owner", + "secret": "$elem.secret", + "server": "$elem.server", + "farm": $elem.farm, + "title": "$elem.title", + "ispublic": $elem.ispublic, + "isfriend": $elem.isfriend, + "isfamily": $elem.isfamily + }#if($foreach.hasNext),#end + +#end + ] + } +} \ No newline at end of file diff --git a/tests/integration/testdata/package/openapi.json b/tests/integration/testdata/package/openapi.json new file mode 100644 index 0000000000..726b4d83a7 --- /dev/null +++ b/tests/integration/testdata/package/openapi.json @@ -0,0 +1,16 @@ +{ + "swagger": 2, + "info": { + "version": "0.0.1", + "title": "test" + }, + "basePath": "/pete", + "schemes": [ + "https" + ], + "definitions": { + "Empty": { + "type": "object" + } + } +} \ No newline at end of file diff --git a/tests/integration/testdata/package/output-mapping-template b/tests/integration/testdata/package/output-mapping-template new file mode 100644 index 0000000000..14961d482a --- /dev/null +++ b/tests/integration/testdata/package/output-mapping-template @@ -0,0 +1,21 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "PhotosOutputModel", + "type": "object", + "properties": { + "photos": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { "type": "string" }, + "owner": { "type": "string" }, + "title": { "type": "string" }, + "ispublic": { "type": "integer" }, + "isfriend": { "type": "integer" }, + "isfamily": { "type": "integer" } + } + } + } + } +} \ No newline at end of file diff --git a/tests/integration/testdata/package/swagger.yaml b/tests/integration/testdata/package/swagger.yaml new file mode 100644 index 0000000000..7638bec54e --- /dev/null +++ b/tests/integration/testdata/package/swagger.yaml @@ -0,0 +1,13 @@ +swagger: "2.0" +info: + title: + Ref: AWS::StackName +paths: + "/anyandall": + x-amazon-apigateway-any-method: + x-amazon-apigateway-integration: + httpMethod: POST + type: aws_proxy + uri: + Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${MyLambdaFunction.Arn}/invocations + responses: {} \ No newline at end of file diff --git a/tests/regression/__init__.py b/tests/regression/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/regression/package/__init__.py b/tests/regression/package/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/regression/package/regression_package_base.py b/tests/regression/package/regression_package_base.py new file mode 100644 index 0000000000..c893a52ba8 --- /dev/null +++ b/tests/regression/package/regression_package_base.py @@ -0,0 +1,97 @@ +import os +import uuid +import json +import tempfile +import time +from pathlib import Path +from subprocess import Popen, PIPE +from unittest import TestCase + +import boto3 + + +class PackageRegressionBase(TestCase): + @classmethod + def setUpClass(cls): + cls.region_name = os.environ.get("AWS_DEFAULT_REGION") + cls.bucket_name = str(uuid.uuid4()) + cls.test_data_path = Path(__file__).resolve().parents[2].joinpath("integration", "testdata", "package") + + # Create S3 bucket + s3 = boto3.resource("s3") + cls.s3_bucket = s3.Bucket(cls.bucket_name) + cls.s3_bucket.create() + + # Given 3 seconds for all the bucket creation to complete + time.sleep(3) + + @classmethod + def tearDownClass(cls): + cls.s3_bucket.objects.all().delete() + cls.s3_bucket.delete() + + def base_command(self, base): + command = [base] + if os.getenv("SAM_CLI_DEV") and base == "sam": + command = ["samdev"] + elif base == "aws": + command = [base, "cloudformation"] + + return command + + def get_command_list( + self, + base="sam", + s3_bucket=None, + template_file=None, + s3_prefix=None, + output_template_file=None, + use_json=False, + force_upload=False, + kms_key_id=None, + metadata=None, + ): + command_list = self.base_command(base=base) + + command_list = command_list + ["package"] + + if s3_bucket: + command_list = command_list + ["--s3-bucket", str(s3_bucket)] + + if template_file: + command_list = command_list + ["--template-file", str(template_file)] + + if s3_prefix: + command_list = command_list + ["--s3-prefix", str(s3_prefix)] + + if output_template_file: + command_list = command_list + ["--output-template-file", str(output_template_file)] + if kms_key_id: + command_list = command_list + ["--kms-key-id", str(kms_key_id)] + if use_json: + command_list = command_list + ["--use-json"] + if force_upload: + command_list = command_list + ["--force-upload"] + if metadata: + command_list = command_list + ["--metadata", json.dumps(metadata)] + + return command_list + + def regression_check(self, args): + with tempfile.NamedTemporaryFile(delete=False) as output_template_file_sam: + sam_command_list = self.get_command_list(output_template_file=output_template_file_sam.name, **args) + process = Popen(sam_command_list, stdout=PIPE) + process.wait() + self.assertEqual(process.returncode, 0) + output_sam = output_template_file_sam.read() + + with tempfile.NamedTemporaryFile(delete=False) as output_template_file_aws: + aws_command_list = self.get_command_list( + base="aws", output_template_file=output_template_file_aws.name, **args + ) + process = Popen(aws_command_list, stdout=PIPE) + process.wait() + self.assertEqual(process.returncode, 0) + output_aws = output_template_file_aws.read() + + self.assertEqual(output_sam, output_aws) diff --git a/tests/regression/package/test_package_regression.py b/tests/regression/package/test_package_regression.py new file mode 100644 index 0000000000..ac55205f09 --- /dev/null +++ b/tests/regression/package/test_package_regression.py @@ -0,0 +1,105 @@ +from subprocess import Popen, PIPE +import tempfile + +from unittest import skipIf +from parameterized import parameterized + +from .regression_package_base import PackageRegressionBase +from tests.testing_utils import RUNNING_ON_CI, RUNNING_TEST_FOR_MASTER_ON_CI + +# Package Regression tests require credentials and CI/CD will only add credentials to the env if the PR is from the same repo. +# This is to restrict package tests to run outside of CI/CD and when the branch is not master. +SKIP_PACKAGE_REGRESSION_TESTS = RUNNING_ON_CI and RUNNING_TEST_FOR_MASTER_ON_CI + + +# Only tested cases where the output template file changes, adding metadata or kms keys does not change the output. + + +@skipIf(SKIP_PACKAGE_REGRESSION_TESTS, "Skip package regression tests in CI/CD only") +class TestPackageRegression(PackageRegressionBase): + def setUp(self): + super(TestPackageRegression, self).setUp() + + def tearDown(self): + super(TestPackageRegression, self).tearDown() + + @parameterized.expand( + [ + "aws-serverless-function.yaml", + "aws-serverless-api.yaml", + "aws-appsync-graphqlschema.yaml", + "aws-appsync-resolver.yaml", + "aws-appsync-functionconfiguration.yaml", + "aws-lambda-function.yaml", + "aws-apigateway-restapi.yaml", + "aws-elasticbeanstalk-applicationversion.yaml", + "aws-cloudformation-stack.yaml", + "aws-serverless-application.yaml", + "aws-lambda-layerversion.yaml", + "aws-serverless-layerversion.yaml", + "aws-glue-job.yaml", + "aws-serverlessrepo-application.yaml", + ] + ) + def test_package_with_output_template_file(self, template_file): + + arguments = {"s3_bucket": self.s3_bucket.name, "template_file": self.test_data_path.joinpath(template_file)} + + self.regression_check(arguments) + + @parameterized.expand( + [ + "aws-serverless-function.yaml", + "aws-serverless-api.yaml", + "aws-appsync-graphqlschema.yaml", + "aws-appsync-resolver.yaml", + "aws-appsync-functionconfiguration.yaml", + "aws-lambda-function.yaml", + "aws-apigateway-restapi.yaml", + "aws-elasticbeanstalk-applicationversion.yaml", + "aws-cloudformation-stack.yaml", + "aws-serverless-application.yaml", + "aws-lambda-layerversion.yaml", + "aws-serverless-layerversion.yaml", + "aws-glue-job.yaml", + "aws-serverlessrepo-application.yaml", + ] + ) + def test_package_with_output_template_file_and_prefix(self, template_file): + + arguments = { + "s3_bucket": self.s3_bucket.name, + "template_file": self.test_data_path.joinpath(template_file), + "s3_prefix": "regression/tests", + } + + self.regression_check(arguments) + + @parameterized.expand( + [ + "aws-serverless-function.yaml", + "aws-serverless-api.yaml", + "aws-appsync-graphqlschema.yaml", + "aws-appsync-resolver.yaml", + "aws-appsync-functionconfiguration.yaml", + "aws-lambda-function.yaml", + "aws-apigateway-restapi.yaml", + "aws-elasticbeanstalk-applicationversion.yaml", + "aws-cloudformation-stack.yaml", + "aws-serverless-application.yaml", + "aws-lambda-layerversion.yaml", + "aws-serverless-layerversion.yaml", + "aws-glue-job.yaml", + "aws-serverlessrepo-application.yaml", + ] + ) + def test_package_with_output_template_file_json_and_prefix(self, template_file): + + arguments = { + "s3_bucket": self.s3_bucket.name, + "template_file": self.test_data_path.joinpath(template_file), + "s3_prefix": "regression/tests", + "use_json": True, + } + + self.regression_check(arguments) diff --git a/tests/testing_utils.py b/tests/testing_utils.py index 293e6e9676..94df3a9418 100644 --- a/tests/testing_utils.py +++ b/tests/testing_utils.py @@ -1,7 +1,60 @@ import os import platform +import tempfile +import shutil IS_WINDOWS = platform.system().lower() == "windows" RUNNING_ON_CI = os.environ.get("APPVEYOR", False) RUNNING_TEST_FOR_MASTER_ON_CI = os.environ.get("APPVEYOR_REPO_BRANCH", "master") != "master" CI_OVERRIDE = os.environ.get("APPVEYOR_CI_OVERRIDE", False) + + +class FileCreator(object): + def __init__(self): + self.rootdir = tempfile.mkdtemp() + + def remove_all(self): + if os.path.exists(self.rootdir): + shutil.rmtree(self.rootdir) + + def create_file(self, filename, contents, mtime=None, mode="w"): + """Creates a file in a tmpdir + ``filename`` should be a relative path, e.g. "foo/bar/baz.txt" + It will be translated into a full path in a tmp dir. + If the ``mtime`` argument is provided, then the file's + mtime will be set to the provided value (must be an epoch time). + Otherwise the mtime is left untouched. + ``mode`` is the mode the file should be opened either as ``w`` or + `wb``. + Returns the full path to the file. + """ + full_path = os.path.join(self.rootdir, filename) + if not os.path.isdir(os.path.dirname(full_path)): + os.makedirs(os.path.dirname(full_path)) + with open(full_path, mode) as f: + f.write(contents) + current_time = os.path.getmtime(full_path) + # Subtract a few years off the last modification date. + os.utime(full_path, (current_time, current_time - 100000000)) + if mtime is not None: + os.utime(full_path, (mtime, mtime)) + return full_path + + def append_file(self, filename, contents): + """Append contents to a file + ``filename`` should be a relative path, e.g. "foo/bar/baz.txt" + It will be translated into a full path in a tmp dir. + Returns the full path to the file. + """ + full_path = os.path.join(self.rootdir, filename) + if not os.path.isdir(os.path.dirname(full_path)): + os.makedirs(os.path.dirname(full_path)) + with open(full_path, "a") as f: + f.write(contents) + return full_path + + def full_path(self, filename): + """Translate relative path to full path in temp dir. + f.full_path('foo/bar.txt') -> /tmp/asdfasd/foo/bar.txt + """ + return os.path.join(self.rootdir, filename) diff --git a/tests/unit/cli/test_types.py b/tests/unit/cli/test_types.py index 1251392c88..b0cd383db9 100644 --- a/tests/unit/cli/test_types.py +++ b/tests/unit/cli/test_types.py @@ -3,6 +3,7 @@ from nose_parameterized import parameterized from samcli.cli.types import CfnParameterOverridesType +from samcli.cli.types import CfnMetadataType class TestCfnParameterOverridesType(TestCase): @@ -73,3 +74,49 @@ def test_must_fail_on_invalid_format(self, input): def test_successful_parsing(self, input, expected): result = self.param_type.convert(input, None, None) self.assertEqual(result, expected, msg="Failed with Input = " + input) + + +class TestCfnMetadataType(TestCase): + def setUp(self): + self.param_type = CfnMetadataType() + + @parameterized.expand( + [ + # Just a string + ("some string"), + # Unfinished dict with just a key + ("{'a'}"), + # Unfinished dict just a key and : + ("{'a'}:"), + # Dict with nested dict: + ("{'a':{'b':'c'}}"), + # Dict with list value: + ("{'a':['b':'c']}"), + # Just a list: + ("['b':'c']"), + # Non-string + ("{1:1}"), + # Wrong notation + ("a==b"), + # Wrong multi-key notation + ("a==b,c==d"), + ] + ) + def test_must_fail_on_invalid_format(self, input): + self.param_type.fail = Mock() + self.param_type.convert(input, "param", "ctx") + + self.param_type.fail.assert_called_with(ANY, "param", "ctx") + + @parameterized.expand( + [ + ("a=b", {"a": "b"}), + ("a=b,c=d", {"a": "b", "c": "d"}), + ('{"a":"b"}', {"a": "b"}), + ('{"a":"b", "c":"d"}', {"a": "b", "c": "d"}), + ("", {}), + ] + ) + def test_successful_parsing(self, input, expected): + result = self.param_type.convert(input, None, None) + self.assertEqual(result, expected, msg="Failed with Input = " + input) diff --git a/tests/unit/commands/_utils/test_template.py b/tests/unit/commands/_utils/test_template.py index 04cd9b0b5c..fe80c7dc79 100644 --- a/tests/unit/commands/_utils/test_template.py +++ b/tests/unit/commands/_utils/test_template.py @@ -8,8 +8,8 @@ from samcli.commands._utils.template import ( get_template_data, - _METADATA_WITH_LOCAL_PATHS, - _RESOURCES_WITH_LOCAL_PATHS, + METADATA_WITH_LOCAL_PATHS, + RESOURCES_WITH_LOCAL_PATHS, _update_relative_paths, move_template, ) @@ -77,7 +77,7 @@ def setUp(self): self.expected_result = os.path.join("..", "foo", "bar") - @parameterized.expand([(resource_type, props) for resource_type, props in _METADATA_WITH_LOCAL_PATHS.items()]) + @parameterized.expand([(resource_type, props) for resource_type, props in METADATA_WITH_LOCAL_PATHS.items()]) def test_must_update_relative_metadata_paths(self, resource_type, properties): for propname in properties: @@ -96,7 +96,7 @@ def test_must_update_relative_metadata_paths(self, resource_type, properties): self.maxDiff = None self.assertEqual(result, expected_template_dict) - @parameterized.expand([(resource_type, props) for resource_type, props in _RESOURCES_WITH_LOCAL_PATHS.items()]) + @parameterized.expand([(resource_type, props) for resource_type, props in RESOURCES_WITH_LOCAL_PATHS.items()]) def test_must_update_relative_resource_paths(self, resource_type, properties): for propname in properties: diff --git a/tests/unit/commands/package/__init__.py b/tests/unit/commands/package/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/package/test_command.py b/tests/unit/commands/package/test_command.py new file mode 100644 index 0000000000..484d827d04 --- /dev/null +++ b/tests/unit/commands/package/test_command.py @@ -0,0 +1,55 @@ +from unittest import TestCase +from unittest.mock import patch, Mock + +from samcli.commands.package.command import do_cli + + +class TestPackageCliCommand(TestCase): + def setUp(self): + + self.template_file = "input-template-file" + self.s3_bucket = "s3-bucket" + self.s3_prefix = "s3-prefix" + self.kms_key_id = "kms-key-id" + self.output_template_file = "output-template-file" + self.use_json = True + self.force_upload = False + self.metadata = {"abc": "def"} + self.region = None + self.profile = None + + @patch("samcli.commands.package.command.click") + @patch("samcli.commands.package.package_context.PackageContext") + def test_all_args(self, package_command_context, click_mock): + + context_mock = Mock() + package_command_context.return_value.__enter__.return_value = context_mock + + do_cli( + template_file=self.template_file, + s3_bucket=self.s3_bucket, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + output_template_file=self.output_template_file, + use_json=self.use_json, + force_upload=self.force_upload, + metadata=self.metadata, + region=self.region, + profile=self.profile, + ) + + package_command_context.assert_called_with( + template_file=self.template_file, + s3_bucket=self.s3_bucket, + s3_prefix=self.s3_prefix, + kms_key_id=self.kms_key_id, + output_template_file=self.output_template_file, + use_json=self.use_json, + force_upload=self.force_upload, + metadata=self.metadata, + region=self.region, + profile=self.profile, + ) + + context_mock.run.assert_called_with() + self.assertEqual(context_mock.run.call_count, 1) diff --git a/tests/unit/commands/package/test_package_context.py b/tests/unit/commands/package/test_package_context.py new file mode 100644 index 0000000000..361a98391f --- /dev/null +++ b/tests/unit/commands/package/test_package_context.py @@ -0,0 +1,86 @@ +"""Test sam package command""" +from unittest import TestCase +from unittest.mock import patch, MagicMock +import tempfile + + +from samcli.commands.package.package_context import PackageContext +from samcli.commands.package.exceptions import PackageFailedError +from samcli.lib.package.artifact_exporter import Template + + +class TestPackageCommand(TestCase): + def setUp(self): + self.package_command_context = PackageContext( + template_file="template-file", + s3_bucket="s3-bucket", + s3_prefix="s3-prefix", + kms_key_id="kms-key-id", + output_template_file=None, + use_json=True, + force_upload=True, + metadata={}, + region=None, + profile=None, + ) + + @patch.object(Template, "export", MagicMock(sideeffect=OSError)) + @patch("boto3.Session") + def test_template_permissions_error(self, patched_boto): + with self.assertRaises(PackageFailedError): + self.package_command_context.run() + + @patch.object(Template, "export", MagicMock(return_value={})) + @patch("boto3.Session") + def test_template_path_valid_with_output_template(self, patched_boto): + with tempfile.NamedTemporaryFile(mode="w", delete=False) as temp_template_file: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as temp_output_template_file: + package_command_context = PackageContext( + template_file=temp_template_file.name, + s3_bucket="s3-bucket", + s3_prefix="s3-prefix", + kms_key_id="kms-key-id", + output_template_file=temp_output_template_file.name, + use_json=True, + force_upload=True, + metadata={}, + region=None, + profile=None, + ) + package_command_context.run() + + @patch.object(Template, "export", MagicMock(return_value={})) + @patch("boto3.Session") + def test_template_path_valid(self, patched_boto): + with tempfile.NamedTemporaryFile(mode="w", delete=False) as temp_template_file: + package_command_context = PackageContext( + template_file=temp_template_file.name, + s3_bucket="s3-bucket", + s3_prefix="s3-prefix", + kms_key_id="kms-key-id", + output_template_file=None, + use_json=True, + force_upload=True, + metadata={}, + region=None, + profile=None, + ) + package_command_context.run() + + @patch.object(Template, "export", MagicMock(return_value={})) + @patch("boto3.Session") + def test_template_path_valid_no_json(self, patched_boto): + with tempfile.NamedTemporaryFile(mode="w", delete=False) as temp_template_file: + package_command_context = PackageContext( + template_file=temp_template_file.name, + s3_bucket="s3-bucket", + s3_prefix="s3-prefix", + kms_key_id="kms-key-id", + output_template_file=None, + use_json=False, + force_upload=True, + metadata={}, + region=None, + profile=None, + ) + package_command_context.run() diff --git a/tests/unit/commands/test_package.py b/tests/unit/commands/test_package.py deleted file mode 100644 index 2e368f4d80..0000000000 --- a/tests/unit/commands/test_package.py +++ /dev/null @@ -1,20 +0,0 @@ -""" -Tests Package CLI command -""" - -from unittest import TestCase -from unittest.mock import patch - -from samcli.commands.package import do_cli as package_cli - - -class TestCli(TestCase): - def setUp(self): - self.args = (" --use - json",) - self.expected_args = self.args + ("--s3-bucket", "bucketName") - - @patch("samcli.commands.package.execute_command") - def test_package_must_pass_args(self, execute_command_mock): - execute_command_mock.return_value = True - package_cli(self.args, "template_file", "bucketName") - execute_command_mock.assert_called_with("package", self.expected_args, "template_file") diff --git a/tests/unit/lib/package/__init__.py b/tests/unit/lib/package/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py new file mode 100644 index 0000000000..236fc28d9b --- /dev/null +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -0,0 +1,1100 @@ +import tempfile +import os +import string +import random +import zipfile +import unittest + +from contextlib import contextmanager, closing +from unittest import mock +from unittest.mock import patch, Mock +from tests.testing_utils import FileCreator +from samcli.commands.package import exceptions +from samcli.lib.package.artifact_exporter import ( + is_s3_url, + parse_s3_url, + is_local_file, + is_local_folder, + upload_local_artifacts, + zip_folder, + make_abs_path, + make_zip, + Template, + Resource, + ResourceWithS3UrlDict, + ServerlessApiResource, + ServerlessFunctionResource, + GraphQLSchemaResource, + LambdaFunctionResource, + ApiGatewayRestApiResource, + ElasticBeanstalkApplicationVersion, + CloudFormationStackResource, + ServerlessApplicationResource, + LambdaLayerVersionResource, + copy_to_temp_dir, + include_transform_export_handler, + GLOBAL_EXPORT_DICT, + ServerlessLayerVersionResource, + ServerlessRepoApplicationLicense, + ServerlessRepoApplicationReadme, + AppSyncResolverRequestTemplateResource, + AppSyncResolverResponseTemplateResource, + AppSyncFunctionConfigurationRequestTemplateResource, + AppSyncFunctionConfigurationResponseTemplateResource, + GlueJobCommandScriptLocationResource, +) + + +class TestArtifactExporter(unittest.TestCase): + def setUp(self): + self.s3_uploader_mock = Mock() + self.s3_uploader_mock.s3.meta.endpoint_url = "https://s3.some-valid-region.amazonaws.com" + + def test_all_resources_export(self): + uploaded_s3_url = "s3://foo/bar?versionId=baz" + + setup = [ + {"class": ServerlessFunctionResource, "expected_result": uploaded_s3_url}, + {"class": ServerlessApiResource, "expected_result": uploaded_s3_url}, + {"class": GraphQLSchemaResource, "expected_result": uploaded_s3_url}, + {"class": AppSyncResolverRequestTemplateResource, "expected_result": uploaded_s3_url}, + {"class": AppSyncResolverResponseTemplateResource, "expected_result": uploaded_s3_url}, + {"class": AppSyncFunctionConfigurationRequestTemplateResource, "expected_result": uploaded_s3_url}, + {"class": AppSyncFunctionConfigurationResponseTemplateResource, "expected_result": uploaded_s3_url}, + {"class": ApiGatewayRestApiResource, "expected_result": {"Bucket": "foo", "Key": "bar", "Version": "baz"}}, + { + "class": LambdaFunctionResource, + "expected_result": {"S3Bucket": "foo", "S3Key": "bar", "S3ObjectVersion": "baz"}, + }, + {"class": ElasticBeanstalkApplicationVersion, "expected_result": {"S3Bucket": "foo", "S3Key": "bar"}}, + { + "class": LambdaLayerVersionResource, + "expected_result": {"S3Bucket": "foo", "S3Key": "bar", "S3ObjectVersion": "baz"}, + }, + {"class": ServerlessLayerVersionResource, "expected_result": uploaded_s3_url}, + {"class": ServerlessRepoApplicationReadme, "expected_result": uploaded_s3_url}, + {"class": ServerlessRepoApplicationLicense, "expected_result": uploaded_s3_url}, + {"class": ServerlessRepoApplicationLicense, "expected_result": uploaded_s3_url}, + {"class": GlueJobCommandScriptLocationResource, "expected_result": {"ScriptLocation": uploaded_s3_url}}, + ] + + with patch("samcli.lib.package.artifact_exporter.upload_local_artifacts") as upload_local_artifacts_mock: + for test in setup: + self._helper_verify_export_resources( + test["class"], uploaded_s3_url, upload_local_artifacts_mock, test["expected_result"] + ) + + def _helper_verify_export_resources( + self, test_class, uploaded_s3_url, upload_local_artifacts_mock, expected_result + ): + + s3_uploader_mock = Mock() + upload_local_artifacts_mock.reset_mock() + + resource_id = "id" + + if "." in test_class.PROPERTY_NAME: + reversed_property_names = test_class.PROPERTY_NAME.split(".") + reversed_property_names.reverse() + property_dict = {reversed_property_names[0]: "foo"} + for sub_property_name in reversed_property_names[1:]: + property_dict = {sub_property_name: property_dict} + resource_dict = property_dict + else: + resource_dict = {test_class.PROPERTY_NAME: "foo"} + parent_dir = "dir" + + upload_local_artifacts_mock.return_value = uploaded_s3_url + + resource_obj = test_class(uploader=s3_uploader_mock) + + resource_obj.export(resource_id, resource_dict, parent_dir) + + upload_local_artifacts_mock.assert_called_once_with( + resource_id, resource_dict, test_class.PROPERTY_NAME, parent_dir, s3_uploader_mock + ) + if "." in test_class.PROPERTY_NAME: + top_level_property_name = test_class.PROPERTY_NAME.split(".")[0] + result = resource_dict[top_level_property_name] + else: + result = resource_dict[test_class.PROPERTY_NAME] + self.assertEqual(result, expected_result) + + def test_is_s3_url(self): + valid = [ + "s3://foo/bar", + "s3://foo/bar/baz/cat/dog", + "s3://foo/bar?versionId=abc", + "s3://foo/bar/baz?versionId=abc&versionId=123", + "s3://foo/bar/baz?versionId=abc", + "s3://www.amazon.com/foo/bar", + "s3://my-new-bucket/foo/bar?a=1&a=2&a=3&b=1", + ] + + invalid = [ + # For purposes of exporter, we need S3 URLs to point to an object + # and not a bucket + "s3://foo", + # two versionIds is invalid + "https://s3-eu-west-1.amazonaws.com/bucket/key", + "https://www.amazon.com", + ] + + for url in valid: + self._assert_is_valid_s3_url(url) + + for url in invalid: + self._assert_is_invalid_s3_url(url) + + def _assert_is_valid_s3_url(self, url): + self.assertTrue(is_s3_url(url), "{0} should be valid".format(url)) + + def _assert_is_invalid_s3_url(self, url): + self.assertFalse(is_s3_url(url), "{0} should be valid".format(url)) + + def test_parse_s3_url(self): + + valid = [ + {"url": "s3://foo/bar", "result": {"Bucket": "foo", "Key": "bar"}}, + {"url": "s3://foo/bar/cat/dog", "result": {"Bucket": "foo", "Key": "bar/cat/dog"}}, + { + "url": "s3://foo/bar/baz?versionId=abc¶m1=val1¶m2=val2", + "result": {"Bucket": "foo", "Key": "bar/baz", "VersionId": "abc"}, + }, + { + # VersionId is not returned if there are more than one versionId + # keys in query parameter + "url": "s3://foo/bar/baz?versionId=abc&versionId=123", + "result": {"Bucket": "foo", "Key": "bar/baz"}, + }, + ] + + invalid = [ + # For purposes of exporter, we need S3 URLs to point to an object + # and not a bucket + "s3://foo", + # two versionIds is invalid + "https://s3-eu-west-1.amazonaws.com/bucket/key", + "https://www.amazon.com", + ] + + for config in valid: + result = parse_s3_url( + config["url"], bucket_name_property="Bucket", object_key_property="Key", version_property="VersionId" + ) + + self.assertEqual(result, config["result"]) + + for url in invalid: + with self.assertRaises(ValueError): + parse_s3_url(url) + + def test_is_local_file(self): + with tempfile.NamedTemporaryFile() as handle: + self.assertTrue(is_local_file(handle.name)) + self.assertFalse(is_local_folder(handle.name)) + + def test_is_local_folder(self): + with self.make_temp_dir() as filename: + self.assertTrue(is_local_folder(filename)) + self.assertFalse(is_local_file(filename)) + + @patch("samcli.lib.package.artifact_exporter.zip_and_upload") + def test_upload_local_artifacts_local_file(self, zip_and_upload_mock): + # Case 1: Artifact path is a relative path + # Verifies that we package local artifacts appropriately + property_name = "property" + resource_id = "resource_id" + expected_s3_url = "s3://foo/bar?versionId=baz" + + self.s3_uploader_mock.upload_with_dedup.return_value = expected_s3_url + + with tempfile.NamedTemporaryFile() as handle: + # Artifact is a file in the temporary directory + artifact_path = handle.name + parent_dir = tempfile.gettempdir() + + resource_dict = {property_name: artifact_path} + result = upload_local_artifacts( + resource_id, resource_dict, property_name, parent_dir, self.s3_uploader_mock + ) + self.assertEqual(result, expected_s3_url) + + # Internally the method would convert relative paths to absolute + # path, with respect to the parent directory + absolute_artifact_path = make_abs_path(parent_dir, artifact_path) + self.s3_uploader_mock.upload_with_dedup.assert_called_with(absolute_artifact_path) + + zip_and_upload_mock.assert_not_called() + + @patch("samcli.lib.package.artifact_exporter.zip_and_upload") + def test_upload_local_artifacts_local_file_abs_path(self, zip_and_upload_mock): + # Case 2: Artifact path is an absolute path + # Verifies that we package local artifacts appropriately + property_name = "property" + resource_id = "resource_id" + expected_s3_url = "s3://foo/bar?versionId=baz" + + self.s3_uploader_mock.upload_with_dedup.return_value = expected_s3_url + + with tempfile.NamedTemporaryFile() as handle: + parent_dir = tempfile.gettempdir() + artifact_path = make_abs_path(parent_dir, handle.name) + + resource_dict = {property_name: artifact_path} + result = upload_local_artifacts( + resource_id, resource_dict, property_name, parent_dir, self.s3_uploader_mock + ) + self.assertEqual(result, expected_s3_url) + + self.s3_uploader_mock.upload_with_dedup.assert_called_with(artifact_path) + zip_and_upload_mock.assert_not_called() + + @patch("samcli.lib.package.artifact_exporter.zip_and_upload") + def test_upload_local_artifacts_local_folder(self, zip_and_upload_mock): + property_name = "property" + resource_id = "resource_id" + expected_s3_url = "s3://foo/bar?versionId=baz" + + zip_and_upload_mock.return_value = expected_s3_url + + # Artifact path is a Directory + with self.make_temp_dir() as artifact_path: + # Artifact is a file in the temporary directory + parent_dir = tempfile.gettempdir() + resource_dict = {property_name: artifact_path} + + result = upload_local_artifacts(resource_id, resource_dict, property_name, parent_dir, Mock()) + self.assertEqual(result, expected_s3_url) + + absolute_artifact_path = make_abs_path(parent_dir, artifact_path) + + zip_and_upload_mock.assert_called_once_with(absolute_artifact_path, mock.ANY) + + @patch("samcli.lib.package.artifact_exporter.zip_and_upload") + def test_upload_local_artifacts_no_path(self, zip_and_upload_mock): + property_name = "property" + resource_id = "resource_id" + expected_s3_url = "s3://foo/bar?versionId=baz" + + zip_and_upload_mock.return_value = expected_s3_url + + # If you don't specify a path, we will default to Current Working Dir + resource_dict = {} + parent_dir = tempfile.gettempdir() + + result = upload_local_artifacts(resource_id, resource_dict, property_name, parent_dir, self.s3_uploader_mock) + self.assertEqual(result, expected_s3_url) + + zip_and_upload_mock.assert_called_once_with(parent_dir, mock.ANY) + self.s3_uploader_mock.upload_with_dedup.assert_not_called() + + @patch("samcli.lib.package.artifact_exporter.zip_and_upload") + def test_upload_local_artifacts_s3_url(self, zip_and_upload_mock): + property_name = "property" + resource_id = "resource_id" + object_s3_url = "s3://foo/bar?versionId=baz" + + # If URL is already S3 URL, this will be returned without zip/upload + resource_dict = {property_name: object_s3_url} + parent_dir = tempfile.gettempdir() + + result = upload_local_artifacts(resource_id, resource_dict, property_name, parent_dir, self.s3_uploader_mock) + self.assertEqual(result, object_s3_url) + + zip_and_upload_mock.assert_not_called() + self.s3_uploader_mock.upload_with_dedup.assert_not_called() + + @patch("samcli.lib.package.artifact_exporter.zip_and_upload") + def test_upload_local_artifacts_invalid_value(self, zip_and_upload_mock): + property_name = "property" + resource_id = "resource_id" + parent_dir = tempfile.gettempdir() + + with self.assertRaises(exceptions.InvalidLocalPathError): + non_existent_file = "some_random_filename" + resource_dict = {property_name: non_existent_file} + upload_local_artifacts(resource_id, resource_dict, property_name, parent_dir, self.s3_uploader_mock) + + with self.assertRaises(exceptions.InvalidLocalPathError): + non_existent_file = ["invalid datatype"] + resource_dict = {property_name: non_existent_file} + upload_local_artifacts(resource_id, resource_dict, property_name, parent_dir, self.s3_uploader_mock) + + zip_and_upload_mock.assert_not_called() + self.s3_uploader_mock.upload_with_dedup.assert_not_called() + + @patch("samcli.lib.package.artifact_exporter.make_zip") + def test_zip_folder(self, make_zip_mock): + zip_file_name = "name.zip" + make_zip_mock.return_value = zip_file_name + + with self.make_temp_dir() as dirname: + with zip_folder(dirname) as actual_zip_file_name: + self.assertEqual(actual_zip_file_name, zip_file_name) + + make_zip_mock.assert_called_once_with(mock.ANY, dirname) + + @patch("samcli.lib.package.artifact_exporter.upload_local_artifacts") + def test_resource(self, upload_local_artifacts_mock): + # Property value is a path to file + + class MockResource(Resource): + PROPERTY_NAME = "foo" + + resource = MockResource(self.s3_uploader_mock) + + resource_id = "id" + resource_dict = {} + resource_dict[resource.PROPERTY_NAME] = "/path/to/file" + parent_dir = "dir" + s3_url = "s3://foo/bar" + + upload_local_artifacts_mock.return_value = s3_url + + resource.export(resource_id, resource_dict, parent_dir) + + upload_local_artifacts_mock.assert_called_once_with( + resource_id, resource_dict, resource.PROPERTY_NAME, parent_dir, self.s3_uploader_mock + ) + + self.assertEqual(resource_dict[resource.PROPERTY_NAME], s3_url) + + @patch("shutil.rmtree") + @patch("zipfile.is_zipfile") + @patch("samcli.lib.package.artifact_exporter.copy_to_temp_dir") + @patch("samcli.lib.package.artifact_exporter.zip_and_upload") + @patch("samcli.lib.package.artifact_exporter.is_local_file") + def test_resource_with_force_zip_on_regular_file( + self, is_local_file_mock, zip_and_upload_mock, copy_to_temp_dir_mock, is_zipfile_mock, rmtree_mock + ): + # Property value is a path to file and FORCE_ZIP is True + + class MockResource(Resource): + PROPERTY_NAME = "foo" + FORCE_ZIP = True + + resource = MockResource(self.s3_uploader_mock) + + resource_id = "id" + resource_dict = {} + original_path = "/path/to/file" + resource_dict[resource.PROPERTY_NAME] = original_path + parent_dir = "dir" + s3_url = "s3://foo/bar" + + zip_and_upload_mock.return_value = s3_url + is_local_file_mock.return_value = True + + with self.make_temp_dir() as tmp_dir: + + copy_to_temp_dir_mock.return_value = tmp_dir + + # This is not a zip file + is_zipfile_mock.return_value = False + + resource.export(resource_id, resource_dict, parent_dir) + + zip_and_upload_mock.assert_called_once_with(tmp_dir, mock.ANY) + rmtree_mock.assert_called_once_with(tmp_dir) + is_zipfile_mock.assert_called_once_with(original_path) + self.assertEqual(resource_dict[resource.PROPERTY_NAME], s3_url) + + @patch("shutil.rmtree") + @patch("zipfile.is_zipfile") + @patch("samcli.lib.package.artifact_exporter.copy_to_temp_dir") + @patch("samcli.lib.package.artifact_exporter.zip_and_upload") + @patch("samcli.lib.package.artifact_exporter.is_local_file") + def test_resource_with_force_zip_on_zip_file( + self, is_local_file_mock, zip_and_upload_mock, copy_to_temp_dir_mock, is_zipfile_mock, rmtree_mock + ): + # Property value is a path to zip file and FORCE_ZIP is True + # We should *not* re-zip an existing zip + + class MockResource(Resource): + PROPERTY_NAME = "foo" + FORCE_ZIP = True + + resource = MockResource(self.s3_uploader_mock) + + resource_id = "id" + resource_dict = {} + original_path = "/path/to/zip_file" + resource_dict[resource.PROPERTY_NAME] = original_path + parent_dir = "dir" + s3_url = "s3://foo/bar" + + # When the file is actually a zip-file, no additional zipping has to happen + is_zipfile_mock.return_value = True + is_local_file_mock.return_value = True + zip_and_upload_mock.return_value = s3_url + self.s3_uploader_mock.upload_with_dedup.return_value = s3_url + + resource.export(resource_id, resource_dict, parent_dir) + + copy_to_temp_dir_mock.assert_not_called() + zip_and_upload_mock.assert_not_called() + rmtree_mock.assert_not_called() + is_zipfile_mock.assert_called_once_with(original_path) + self.assertEqual(resource_dict[resource.PROPERTY_NAME], s3_url) + + @patch("shutil.rmtree") + @patch("zipfile.is_zipfile") + @patch("samcli.lib.package.artifact_exporter.copy_to_temp_dir") + @patch("samcli.lib.package.artifact_exporter.zip_and_upload") + @patch("samcli.lib.package.artifact_exporter.is_local_file") + def test_resource_without_force_zip( + self, is_local_file_mock, zip_and_upload_mock, copy_to_temp_dir_mock, is_zipfile_mock, rmtree_mock + ): + class MockResourceNoForceZip(Resource): + PROPERTY_NAME = "foo" + + resource = MockResourceNoForceZip(self.s3_uploader_mock) + + resource_id = "id" + resource_dict = {} + original_path = "/path/to/file" + resource_dict[resource.PROPERTY_NAME] = original_path + parent_dir = "dir" + s3_url = "s3://foo/bar" + + # This is not a zip file, but a valid local file. Since FORCE_ZIP is NOT set, this will not be zipped + is_zipfile_mock.return_value = False + is_local_file_mock.return_value = True + zip_and_upload_mock.return_value = s3_url + self.s3_uploader_mock.upload_with_dedup.return_value = s3_url + + resource.export(resource_id, resource_dict, parent_dir) + + copy_to_temp_dir_mock.assert_not_called() + zip_and_upload_mock.assert_not_called() + rmtree_mock.assert_not_called() + is_zipfile_mock.assert_called_once_with(original_path) + self.assertEqual(resource_dict[resource.PROPERTY_NAME], s3_url) + + @patch("samcli.lib.package.artifact_exporter.upload_local_artifacts") + def test_resource_empty_property_value(self, upload_local_artifacts_mock): + # Property value is empty + + class MockResource(Resource): + PROPERTY_NAME = "foo" + + resource = MockResource(self.s3_uploader_mock) + + resource_id = "id" + resource_dict = {} + resource_dict[resource.PROPERTY_NAME] = "/path/to/file" + parent_dir = "dir" + s3_url = "s3://foo/bar" + + upload_local_artifacts_mock.return_value = s3_url + resource_dict = {} + resource.export(resource_id, resource_dict, parent_dir) + upload_local_artifacts_mock.assert_called_once_with( + resource_id, resource_dict, resource.PROPERTY_NAME, parent_dir, self.s3_uploader_mock + ) + self.assertEqual(resource_dict[resource.PROPERTY_NAME], s3_url) + + @patch("samcli.lib.package.artifact_exporter.upload_local_artifacts") + def test_resource_property_value_dict(self, upload_local_artifacts_mock): + # Property value is a dictionary. Export should not upload anything + + class MockResource(Resource): + PROPERTY_NAME = "foo" + + resource = MockResource(self.s3_uploader_mock) + resource_id = "id" + resource_dict = {} + resource_dict[resource.PROPERTY_NAME] = "/path/to/file" + parent_dir = "dir" + s3_url = "s3://foo/bar" + + upload_local_artifacts_mock.return_value = s3_url + resource_dict = {} + resource_dict[resource.PROPERTY_NAME] = {"a": "b"} + resource.export(resource_id, resource_dict, parent_dir) + upload_local_artifacts_mock.assert_not_called() + self.assertEqual(resource_dict, {"foo": {"a": "b"}}) + + @patch("samcli.lib.package.artifact_exporter.upload_local_artifacts") + def test_resource_has_package_null_property_to_false(self, upload_local_artifacts_mock): + # Should not upload anything if PACKAGE_NULL_PROPERTY is set to False + + class MockResource(Resource): + PROPERTY_NAME = "foo" + PACKAGE_NULL_PROPERTY = False + + resource = MockResource(self.s3_uploader_mock) + resource_id = "id" + resource_dict = {} + parent_dir = "dir" + s3_url = "s3://foo/bar" + + upload_local_artifacts_mock.return_value = s3_url + + resource.export(resource_id, resource_dict, parent_dir) + + upload_local_artifacts_mock.assert_not_called() + self.assertNotIn(resource.PROPERTY_NAME, resource_dict) + + @patch("samcli.lib.package.artifact_exporter.upload_local_artifacts") + def test_resource_export_fails(self, upload_local_artifacts_mock): + class MockResource(Resource): + PROPERTY_NAME = "foo" + + resource = MockResource(self.s3_uploader_mock) + resource_id = "id" + resource_dict = {} + resource_dict[resource.PROPERTY_NAME] = "/path/to/file" + parent_dir = "dir" + s3_url = "s3://foo/bar" + + upload_local_artifacts_mock.side_effect = RuntimeError + resource_dict = {} + + with self.assertRaises(exceptions.ExportFailedError): + resource.export(resource_id, resource_dict, parent_dir) + + @patch("samcli.lib.package.artifact_exporter.upload_local_artifacts") + def test_resource_with_s3_url_dict(self, upload_local_artifacts_mock): + """ + Checks if we properly export from the Resource classc + :return: + """ + + self.assertTrue(issubclass(ResourceWithS3UrlDict, Resource)) + + class MockResource(ResourceWithS3UrlDict): + PROPERTY_NAME = "foo" + BUCKET_NAME_PROPERTY = "b" + OBJECT_KEY_PROPERTY = "o" + VERSION_PROPERTY = "v" + + resource = MockResource(self.s3_uploader_mock) + + # Case 1: Property value is a path to file + resource_id = "id" + resource_dict = {} + resource_dict[resource.PROPERTY_NAME] = "/path/to/file" + parent_dir = "dir" + s3_url = "s3://bucket/key1/key2?versionId=SomeVersionNumber" + + upload_local_artifacts_mock.return_value = s3_url + + resource.export(resource_id, resource_dict, parent_dir) + + upload_local_artifacts_mock.assert_called_once_with( + resource_id, resource_dict, resource.PROPERTY_NAME, parent_dir, self.s3_uploader_mock + ) + + self.assertEqual( + resource_dict[resource.PROPERTY_NAME], {"b": "bucket", "o": "key1/key2", "v": "SomeVersionNumber"} + ) + + @patch("samcli.lib.package.artifact_exporter.Template") + def test_export_cloudformation_stack(self, TemplateMock): + stack_resource = CloudFormationStackResource(self.s3_uploader_mock) + + resource_id = "id" + property_name = stack_resource.PROPERTY_NAME + exported_template_dict = {"foo": "bar"} + result_s3_url = "s3://hello/world" + result_path_style_s3_url = "http://s3.amazonws.com/hello/world" + + template_instance_mock = Mock() + TemplateMock.return_value = template_instance_mock + template_instance_mock.export.return_value = exported_template_dict + + self.s3_uploader_mock.upload_with_dedup.return_value = result_s3_url + self.s3_uploader_mock.to_path_style_s3_url.return_value = result_path_style_s3_url + + with tempfile.NamedTemporaryFile() as handle: + template_path = handle.name + resource_dict = {property_name: template_path} + parent_dir = tempfile.gettempdir() + + stack_resource.export(resource_id, resource_dict, parent_dir) + + self.assertEqual(resource_dict[property_name], result_path_style_s3_url) + + TemplateMock.assert_called_once_with(template_path, parent_dir, self.s3_uploader_mock) + template_instance_mock.export.assert_called_once_with() + self.s3_uploader_mock.upload_with_dedup.assert_called_once_with(mock.ANY, "template") + self.s3_uploader_mock.to_path_style_s3_url.assert_called_once_with("world", None) + + def test_export_cloudformation_stack_no_upload_path_is_s3url(self): + stack_resource = CloudFormationStackResource(self.s3_uploader_mock) + resource_id = "id" + property_name = stack_resource.PROPERTY_NAME + s3_url = "s3://hello/world" + resource_dict = {property_name: s3_url} + + # Case 1: Path is already S3 url + stack_resource.export(resource_id, resource_dict, "dir") + self.assertEqual(resource_dict[property_name], s3_url) + self.s3_uploader_mock.upload_with_dedup.assert_not_called() + + def test_export_cloudformation_stack_no_upload_path_is_httpsurl(self): + stack_resource = CloudFormationStackResource(self.s3_uploader_mock) + resource_id = "id" + property_name = stack_resource.PROPERTY_NAME + s3_url = "https://s3.amazonaws.com/hello/world" + resource_dict = {property_name: s3_url} + + # Case 1: Path is already S3 url + stack_resource.export(resource_id, resource_dict, "dir") + self.assertEqual(resource_dict[property_name], s3_url) + self.s3_uploader_mock.upload_with_dedup.assert_not_called() + + def test_export_cloudformation_stack_no_upload_path_is_s3_region_httpsurl(self): + stack_resource = CloudFormationStackResource(self.s3_uploader_mock) + resource_id = "id" + property_name = stack_resource.PROPERTY_NAME + + s3_url = "https://s3.some-valid-region.amazonaws.com/hello/world" + resource_dict = {property_name: s3_url} + + stack_resource.export(resource_id, resource_dict, "dir") + self.assertEqual(resource_dict[property_name], s3_url) + self.s3_uploader_mock.upload_with_dedup.assert_not_called() + + def test_export_cloudformation_stack_no_upload_path_is_empty(self): + stack_resource = CloudFormationStackResource(self.s3_uploader_mock) + resource_id = "id" + property_name = stack_resource.PROPERTY_NAME + s3_url = "s3://hello/world" + resource_dict = {property_name: s3_url} + + # Case 2: Path is empty + resource_dict = {} + stack_resource.export(resource_id, resource_dict, "dir") + self.assertEqual(resource_dict, {}) + self.s3_uploader_mock.upload_with_dedup.assert_not_called() + + def test_export_cloudformation_stack_no_upload_path_not_file(self): + stack_resource = CloudFormationStackResource(self.s3_uploader_mock) + resource_id = "id" + property_name = stack_resource.PROPERTY_NAME + s3_url = "s3://hello/world" + + # Case 3: Path is not a file + with self.make_temp_dir() as dirname: + resource_dict = {property_name: dirname} + with self.assertRaises(exceptions.ExportFailedError): + stack_resource.export(resource_id, resource_dict, "dir") + self.s3_uploader_mock.upload_with_dedup.assert_not_called() + + @patch("samcli.lib.package.artifact_exporter.Template") + def test_export_serverless_application(self, TemplateMock): + stack_resource = ServerlessApplicationResource(self.s3_uploader_mock) + + resource_id = "id" + property_name = stack_resource.PROPERTY_NAME + exported_template_dict = {"foo": "bar"} + result_s3_url = "s3://hello/world" + result_path_style_s3_url = "http://s3.amazonws.com/hello/world" + + template_instance_mock = Mock() + TemplateMock.return_value = template_instance_mock + template_instance_mock.export.return_value = exported_template_dict + + self.s3_uploader_mock.upload_with_dedup.return_value = result_s3_url + self.s3_uploader_mock.to_path_style_s3_url.return_value = result_path_style_s3_url + + with tempfile.NamedTemporaryFile() as handle: + template_path = handle.name + resource_dict = {property_name: template_path} + parent_dir = tempfile.gettempdir() + + stack_resource.export(resource_id, resource_dict, parent_dir) + + self.assertEqual(resource_dict[property_name], result_path_style_s3_url) + + TemplateMock.assert_called_once_with(template_path, parent_dir, self.s3_uploader_mock) + template_instance_mock.export.assert_called_once_with() + self.s3_uploader_mock.upload_with_dedup.assert_called_once_with(mock.ANY, "template") + self.s3_uploader_mock.to_path_style_s3_url.assert_called_once_with("world", None) + + def test_export_serverless_application_no_upload_path_is_s3url(self): + stack_resource = ServerlessApplicationResource(self.s3_uploader_mock) + resource_id = "id" + property_name = stack_resource.PROPERTY_NAME + s3_url = "s3://hello/world" + resource_dict = {property_name: s3_url} + + # Case 1: Path is already S3 url + stack_resource.export(resource_id, resource_dict, "dir") + self.assertEqual(resource_dict[property_name], s3_url) + self.s3_uploader_mock.upload_with_dedup.assert_not_called() + + def test_export_serverless_application_no_upload_path_is_httpsurl(self): + stack_resource = ServerlessApplicationResource(self.s3_uploader_mock) + resource_id = "id" + property_name = stack_resource.PROPERTY_NAME + s3_url = "https://s3.amazonaws.com/hello/world" + resource_dict = {property_name: s3_url} + + # Case 1: Path is already S3 url + stack_resource.export(resource_id, resource_dict, "dir") + self.assertEqual(resource_dict[property_name], s3_url) + self.s3_uploader_mock.upload_with_dedup.assert_not_called() + + def test_export_serverless_application_no_upload_path_is_empty(self): + stack_resource = ServerlessApplicationResource(self.s3_uploader_mock) + resource_id = "id" + property_name = stack_resource.PROPERTY_NAME + + # Case 2: Path is empty + resource_dict = {} + stack_resource.export(resource_id, resource_dict, "dir") + self.assertEqual(resource_dict, {}) + self.s3_uploader_mock.upload_with_dedup.assert_not_called() + + def test_export_serverless_application_no_upload_path_not_file(self): + stack_resource = ServerlessApplicationResource(self.s3_uploader_mock) + resource_id = "id" + property_name = stack_resource.PROPERTY_NAME + + # Case 3: Path is not a file + with self.make_temp_dir() as dirname: + resource_dict = {property_name: dirname} + with self.assertRaises(exceptions.ExportFailedError): + stack_resource.export(resource_id, resource_dict, "dir") + self.s3_uploader_mock.upload_with_dedup.assert_not_called() + + def test_export_serverless_application_no_upload_path_is_dictionary(self): + stack_resource = ServerlessApplicationResource(self.s3_uploader_mock) + resource_id = "id" + property_name = stack_resource.PROPERTY_NAME + + # Case 4: Path is dictionary + location = {"ApplicationId": "id", "SemanticVersion": "1.0.1"} + resource_dict = {property_name: location} + stack_resource.export(resource_id, resource_dict, "dir") + self.assertEqual(resource_dict[property_name], location) + self.s3_uploader_mock.upload_with_dedup.assert_not_called() + + @patch("samcli.lib.package.artifact_exporter.yaml_parse") + def test_template_export_metadata(self, yaml_parse_mock): + parent_dir = os.path.sep + template_dir = os.path.join(parent_dir, "foo", "bar") + template_path = os.path.join(template_dir, "path") + template_str = self.example_yaml_template() + + metadata_type1_class = Mock() + metadata_type1_class.RESOURCE_TYPE = "metadata_type1" + metadata_type1_class.PROPERTY_NAME = "property_1" + metadata_type1_instance = Mock() + metadata_type1_class.return_value = metadata_type1_instance + + metadata_type2_class = Mock() + metadata_type2_class.RESOURCE_TYPE = "metadata_type2" + metadata_type2_class.PROPERTY_NAME = "property_2" + metadata_type2_instance = Mock() + metadata_type2_class.return_value = metadata_type2_instance + + metadata_to_export = [metadata_type1_class, metadata_type2_class] + + template_dict = {"Metadata": {"metadata_type1": {"property_1": "abc"}, "metadata_type2": {"property_2": "def"}}} + open_mock = mock.mock_open() + yaml_parse_mock.return_value = template_dict + + # Patch the file open method to return template string + with patch("samcli.lib.package.artifact_exporter.open", open_mock(read_data=template_str)) as open_mock: + + template_exporter = Template( + template_path, parent_dir, self.s3_uploader_mock, metadata_to_export=metadata_to_export + ) + exported_template = template_exporter.export() + self.assertEqual(exported_template, template_dict) + + open_mock.assert_called_once_with(make_abs_path(parent_dir, template_path), "r") + + self.assertEqual(1, yaml_parse_mock.call_count) + + metadata_type1_class.assert_called_once_with(self.s3_uploader_mock) + metadata_type1_instance.export.assert_called_once_with("metadata_type1", mock.ANY, template_dir) + metadata_type2_class.assert_called_once_with(self.s3_uploader_mock) + metadata_type2_instance.export.assert_called_once_with("metadata_type2", mock.ANY, template_dir) + + @patch("samcli.lib.package.artifact_exporter.yaml_parse") + def test_template_export(self, yaml_parse_mock): + parent_dir = os.path.sep + template_dir = os.path.join(parent_dir, "foo", "bar") + template_path = os.path.join(template_dir, "path") + template_str = self.example_yaml_template() + + resource_type1_class = Mock() + resource_type1_class.RESOURCE_TYPE = "resource_type1" + resource_type1_instance = Mock() + resource_type1_class.return_value = resource_type1_instance + resource_type2_class = Mock() + resource_type2_class.RESOURCE_TYPE = "resource_type2" + resource_type2_instance = Mock() + resource_type2_class.return_value = resource_type2_instance + + resources_to_export = [resource_type1_class, resource_type2_class] + + properties = {"foo": "bar"} + template_dict = { + "Resources": { + "Resource1": {"Type": "resource_type1", "Properties": properties}, + "Resource2": {"Type": "resource_type2", "Properties": properties}, + "Resource3": {"Type": "some-other-type", "Properties": properties}, + } + } + + open_mock = mock.mock_open() + yaml_parse_mock.return_value = template_dict + + # Patch the file open method to return template string + with patch("samcli.lib.package.artifact_exporter.open", open_mock(read_data=template_str)) as open_mock: + + template_exporter = Template(template_path, parent_dir, self.s3_uploader_mock, resources_to_export) + exported_template = template_exporter.export() + self.assertEqual(exported_template, template_dict) + + open_mock.assert_called_once_with(make_abs_path(parent_dir, template_path), "r") + + self.assertEqual(1, yaml_parse_mock.call_count) + + resource_type1_class.assert_called_once_with(self.s3_uploader_mock) + resource_type1_instance.export.assert_called_once_with("Resource1", mock.ANY, template_dir) + resource_type2_class.assert_called_once_with(self.s3_uploader_mock) + resource_type2_instance.export.assert_called_once_with("Resource2", mock.ANY, template_dir) + + @patch("samcli.lib.package.artifact_exporter.yaml_parse") + def test_template_global_export(self, yaml_parse_mock): + parent_dir = os.path.sep + template_dir = os.path.join(parent_dir, "foo", "bar") + template_path = os.path.join(template_dir, "path") + template_str = self.example_yaml_template() + + resource_type1_class = Mock() + resource_type1_instance = Mock() + resource_type1_class.return_value = resource_type1_instance + resource_type2_class = Mock() + resource_type2_instance = Mock() + resource_type2_class.return_value = resource_type2_instance + + resources_to_export = {"resource_type1": resource_type1_class, "resource_type2": resource_type2_class} + properties1 = {"foo": "bar", "Fn::Transform": {"Name": "AWS::Include", "Parameters": {"Location": "foo.yaml"}}} + properties2 = {"foo": "bar", "Fn::Transform": {"Name": "AWS::OtherTransform"}} + properties_in_list = {"Fn::Transform": {"Name": "AWS::Include", "Parameters": {"Location": "bar.yaml"}}} + template_dict = { + "Resources": { + "Resource1": {"Type": "resource_type1", "Properties": properties1}, + "Resource2": {"Type": "resource_type2", "Properties": properties2}, + }, + "List": ["foo", properties_in_list], + } + open_mock = mock.mock_open() + include_transform_export_handler_mock = Mock() + include_transform_export_handler_mock.return_value = { + "Name": "AWS::Include", + "Parameters": {"Location": "s3://foo"}, + } + yaml_parse_mock.return_value = template_dict + + with patch("samcli.lib.package.artifact_exporter.open", open_mock(read_data=template_str)) as open_mock: + with patch.dict(GLOBAL_EXPORT_DICT, {"Fn::Transform": include_transform_export_handler_mock}): + template_exporter = Template(template_path, parent_dir, self.s3_uploader_mock, resources_to_export) + + exported_template = template_exporter.export_global_artifacts(template_exporter.template_dict) + + first_call_args, kwargs = include_transform_export_handler_mock.call_args_list[0] + second_call_args, kwargs = include_transform_export_handler_mock.call_args_list[1] + third_call_args, kwargs = include_transform_export_handler_mock.call_args_list[2] + call_args = [first_call_args[0], second_call_args[0], third_call_args[0]] + self.assertTrue({"Name": "AWS::Include", "Parameters": {"Location": "foo.yaml"}} in call_args) + self.assertTrue({"Name": "AWS::OtherTransform"} in call_args) + self.assertTrue({"Name": "AWS::Include", "Parameters": {"Location": "bar.yaml"}} in call_args) + self.assertTrue(template_dir in first_call_args) + self.assertTrue(template_dir in second_call_args) + self.assertTrue(template_dir in third_call_args) + self.assertEqual(include_transform_export_handler_mock.call_count, 3) + # new s3 url is added to include location + self.assertEqual( + exported_template["Resources"]["Resource1"]["Properties"]["Fn::Transform"], + {"Name": "AWS::Include", "Parameters": {"Location": "s3://foo"}}, + ) + self.assertEqual( + exported_template["List"][1]["Fn::Transform"], + {"Name": "AWS::Include", "Parameters": {"Location": "s3://foo"}}, + ) + + @patch("samcli.lib.package.artifact_exporter.is_local_file") + def test_include_transform_export_handler_with_relative_file_path(self, is_local_file_mock): + # exports transform + parent_dir = os.path.abspath("someroot") + self.s3_uploader_mock.upload_with_dedup.return_value = "s3://foo" + is_local_file_mock.return_value = True + abs_file_path = os.path.join(parent_dir, "foo.yaml") + + handler_output = include_transform_export_handler( + {"Name": "AWS::Include", "Parameters": {"Location": "foo.yaml"}}, self.s3_uploader_mock, parent_dir + ) + self.s3_uploader_mock.upload_with_dedup.assert_called_once_with(abs_file_path) + is_local_file_mock.assert_called_with(abs_file_path) + self.assertEqual(handler_output, {"Name": "AWS::Include", "Parameters": {"Location": "s3://foo"}}) + + @patch("samcli.lib.package.artifact_exporter.is_local_file") + def test_include_transform_export_handler_with_absolute_file_path(self, is_local_file_mock): + # exports transform + parent_dir = os.path.abspath("someroot") + self.s3_uploader_mock.upload_with_dedup.return_value = "s3://foo" + is_local_file_mock.return_value = True + abs_file_path = os.path.abspath(os.path.join("my", "file.yaml")) + + handler_output = include_transform_export_handler( + {"Name": "AWS::Include", "Parameters": {"Location": abs_file_path}}, self.s3_uploader_mock, parent_dir + ) + self.s3_uploader_mock.upload_with_dedup.assert_called_once_with(abs_file_path) + is_local_file_mock.assert_called_with(abs_file_path) + self.assertEqual(handler_output, {"Name": "AWS::Include", "Parameters": {"Location": "s3://foo"}}) + + @patch("samcli.lib.package.artifact_exporter.is_local_file") + def test_include_transform_export_handler_with_s3_uri(self, is_local_file_mock): + + handler_output = include_transform_export_handler( + {"Name": "AWS::Include", "Parameters": {"Location": "s3://bucket/foo.yaml"}}, + self.s3_uploader_mock, + "parent_dir", + ) + # Input is returned unmodified + self.assertEqual(handler_output, {"Name": "AWS::Include", "Parameters": {"Location": "s3://bucket/foo.yaml"}}) + + is_local_file_mock.assert_not_called() + self.s3_uploader_mock.assert_not_called() + + @patch("samcli.lib.package.artifact_exporter.is_local_file") + def test_include_transform_export_handler_with_no_path(self, is_local_file_mock): + + handler_output = include_transform_export_handler( + {"Name": "AWS::Include", "Parameters": {"Location": ""}}, self.s3_uploader_mock, "parent_dir" + ) + # Input is returned unmodified + self.assertEqual(handler_output, {"Name": "AWS::Include", "Parameters": {"Location": ""}}) + + is_local_file_mock.assert_not_called() + self.s3_uploader_mock.assert_not_called() + + @patch("samcli.lib.package.artifact_exporter.is_local_file") + def test_include_transform_export_handler_with_dict_value_for_location(self, is_local_file_mock): + + handler_output = include_transform_export_handler( + {"Name": "AWS::Include", "Parameters": {"Location": {"Fn::Sub": "${S3Bucket}/file.txt"}}}, + self.s3_uploader_mock, + "parent_dir", + ) + # Input is returned unmodified + self.assertEqual( + handler_output, {"Name": "AWS::Include", "Parameters": {"Location": {"Fn::Sub": "${S3Bucket}/file.txt"}}} + ) + + is_local_file_mock.assert_not_called() + self.s3_uploader_mock.assert_not_called() + + @patch("samcli.lib.package.artifact_exporter.is_local_file") + def test_include_transform_export_handler_non_local_file(self, is_local_file_mock): + # returns unchanged template dict if transform not a local file, and not a S3 URI + is_local_file_mock.return_value = False + + with self.assertRaises(exceptions.InvalidLocalPathError): + include_transform_export_handler( + {"Name": "AWS::Include", "Parameters": {"Location": "http://foo.yaml"}}, + self.s3_uploader_mock, + "parent_dir", + ) + is_local_file_mock.assert_called_with("http://foo.yaml") + self.s3_uploader_mock.assert_not_called() + + @patch("samcli.lib.package.artifact_exporter.is_local_file") + def test_include_transform_export_handler_non_include_transform(self, is_local_file_mock): + # ignores transform that is not aws::include + handler_output = include_transform_export_handler( + {"Name": "AWS::OtherTransform", "Parameters": {"Location": "foo.yaml"}}, self.s3_uploader_mock, "parent_dir" + ) + self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.assertEqual(handler_output, {"Name": "AWS::OtherTransform", "Parameters": {"Location": "foo.yaml"}}) + + def test_template_export_path_be_folder(self): + + template_path = "/path/foo" + # Set parent_dir to be a non-existent folder + with self.assertRaises(ValueError): + Template(template_path, "somefolder", self.s3_uploader_mock) + + # Set parent_dir to be a real folder, but just a relative path + with self.make_temp_dir() as dirname: + with self.assertRaises(ValueError): + Template(template_path, os.path.relpath(dirname), self.s3_uploader_mock) + + def test_make_zip(self): + test_file_creator = FileCreator() + test_file_creator.append_file( + "index.js", "exports handler = (event, context, callback) => {callback(null, event);}" + ) + + dirname = test_file_creator.rootdir + + expected_files = {"index.js"} + + random_name = "".join(random.choice(string.ascii_letters) for _ in range(10)) + outfile = os.path.join(tempfile.gettempdir(), random_name) + + zipfile_name = None + try: + zipfile_name = make_zip(outfile, dirname) + + test_zip_file = zipfile.ZipFile(zipfile_name, "r") + with closing(test_zip_file) as zf: + files_in_zip = set() + for info in zf.infolist(): + files_in_zip.add(info.filename) + + self.assertEqual(files_in_zip, expected_files) + + finally: + if zipfile_name: + os.remove(zipfile_name) + test_file_creator.remove_all() + + @patch("shutil.copyfile") + @patch("tempfile.mkdtemp") + def test_copy_to_temp_dir(self, mkdtemp_mock, copyfile_mock): + temp_dir = "/tmp/foo/" + filename = "test.js" + mkdtemp_mock.return_value = temp_dir + + returned_dir = copy_to_temp_dir(filename) + + self.assertEqual(returned_dir, temp_dir) + copyfile_mock.assert_called_once_with(filename, temp_dir + filename) + + @contextmanager + def make_temp_dir(self): + filename = tempfile.mkdtemp() + try: + yield filename + finally: + if filename: + os.rmdir(filename) + + def example_yaml_template(self): + return """ + AWSTemplateFormatVersion: '2010-09-09' + Description: Simple CRUD webservice. State is stored in a SimpleTable (DynamoDB) resource. + Resources: + MyFunction: + Type: AWS::Lambda::Function + Properties: + Code: ./handler + Handler: index.get + Role: + Fn::GetAtt: + - MyFunctionRole + - Arn + Timeout: 20 + Runtime: nodejs4.3 + """ diff --git a/tests/unit/lib/package/test_s3_uploader.py b/tests/unit/lib/package/test_s3_uploader.py new file mode 100644 index 0000000000..1b47871e0e --- /dev/null +++ b/tests/unit/lib/package/test_s3_uploader.py @@ -0,0 +1,159 @@ +import os + +from unittest import TestCase +from unittest.mock import MagicMock +import tempfile + +from pathlib import Path +from botocore.exceptions import ClientError + +from samcli.lib.package.s3_uploader import S3Uploader, NoSuchBucketError + + +class TestS3Uploader(TestCase): + def setUp(self): + self.s3 = MagicMock() + self.bucket_name = "mock-bucket" + self.prefix = "mock-prefix" + self.kms_key_id = "mock-kms-key-id" + self.force_upload = False + + def test_s3_uploader_init(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=self.bucket_name, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + ) + self.assertEqual(s3_uploader.s3, self.s3) + self.assertEqual(s3_uploader.bucket_name, self.bucket_name) + self.assertEqual(s3_uploader.prefix, self.prefix) + self.assertEqual(s3_uploader.kms_key_id, self.kms_key_id) + self.assertEqual(s3_uploader.force_upload, self.force_upload) + self.assertEqual(s3_uploader.artifact_metadata, None) + + def test_s3_uploader_artifact_metadata(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=self.bucket_name, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + ) + s3_uploader.artifact_metadata = {} + self.assertEqual(s3_uploader.artifact_metadata, {}) + with self.assertRaises(TypeError): + s3_uploader.artifact_metadata = "Not a dict" + + def test_s3_upload_skip_upload(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=self.bucket_name, + prefix=None, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + ) + with tempfile.NamedTemporaryFile() as f: + s3_url = s3_uploader.upload("package.zip", f.name) + self.assertEqual(s3_url, "s3://{0}/{1}".format(self.bucket_name, f.name)) + + def test_s3_upload_skip_upload_with_prefix(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=self.bucket_name, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + ) + with tempfile.NamedTemporaryFile() as f: + s3_url = s3_uploader.upload("package.zip", f.name) + self.assertEqual(s3_url, "s3://{0}/{1}/{2}".format(self.bucket_name, self.prefix, f.name)) + + def test_s3_upload_bucket_not_found(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=self.bucket_name, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=True, + ) + remote_path = Path.joinpath(Path(os.getcwd()), Path("tmp")) + s3_uploader.transfer_manager.upload = MagicMock( + side_effect=ClientError(error_response={"Error": {"Code": "NoSuchBucket"}}, operation_name="create_object") + ) + with tempfile.NamedTemporaryFile() as f: + with self.assertRaises(NoSuchBucketError): + s3_uploader.upload(f.name, remote_path) + + def test_s3_upload_general_error(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=self.bucket_name, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=True, + ) + remote_path = Path.joinpath(Path(os.getcwd()), Path("tmp")) + s3_uploader.transfer_manager.upload = MagicMock( + side_effect=ClientError(error_response={"Error": {"Code": "Unknown"}}, operation_name="create_object") + ) + with tempfile.NamedTemporaryFile() as f: + with self.assertRaises(ClientError): + s3_uploader.upload(f.name, remote_path) + + def test_file_checksum(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=self.bucket_name, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + ) + with tempfile.NamedTemporaryFile(mode="wb", delete=False) as f: + f.write(b"Hello World!") + f.seek(0) + self.assertEqual("ed076287532e86365e841e92bfc50d8c", s3_uploader.file_checksum(f.name)) + + def test_path_style_s3_url(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=self.bucket_name, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + ) + self.s3.meta.endpoint_url = "s3_url" + self.assertEqual( + s3_uploader.to_path_style_s3_url("package.zip", version="1"), "s3_url/mock-bucket/package.zip?versionId=1" + ) + + def test_s3_upload(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=self.bucket_name, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + ) + s3_uploader.artifact_metadata = {"a": "b"} + remote_path = Path.joinpath(Path(os.getcwd()), Path("tmp")) + self.s3.head_object = MagicMock(side_effect=ClientError(error_response={}, operation_name="head_object")) + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + s3_url = s3_uploader.upload(f.name, remote_path) + self.assertEqual(s3_url, "s3://{0}/{1}/{2}".format(self.bucket_name, self.prefix, remote_path)) + + def test_s3_upload_with_dedup(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=self.bucket_name, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + ) + self.s3.head_object = MagicMock(side_effect=ClientError(error_response={}, operation_name="head_object")) + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + s3_url = s3_uploader.upload_with_dedup(f.name, "zip") + self.assertEqual( + s3_url, "s3://{0}/{1}/{2}.zip".format(self.bucket_name, self.prefix, s3_uploader.file_checksum(f.name)) + ) From aa265029e07a54106ff2dc967ec737bc6651d108 Mon Sep 17 00:00:00 2001 From: Alex Wood Date: Tue, 5 Nov 2019 14:47:06 -0800 Subject: [PATCH 5/7] Init User Flow Rework (#1501) * Experience Improvements to sam init * Add Numeric Key Prompt for Dependency Managers * Remove Output Dir Prompt, Clean Up Formatting * Improve Prompt Formatting * Improve Init User Flows * Additional Changes to Init Flow * Linter Changes * Constrain python-dateutil Dependency botocore sets a maximum version number and pip can't figure that out. --- requirements/base.txt | 2 +- samcli/commands/init/__init__.py | 2 +- samcli/commands/init/init_generator.py | 40 --------- samcli/commands/init/init_templates.py | 51 +++++++----- samcli/commands/init/interactive_init_flow.py | 81 +++++++++++++++---- samcli/local/common/runtime_template.py | 15 +++- tests/unit/commands/init/test_cli.py | 54 +++++++++---- tests/unit/commands/init/test_templates.py | 6 +- 8 files changed, 153 insertions(+), 98 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index bd43830f76..31fb7b0c94 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -8,7 +8,7 @@ cookiecutter~=1.6.0 aws-sam-translator==1.15.1 docker~=4.0 dateparser~=0.7 -python-dateutil~=2.6 +python-dateutil~=2.6, <2.8.1 requests==2.22.0 serverlessrepo==0.1.9 aws_lambda_builders==0.5.0 diff --git a/samcli/commands/init/__init__.py b/samcli/commands/init/__init__.py index 79a0966a5f..db92f30765 100644 --- a/samcli/commands/init/__init__.py +++ b/samcli/commands/init/__init__.py @@ -74,7 +74,7 @@ help="Dependency manager of your Lambda runtime", required=False, ) -@click.option("-o", "--output-dir", type=click.Path(), help="Where to output the initialized app into") +@click.option("-o", "--output-dir", type=click.Path(), help="Where to output the initialized app into", default=".") @click.option("-n", "--name", help="Name of your project to be generated as a folder") @click.option( "--app-template", diff --git a/samcli/commands/init/init_generator.py b/samcli/commands/init/init_generator.py index 5c1111f3fc..dc6bbbea8b 100644 --- a/samcli/commands/init/init_generator.py +++ b/samcli/commands/init/init_generator.py @@ -9,47 +9,7 @@ def do_generate(location, runtime, dependency_manager, output_dir, name, no_input, extra_context): - no_build_msg = """ -Project generated: {output_dir}/{name} - -Steps you can take next within the project folder -=================================================== -[*] Invoke Function: sam local invoke HelloWorldFunction --event event.json -[*] Start API Gateway locally: sam local start-api -""".format( - output_dir=output_dir, name=name - ) - - build_msg = """ -Project generated: {output_dir}/{name} - -Steps you can take next within the project folder -=================================================== -[*] Install dependencies -[*] Invoke Function: sam local invoke HelloWorldFunction --event event.json -[*] Start API Gateway locally: sam local start-api -""".format( - output_dir=output_dir, name=name - ) - - no_build_step_required = ( - "python", - "python3.7", - "python3.6", - "python2.7", - "nodejs", - "nodejs4.3", - "nodejs6.10", - "nodejs8.10", - "nodejs10.x", - "ruby2.5", - ) - next_step_msg = no_build_msg if runtime in no_build_step_required else build_msg try: generate_project(location, runtime, dependency_manager, output_dir, name, no_input, extra_context) - if not location: - click.secho(next_step_msg, bold=True) - click.secho("Read {name}/README.md for further instructions\n".format(name=name), bold=True) - click.secho("[*] Project initialization is now complete", fg="green") except GenerateProjectFailedError as e: raise UserException(str(e)) diff --git a/samcli/commands/init/init_templates.py b/samcli/commands/init/init_templates.py index f6a5e9cb79..8f53bb4efd 100644 --- a/samcli/commands/init/init_templates.py +++ b/samcli/commands/init/init_templates.py @@ -32,28 +32,33 @@ def __init__(self, no_interactive=False, auto_clone=True): def prompt_for_location(self, runtime, dependency_manager): options = self.init_options(runtime, dependency_manager) - choices = map(str, range(1, len(options) + 1)) - choice_num = 1 - for o in options: - if o.get("displayName") is not None: - msg = str(choice_num) + " - " + o.get("displayName") - click.echo(msg) - else: - msg = ( - str(choice_num) - + " - Default Template for runtime " - + runtime - + " with dependency manager " - + dependency_manager - ) - click.echo(msg) - choice_num = choice_num + 1 - choice = click.prompt("Template Selection", type=click.Choice(choices), show_choices=False) - template_md = options[int(choice) - 1] # zero index + if len(options) == 1: + template_md = options[0] + else: + choices = list(map(str, range(1, len(options) + 1))) + choice_num = 1 + click.echo("\nAWS quick start application templates:") + for o in options: + if o.get("displayName") is not None: + msg = "\t" + str(choice_num) + " - " + o.get("displayName") + click.echo(msg) + else: + msg = ( + "\t" + + str(choice_num) + + " - Default Template for runtime " + + runtime + + " with dependency manager " + + dependency_manager + ) + click.echo(msg) + choice_num = choice_num + 1 + choice = click.prompt("Template selection", type=click.Choice(choices), show_choices=False) + template_md = options[int(choice) - 1] # zero index if template_md.get("init_location") is not None: - return template_md["init_location"] + return (template_md["init_location"], "hello-world") if template_md.get("directory") is not None: - return os.path.join(self.repo_path, template_md["directory"]) + return (os.path.join(self.repo_path, template_md["directory"]), template_md["appTemplate"]) raise UserException("Invalid template. This should not be possible, please raise an issue.") def location_from_app_template(self, runtime, dependency_manager, app_template): @@ -150,7 +155,9 @@ def _should_clone_repo(self, expected_path): path = Path(expected_path) if path.exists(): if not self._no_interactive: - overwrite = click.confirm("Init templates exist on disk. Do you wish to update?") + overwrite = click.confirm( + "\nQuick start templates may have been updated. Do you want to re-download the latest", default=True + ) if overwrite: shutil.rmtree(expected_path) # fail hard if there is an issue return True @@ -160,6 +167,6 @@ def _should_clone_repo(self, expected_path): if self._no_interactive: return self._auto_clone do_clone = click.confirm( - "This process will clone app templates from https://github.com/awslabs/aws-sam-cli-app-templates - is this ok?" + "\nAllow SAM CLI to download AWS-provided quick start templates from Github", default=True ) return do_clone diff --git a/samcli/commands/init/interactive_init_flow.py b/samcli/commands/init/interactive_init_flow.py index dbb55a7b44..2e1784d3fe 100644 --- a/samcli/commands/init/interactive_init_flow.py +++ b/samcli/commands/init/interactive_init_flow.py @@ -3,7 +3,7 @@ """ import click -from samcli.local.common.runtime_template import RUNTIMES, RUNTIME_TO_DEPENDENCY_MANAGERS +from samcli.local.common.runtime_template import INIT_RUNTIMES, RUNTIME_TO_DEPENDENCY_MANAGERS from samcli.commands.init.init_generator import do_generate from samcli.commands.init.init_templates import InitTemplates @@ -12,8 +12,9 @@ def do_interactive(location, runtime, dependency_manager, output_dir, name, app_ if app_template: location_opt_choice = "1" else: - click.echo("1 - Use a Managed Application Template\n2 - Provide a Custom Location") - location_opt_choice = click.prompt("Location Choice", type=click.Choice(["1", "2"]), show_choices=False) + click.echo("Which template source would you like to use?") + click.echo("\t1 - AWS Quick Start Templates\n\t2 - Custom Template Location") + location_opt_choice = click.prompt("Choice", type=click.Choice(["1", "2"]), show_choices=False) if location_opt_choice == "2": _generate_from_location(location, runtime, dependency_manager, output_dir, name, app_template, no_input) else: @@ -21,34 +22,84 @@ def do_interactive(location, runtime, dependency_manager, output_dir, name, app_ def _generate_from_location(location, runtime, dependency_manager, output_dir, name, app_template, no_input): - location = click.prompt("Template location (git, mercurial, http(s), zip, path)", type=str) - if not output_dir: - output_dir = click.prompt("Output Directory", type=click.Path(), default=".") + location = click.prompt("\nTemplate location (git, mercurial, http(s), zip, path)", type=str) + summary_msg = """ +----------------------- +Generating application: +----------------------- +Location: {location} +Output Directory: {output_dir} + +To do this without interactive prompts, you can run: + + sam init --location {location} --output-dir {output_dir} + """.format( + location=location, output_dir=output_dir + ) + click.echo(summary_msg) do_generate(location, runtime, dependency_manager, output_dir, name, no_input, None) +# pylint: disable=too-many-statements def _generate_from_app_template(location, runtime, dependency_manager, output_dir, name, app_template): extra_context = None - if not name: - name = click.prompt("Project Name", type=str) if not runtime: - runtime = click.prompt("Runtime", type=click.Choice(RUNTIMES)) + choices = list(map(str, range(1, len(INIT_RUNTIMES) + 1))) + choice_num = 1 + click.echo("\nWhich runtime would you like to use?") + for r in INIT_RUNTIMES: + msg = "\t" + str(choice_num) + " - " + r + click.echo(msg) + choice_num = choice_num + 1 + choice = click.prompt("Runtime", type=click.Choice(choices), show_choices=False) + runtime = INIT_RUNTIMES[int(choice) - 1] # zero index if not dependency_manager: valid_dep_managers = RUNTIME_TO_DEPENDENCY_MANAGERS.get(runtime) if valid_dep_managers is None: dependency_manager = None + elif len(valid_dep_managers) == 1: + dependency_manager = valid_dep_managers[0] else: - dependency_manager = click.prompt( - "Dependency Manager", type=click.Choice(valid_dep_managers), default=valid_dep_managers[0] - ) + choices = list(map(str, range(1, len(valid_dep_managers) + 1))) + choice_num = 1 + click.echo("\nWhich dependency manager would you like to use?") + for dm in valid_dep_managers: + msg = "\t" + str(choice_num) + " - " + dm + click.echo(msg) + choice_num = choice_num + 1 + choice = click.prompt("Dependency manager", type=click.Choice(choices), show_choices=False) + dependency_manager = valid_dep_managers[int(choice) - 1] # zero index + if not name: + name = click.prompt("\nProject name", type=str, default="sam-app") templates = InitTemplates() if app_template is not None: location = templates.location_from_app_template(runtime, dependency_manager, app_template) extra_context = {"project_name": name, "runtime": runtime} else: - location = templates.prompt_for_location(runtime, dependency_manager) + location, app_template = templates.prompt_for_location(runtime, dependency_manager) extra_context = {"project_name": name, "runtime": runtime} no_input = True - if not output_dir: - output_dir = click.prompt("Output Directory", type=click.Path(), default=".") + summary_msg = """ +----------------------- +Generating application: +----------------------- +Name: {name} +Runtime: {runtime} +Dependency Manager: {dependency_manager} +Application Template: {app_template} +Output Directory: {output_dir} + +Non-interactive init command with parameters: + + sam init --name {name} --runtime {runtime} --dependency-manager {dependency_manager} --app-template {app_template} --output-dir {output_dir} + +Next steps can be found in the README file at {output_dir}/{name}/README.md + """.format( + name=name, + runtime=runtime, + dependency_manager=dependency_manager, + app_template=app_template, + output_dir=output_dir, + ) + click.echo(summary_msg) do_generate(location, runtime, dependency_manager, output_dir, name, no_input, extra_context) diff --git a/samcli/local/common/runtime_template.py b/samcli/local/common/runtime_template.py index eda4ae3802..7788e2cf53 100644 --- a/samcli/local/common/runtime_template.py +++ b/samcli/local/common/runtime_template.py @@ -99,4 +99,17 @@ itertools.chain(*[c["runtimes"] for c in list(itertools.chain(*(RUNTIME_DEP_TEMPLATE_MAPPING.values())))]) ) -INIT_RUNTIMES = RUNTIMES.union(RUNTIME_DEP_TEMPLATE_MAPPING.keys()) +INIT_RUNTIMES = [ + "nodejs10.x", + "python3.7", + "ruby2.5", + "go1.x", + "java8", + "dotnetcore2.1", + "nodejs8.10", + "nodejs6.10", + "python3.6", + "python2.7", + "dotnetcore2.0", + "dotnetcore1.0", +] diff --git a/tests/unit/commands/init/test_cli.py b/tests/unit/commands/init/test_cli.py index 98dc5e1b88..0e9ebc364e 100644 --- a/tests/unit/commands/init/test_cli.py +++ b/tests/unit/commands/init/test_cli.py @@ -95,19 +95,14 @@ def test_init_cli_interactive(self, generate_project_patch, sd_mock): # WHEN the user follows interactive init prompts # 1: selecting managed templates + # 3: ruby2.5 response to runtime # test-project: response to name - # ruby2.5: response to runtime - # bundler: response to dependency manager # N: Don't clone/update the source repo - # 1: First choice will always be the hello world example user_input = """ 1 +3 test-project -ruby2.5 -bundler N -1 -. """ runner = CliRunner() result = runner.invoke(init_cmd, input=user_input) @@ -125,22 +120,51 @@ def test_init_cli_interactive(self, generate_project_patch, sd_mock): {"project_name": "test-project", "runtime": "ruby2.5"}, ) + @patch("samcli.commands.init.init_templates.InitTemplates._shared_dir_check") + @patch("samcli.commands.init.init_generator.generate_project") + def test_init_cli_interactive_multiple_dep_mgrs(self, generate_project_patch, sd_mock): + # WHEN the user follows interactive init prompts + + # 1: selecting managed templates + # 5: java8 response to runtime + # 2: gradle as the dependency manager + # test-project: response to name + # N: Don't clone/update the source repo + user_input = """ +1 +5 +2 +test-project +N + """ + runner = CliRunner() + result = runner.invoke(init_cmd, input=user_input) + + # THEN we should receive no errors + self.assertFalse(result.exception) + generate_project_patch.assert_called_once_with( + # need to change the location validation check + ANY, + "java8", + "gradle", + ".", + "test-project", + True, + {"project_name": "test-project", "runtime": "java8"}, + ) + @patch("samcli.commands.init.init_templates.InitTemplates._shared_dir_check") @patch("samcli.commands.init.init_generator.generate_project") def test_init_cli_int_with_app_template(self, generate_project_patch, sd_mock): # WHEN the user follows interactive init prompts + # 3: ruby2.5 response to runtime # test-project: response to name - # ruby2.5: response to runtime - # bundler: response to dependency manager # N: Don't clone/update the source repo - # .: output dir user_input = """ +3 test-project -ruby2.5 -bundler N -. """ runner = CliRunner() result = runner.invoke(init_cmd, ["--app-template", "hello-world"], input=user_input) @@ -165,11 +189,9 @@ def test_init_cli_int_from_location(self, generate_project_patch, sd_mock): # 2: selecting custom location # foo: the "location" - # output/: the "output dir" user_input = """ 2 foo -output/ """ runner = CliRunner() @@ -182,7 +204,7 @@ def test_init_cli_int_from_location(self, generate_project_patch, sd_mock): "foo", None, None, - "output/", + ".", None, False, None, diff --git a/tests/unit/commands/init/test_templates.py b/tests/unit/commands/init/test_templates.py index 6f10ca61e1..0b91a5f00f 100644 --- a/tests/unit/commands/init/test_templates.py +++ b/tests/unit/commands/init/test_templates.py @@ -45,8 +45,9 @@ def test_fallback_options(self, git_exec_mock, prompt_mock, sd_mock): mock_sub.side_effect = OSError("Fail") mock_cfg.return_value = "/tmp/test-sam" it = InitTemplates(True) - location = it.prompt_for_location("ruby2.5", "bundler") + location, app_template = it.prompt_for_location("ruby2.5", "bundler") self.assertTrue(search("cookiecutter-aws-sam-hello-ruby", location)) + self.assertEqual("hello-world", app_template) @patch("samcli.commands.init.init_templates.InitTemplates._git_executable") @patch("click.prompt") @@ -58,8 +59,9 @@ def test_fallback_process_error(self, git_exec_mock, prompt_mock, sd_mock): mock_sub.side_effect = subprocess.CalledProcessError("fail", "fail", "not found".encode("utf-8")) mock_cfg.return_value = "/tmp/test-sam" it = InitTemplates(True) - location = it.prompt_for_location("ruby2.5", "bundler") + location, app_template = it.prompt_for_location("ruby2.5", "bundler") self.assertTrue(search("cookiecutter-aws-sam-hello-ruby", location)) + self.assertEqual("hello-world", app_template) def test_git_executable_windows(self): with patch("platform.system", new_callable=MagicMock) as mock_platform: From 7e272efde7fd9571b0de8add889ea34a7ffab989 Mon Sep 17 00:00:00 2001 From: Alex Wood Date: Wed, 6 Nov 2019 12:46:01 -0800 Subject: [PATCH 6/7] Remove non-interactive command explainer (#1508) --- samcli/commands/init/interactive_init_flow.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/samcli/commands/init/interactive_init_flow.py b/samcli/commands/init/interactive_init_flow.py index 2e1784d3fe..d8ea204bf3 100644 --- a/samcli/commands/init/interactive_init_flow.py +++ b/samcli/commands/init/interactive_init_flow.py @@ -29,10 +29,6 @@ def _generate_from_location(location, runtime, dependency_manager, output_dir, n ----------------------- Location: {location} Output Directory: {output_dir} - -To do this without interactive prompts, you can run: - - sam init --location {location} --output-dir {output_dir} """.format( location=location, output_dir=output_dir ) @@ -89,10 +85,6 @@ def _generate_from_app_template(location, runtime, dependency_manager, output_di Application Template: {app_template} Output Directory: {output_dir} -Non-interactive init command with parameters: - - sam init --name {name} --runtime {runtime} --dependency-manager {dependency_manager} --app-template {app_template} --output-dir {output_dir} - Next steps can be found in the README file at {output_dir}/{name}/README.md """.format( name=name, From e2e20718d02aa29a6c27e9c3b898d9a65f5cd124 Mon Sep 17 00:00:00 2001 From: Sanath Kumar Ramesh Date: Wed, 6 Nov 2019 12:53:49 -0800 Subject: [PATCH 7/7] chore: Bump version to 0.31.0 (#1510) --- samcli/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samcli/__init__.py b/samcli/__init__.py index d126c85d2a..dc38712618 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = "0.30.0" +__version__ = "0.31.0"