diff --git a/.flake8 b/.flake8 deleted file mode 100644 index f4ae26a85e..0000000000 --- a/.flake8 +++ /dev/null @@ -1,4 +0,0 @@ -[flake8] -max-line-length = 120 -ignore = E126 -exclude = samcli/local/init/templates \ No newline at end of file diff --git a/.gitignore b/.gitignore index 77c7fe858c..0126ab5dee 100644 --- a/.gitignore +++ b/.gitignore @@ -245,6 +245,7 @@ wheels/ *.egg-info/ .installed.cfg *.egg +pip-wheel-metadata/ # PyInstaller # Usually these files are written by a python script from a template diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..5f16c0c830 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,7 @@ +repos: + - repo: https://github.com/python/black + rev: 19.3b0 + hooks: + - id: black + language_version: python3.7 + exclude_types: ['markdown', 'ini', 'toml'] diff --git a/DEVELOPMENT_GUIDE.md b/DEVELOPMENT_GUIDE.md index f6e916f426..b9605ac0b3 100644 --- a/DEVELOPMENT_GUIDE.md +++ b/DEVELOPMENT_GUIDE.md @@ -36,7 +36,20 @@ easily setup multiple Python versions. 5. Make Python versions available in the project: `pyenv local 3.6.8 2.7.14 3.7.2` -### 2. Activate Virtualenv +### 2. Install Additional Tooling +#### Black +We format our code using [Black](https://github.com/python/black) and verify the source code is black compliant +in Appveyor during PRs. You can find installation instructions on [Black's docs](https://black.readthedocs.io/en/stable/installation_and_usage.html). + +After installing, you can run our formatting through our Makefile by `make black-format` or integrating Black directly in your favorite IDE (instructions +can be found [here](https://black.readthedocs.io/en/stable/editor_integration.html)) + +#### Pre-commit +If you don't wish to manually run black on each pr or install black manually, we have integrated black into git hooks through [pre-commit](https://pre-commit.com/). +After installing pre-commit, run `pre-commit install` in the root of the project. This will install black for you and run the black formatting on +commit. + +### 3. Activate Virtualenv Virtualenv allows you to install required libraries outside of the Python installation. A good practice is to setup a different virtualenv @@ -49,7 +62,7 @@ be the appropriate python version. 1. `pyenv virtualenv 3.7.2 samcli37` 2. `pyenv activate samcli37` for Python3.7 -### 3. Install dev version of SAM CLI +### 4. Install dev version of SAM CLI We will install a development version of SAM CLI from source into the virtualenv for you to try out the CLI as you make changes. We will @@ -60,7 +73,7 @@ SAM CLI installation, if any. 2. Install dev CLI: `make init` 3. Make sure installation succeeded: `which samdev` -### 4. (Optional) Install development version of SAM Transformer +### 5. (Optional) Install development version of SAM Transformer If you want to run the latest version of [SAM Transformer](https://github.com/awslabs/serverless-application-model/), diff --git a/Makefile b/Makefile index 12e68bc4db..b9084260bf 100644 --- a/Makefile +++ b/Makefile @@ -22,19 +22,25 @@ func-test: smoke-test: # Smoke tests run in parallel - pytest -n 4 tests/functional - -flake: - # Make sure code conforms to PEP8 standards - flake8 samcli - flake8 tests/unit tests/integration + SAM_CLI_DEV=1 pytest -n 4 tests/smoke lint: # Linter performs static analysis to catch latent bugs pylint --rcfile .pylintrc samcli # Command to run everytime you make changes to verify everything works -dev: flake lint test +dev: lint test + +black: + black samcli/* tests/* scripts/* + +black-check: + black --check samcli/* tests/* scripts/* # Verifications to run before sending a pull request -pr: init dev +pr: init dev black-check + +update-isolated-req: + pipenv --three + pipenv run pip install -r requirements/base.txt + pipenv run pip freeze > requirements/isolated.txt diff --git a/appveyor-windows.yml b/appveyor-windows.yml new file mode 100644 index 0000000000..2aca76c3ff --- /dev/null +++ b/appveyor-windows.yml @@ -0,0 +1,100 @@ +version: 1.0.{build} +image: Windows Server 2019 +build: off + +# Change the clone folder to somewhere in "D:\" because this is shared by default with Docker. We need this to mount folders. +clone_folder: D:\source + +environment: + AWS_DEFAULT_REGION: us-east-1 + SAM_CLI_DEV: 1 + + # In Windows, tempdir is usually in C:\. But in AppVeyor only D:\ is shared in Docker. + # Therefore change TEMPDIR it to D: for invoke tests to work. Python uses $TMPDIR envvar + # to find root of tempdir + TMPDIR: D:\tmp + TEMP: D:\tmp + TMP: D:\tmp + + # MSI Installers only use Py3.6.6. It is sufficient to test with this version here. + PYTHON_HOME: "C:\\Python36-x64" + PYTHON_SCRIPTS: "C:\\Python36-x64\\Scripts" + PYTHON_EXE: "C:\\Python36-x64\\python.exe" + PYTHON_VERSION: '3.6.8' + PYTHON_ARCH: '64' + HOME: 'C:\Users\appveyor' + HOMEDRIVE: 'C:' + HOMEPATH: 'C:\Users\appveyor' + +init: + # Uncomment this for RDP + - ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1')) + - ps: gcim Win32_Processor | % { "$($_.NumberOfLogicalProcessors) logical CPUs" } + - ps: gcim Win32_OperatingSystem | % { "$([int]($_.TotalVisibleMemorySize/1mb)) Gb" } + +cache: + - C:\ProgramData\chocolatey\bin -> appveyor.yml + - C:\ProgramData\chocolatey\lib -> appveyor.yml + +install: + + # Make sure the temp directory exists for Python to use. + - ps: "mkdir -Force D:\\tmp" + - "echo %PATH%" + - "python --version" + + # Upgrade setuptools, wheel and virtualenv + - "python -m pip install --upgrade setuptools wheel virtualenv" + + # Create new virtual environment with chosen python version and activate it + - "rm -rf venv" + - "dir C:\\Python36-x64" + - "virtualenv -p %PYTHON_EXE% venv" + - "venv\\Scripts\\activate" + - "python -c \"import sys; print(sys.executable)\"" + - "python --version" + + # Actually install SAM CLI's dependencies + - "pip install -e \".[dev]\"" + + + # To run Nodejs workflow integ tests + - "choco install nodejs-lts -y --force" + - "refreshenv" + # setup Ruby + - "choco install ruby --version 2.5.3.1 --force -y" + - "refreshenv" + - "ruby --version" + - "gem --version" + - "gem install bundler -v 2.0.2" + - "refreshenv" + - "bundler --version" + - "echo %PATH%" + + # setup Java, Maven and Gradle + - "refreshenv" + - "choco install jdk8 -y --force" + - "refreshenv" + - "choco install maven -y --force" + - "refreshenv" + - "choco install gradle -y --force" + - "refreshenv" + - "java -version" + - "gradle -v" + - "mvn --version" + + # Switch to Docker Linux containers + - ps: Switch-DockerLinux + + # Echo final Path + - "echo %PATH%" + +test_script: + # Reactivate virtualenv before running tests + - "venv\\Scripts\\activate" + - "docker system prune -a -f" + - "pytest -vv tests/integration" + +# Uncomment for RDP +# on_finish: +# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1')) diff --git a/appveyor.yml b/appveyor.yml index 3b8d76164f..39378826fb 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -3,9 +3,6 @@ image: - Ubuntu - Visual Studio 2017 -configuration: - - smoke - environment: AWS_DEFAULT_REGION: us-east-1 SAM_CLI_DEV: 1 @@ -56,6 +53,11 @@ for: - sh: "sudo unzip -d /opt/gradle /tmp/gradle-*.zip" - sh: "PATH=/opt/gradle/gradle-5.5/bin:$PATH" + # Install black + - sh: "wget -O /tmp/black https://github.com/python/black/releases/download/19.3b0/black" + - sh: "chmod +x /tmp/black" + - sh: "/tmp/black --version" + - matrix: only: @@ -72,12 +74,12 @@ build_script: test_script: - "pytest --cov samcli --cov-report term-missing --cov-fail-under 95 tests/unit" - - "flake8 samcli" - - "flake8 tests/unit tests/integration" - "pylint --rcfile .pylintrc samcli" # Runs only in Linux - sh: "pytest -vv tests/integration" + - sh: "/tmp/black --check setup.py tests samcli scripts" + - sh: "python scripts/check-isolated-needs-update.py" # Smoke tests run in parallel - it runs on both Linux & Windows # Presence of the RUN_SMOKE envvar will run the smoke tests diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000..40711e3163 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,22 @@ +[build-system] +requires = ["setuptools", "wheel"] # PEP 508 specifications. + + +[tool.black] +line-length = 120 +target_version = ['py37', 'py27', 'py36'] +exclude = ''' + +( + /( + \.eggs # exclude a few common directories in the + | \.git # root of the project + | \.tox + | \.venv + | build + | dist + | pip-wheel-metadata + | samcli/local/init/templates + )/ +) +''' diff --git a/pytest.ini b/pytest.ini index fd1b9f27dc..926a10d096 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,4 +1,5 @@ [pytest] env = AWS_DEFAULT_REGION = ap-southeast-1 - +filterwarnings = + error diff --git a/requirements/base.txt b/requirements/base.txt index 159d6ec4ed..a43a664dbe 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ Flask~=1.0.2 boto3~=1.9, >=1.9.56 PyYAML~=5.1 cookiecutter~=1.6.0 -aws-sam-translator==1.11.0 +aws-sam-translator==1.14.0 docker~=4.0 dateparser~=0.7 python-dateutil~=2.6 diff --git a/requirements/dev.txt b/requirements/dev.txt index edead4ceff..8f4e1fdc32 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -1,5 +1,4 @@ coverage==4.3.4 -flake8==3.3.0 tox==2.2.1 pytest-cov==2.4.0 # astroid > 2.0.4 is not compatible with pylint1.7 diff --git a/requirements/isolated.txt b/requirements/isolated.txt new file mode 100644 index 0000000000..1440a6db13 --- /dev/null +++ b/requirements/isolated.txt @@ -0,0 +1,39 @@ +arrow==0.15.2 +attrs==19.1.0 +aws-lambda-builders==0.4.0 +aws-sam-translator==1.14.0 +binaryornot==0.4.4 +boto3==1.9.228 +botocore==1.12.228 +certifi==2019.9.11 +chardet==3.0.4 +chevron==0.13.1 +Click==7.0 +cookiecutter==1.6.0 +dateparser==0.7.1 +docker==4.0.2 +docutils==0.15.2 +Flask==1.0.4 +future==0.17.1 +idna==2.8 +itsdangerous==1.1.0 +Jinja2==2.10.1 +jinja2-time==0.2.0 +jmespath==0.9.4 +jsonschema==3.0.2 +MarkupSafe==1.1.1 +poyo==0.5.0 +pyrsistent==0.15.4 +python-dateutil==2.8.0 +pytz==2019.2 +PyYAML==5.1.2 +regex==2019.8.19 +requests==2.22.0 +s3transfer==0.2.1 +serverlessrepo==0.1.9 +six==1.11.0 +tzlocal==2.0.0 +urllib3==1.25.3 +websocket-client==0.56.0 +Werkzeug==0.15.6 +whichcraft==0.6.1 diff --git a/samcli/__init__.py b/samcli/__init__.py index 508ad3417d..823cc30cb7 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = '0.21.0' +__version__ = "0.22.0" diff --git a/samcli/cli/command.py b/samcli/cli/command.py index 38331565ff..b09f0f5071 100644 --- a/samcli/cli/command.py +++ b/samcli/cli/command.py @@ -21,7 +21,7 @@ "samcli.commands.package", "samcli.commands.deploy", "samcli.commands.logs", - "samcli.commands.publish" + "samcli.commands.publish", ] DEPRECATION_NOTICE = ( @@ -85,7 +85,7 @@ def _set_commands(package_names): commands = OrderedDict() for pkg_name in package_names: - cmd_name = pkg_name.split('.')[-1] + cmd_name = pkg_name.split(".")[-1] commands[cmd_name] = pkg_name return commands diff --git a/samcli/cli/context.py b/samcli/cli/context.py index 39e641e57c..c50fefa8fe 100644 --- a/samcli/cli/context.py +++ b/samcli/cli/context.py @@ -45,8 +45,8 @@ def debug(self, value): if self._debug: # Turn on debug logging - logging.getLogger('samcli').setLevel(logging.DEBUG) - logging.getLogger('aws_lambda_builders').setLevel(logging.DEBUG) + logging.getLogger("samcli").setLevel(logging.DEBUG) + logging.getLogger("aws_lambda_builders").setLevel(logging.DEBUG) @property def region(self): @@ -135,5 +135,4 @@ def _refresh_session(self): the Boto3's session object are read-only. Therefore when Click parses new AWS session related properties (like region & profile), it will call this method to create a new session with latest values for these properties. """ - boto3.setup_default_session(region_name=self._aws_region, - profile_name=self._aws_profile) + boto3.setup_default_session(region_name=self._aws_region, profile_name=self._aws_profile) diff --git a/samcli/cli/global_config.py b/samcli/cli/global_config.py index d249ad4350..539aa2c1a0 100644 --- a/samcli/cli/global_config.py +++ b/samcli/cli/global_config.py @@ -46,7 +46,7 @@ def config_dir(self): if not self._config_dir: # Internal Environment variable to customize SAM CLI App Dir. Currently used only by integ tests. app_dir = os.getenv("__SAM_CLI_APP_DIR") - self._config_dir = Path(app_dir) if app_dir else Path(click.get_app_dir('AWS SAM', force_posix=True)) + self._config_dir = Path(app_dir) if app_dir else Path(click.get_app_dir("AWS SAM", force_posix=True)) return Path(self._config_dir) @property @@ -106,7 +106,7 @@ def telemetry_enabled(self): # If environment variable is set, its value takes precedence over the value from config file. env_name = "SAM_CLI_TELEMETRY" if env_name in os.environ: - return os.getenv(env_name) in ('1', 1) + return os.getenv(env_name) in ("1", 1) try: self._telemetry_enabled = self._get_value(TELEMETRY_ENABLED_KEY) @@ -200,7 +200,7 @@ def _set_json_cfg(self, filepath, key, value, json_body=None): json_body[key] = value file_body = json.dumps(json_body, indent=4) + "\n" try: - with open(str(filepath), 'w') as f: + with open(str(filepath), "w") as f: f.write(file_body) except IOError as ex: LOG.debug("Error writing to {filepath}", exc_info=ex) diff --git a/samcli/cli/main.py b/samcli/cli/main.py index f2ab6bd46f..c4e0279d6d 100644 --- a/samcli/cli/main.py +++ b/samcli/cli/main.py @@ -15,7 +15,7 @@ from .global_config import GlobalConfig LOG = logging.getLogger(__name__) -logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s', datefmt='%Y-%m-%d %H:%M:%S') +logging.basicConfig(level=logging.INFO, format="%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S") pass_context = click.make_pass_decorator(Context) @@ -47,9 +47,7 @@ def print_info(ctx, param, value): if not value or ctx.resilient_parsing: return - click.echo(json.dumps({ - "version": __version__ - }, indent=2)) + click.echo(json.dumps({"version": __version__}, indent=2)) ctx.exit() @@ -96,9 +94,9 @@ def cli(ctx): except (IOError, ValueError) as ex: LOG.debug("Unable to write telemetry flag", exc_info=ex) - sam_cli_logger = logging.getLogger('samcli') - sam_cli_formatter = logging.Formatter('%(message)s') - lambda_builders_logger = logging.getLogger('aws_lambda_builders') + sam_cli_logger = logging.getLogger("samcli") + sam_cli_formatter = logging.Formatter("%(message)s") + lambda_builders_logger = logging.getLogger("aws_lambda_builders") SamCliLogger.configure_logger(sam_cli_logger, sam_cli_formatter, logging.INFO) SamCliLogger.configure_logger(lambda_builders_logger, sam_cli_formatter, logging.INFO) diff --git a/samcli/cli/options.py b/samcli/cli/options.py index c5e7baaff5..38d718992a 100644 --- a/samcli/cli/options.py +++ b/samcli/cli/options.py @@ -14,17 +14,20 @@ def debug_option(f): :param f: Callback Function to be passed to Click """ + def callback(ctx, param, value): state = ctx.ensure_object(Context) state.debug = value return value - return click.option('--debug', - expose_value=False, - is_flag=True, - envvar="SAM_DEBUG", - help='Turn on debug logging to print debug message generated by SAM CLI.', - callback=callback)(f) + return click.option( + "--debug", + expose_value=False, + is_flag=True, + envvar="SAM_DEBUG", + help="Turn on debug logging to print debug message generated by SAM CLI.", + callback=callback, + )(f) def region_option(f): @@ -33,15 +36,15 @@ def region_option(f): :param f: Callback Function to be passed to Click """ + def callback(ctx, param, value): state = ctx.ensure_object(Context) state.region = value return value - return click.option('--region', - expose_value=False, - help='Set the AWS Region of the service (e.g. us-east-1).', - callback=callback)(f) + return click.option( + "--region", expose_value=False, help="Set the AWS Region of the service (e.g. us-east-1).", callback=callback + )(f) def profile_option(f): @@ -50,12 +53,15 @@ def profile_option(f): :param f: Callback Function to be passed to Click """ + def callback(ctx, param, value): state = ctx.ensure_object(Context) state.profile = value return value - return click.option('--profile', - expose_value=False, - help='Select a specific profile from your credential file to get AWS credentials.', - callback=callback)(f) + return click.option( + "--profile", + expose_value=False, + help="Select a specific profile from your credential file to get AWS credentials.", + callback=callback, + )(f) diff --git a/samcli/cli/types.py b/samcli/cli/types.py index 859e80d4b9..1a147fa4dc 100644 --- a/samcli/cli/types.py +++ b/samcli/cli/types.py @@ -15,9 +15,9 @@ class CfnParameterOverridesType(click.ParamType): __EXAMPLE = "ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro" # Regex that parses CloudFormation parameter key-value pairs: https://regex101.com/r/xqfSjW/2 - _pattern = r'(?:ParameterKey=([A-Za-z0-9\"]+),ParameterValue=(\"(?:\\.|[^\"\\]+)*\"|(?:\\.|[^ \"\\]+)+))' + _pattern = r"(?:ParameterKey=([A-Za-z0-9\"]+),ParameterValue=(\"(?:\\.|[^\"\\]+)*\"|(?:\\.|[^ \"\\]+)+))" - name = '' + name = "" def convert(self, value, param, ctx): result = {} @@ -27,9 +27,7 @@ def convert(self, value, param, ctx): groups = re.findall(self._pattern, value) if not groups: return self.fail( - "{} is not in valid format. It must look something like '{}'".format(value, self.__EXAMPLE), - param, - ctx + "{} is not in valid format. It must look something like '{}'".format(value, self.__EXAMPLE), param, ctx ) # 'groups' variable is a list of tuples ex: [(key1, value1), (key2, value2)] diff --git a/samcli/commands/_utils/options.py b/samcli/commands/_utils/options.py index bea74343d7..0b3362d867 100644 --- a/samcli/commands/_utils/options.py +++ b/samcli/commands/_utils/options.py @@ -27,10 +27,7 @@ def get_or_default_template_file_name(ctx, param, provided_value, include_build) :return: Actual value to be used in the CLI """ - search_paths = [ - "template.yaml", - "template.yml", - ] + search_paths = ["template.yaml", "template.yml"] if include_build: search_paths.insert(0, os.path.join(".aws-sam", "build", "template.yaml")) @@ -74,13 +71,16 @@ def template_click_option(include_build=True): """ Click Option for template option """ - return click.option('--template', '-t', - default=_TEMPLATE_OPTION_DEFAULT_VALUE, - type=click.Path(), - envvar="SAM_TEMPLATE_FILE", - callback=partial(get_or_default_template_file_name, include_build=include_build), - show_default=True, - help="AWS SAM template file") + return click.option( + "--template", + "-t", + default=_TEMPLATE_OPTION_DEFAULT_VALUE, + type=click.Path(), + envvar="SAM_TEMPLATE_FILE", + callback=partial(get_or_default_template_file_name, include_build=include_build), + show_default=True, + help="AWS SAM template file", + ) def docker_common_options(f): @@ -93,26 +93,31 @@ def docker_common_options(f): def docker_click_options(): return [ - click.option('--skip-pull-image', - is_flag=True, - help="Specify whether CLI should skip pulling down the latest Docker image for Lambda runtime.", - envvar="SAM_SKIP_PULL_IMAGE", - default=False), - - click.option('--docker-network', - envvar="SAM_DOCKER_NETWORK", - help="Specifies the name or id of an existing docker network to lambda docker " - "containers should connect to, along with the default bridge network. If not specified, " - "the Lambda containers will only connect to the default bridge docker network."), + click.option( + "--skip-pull-image", + is_flag=True, + help="Specify whether CLI should skip pulling down the latest Docker image for Lambda runtime.", + envvar="SAM_SKIP_PULL_IMAGE", + default=False, + ), + click.option( + "--docker-network", + envvar="SAM_DOCKER_NETWORK", + help="Specifies the name or id of an existing docker network to lambda docker " + "containers should connect to, along with the default bridge network. If not specified, " + "the Lambda containers will only connect to the default bridge docker network.", + ), ] def parameter_override_click_option(): - return click.option("--parameter-overrides", - type=CfnParameterOverridesType(), - help="Optional. A string that contains CloudFormation parameter overrides encoded as key=value " - "pairs. Use the same format as the AWS CLI, e.g. 'ParameterKey=KeyPairName," - "ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro'") + return click.option( + "--parameter-overrides", + type=CfnParameterOverridesType(), + help="Optional. A string that contains CloudFormation parameter overrides encoded as key=value " + "pairs. Use the same format as the AWS CLI, e.g. 'ParameterKey=KeyPairName," + "ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro'", + ) def parameter_override_option(f): diff --git a/samcli/commands/_utils/template.py b/samcli/commands/_utils/template.py index e268d44fcd..2b45e2e41e 100644 --- a/samcli/commands/_utils/template.py +++ b/samcli/commands/_utils/template.py @@ -14,9 +14,7 @@ from samcli.yamlhelper import yaml_parse, yaml_dump -_METADATA_WITH_LOCAL_PATHS = { - "AWS::ServerlessRepo::Application": ["LicenseUrl", "ReadmeUrl"] -} +_METADATA_WITH_LOCAL_PATHS = {"AWS::ServerlessRepo::Application": ["LicenseUrl", "ReadmeUrl"]} _RESOURCES_WITH_LOCAL_PATHS = { "AWS::Serverless::Function": ["CodeUri"], @@ -30,7 +28,7 @@ "AWS::CloudFormation::Stack": ["TemplateURL"], "AWS::Serverless::Application": ["Location"], "AWS::Lambda::LayerVersion": ["Content"], - "AWS::Serverless::LayerVersion": ["ContentUri"] + "AWS::Serverless::LayerVersion": ["ContentUri"], } @@ -51,16 +49,14 @@ def get_template_data(template_file): if not pathlib.Path(template_file).exists(): raise ValueError("Template file not found at {}".format(template_file)) - with open(template_file, 'r') as fp: + with open(template_file, "r") as fp: try: return yaml_parse(fp.read()) except (ValueError, yaml.YAMLError) as ex: raise ValueError("Failed to parse template: {}".format(str(ex))) -def move_template(src_template_path, - dest_template_path, - template_dict): +def move_template(src_template_path, dest_template_path, template_dict): """ Move the SAM/CloudFormation template from ``src_template_path`` to ``dest_template_path``. For convenience, this method accepts a dictionary of template data ``template_dict`` that will be written to the destination instead of @@ -93,17 +89,13 @@ def move_template(src_template_path, # Next up, we will be writing the template to a different location. Before doing so, we should # update any relative paths in the template to be relative to the new location. - modified_template = _update_relative_paths(template_dict, - original_root, - new_root) + modified_template = _update_relative_paths(template_dict, original_root, new_root) with open(dest_template_path, "w") as fp: fp.write(yaml_dump(modified_template)) -def _update_relative_paths(template_dict, - original_root, - new_root): +def _update_relative_paths(template_dict, original_root, new_root): """ SAM/CloudFormation template can contain certain properties whose value is a relative path to a local file/folder. This path is usually relative to the template's location. If the template is being moved from original location @@ -226,13 +218,11 @@ def _resolve_relative_to(path, original_root, new_root): Updated path if the given path is a relative path. None, if the path is not a relative path. """ - if not isinstance(path, six.string_types) \ - or path.startswith("s3://") \ - or os.path.isabs(path): + if not isinstance(path, six.string_types) or path.startswith("s3://") or os.path.isabs(path): # Value is definitely NOT a relative path. It is either a S3 URi or Absolute path or not a string at all return None # Value is definitely a relative path. Change it relative to the destination directory return os.path.relpath( - os.path.normpath(os.path.join(original_root, path)), # Absolute original path w.r.t ``original_root`` - new_root) # Resolve the original path with respect to ``new_root`` + os.path.normpath(os.path.join(original_root, path)), new_root # Absolute original path w.r.t ``original_root`` + ) # Resolve the original path with respect to ``new_root`` diff --git a/samcli/commands/build/build_context.py b/samcli/commands/build/build_context.py index 0cb547f0ca..5892b8a6f8 100644 --- a/samcli/commands/build/build_context.py +++ b/samcli/commands/build/build_context.py @@ -16,6 +16,7 @@ from samcli.commands._utils.template import get_template_data from samcli.commands.exceptions import UserException from samcli.local.lambdafn.exceptions import FunctionNotFound +from samcli.commands.build.exceptions import InvalidBuildDirException LOG = logging.getLogger(__name__) @@ -83,6 +84,10 @@ def __exit__(self, *args): def _setup_build_dir(build_dir, clean): build_path = pathlib.Path(build_dir) + if os.path.abspath(str(build_path)) == os.path.abspath(str(pathlib.Path.cwd())): + exception_message = "Failing build: Running a build with build-dir as current working directory is extremely dangerous since the build-dir contents is first removed. This is no longer supported, please remove the '--build-dir' option from the command to allow the build artifacts to be placed in the directory your template is in." + raise InvalidBuildDirException(exception_message) + if build_path.exists() and os.listdir(build_dir) and clean: # build folder contains something inside. Clear everything. shutil.rmtree(build_dir) diff --git a/samcli/commands/build/command.py b/samcli/commands/build/command.py index a84cf804de..d7549a0074 100644 --- a/samcli/commands/build/command.py +++ b/samcli/commands/build/command.py @@ -63,7 +63,7 @@ @click.option('--build-dir', '-b', default=DEFAULT_BUILD_DIR, type=click.Path(file_okay=False, dir_okay=True, writable=True), # Must be a directory - help="Path to a folder where the built artifacts will be stored") + help="Path to a folder where the built artifacts will be stored. This directory will be first removed before starting a build.") @click.option("--base-dir", "-s", default=None, type=click.Path(dir_okay=True, file_okay=False), # Must be a directory diff --git a/samcli/commands/build/exceptions.py b/samcli/commands/build/exceptions.py new file mode 100644 index 0000000000..783a928803 --- /dev/null +++ b/samcli/commands/build/exceptions.py @@ -0,0 +1,10 @@ +"""Build exceptions""" + +from samcli.commands.exceptions import UserException + + +class InvalidBuildDirException(UserException): + """ + Value provided to --build-dir is invalid + """ + pass diff --git a/samcli/commands/deploy/__init__.py b/samcli/commands/deploy/__init__.py index 5939b2a39e..7e3bd984ab 100644 --- a/samcli/commands/deploy/__init__.py +++ b/samcli/commands/deploy/__init__.py @@ -26,15 +26,16 @@ @click.command("deploy", short_help=SHORT_HELP, context_settings={"ignore_unknown_options": True}, help=HELP_TEXT) @click.argument("args", nargs=-1, type=click.UNPROCESSED) -@click.option('--template-file', - required=True, - type=click.Path(), - help="The path where your AWS SAM template is located") -@click.option('--stack-name', - required=True, - help="The name of the AWS CloudFormation stack you're deploying to. " - "If you specify an existing stack, the command updates the stack. " - "If you specify a new stack, the command creates it.") +@click.option( + "--template-file", required=True, type=click.Path(), help="The path where your AWS SAM template is located" +) +@click.option( + "--stack-name", + required=True, + help="The name of the AWS CloudFormation stack you're deploying to. " + "If you specify an existing stack, the command updates the stack. " + "If you specify a new stack, the command creates it.", +) @common_options @pass_context @track_command @@ -45,7 +46,7 @@ def cli(ctx, args, template_file, stack_name): def do_cli(args, template_file, stack_name): - args = args + ('--stack-name', stack_name) + args = args + ("--stack-name", stack_name) try: execute_command("deploy", args, template_file=template_file) diff --git a/samcli/commands/init/__init__.py b/samcli/commands/init/__init__.py index 1e548edc4a..9164ddeb90 100644 --- a/samcli/commands/init/__init__.py +++ b/samcli/commands/init/__init__.py @@ -17,18 +17,29 @@ LOG = logging.getLogger(__name__) -@click.command("init", - short_help="Init an AWS SAM application.", - context_settings=dict(help_option_names=[u'-h', u'--help'])) -@click.option('-l', '--location', help="Template location (git, mercurial, http(s), zip, path)") -@click.option('-r', '--runtime', type=click.Choice(INIT_RUNTIMES), default=DEFAULT_RUNTIME, - help="Lambda Runtime of your app") -@click.option('-d', '--dependency-manager', type=click.Choice(SUPPORTED_DEP_MANAGERS), default=None, - help="Dependency manager of your Lambda runtime", required=False) -@click.option('-o', '--output-dir', default='.', type=click.Path(), help="Where to output the initialized app into") -@click.option('-n', '--name', default="sam-app", help="Name of your project to be generated as a folder") -@click.option('--no-input', is_flag=True, default=False, - help="Disable prompting and accept default values defined template config") +@click.command( + "init", short_help="Init an AWS SAM application.", context_settings=dict(help_option_names=[u"-h", u"--help"]) +) +@click.option("-l", "--location", help="Template location (git, mercurial, http(s), zip, path)") +@click.option( + "-r", "--runtime", type=click.Choice(INIT_RUNTIMES), default=DEFAULT_RUNTIME, help="Lambda Runtime of your app" +) +@click.option( + "-d", + "--dependency-manager", + type=click.Choice(SUPPORTED_DEP_MANAGERS), + default=None, + help="Dependency manager of your Lambda runtime", + required=False, +) +@click.option("-o", "--output-dir", default=".", type=click.Path(), help="Where to output the initialized app into") +@click.option("-n", "--name", default="sam-app", help="Name of your project to be generated as a folder") +@click.option( + "--no-input", + is_flag=True, + default=False, + help="Disable prompting and accept default values defined template config", +) @common_options @pass_context @track_command @@ -77,8 +88,7 @@ def cli(ctx, location, runtime, dependency_manager, output_dir, name, no_input): """ # All logic must be implemented in the `do_cli` method. This helps ease unit tests - do_cli(ctx, location, runtime, dependency_manager, output_dir, - name, no_input) # pragma: no cover + do_cli(ctx, location, runtime, dependency_manager, output_dir, name, no_input) # pragma: no cover def do_cli(ctx, location, runtime, dependency_manager, output_dir, name, no_input): @@ -95,7 +105,9 @@ def do_cli(ctx, location, runtime, dependency_manager, output_dir, name, no_inpu =================================================== [*] Invoke Function: sam local invoke HelloWorldFunction --event event.json [*] Start API Gateway locally: sam local start-api -""".format(output_dir=output_dir, name=name) +""".format( + output_dir=output_dir, name=name + ) build_msg = """ Project generated: {output_dir}/{name} @@ -105,11 +117,21 @@ def do_cli(ctx, location, runtime, dependency_manager, output_dir, name, no_inpu [*] Install dependencies [*] Invoke Function: sam local invoke HelloWorldFunction --event event.json [*] Start API Gateway locally: sam local start-api -""".format(output_dir=output_dir, name=name) +""".format( + output_dir=output_dir, name=name + ) no_build_step_required = ( - "python", "python3.7", "python3.6", "python2.7", "nodejs", - "nodejs4.3", "nodejs6.10", "nodejs8.10", "nodejs10.x", "ruby2.5" + "python", + "python3.7", + "python3.6", + "python2.7", + "nodejs", + "nodejs4.3", + "nodejs6.10", + "nodejs8.10", + "nodejs10.x", + "ruby2.5", ) next_step_msg = no_build_msg if runtime in no_build_step_required else build_msg diff --git a/samcli/commands/local/cli_common/invoke_context.py b/samcli/commands/local/cli_common/invoke_context.py index 342f9df179..8d870dbea0 100644 --- a/samcli/commands/local/cli_common/invoke_context.py +++ b/samcli/commands/local/cli_common/invoke_context.py @@ -43,23 +43,24 @@ class InvokeContext(object): This class sets up some resources that need to be cleaned up after the context object is used. """ - def __init__(self, # pylint: disable=R0914 - template_file, - function_identifier=None, - env_vars_file=None, - docker_volume_basedir=None, - docker_network=None, - log_file=None, - skip_pull_image=None, - debug_port=None, - debug_args=None, - debugger_path=None, - parameter_overrides=None, - layer_cache_basedir=None, - force_image_build=None, - aws_region=None, - aws_profile=None, - ): + def __init__( + self, # pylint: disable=R0914 + template_file, + function_identifier=None, + env_vars_file=None, + docker_volume_basedir=None, + docker_network=None, + log_file=None, + skip_pull_image=None, + debug_port=None, + debug_args=None, + debugger_path=None, + parameter_overrides=None, + layer_cache_basedir=None, + force_image_build=None, + aws_region=None, + aws_profile=None, + ): """ Initialize the context @@ -135,12 +136,9 @@ def __enter__(self): self._env_vars_value = self._get_env_vars_value(self._env_vars_file) self._log_file_handle = self._setup_log_file(self._log_file) - self._debug_context = self._get_debug_context(self._debug_port, - self._debug_args, - self._debugger_path) + self._debug_context = self._get_debug_context(self._debug_port, self._debug_args, self._debugger_path) - self._container_manager = self._get_container_manager(self._docker_network, - self._skip_pull_image) + self._container_manager = self._get_container_manager(self._docker_network, self._skip_pull_image) if not self._container_manager.is_docker_reachable: raise InvokeContextException("Running AWS SAM projects locally requires Docker. Have you got it installed?") @@ -179,8 +177,10 @@ def function_name(self): all_function_names = [f.name for f in all_functions] # There are more functions in the template, and function identifier is not provided, hence raise. - raise InvokeContextException("You must provide a function identifier (function's Logical ID in the template). " - "Possible options in your template: {}".format(all_function_names)) + raise InvokeContextException( + "You must provide a function identifier (function's Logical ID in the template). " + "Possible options in your template: {}".format(all_function_names) + ) @property def local_lambda_runner(self): @@ -192,18 +192,18 @@ def local_lambda_runner(self): """ layer_downloader = LayerDownloader(self._layer_cache_basedir, self.get_cwd()) - image_builder = LambdaImage(layer_downloader, - self._skip_pull_image, - self._force_image_build) + image_builder = LambdaImage(layer_downloader, self._skip_pull_image, self._force_image_build) lambda_runtime = LambdaRuntime(self._container_manager, image_builder) - return LocalLambdaRunner(local_runtime=lambda_runtime, - function_provider=self._function_provider, - cwd=self.get_cwd(), - aws_profile=self._aws_profile, - aws_region=self._aws_region, - env_vars_values=self._env_vars_value, - debug_context=self._debug_context) + return LocalLambdaRunner( + local_runtime=lambda_runtime, + function_provider=self._function_provider, + cwd=self.get_cwd(), + aws_profile=self._aws_profile, + aws_region=self._aws_region, + env_vars_values=self._env_vars_value, + debug_context=self._debug_context, + ) @property def stdout(self): @@ -299,13 +299,13 @@ def _get_env_vars_value(filename): # Try to read the file and parse it as JSON try: - with open(filename, 'r') as fp: + with open(filename, "r") as fp: return json.load(fp) except Exception as ex: - raise InvokeContextException("Could not read environment variables overrides from file {}: {}".format( - filename, - str(ex))) + raise InvokeContextException( + "Could not read environment variables overrides from file {}: {}".format(filename, str(ex)) + ) @staticmethod def _setup_log_file(log_file): @@ -318,7 +318,7 @@ def _setup_log_file(log_file): if not log_file: return None - return open(log_file, 'wb') + return open(log_file, "wb") @staticmethod def _get_debug_context(debug_port, debug_args, debugger_path): diff --git a/samcli/commands/local/cli_common/options.py b/samcli/commands/local/cli_common/options.py index 459251acf3..4d92c6dffc 100644 --- a/samcli/commands/local/cli_common/options.py +++ b/samcli/commands/local/cli_common/options.py @@ -20,7 +20,7 @@ def get_application_dir(): Path representing the application config directory """ # TODO: Get the config directory directly from `GlobalConfig` - return Path(click.get_app_dir('AWS SAM', force_posix=True)) + return Path(click.get_app_dir("AWS SAM", force_posix=True)) def get_default_layer_cache_dir(): @@ -32,7 +32,7 @@ def get_default_layer_cache_dir(): str String representing the layer cache directory """ - layer_cache_dir = get_application_dir().joinpath('layers-pkg') + layer_cache_dir = get_application_dir().joinpath("layers-pkg") return str(layer_cache_dir) @@ -55,12 +55,12 @@ def construct_options(f): The callback function """ service_options = [ - click.option('--host', - default="127.0.0.1", - help="Local hostname or IP address to bind to (default: '127.0.0.1')"), - click.option("--port", "-p", - default=port, - help="Local port number to listen on (default: '{}')".format(str(port))) + click.option( + "--host", default="127.0.0.1", help="Local hostname or IP address to bind to (default: '127.0.0.1')" + ), + click.option( + "--port", "-p", default=port, help="Local port number to listen on (default: '{}')".format(str(port)) + ), ] # Reverse the list to maintain ordering of options in help text printed with --help @@ -68,6 +68,7 @@ def construct_options(f): option(f) return f + return construct_options @@ -78,51 +79,57 @@ def invoke_common_options(f): :param f: Callback passed by Click """ - invoke_options = [ - template_click_option(), - - click.option('--env-vars', '-n', - type=click.Path(exists=True), - help="JSON file containing values for Lambda function's environment variables."), - - parameter_override_click_option(), - - click.option('--debug-port', '-d', - help="When specified, Lambda function container will start in debug mode and will expose this " - "port on localhost.", - envvar="SAM_DEBUG_PORT"), - - click.option('--debugger-path', - help="Host path to a debugger that will be mounted into the Lambda container."), - - click.option('--debug-args', - help="Additional arguments to be passed to the debugger.", - envvar="DEBUGGER_ARGS"), - - click.option('--docker-volume-basedir', '-v', - envvar="SAM_DOCKER_VOLUME_BASEDIR", - help="Specifies the location basedir where the SAM file exists. If the Docker is running on " - "a remote machine, you must mount the path where the SAM file exists on the docker machine " - "and modify this value to match the remote machine."), - - click.option('--log-file', '-l', - help="logfile to send runtime logs to."), - - click.option('--layer-cache-basedir', - type=click.Path(exists=False, file_okay=False), - envvar="SAM_LAYER_CACHE_BASEDIR", - help="Specifies the location basedir where the Layers your template uses will be downloaded to.", - default=get_default_layer_cache_dir()), - - ] + docker_click_options() + [ - - click.option('--force-image-build', - is_flag=True, - help='Specify whether CLI should rebuild the image used for invoking functions with layers.', - envvar='SAM_FORCE_IMAGE_BUILD', - default=False), - - ] + invoke_options = ( + [ + template_click_option(), + click.option( + "--env-vars", + "-n", + type=click.Path(exists=True), + help="JSON file containing values for Lambda function's environment variables.", + ), + parameter_override_click_option(), + click.option( + "--debug-port", + "-d", + help="When specified, Lambda function container will start in debug mode and will expose this " + "port on localhost.", + envvar="SAM_DEBUG_PORT", + ), + click.option( + "--debugger-path", help="Host path to a debugger that will be mounted into the Lambda container." + ), + click.option( + "--debug-args", help="Additional arguments to be passed to the debugger.", envvar="DEBUGGER_ARGS" + ), + click.option( + "--docker-volume-basedir", + "-v", + envvar="SAM_DOCKER_VOLUME_BASEDIR", + help="Specifies the location basedir where the SAM file exists. If the Docker is running on " + "a remote machine, you must mount the path where the SAM file exists on the docker machine " + "and modify this value to match the remote machine.", + ), + click.option("--log-file", "-l", help="logfile to send runtime logs to."), + click.option( + "--layer-cache-basedir", + type=click.Path(exists=False, file_okay=False), + envvar="SAM_LAYER_CACHE_BASEDIR", + help="Specifies the location basedir where the Layers your template uses will be downloaded to.", + default=get_default_layer_cache_dir(), + ), + ] + + docker_click_options() + + [ + click.option( + "--force-image-build", + is_flag=True, + help="Specify whether CLI should rebuild the image used for invoking functions with layers.", + envvar="SAM_FORCE_IMAGE_BUILD", + default=False, + ) + ] + ) # Reverse the list to maintain ordering of options in help text printed with --help for option in reversed(invoke_options): diff --git a/samcli/commands/local/cli_common/user_exceptions.py b/samcli/commands/local/cli_common/user_exceptions.py index 74be2baf48..7f474859a7 100644 --- a/samcli/commands/local/cli_common/user_exceptions.py +++ b/samcli/commands/local/cli_common/user_exceptions.py @@ -9,6 +9,7 @@ class InvokeContextException(UserException): """ Something went wrong invoking the function. """ + pass @@ -16,6 +17,7 @@ class InvalidSamTemplateException(UserException): """ The template provided was invalid and not able to transform into a Standard CloudFormation Template """ + pass @@ -23,6 +25,7 @@ class SamTemplateNotFoundException(UserException): """ The SAM Template provided could not be found """ + pass @@ -30,6 +33,7 @@ class DebugContextException(UserException): """ Something went wrong when creating the DebugContext """ + pass @@ -37,6 +41,7 @@ class ImageBuildException(UserException): """ Image failed to build """ + pass @@ -44,6 +49,7 @@ class CredentialsRequired(UserException): """ Credentials were not given when Required """ + pass @@ -51,6 +57,7 @@ class ResourceNotFound(UserException): """ The Resource requested was not found """ + pass @@ -58,6 +65,7 @@ class InvalidLayerVersionArn(UserException): """ The LayerVersion Arn given in the template is Invalid """ + pass @@ -65,4 +73,5 @@ class UnsupportedIntrinsic(UserException): """ Value from a template has an Intrinsic that is unsupported """ + pass diff --git a/samcli/commands/local/generate_event/event_generation.py b/samcli/commands/local/generate_event/event_generation.py index ca9180f9f0..12379c5892 100644 --- a/samcli/commands/local/generate_event/event_generation.py +++ b/samcli/commands/local/generate_event/event_generation.py @@ -92,7 +92,7 @@ class EventTypeSubCommand(click.MultiCommand): List all of the subcommands """ - TAGS = 'tags' + TAGS = "tags" def __init__(self, events_lib, top_level_cmd_name, subcmd_definition, *args, **kwargs): """ @@ -139,20 +139,23 @@ def get_command(self, ctx, cmd_name): parameters = [] for param_name in self.subcmd_definition[cmd_name][self.TAGS].keys(): default = self.subcmd_definition[cmd_name][self.TAGS][param_name]["default"] - parameters.append(click.Option( - ["--{}".format(param_name)], - default=default, - help="Specify the {} name you'd like, otherwise the default = {}".format(param_name, default) - )) - - command_callback = functools.partial(self.cmd_implementation, - self.events_lib, - self.top_level_cmd_name, - cmd_name) - cmd = click.Command(name=cmd_name, - short_help=self.subcmd_definition[cmd_name]["help"], - params=parameters, - callback=command_callback) + parameters.append( + click.Option( + ["--{}".format(param_name)], + default=default, + help="Specify the {} name you'd like, otherwise the default = {}".format(param_name, default), + ) + ) + + command_callback = functools.partial( + self.cmd_implementation, self.events_lib, self.top_level_cmd_name, cmd_name + ) + cmd = click.Command( + name=cmd_name, + short_help=self.subcmd_definition[cmd_name]["help"], + params=parameters, + callback=command_callback, + ) cmd = debug_option(cmd) return cmd diff --git a/samcli/commands/local/invoke/cli.py b/samcli/commands/local/invoke/cli.py index 9272c30f30..d059e66ee2 100644 --- a/samcli/commands/local/invoke/cli.py +++ b/samcli/commands/local/invoke/cli.py @@ -35,32 +35,80 @@ @click.command("invoke", help=HELP_TEXT, short_help="Invokes a local Lambda function once.") -@click.option("--event", '-e', - type=click.Path(), - default=STDIN_FILE_NAME, # Defaults to stdin - help="JSON file containing event data passed to the Lambda function during invoke. If this option " - "is not specified, we will default to reading JSON from stdin") +@click.option( + "--event", + "-e", + type=click.Path(), + default=STDIN_FILE_NAME, # Defaults to stdin + help="JSON file containing event data passed to the Lambda function during invoke. If this option " + "is not specified, we will default to reading JSON from stdin", +) @click.option("--no-event", is_flag=True, default=False, help="Invoke Function with an empty event") @invoke_common_options @cli_framework_options @aws_creds_options -@click.argument('function_identifier', required=False) +@click.argument("function_identifier", required=False) @pass_context @track_command # pylint: disable=R0914 -def cli(ctx, function_identifier, template, event, no_event, env_vars, debug_port, debug_args, debugger_path, - docker_volume_basedir, docker_network, log_file, layer_cache_basedir, skip_pull_image, force_image_build, - parameter_overrides): +def cli( + ctx, + function_identifier, + template, + event, + no_event, + env_vars, + debug_port, + debug_args, + debugger_path, + docker_volume_basedir, + docker_network, + log_file, + layer_cache_basedir, + skip_pull_image, + force_image_build, + parameter_overrides, +): # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - do_cli(ctx, function_identifier, template, event, no_event, env_vars, debug_port, debug_args, debugger_path, - docker_volume_basedir, docker_network, log_file, layer_cache_basedir, skip_pull_image, force_image_build, - parameter_overrides) # pragma: no cover - - -def do_cli(ctx, function_identifier, template, event, no_event, env_vars, debug_port, # pylint: disable=R0914 - debug_args, debugger_path, docker_volume_basedir, docker_network, log_file, layer_cache_basedir, - skip_pull_image, force_image_build, parameter_overrides): + do_cli( + ctx, + function_identifier, + template, + event, + no_event, + env_vars, + debug_port, + debug_args, + debugger_path, + docker_volume_basedir, + docker_network, + log_file, + layer_cache_basedir, + skip_pull_image, + force_image_build, + parameter_overrides, + ) # pragma: no cover + + +def do_cli( # pylint: disable=R0914 + ctx, + function_identifier, + template, + event, + no_event, + env_vars, + debug_port, + debug_args, + debugger_path, + docker_volume_basedir, + docker_network, + log_file, + layer_cache_basedir, + skip_pull_image, + force_image_build, + parameter_overrides, +): """ Implementation of the ``cli`` method, just separated out for unit testing purposes """ @@ -79,34 +127,37 @@ def do_cli(ctx, function_identifier, template, event, no_event, env_vars, debug_ # Pass all inputs to setup necessary context to invoke function locally. # Handler exception raised by the processor for invalid args and print errors try: - with InvokeContext(template_file=template, - function_identifier=function_identifier, - env_vars_file=env_vars, - docker_volume_basedir=docker_volume_basedir, - docker_network=docker_network, - log_file=log_file, - skip_pull_image=skip_pull_image, - debug_port=debug_port, - debug_args=debug_args, - debugger_path=debugger_path, - parameter_overrides=parameter_overrides, - layer_cache_basedir=layer_cache_basedir, - force_image_build=force_image_build, - aws_region=ctx.region, - aws_profile=ctx.profile) as context: + with InvokeContext( + template_file=template, + function_identifier=function_identifier, + env_vars_file=env_vars, + docker_volume_basedir=docker_volume_basedir, + docker_network=docker_network, + log_file=log_file, + skip_pull_image=skip_pull_image, + debug_port=debug_port, + debug_args=debug_args, + debugger_path=debugger_path, + parameter_overrides=parameter_overrides, + layer_cache_basedir=layer_cache_basedir, + force_image_build=force_image_build, + aws_region=ctx.region, + aws_profile=ctx.profile, + ) as context: # Invoke the function - context.local_lambda_runner.invoke(context.function_name, - event=event_data, - stdout=context.stdout, - stderr=context.stderr) + context.local_lambda_runner.invoke( + context.function_name, event=event_data, stdout=context.stdout, stderr=context.stderr + ) except FunctionNotFound: raise UserException("Function {} not found in template".format(function_identifier)) - except (InvalidSamDocumentException, - OverridesNotWellDefinedError, - InvalidLayerReference, - DebuggingNotSupported) as ex: + except ( + InvalidSamDocumentException, + OverridesNotWellDefinedError, + InvalidLayerReference, + DebuggingNotSupported, + ) as ex: raise UserException(str(ex)) except DockerImagePullFailedException as ex: raise UserException(str(ex)) @@ -126,5 +177,5 @@ def _get_event(event_file_name): # click.open_file knows to open stdin when filename is '-'. This is safer than manually opening streams, and # accidentally closing a standard stream - with click.open_file(event_file_name, 'r') as fp: + with click.open_file(event_file_name, "r") as fp: return fp.read() diff --git a/samcli/commands/local/lib/api_collector.py b/samcli/commands/local/lib/api_collector.py index 4c4c1abe8c..98fd2861c7 100644 --- a/samcli/commands/local/lib/api_collector.py +++ b/samcli/commands/local/lib/api_collector.py @@ -15,7 +15,6 @@ class ApiCollector(object): - def __init__(self): # Route properties stored per resource. self._route_per_resource = defaultdict(list) diff --git a/samcli/commands/local/lib/api_provider.py b/samcli/commands/local/lib/api_provider.py index 7a95959e6d..6e074bda02 100644 --- a/samcli/commands/local/lib/api_provider.py +++ b/samcli/commands/local/lib/api_provider.py @@ -13,7 +13,6 @@ class ApiProvider(AbstractApiProvider): - def __init__(self, template_dict, parameter_overrides=None, cwd=None): """ Initialize the class with template data. The template_dict is assumed diff --git a/samcli/commands/local/lib/cfn_api_provider.py b/samcli/commands/local/lib/cfn_api_provider.py index fb13450290..e844464eee 100644 --- a/samcli/commands/local/lib/cfn_api_provider.py +++ b/samcli/commands/local/lib/cfn_api_provider.py @@ -17,12 +17,7 @@ class CfnApiProvider(CfnBaseApiProvider): APIGATEWAY_RESOURCE = "AWS::ApiGateway::Resource" APIGATEWAY_METHOD = "AWS::ApiGateway::Method" METHOD_BINARY_TYPE = "CONVERT_TO_BINARY" - TYPES = [ - APIGATEWAY_RESTAPI, - APIGATEWAY_STAGE, - APIGATEWAY_RESOURCE, - APIGATEWAY_METHOD - ] + TYPES = [APIGATEWAY_RESTAPI, APIGATEWAY_STAGE, APIGATEWAY_RESOURCE, APIGATEWAY_METHOD] def extract_resources(self, resources, collector, cwd=None): """ @@ -83,8 +78,7 @@ def _extract_cloud_formation_route(self, logical_id, api_resource, collector, cw if not body and not body_s3_location: # Swagger is not found anywhere. - LOG.debug("Skipping resource '%s'. Swagger document not found in Body and BodyS3Location", - logical_id) + LOG.debug("Skipping resource '%s'. Swagger document not found in Body and BodyS3Location", logical_id) return self.extract_swagger_route(logical_id, body, body_s3_location, binary_media, collector, cwd) @@ -114,7 +108,9 @@ def _extract_cloud_formation_stage(resources, stage_resource, collector): if rest_api_resource_type != CfnApiProvider.APIGATEWAY_RESTAPI: raise InvalidSamTemplateException( "The AWS::ApiGateway::Stage must have a valid RestApiId that points to RestApi resource {}".format( - logical_id)) + logical_id + ) + ) collector.stage_name = stage_name collector.stage_variables = stage_variables @@ -161,9 +157,9 @@ def _extract_cloud_formation_method(self, resources, logical_id, method_resource if content_handling == CfnApiProvider.METHOD_BINARY_TYPE and content_type: collector.add_binary_media_types(logical_id, [content_type]) - routes = Route(methods=[method], - function_name=self._get_integration_function_name(integration), - path=resource_path) + routes = Route( + methods=[method], function_name=self._get_integration_function_name(integration), path=resource_path + ) collector.add_routes(rest_api_id, [routes]) def resolve_resource_path(self, resources, resource, current_path): @@ -212,7 +208,6 @@ def _get_integration_function_name(integration): Lambda function name, if possible. None, if not. """ - if integration \ - and isinstance(integration, dict): + if integration and isinstance(integration, dict): # Integration must be "aws_proxy" otherwise we don't care about it return LambdaUri.get_function_name(integration.get("Uri")) diff --git a/samcli/commands/local/lib/cfn_base_api_provider.py b/samcli/commands/local/lib/cfn_base_api_provider.py index 8d0d4c3774..d0a769e703 100644 --- a/samcli/commands/local/lib/cfn_base_api_provider.py +++ b/samcli/commands/local/lib/cfn_base_api_provider.py @@ -55,9 +55,7 @@ def extract_swagger_route(self, logical_id, body, uri, binary_media, collector, cwd : str Optional working directory with respect to which we will resolve relative path to Swagger file """ - reader = SwaggerReader(definition_body=body, - definition_uri=uri, - working_dir=cwd) + reader = SwaggerReader(definition_body=body, definition_uri=uri, working_dir=cwd) swagger = reader.read() parser = SwaggerParser(swagger) routes = parser.get_routes() diff --git a/samcli/commands/local/lib/debug_context.py b/samcli/commands/local/lib/debug_context.py index 27056077dd..26bb692452 100644 --- a/samcli/commands/local/lib/debug_context.py +++ b/samcli/commands/local/lib/debug_context.py @@ -4,11 +4,7 @@ class DebugContext(object): - - def __init__(self, - debug_port=None, - debugger_path=None, - debug_args=None): + def __init__(self, debug_port=None, debugger_path=None, debug_args=None): self.debug_port = debug_port self.debugger_path = debugger_path diff --git a/samcli/commands/local/lib/exceptions.py b/samcli/commands/local/lib/exceptions.py index 32e8afe91d..ea5e9e9a31 100644 --- a/samcli/commands/local/lib/exceptions.py +++ b/samcli/commands/local/lib/exceptions.py @@ -7,6 +7,7 @@ class NoApisDefined(Exception): """ Raised when there are no APIs defined in the template """ + pass @@ -14,6 +15,7 @@ class OverridesNotWellDefinedError(Exception): """ Raised when the overrides file is invalid """ + pass @@ -21,6 +23,8 @@ class InvalidLayerReference(Exception): """ Raised when the LayerVersion LogicalId does not exist in the template """ + def __init__(self): - super(InvalidLayerReference, self).__init__("Layer References need to be of type " - "'AWS::Serverless::LayerVersion' or 'AWS::Lambda::LayerVersion'") + super(InvalidLayerReference, self).__init__( + "Layer References need to be of type " "'AWS::Serverless::LayerVersion' or 'AWS::Lambda::LayerVersion'" + ) diff --git a/samcli/commands/local/lib/generated_sample_events/events.py b/samcli/commands/local/lib/generated_sample_events/events.py index 9f159c70ab..a5d929401d 100644 --- a/samcli/commands/local/lib/generated_sample_events/events.py +++ b/samcli/commands/local/lib/generated_sample_events/events.py @@ -89,7 +89,7 @@ def base64_utf_encode(self, value): string: the base64_utf encoded value """ - return base64.b64encode(value.encode('utf8')).decode('utf-8') + return base64.b64encode(value.encode("utf8")).decode("utf-8") def generate_event(self, service_name, event_type, values_to_sub): """ @@ -111,12 +111,12 @@ def generate_event(self, service_name, event_type, values_to_sub): """ # set variables for easy calling - tags = self.event_mapping[service_name][event_type]['tags'] - values_to_sub = self.encode(tags, 'encoding', values_to_sub) + tags = self.event_mapping[service_name][event_type]["tags"] + values_to_sub = self.encode(tags, "encoding", values_to_sub) # construct the path to the Events json file this_folder = os.path.dirname(os.path.abspath(__file__)) - file_name = self.event_mapping[service_name][event_type]['filename'] + ".json" + file_name = self.event_mapping[service_name][event_type]["filename"] + ".json" file_path = os.path.join(this_folder, "events", service_name, file_name) # open the file diff --git a/samcli/commands/local/lib/generated_sample_events/events/apigateway/AwsProxy.json b/samcli/commands/local/lib/generated_sample_events/events/apigateway/AwsProxy.json index 81fb8ad60a..90ff8ecfed 100644 --- a/samcli/commands/local/lib/generated_sample_events/events/apigateway/AwsProxy.json +++ b/samcli/commands/local/lib/generated_sample_events/events/apigateway/AwsProxy.json @@ -70,7 +70,7 @@ "US" ], "Host": [ - "0123456789.execute-api.{{dns_suffix}}" + "0123456789.execute-api.{{{dns_suffix}}}" ], "Upgrade-Insecure-Requests": [ "1" diff --git a/samcli/commands/local/lib/generated_sample_events/events/ses/SesEmailReceiving.json b/samcli/commands/local/lib/generated_sample_events/events/ses/SesEmailReceiving.json index 7b10760c26..ee4c214f96 100644 --- a/samcli/commands/local/lib/generated_sample_events/events/ses/SesEmailReceiving.json +++ b/samcli/commands/local/lib/generated_sample_events/events/ses/SesEmailReceiving.json @@ -27,7 +27,7 @@ }, { "name": "Received", - "value": "from mailer.example.com (mailer.example.com [203.0.113.1]) by inbound-smtp.{{dns_suffix}} with SMTP id o3vrnil0e2ic28trm7dfhrc2v0cnbeccl4nbp0g1 for johndoe@example.com; Wed, 07 Oct 2015 12:34:56 +0000 (UTC)" + "value": "from mailer.example.com (mailer.example.com [203.0.113.1]) by inbound-smtp.{{{dns_suffix}}} with SMTP id o3vrnil0e2ic28trm7dfhrc2v0cnbeccl4nbp0g1 for johndoe@example.com; Wed, 07 Oct 2015 12:34:56 +0000 (UTC)" }, { "name": "DKIM-Signature", diff --git a/samcli/commands/local/lib/local_api_service.py b/samcli/commands/local/lib/local_api_service.py index 441d6c3cbc..d1ed8f7bca 100644 --- a/samcli/commands/local/lib/local_api_service.py +++ b/samcli/commands/local/lib/local_api_service.py @@ -18,11 +18,7 @@ class LocalApiService(object): Lambda function. """ - def __init__(self, - lambda_invoke_context, - port, - host, - static_dir): + def __init__(self, lambda_invoke_context, port, host, static_dir): """ Initialize the local API service. @@ -38,9 +34,9 @@ def __init__(self, self.static_dir = static_dir self.cwd = lambda_invoke_context.get_cwd() - self.api_provider = ApiProvider(lambda_invoke_context.template, - parameter_overrides=lambda_invoke_context.parameter_overrides, - cwd=self.cwd) + self.api_provider = ApiProvider( + lambda_invoke_context.template, parameter_overrides=lambda_invoke_context.parameter_overrides, cwd=self.cwd + ) self.lambda_runner = lambda_invoke_context.local_lambda_runner self.stderr_stream = lambda_invoke_context.stderr @@ -62,21 +58,25 @@ def start(self): # contains the response to the API which is sent out as HTTP response. Only stderr needs to be printed # to the console or a log file. stderr from Docker container contains runtime logs and output of print # statements from the Lambda function - service = LocalApigwService(api=self.api_provider.api, - lambda_runner=self.lambda_runner, - static_dir=static_dir_path, - port=self.port, - host=self.host, - stderr=self.stderr_stream) + service = LocalApigwService( + api=self.api_provider.api, + lambda_runner=self.lambda_runner, + static_dir=static_dir_path, + port=self.port, + host=self.host, + stderr=self.stderr_stream, + ) service.create() # Print out the list of routes that will be mounted self._print_routes(self.api_provider.api.routes, self.host, self.port) - LOG.info("You can now browse to the above endpoints to invoke your functions. " - "You do not need to restart/reload SAM CLI while working on your functions, " - "changes will be reflected instantly/automatically. You only need to restart " - "SAM CLI if you update your AWS SAM template") + LOG.info( + "You can now browse to the above endpoints to invoke your functions. " + "You do not need to restart/reload SAM CLI while working on your functions, " + "changes will be reflected instantly/automatically. You only need to restart " + "SAM CLI if you update your AWS SAM template" + ) service.run() @@ -103,13 +103,8 @@ def _print_routes(routes, host, port): print_lines = [] for route in routes: - methods_str = "[{}]".format(', '.join(route.methods)) - output = "Mounting {} at http://{}:{}{} {}".format( - route.function_name, - host, - port, - route.path, - methods_str) + methods_str = "[{}]".format(", ".join(route.methods)) + output = "Mounting {} at http://{}:{}{} {}".format(route.function_name, host, port, route.path, methods_str) print_lines.append(output) LOG.info(output) diff --git a/samcli/commands/local/lib/local_lambda.py b/samcli/commands/local/lib/local_lambda.py index ac401fe6d5..58a1eebb98 100644 --- a/samcli/commands/local/lib/local_lambda.py +++ b/samcli/commands/local/lib/local_lambda.py @@ -20,16 +20,19 @@ class LocalLambdaRunner(object): Runs Lambda functions locally. This class is a wrapper around the `samcli.local` library which takes care of actually running the function on a Docker container. """ + MAX_DEBUG_TIMEOUT = 36000 # 10 hours in seconds - def __init__(self, - local_runtime, - function_provider, - cwd, - aws_profile=None, - aws_region=None, - env_vars_values=None, - debug_context=None): + def __init__( + self, + local_runtime, + function_provider, + cwd, + aws_profile=None, + aws_region=None, + env_vars_values=None, + debug_context=None, + ): """ Initializes the class @@ -79,8 +82,9 @@ def invoke(self, function_name, event, stdout=None, stderr=None): if not function: all_functions = [f.name for f in self.provider.get_all()] - available_function_message = "{} not found. Possible options in your template: {}"\ - .format(function_name, all_functions) + available_function_message = "{} not found. Possible options in your template: {}".format( + function_name, all_functions + ) LOG.info(available_function_message) raise FunctionNotFound("Unable to find a Function with name '%s'", function_name) @@ -125,14 +129,16 @@ def _get_invoke_config(self, function): if self.is_debugging(): function_timeout = self.MAX_DEBUG_TIMEOUT - return FunctionConfig(name=function.name, - runtime=function.runtime, - handler=function.handler, - code_abs_path=code_abs_path, - layers=function.layers, - memory=function.memory, - timeout=function_timeout, - env_vars=env_vars) + return FunctionConfig( + name=function.name, + runtime=function.runtime, + handler=function.handler, + code_abs_path=code_abs_path, + layers=function.layers, + memory=function.memory, + timeout=function_timeout, + env_vars=env_vars, + ) def _make_env_vars(self, function): """Returns the environment variables configuration for this function @@ -188,13 +194,15 @@ def _make_env_vars(self, function): shell_env = os.environ aws_creds = self.get_aws_creds() - return EnvironmentVariables(function.memory, - function.timeout, - function.handler, - variables=variables, - shell_env_values=shell_env, - override_values=overrides, - aws_creds=aws_creds) + return EnvironmentVariables( + function.memory, + function.timeout, + function.handler, + variables=variables, + shell_env_values=shell_env, + override_values=overrides, + aws_creds=aws_creds, + ) def get_aws_creds(self): """ @@ -223,17 +231,17 @@ def get_aws_creds(self): return result # After loading credentials, region name might be available here. - if hasattr(session, 'region_name') and session.region_name: + if hasattr(session, "region_name") and session.region_name: result["region"] = session.region_name # Only add the key, if its value is present - if hasattr(creds, 'access_key') and creds.access_key: + if hasattr(creds, "access_key") and creds.access_key: result["key"] = creds.access_key - if hasattr(creds, 'secret_key') and creds.secret_key: + if hasattr(creds, "secret_key") and creds.secret_key: result["secret"] = creds.secret_key - if hasattr(creds, 'token') and creds.token: + if hasattr(creds, "token") and creds.token: result["sessiontoken"] = creds.token return result diff --git a/samcli/commands/local/lib/local_lambda_service.py b/samcli/commands/local/lib/local_lambda_service.py index c56de5dd4b..45d3eb407b 100644 --- a/samcli/commands/local/lib/local_lambda_service.py +++ b/samcli/commands/local/lib/local_lambda_service.py @@ -14,10 +14,7 @@ class LocalLambdaService(object): that are defined in a SAM file. """ - def __init__(self, - lambda_invoke_context, - port, - host): + def __init__(self, lambda_invoke_context, port, host): """ Initialize the Local Lambda Invoke service. @@ -45,14 +42,15 @@ def start(self): # contains the response to the API which is sent out as HTTP response. Only stderr needs to be printed # to the console or a log file. stderr from Docker container contains runtime logs and output of print # statements from the Lambda function - service = LocalLambdaInvokeService(lambda_runner=self.lambda_runner, - port=self.port, - host=self.host, - stderr=self.stderr_stream) + service = LocalLambdaInvokeService( + lambda_runner=self.lambda_runner, port=self.port, host=self.host, stderr=self.stderr_stream + ) service.create() - LOG.info("Starting the Local Lambda Service. You can now invoke your Lambda Functions defined in your template" - " through the endpoint.") + LOG.info( + "Starting the Local Lambda Service. You can now invoke your Lambda Functions defined in your template" + " through the endpoint." + ) service.run() diff --git a/samcli/commands/local/lib/provider.py b/samcli/commands/local/lib/provider.py index a61891dbfc..8aefb71107 100644 --- a/samcli/commands/local/lib/provider.py +++ b/samcli/commands/local/lib/provider.py @@ -10,36 +10,31 @@ from samcli.commands.local.cli_common.user_exceptions import InvalidLayerVersionArn, UnsupportedIntrinsic # Named Tuple to representing the properties of a Lambda Function -Function = namedtuple("Function", [ - # Function name or logical ID - "name", - - # Runtime/language - "runtime", - - # Memory in MBs - "memory", - - # Function Timeout in seconds - "timeout", - - # Name of the handler - "handler", - - # Path to the code. This could be a S3 URI or local path or a dictionary of S3 Bucket, Key, Version - "codeuri", - - # Environment variables. This is a dictionary with one key called Variables inside it. This contains the definition - # of environment variables - "environment", - - # Lambda Execution IAM Role ARN. In the future, this can be used by Local Lambda runtime to assume the IAM role - # to get credentials to run the container with. This gives a much higher fidelity simulation of cloud Lambda. - "rolearn", - - # List of Layers - "layers" -]) +Function = namedtuple( + "Function", + [ + # Function name or logical ID + "name", + # Runtime/language + "runtime", + # Memory in MBs + "memory", + # Function Timeout in seconds + "timeout", + # Name of the handler + "handler", + # Path to the code. This could be a S3 URI or local path or a dictionary of S3 Bucket, Key, Version + "codeuri", + # Environment variables. This is a dictionary with one key called Variables inside it. This contains the definition + # of environment variables + "environment", + # Lambda Execution IAM Role ARN. In the future, this can be used by Local Lambda runtime to assume the IAM role + # to get credentials to run the container with. This gives a much higher fidelity simulation of cloud Lambda. + "rolearn", + # List of Layers + "layers", + ], +) class LayerVersion(object): @@ -91,7 +86,7 @@ def _compute_layer_version(is_defined_within_template, arn): return None try: - _, layer_version = arn.rsplit(':', 1) + _, layer_version = arn.rsplit(":", 1) layer_version = int(layer_version) except ValueError: raise InvalidLayerVersionArn(arn + " is an Invalid Layer Arn.") @@ -125,13 +120,13 @@ def _compute_layer_name(is_defined_within_template, arn): return arn try: - _, layer_name, layer_version = arn.rsplit(':', 2) + _, layer_name, layer_version = arn.rsplit(":", 2) except ValueError: raise InvalidLayerVersionArn(arn + " is an Invalid Layer Arn.") - return LayerVersion.LAYER_NAME_DELIMETER.join([layer_name, - layer_version, - hashlib.sha256(arn.encode('utf-8')).hexdigest()[0:10]]) + return LayerVersion.LAYER_NAME_DELIMETER.join( + [layer_name, layer_version, hashlib.sha256(arn.encode("utf-8")).hexdigest()[0:10]] + ) @property def arn(self): @@ -162,7 +157,7 @@ def version(self): @property def layer_arn(self): - layer_arn, _ = self.arn.rsplit(':', 1) + layer_arn, _ = self.arn.rsplit(":", 1) return layer_arn @codeuri.setter @@ -230,15 +225,15 @@ def binary_media_types(self): _CorsTuple = namedtuple("Cors", ["allow_origin", "allow_methods", "allow_headers", "max_age"]) -_CorsTuple.__new__.__defaults__ = (None, # Allow Origin defaults to None - None, # Allow Methods is optional and defaults to empty - None, # Allow Headers is optional and defaults to empty - None # MaxAge is optional and defaults to empty - ) +_CorsTuple.__new__.__defaults__ = ( + None, # Allow Origin defaults to None + None, # Allow Methods is optional and defaults to empty + None, # Allow Headers is optional and defaults to empty + None, # MaxAge is optional and defaults to empty +) class Cors(_CorsTuple): - @staticmethod def cors_to_headers(cors): """ @@ -254,10 +249,10 @@ def cors_to_headers(cors): if not cors: return {} headers = { - 'Access-Control-Allow-Origin': cors.allow_origin, - 'Access-Control-Allow-Methods': cors.allow_methods, - 'Access-Control-Allow-Headers': cors.allow_headers, - 'Access-Control-Max-Age': cors.max_age + "Access-Control-Allow-Origin": cors.allow_origin, + "Access-Control-Allow-Methods": cors.allow_methods, + "Access-Control-Allow-Headers": cors.allow_headers, + "Access-Control-Max-Age": cors.max_age, } # Filters out items in the headers dictionary that isn't empty. # This is required because the flask Headers dict will send an invalid 'None' string diff --git a/samcli/commands/local/lib/sam_api_provider.py b/samcli/commands/local/lib/sam_api_provider.py index 9fda35a934..01554e8828 100644 --- a/samcli/commands/local/lib/sam_api_provider.py +++ b/samcli/commands/local/lib/sam_api_provider.py @@ -15,10 +15,7 @@ class SamApiProvider(CfnBaseApiProvider): SERVERLESS_FUNCTION = "AWS::Serverless::Function" SERVERLESS_API = "AWS::Serverless::Api" - TYPES = [ - SERVERLESS_FUNCTION, - SERVERLESS_API - ] + TYPES = [SERVERLESS_FUNCTION, SERVERLESS_API] _FUNCTION_EVENT_TYPE_API = "Api" _FUNCTION_EVENT = "Events" _EVENT_PATH = "Path" @@ -85,8 +82,9 @@ def _extract_from_serverless_api(self, logical_id, api_resource, collector, cwd= stage_variables = properties.get("Variables") if not body and not uri: # Swagger is not found anywhere. - LOG.debug("Skipping resource '%s'. Swagger document not found in DefinitionBody and DefinitionUri", - logical_id) + LOG.debug( + "Skipping resource '%s'. Swagger document not found in DefinitionBody and DefinitionUri", logical_id + ) return self.extract_swagger_route(logical_id, body, uri, binary_media, collector, cwd=cwd) collector.stage_name = stage_name @@ -105,23 +103,58 @@ def extract_cors(self, cors_prop): """ cors = None if cors_prop and isinstance(cors_prop, dict): - allow_methods = cors_prop.get("AllowMethods", ','.join(sorted(Route.ANY_HTTP_METHODS))) - allow_methods = self.normalize_cors_allow_methods(allow_methods) + allow_methods = self._get_cors_prop(cors_prop, "AllowMethods") + if allow_methods: + allow_methods = self.normalize_cors_allow_methods(allow_methods) + else: + allow_methods = ",".join(sorted(Route.ANY_HTTP_METHODS)) + + allow_origin = self._get_cors_prop(cors_prop, "AllowOrigin") + allow_headers = self._get_cors_prop(cors_prop, "AllowHeaders") + max_age = self._get_cors_prop(cors_prop, "MaxAge") + cors = Cors( - allow_origin=cors_prop.get("AllowOrigin"), - allow_methods=allow_methods, - allow_headers=cors_prop.get("AllowHeaders"), - max_age=cors_prop.get("MaxAge") + allow_origin=allow_origin, allow_methods=allow_methods, allow_headers=allow_headers, max_age=max_age ) elif cors_prop and isinstance(cors_prop, string_types): + allow_origin = cors_prop + if not (allow_origin.startswith("'") and allow_origin.endswith("'")): + raise InvalidSamDocumentException( + "Cors Properties must be a quoted string " '(i.e. "\'*\'" is correct, but "*" is not).' + ) + allow_origin = allow_origin.strip("'") + cors = Cors( - allow_origin=cors_prop, - allow_methods=','.join(sorted(Route.ANY_HTTP_METHODS)), + allow_origin=allow_origin, + allow_methods=",".join(sorted(Route.ANY_HTTP_METHODS)), allow_headers=None, - max_age=None + max_age=None, ) return cors + @staticmethod + def _get_cors_prop(cors_dict, prop_name): + """ + Extract cors properties from dictionary and remove extra quotes. + + Parameters + ---------- + cors_dict : dict + Resource properties for Cors + + Return + ------ + A string with the extra quotes removed + """ + prop = cors_dict.get(prop_name) + if prop: + if (not isinstance(prop, string_types)) or (not (prop.startswith("'") and prop.endswith("'"))): + raise InvalidSamDocumentException( + "{} must be a quoted string " '(i.e. "\'value\'" is correct, but "value" is not).'.format(prop_name) + ) + prop = prop.strip("'") + return prop + @staticmethod def normalize_cors_allow_methods(allow_methods): """ @@ -137,7 +170,7 @@ def normalize_cors_allow_methods(allow_methods): A string with normalized route """ if allow_methods == "*": - return ','.join(sorted(Route.ANY_HTTP_METHODS)) + return ",".join(sorted(Route.ANY_HTTP_METHODS)) methods = allow_methods.split(",") normalized_methods = [] for method in methods: @@ -149,7 +182,7 @@ def normalize_cors_allow_methods(allow_methods): if "OPTIONS" not in normalized_methods: normalized_methods.append("OPTIONS") - return ','.join(sorted(normalized_methods)) + return ",".join(sorted(normalized_methods)) def _extract_routes_from_function(self, logical_id, function_resource, collector): """ @@ -219,9 +252,10 @@ def _convert_event_route(lambda_logical_id, event_properties): # This is still a dictionary. Something wrong with the template if isinstance(api_resource_id, dict): LOG.debug("Invalid RestApiId property of event %s", event_properties) - raise InvalidSamDocumentException("RestApiId property of resource with logicalId '{}' is invalid. " - "It should either be a LogicalId string or a Ref of a Logical Id string" - .format(lambda_logical_id)) + raise InvalidSamDocumentException( + "RestApiId property of resource with logicalId '{}' is invalid. " + "It should either be a LogicalId string or a Ref of a Logical Id string".format(lambda_logical_id) + ) return api_resource_id, Route(path=path, methods=[method], function_name=lambda_logical_id) @@ -271,6 +305,10 @@ def merge_routes(collector): all_routes[key] = config result = set(all_routes.values()) # Assign to a set() to de-dupe - LOG.debug("Removed duplicates from '%d' Explicit APIs and '%d' Implicit APIs to produce '%d' APIs", - len(explicit_routes), len(implicit_routes), len(result)) + LOG.debug( + "Removed duplicates from '%d' Explicit APIs and '%d' Implicit APIs to produce '%d' APIs", + len(explicit_routes), + len(implicit_routes), + len(result), + ) return list(result) diff --git a/samcli/commands/local/lib/sam_base_provider.py b/samcli/commands/local/lib/sam_base_provider.py index 897fa06c56..293724c640 100644 --- a/samcli/commands/local/lib/sam_base_provider.py +++ b/samcli/commands/local/lib/sam_base_provider.py @@ -40,15 +40,11 @@ def get_template(template_dict, parameter_overrides=None): if template_dict: template_dict = SamTranslatorWrapper(template_dict).run_plugins() ResourceMetadataNormalizer.normalize(template_dict) - logical_id_translator = SamBaseProvider._get_parameter_values( - template_dict, parameter_overrides - ) + logical_id_translator = SamBaseProvider._get_parameter_values(template_dict, parameter_overrides) resolver = IntrinsicResolver( template=template_dict, - symbol_resolver=IntrinsicsSymbolTable( - logical_id_translator=logical_id_translator, template=template_dict - ), + symbol_resolver=IntrinsicsSymbolTable(logical_id_translator=logical_id_translator, template=template_dict), ) template_dict = resolver.resolve_template(ignore_errors=True) return template_dict diff --git a/samcli/commands/local/lib/sam_function_provider.py b/samcli/commands/local/lib/sam_function_provider.py index 7eed0c8a8b..4a0ef70f77 100644 --- a/samcli/commands/local/lib/sam_function_provider.py +++ b/samcli/commands/local/lib/sam_function_provider.py @@ -132,7 +132,7 @@ def _convert_sam_function_resource(name, resource_properties, layers): codeuri=codeuri, environment=resource_properties.get("Environment"), rolearn=resource_properties.get("Role"), - layers=layers + layers=layers, ) @staticmethod @@ -156,11 +156,14 @@ def _extract_sam_function_codeuri(name, resource_properties, code_property_key): """ codeuri = resource_properties.get(code_property_key, SamFunctionProvider._DEFAULT_CODEURI) # CodeUri can be a dictionary of S3 Bucket/Key or a S3 URI, neither of which are supported - if isinstance(codeuri, dict) or \ - (isinstance(codeuri, six.string_types) and codeuri.startswith("s3://")): + if isinstance(codeuri, dict) or (isinstance(codeuri, six.string_types) and codeuri.startswith("s3://")): codeuri = SamFunctionProvider._DEFAULT_CODEURI - LOG.warning("Lambda function '%s' has specified S3 location for CodeUri which is unsupported. " - "Using default value of '%s' instead", name, codeuri) + LOG.warning( + "Lambda function '%s' has specified S3 location for CodeUri which is unsupported. " + "Using default value of '%s' instead", + name, + codeuri, + ) return codeuri @staticmethod @@ -189,7 +192,7 @@ def _convert_lambda_function_resource(name, resource_properties, layers): # pyl codeuri=codeuri, environment=resource_properties.get("Environment"), rolearn=resource_properties.get("Role"), - layers=layers + layers=layers, ) @staticmethod @@ -239,13 +242,15 @@ def _parse_layer_info(list_of_layers, resources): """ layers = [] for layer in list_of_layers: - if layer == 'arn:aws:lambda:::awslayer:AmazonLinux1803': - LOG.debug('Skipped arn:aws:lambda:::awslayer:AmazonLinux1803 as the containers are AmazonLinux1803') + if layer == "arn:aws:lambda:::awslayer:AmazonLinux1803": + LOG.debug("Skipped arn:aws:lambda:::awslayer:AmazonLinux1803 as the containers are AmazonLinux1803") continue - if layer == 'arn:aws:lambda:::awslayer:AmazonLinux1703': - raise InvalidLayerVersionArn('Building and invoking locally only supports AmazonLinux1803. See ' - 'https://aws.amazon.com/blogs/compute/upcoming-updates-to-the-aws-lambda-execution-environment/ for more detials.') # noqa: E501 + if layer == "arn:aws:lambda:::awslayer:AmazonLinux1703": + raise InvalidLayerVersionArn( + "Building and invoking locally only supports AmazonLinux1803. See " + "https://aws.amazon.com/blogs/compute/upcoming-updates-to-the-aws-lambda-execution-environment/ for more detials." + ) # noqa: E501 # If the layer is a string, assume it is the arn if isinstance(layer, six.string_types): @@ -257,9 +262,10 @@ def _parse_layer_info(list_of_layers, resources): if isinstance(layer, dict) and layer.get("Ref"): layer_logical_id = layer.get("Ref") layer_resource = resources.get(layer_logical_id) - if not layer_resource or \ - layer_resource.get("Type", "") not in (SamFunctionProvider._SERVERLESS_LAYER, - SamFunctionProvider._LAMBDA_LAYER): + if not layer_resource or layer_resource.get("Type", "") not in ( + SamFunctionProvider._SERVERLESS_LAYER, + SamFunctionProvider._LAMBDA_LAYER, + ): raise InvalidLayerReference() layer_properties = layer_resource.get("Properties", {}) @@ -270,9 +276,9 @@ def _parse_layer_info(list_of_layers, resources): codeuri = SamFunctionProvider._extract_lambda_function_code(layer_properties, "Content") if resource_type == SamFunctionProvider._SERVERLESS_LAYER: - codeuri = SamFunctionProvider._extract_sam_function_codeuri(layer_logical_id, - layer_properties, - "ContentUri") + codeuri = SamFunctionProvider._extract_sam_function_codeuri( + layer_logical_id, layer_properties, "ContentUri" + ) layers.append(LayerVersion(layer_logical_id, codeuri)) diff --git a/samcli/commands/local/lib/swagger/integration_uri.py b/samcli/commands/local/lib/swagger/integration_uri.py index fd0c169b97..437e8d6ed7 100644 --- a/samcli/commands/local/lib/swagger/integration_uri.py +++ b/samcli/commands/local/lib/swagger/integration_uri.py @@ -21,22 +21,22 @@ class LambdaUri(object): # From an ARN like below, extract just the Lambda Function ARN # arn:aws:apigateway:us-west-2:lambda:path/2015-03-31/functions/arn:aws:lambda:us-west-2:123456789012:function:Calculator:ProdAlias/invocations # NOQA - _REGEX_GET_FUNCTION_ARN = r'.*/functions/(.*)/invocations' + _REGEX_GET_FUNCTION_ARN = r".*/functions/(.*)/invocations" # From Lamdba Function ARN like below, extract the function name. Note, the [^:] syntax is to capture only function # name and exclude the Alias name that can optionally follow it. # arn:aws:lambda:us-west-2:123456789012:function:Calculator:ProdAlias - _REGEX_GET_FUNCTION_NAME = r'.*:function:([^:]*)' + _REGEX_GET_FUNCTION_NAME = r".*:function:([^:]*)" # ${stageVariable.MyFunctionName} - _REGEX_STAGE_VARIABLE = r'\$\{stageVariables\..+\}' + _REGEX_STAGE_VARIABLE = r"\$\{stageVariables\..+\}" # Got this regex from Lambda's CreateFunction API docs - _REGEX_VALID_FUNCTION_NAME = r'([a-zA-Z0-9-_]+)' + _REGEX_VALID_FUNCTION_NAME = r"([a-zA-Z0-9-_]+)" # Get the function name from variables within Fn::Sub. Supports ${Resource.Arn} and ${Resource.Alias} # Ex: "arn:aws:apigateway:function/${LambdaFunction.Arn}/invocations => ${LambdaFunction.Arn} => LambdaFunction - _REGEX_SUB_FUNCTION_ARN = r'\$\{([A-Za-z0-9]+)\.(Arn|Alias)\}' + _REGEX_SUB_FUNCTION_ARN = r"\$\{([A-Za-z0-9]+)\.(Arn|Alias)\}" @staticmethod def get_function_name(integration_uri): @@ -166,8 +166,7 @@ def _get_function_name_from_arn(function_arn): return maybe_function_name # Some unknown format - LOG.debug("Ignoring integration ARN. Unable to parse Function Name from function arn %s", - function_arn) + LOG.debug("Ignoring integration ARN. Unable to parse Function Name from function arn %s", function_arn) @staticmethod def _resolve_fn_sub(uri_data): @@ -242,12 +241,14 @@ def _resolve_fn_sub(uri_data): # Now finally we got the ARN string. Let us try to resolve it. # We only support value of type ${XXX.Arn} or ${YYY.Alias}. The `.Alias` syntax is a SAM specific intrinsic # to get ARN of Lambda Alias when using DeploymentPreference - lambda_function_arn_template = r'arn:aws:lambda:${AWS::Region}:123456789012:function:\1' - - return re.sub(LambdaUri._REGEX_SUB_FUNCTION_ARN, # Find all ${blah} patterns - # Replace with Lambda Function ARN, where function name is from pattern - lambda_function_arn_template, - arn) + lambda_function_arn_template = r"arn:aws:lambda:${AWS::Region}:123456789012:function:\1" + + return re.sub( + LambdaUri._REGEX_SUB_FUNCTION_ARN, # Find all ${blah} patterns + # Replace with Lambda Function ARN, where function name is from pattern + lambda_function_arn_template, + arn, + ) @staticmethod def _is_sub_intrinsic(data): diff --git a/samcli/commands/local/lib/swagger/parser.py b/samcli/commands/local/lib/swagger/parser.py index 072e71c378..9f5363570f 100644 --- a/samcli/commands/local/lib/swagger/parser.py +++ b/samcli/commands/local/lib/swagger/parser.py @@ -74,8 +74,11 @@ def get_routes(self): function_name = self._get_integration_function_name(method_config) if not function_name: - LOG.debug("Lambda function integration not found in Swagger document at path='%s' method='%s'", - full_path, method) + LOG.debug( + "Lambda function integration not found in Swagger document at path='%s' method='%s'", + full_path, + method, + ) continue if method.lower() == self._ANY_METHOD_EXTENSION_KEY: @@ -108,8 +111,6 @@ def _get_integration_function_name(self, method_config): integration = method_config[self._INTEGRATION_KEY] - if integration \ - and isinstance(integration, dict) \ - and integration.get("type") == IntegrationType.aws_proxy.value: + if integration and isinstance(integration, dict) and integration.get("type") == IntegrationType.aws_proxy.value: # Integration must be "aws_proxy" otherwise we don't care about it return LambdaUri.get_function_name(integration.get("uri")) diff --git a/samcli/commands/local/lib/swagger/reader.py b/samcli/commands/local/lib/swagger/reader.py index 02c2c1edb7..3ef417e858 100644 --- a/samcli/commands/local/lib/swagger/reader.py +++ b/samcli/commands/local/lib/swagger/reader.py @@ -205,7 +205,7 @@ def _download_from_s3(bucket, key, version=None): botocore.exceptions.ClientError if we were unable to download the file from S3 """ - s3 = boto3.client('s3') + s3 = boto3.client("s3") extra_args = {} if version: @@ -213,9 +213,7 @@ def _download_from_s3(bucket, key, version=None): with tempfile.TemporaryFile() as fp: try: - s3.download_fileobj( - bucket, key, fp, - ExtraArgs=extra_args) + s3.download_fileobj(bucket, key, fp, ExtraArgs=extra_args) # go to start of file fp.seek(0) @@ -224,8 +222,9 @@ def _download_from_s3(bucket, key, version=None): return fp.read() except botocore.exceptions.ClientError: - LOG.error("Unable to download Swagger document from S3 Bucket=%s Key=%s Version=%s", - bucket, key, version) + LOG.error( + "Unable to download Swagger document from S3 Bucket=%s Key=%s Version=%s", bucket, key, version + ) raise @staticmethod @@ -259,11 +258,7 @@ def _parse_s3_location(location): if isinstance(location, dict): # This is a S3 Location dictionary. Just grab the fields. It is very well possible that # this dictionary has none of the fields we expect. Return None if the fields don't exist. - bucket, key, version = ( - location.get("Bucket"), - location.get("Key"), - location.get("Version") - ) + bucket, key, version = (location.get("Bucket"), location.get("Key"), location.get("Version")) elif isinstance(location, string_types) and location.startswith("s3://"): # This is a S3 URI. Parse it using a standard URI parser to extract the components @@ -272,11 +267,11 @@ def _parse_s3_location(location): query = parse_qs(parsed.query) bucket = parsed.netloc - key = parsed.path.lstrip('/') # Leading '/' messes with S3 APIs. Remove it. + key = parsed.path.lstrip("/") # Leading '/' messes with S3 APIs. Remove it. # If there is a query string that has a single versionId field, # set the object version and return - if query and 'versionId' in query and len(query['versionId']) == 1: - version = query['versionId'][0] + if query and "versionId" in query and len(query["versionId"]) == 1: + version = query["versionId"][0] return bucket, key, version diff --git a/samcli/commands/local/start_api/cli.py b/samcli/commands/local/start_api/cli.py index a2eaac6003..a967ec1a3b 100644 --- a/samcli/commands/local/start_api/cli.py +++ b/samcli/commands/local/start_api/cli.py @@ -32,37 +32,84 @@ """ -@click.command("start-api", - help=HELP_TEXT, - short_help="Sets up a local endpoint you can use to test your API. Supports hot-reloading " - "so you don't need to restart this service when you make changes to your function.") +@click.command( + "start-api", + help=HELP_TEXT, + short_help="Sets up a local endpoint you can use to test your API. Supports hot-reloading " + "so you don't need to restart this service when you make changes to your function.", +) @service_common_options(3000) -@click.option("--static-dir", "-s", - default="public", - help="Any static assets (e.g. CSS/Javascript/HTML) files located in this directory " - "will be presented at /") +@click.option( + "--static-dir", + "-s", + default="public", + help="Any static assets (e.g. CSS/Javascript/HTML) files located in this directory " "will be presented at /", +) @invoke_common_options @cli_framework_options @aws_creds_options # pylint: disable=R0914 @pass_context @track_command -def cli(ctx, - # start-api Specific Options - host, port, static_dir, - - # Common Options for Lambda Invoke - template, env_vars, debug_port, debug_args, debugger_path, docker_volume_basedir, - docker_network, log_file, layer_cache_basedir, skip_pull_image, force_image_build, parameter_overrides): +def cli( + ctx, + # start-api Specific Options + host, + port, + static_dir, + # Common Options for Lambda Invoke + template, + env_vars, + debug_port, + debug_args, + debugger_path, + docker_volume_basedir, + docker_network, + log_file, + layer_cache_basedir, + skip_pull_image, + force_image_build, + parameter_overrides, +): # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - do_cli(ctx, host, port, static_dir, template, env_vars, debug_port, debug_args, debugger_path, - docker_volume_basedir, docker_network, log_file, layer_cache_basedir, skip_pull_image, force_image_build, - parameter_overrides) # pragma: no cover - - -def do_cli(ctx, host, port, static_dir, template, env_vars, debug_port, debug_args, # pylint: disable=R0914 - debugger_path, docker_volume_basedir, docker_network, log_file, layer_cache_basedir, skip_pull_image, - force_image_build, parameter_overrides): + do_cli( + ctx, + host, + port, + static_dir, + template, + env_vars, + debug_port, + debug_args, + debugger_path, + docker_volume_basedir, + docker_network, + log_file, + layer_cache_basedir, + skip_pull_image, + force_image_build, + parameter_overrides, + ) # pragma: no cover + + +def do_cli( # pylint: disable=R0914 + ctx, + host, + port, + static_dir, + template, + env_vars, + debug_port, + debug_args, + debugger_path, + docker_volume_basedir, + docker_network, + log_file, + layer_cache_basedir, + skip_pull_image, + force_image_build, + parameter_overrides, +): """ Implementation of the ``cli`` method, just separated out for unit testing purposes """ @@ -73,32 +120,33 @@ def do_cli(ctx, host, port, static_dir, template, env_vars, debug_port, debug_ar # Handler exception raised by the processor for invalid args and print errors try: - with InvokeContext(template_file=template, - function_identifier=None, # Don't scope to one particular function - env_vars_file=env_vars, - docker_volume_basedir=docker_volume_basedir, - docker_network=docker_network, - log_file=log_file, - skip_pull_image=skip_pull_image, - debug_port=debug_port, - debug_args=debug_args, - debugger_path=debugger_path, - parameter_overrides=parameter_overrides, - layer_cache_basedir=layer_cache_basedir, - force_image_build=force_image_build, - aws_region=ctx.region, - aws_profile=ctx.profile) as invoke_context: - - service = LocalApiService(lambda_invoke_context=invoke_context, - port=port, - host=host, - static_dir=static_dir) + with InvokeContext( + template_file=template, + function_identifier=None, # Don't scope to one particular function + env_vars_file=env_vars, + docker_volume_basedir=docker_volume_basedir, + docker_network=docker_network, + log_file=log_file, + skip_pull_image=skip_pull_image, + debug_port=debug_port, + debug_args=debug_args, + debugger_path=debugger_path, + parameter_overrides=parameter_overrides, + layer_cache_basedir=layer_cache_basedir, + force_image_build=force_image_build, + aws_region=ctx.region, + aws_profile=ctx.profile, + ) as invoke_context: + + service = LocalApiService(lambda_invoke_context=invoke_context, port=port, host=host, static_dir=static_dir) service.start() except NoApisDefined: raise UserException("Template does not have any APIs connected to Lambda functions") - except (InvalidSamDocumentException, - OverridesNotWellDefinedError, - InvalidLayerReference, - DebuggingNotSupported) as ex: + except ( + InvalidSamDocumentException, + OverridesNotWellDefinedError, + InvalidLayerReference, + DebuggingNotSupported, + ) as ex: raise UserException(str(ex)) diff --git a/samcli/commands/local/start_lambda/cli.py b/samcli/commands/local/start_lambda/cli.py index a5807a6b50..96dc14c6e2 100644 --- a/samcli/commands/local/start_lambda/cli.py +++ b/samcli/commands/local/start_lambda/cli.py @@ -51,33 +51,74 @@ """ -@click.command("start-lambda", - help=HELP_TEXT, - short_help="Starts a local endpoint you can use to invoke your local Lambda functions.") +@click.command( + "start-lambda", + help=HELP_TEXT, + short_help="Starts a local endpoint you can use to invoke your local Lambda functions.", +) @service_common_options(3001) @invoke_common_options @cli_framework_options @aws_creds_options @pass_context @track_command -def cli(ctx, # pylint: disable=R0914 - # start-lambda Specific Options - host, port, - - # Common Options for Lambda Invoke - template, env_vars, debug_port, debug_args, debugger_path, docker_volume_basedir, - docker_network, log_file, layer_cache_basedir, skip_pull_image, force_image_build, - parameter_overrides): # pylint: disable=R0914 +def cli( + ctx, # pylint: disable=R0914 + # start-lambda Specific Options + host, + port, + # Common Options for Lambda Invoke + template, + env_vars, + debug_port, + debug_args, + debugger_path, + docker_volume_basedir, + docker_network, + log_file, + layer_cache_basedir, + skip_pull_image, + force_image_build, + parameter_overrides, +): # pylint: disable=R0914 # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing - do_cli(ctx, host, port, template, env_vars, debug_port, debug_args, debugger_path, docker_volume_basedir, - docker_network, log_file, layer_cache_basedir, skip_pull_image, force_image_build, - parameter_overrides) # pragma: no cover - - -def do_cli(ctx, host, port, template, env_vars, debug_port, debug_args, # pylint: disable=R0914 - debugger_path, docker_volume_basedir, docker_network, log_file, layer_cache_basedir, skip_pull_image, - force_image_build, parameter_overrides): + do_cli( + ctx, + host, + port, + template, + env_vars, + debug_port, + debug_args, + debugger_path, + docker_volume_basedir, + docker_network, + log_file, + layer_cache_basedir, + skip_pull_image, + force_image_build, + parameter_overrides, + ) # pragma: no cover + + +def do_cli( # pylint: disable=R0914 + ctx, + host, + port, + template, + env_vars, + debug_port, + debug_args, + debugger_path, + docker_volume_basedir, + docker_network, + log_file, + layer_cache_basedir, + skip_pull_image, + force_image_build, + parameter_overrides, +): """ Implementation of the ``cli`` method, just separated out for unit testing purposes """ @@ -88,29 +129,31 @@ def do_cli(ctx, host, port, template, env_vars, debug_port, debug_args, # pylin # Handler exception raised by the processor for invalid args and print errors try: - with InvokeContext(template_file=template, - function_identifier=None, # Don't scope to one particular function - env_vars_file=env_vars, - docker_volume_basedir=docker_volume_basedir, - docker_network=docker_network, - log_file=log_file, - skip_pull_image=skip_pull_image, - debug_port=debug_port, - debug_args=debug_args, - debugger_path=debugger_path, - parameter_overrides=parameter_overrides, - layer_cache_basedir=layer_cache_basedir, - force_image_build=force_image_build, - aws_region=ctx.region, - aws_profile=ctx.profile) as invoke_context: - - service = LocalLambdaService(lambda_invoke_context=invoke_context, - port=port, - host=host) + with InvokeContext( + template_file=template, + function_identifier=None, # Don't scope to one particular function + env_vars_file=env_vars, + docker_volume_basedir=docker_volume_basedir, + docker_network=docker_network, + log_file=log_file, + skip_pull_image=skip_pull_image, + debug_port=debug_port, + debug_args=debug_args, + debugger_path=debugger_path, + parameter_overrides=parameter_overrides, + layer_cache_basedir=layer_cache_basedir, + force_image_build=force_image_build, + aws_region=ctx.region, + aws_profile=ctx.profile, + ) as invoke_context: + + service = LocalLambdaService(lambda_invoke_context=invoke_context, port=port, host=host) service.start() - except (InvalidSamDocumentException, - OverridesNotWellDefinedError, - InvalidLayerReference, - DebuggingNotSupported) as ex: + except ( + InvalidSamDocumentException, + OverridesNotWellDefinedError, + InvalidLayerReference, + DebuggingNotSupported, + ) as ex: raise UserException(str(ex)) diff --git a/samcli/commands/logs/command.py b/samcli/commands/logs/command.py index 6342fa0365..3bdb90b18d 100644 --- a/samcli/commands/logs/command.py +++ b/samcli/commands/logs/command.py @@ -33,43 +33,48 @@ @click.command("logs", help=HELP_TEXT, short_help="Fetch logs for a function") -@click.option("--name", "-n", - required=True, - help="Name of your AWS Lambda function. If this function is a part of a CloudFormation stack, " - "this can be the LogicalID of function resource in the CloudFormation/SAM template.") -@click.option("--stack-name", - default=None, - help="Name of the AWS CloudFormation stack that the function is a part of.") -@click.option("--filter", - default=None, - help="You can specify an expression to quickly find logs that match terms, phrases or values in " - "your log events. This could be a simple keyword (e.g. \"error\") or a pattern " - "supported by AWS CloudWatch Logs. See the AWS CloudWatch Logs documentation for the syntax " - "https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html") -@click.option("--start-time", "-s", - default='10m ago', - help="Fetch logs starting at this time. Time can be relative values like '5mins ago', 'yesterday' or " - "formatted timestamp like '2018-01-01 10:10:10'. Defaults to '10mins ago'.") -@click.option("--end-time", "-e", - default=None, - help="Fetch logs up to this time. Time can be relative values like '5mins ago', 'tomorrow' or " - "formatted timestamp like '2018-01-01 10:10:10'") -@click.option("--tail", "-t", - is_flag=True, - help="Tail the log output. This will ignore the end time argument and continue to fetch logs as they " - "become available.") +@click.option( + "--name", + "-n", + required=True, + help="Name of your AWS Lambda function. If this function is a part of a CloudFormation stack, " + "this can be the LogicalID of function resource in the CloudFormation/SAM template.", +) +@click.option("--stack-name", default=None, help="Name of the AWS CloudFormation stack that the function is a part of.") +@click.option( + "--filter", + default=None, + help="You can specify an expression to quickly find logs that match terms, phrases or values in " + 'your log events. This could be a simple keyword (e.g. "error") or a pattern ' + "supported by AWS CloudWatch Logs. See the AWS CloudWatch Logs documentation for the syntax " + "https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html", +) +@click.option( + "--start-time", + "-s", + default="10m ago", + help="Fetch logs starting at this time. Time can be relative values like '5mins ago', 'yesterday' or " + "formatted timestamp like '2018-01-01 10:10:10'. Defaults to '10mins ago'.", +) +@click.option( + "--end-time", + "-e", + default=None, + help="Fetch logs up to this time. Time can be relative values like '5mins ago', 'tomorrow' or " + "formatted timestamp like '2018-01-01 10:10:10'", +) +@click.option( + "--tail", + "-t", + is_flag=True, + help="Tail the log output. This will ignore the end time argument and continue to fetch logs as they " + "become available.", +) @cli_framework_options @aws_creds_options @pass_context @track_command -def cli(ctx, - name, - stack_name, - filter, # pylint: disable=redefined-builtin - tail, - start_time, - end_time - ): +def cli(ctx, name, stack_name, filter, tail, start_time, end_time): # pylint: disable=redefined-builtin # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing do_cli(name, stack_name, filter, tail, start_time, end_time) # pragma: no cover @@ -82,23 +87,27 @@ def do_cli(function_name, stack_name, filter_pattern, tailing, start_time, end_t LOG.debug("'logs' command is called") - with LogsCommandContext(function_name, - stack_name=stack_name, - filter_pattern=filter_pattern, - start_time=start_time, - end_time=end_time, - # output_file is not yet supported by CLI - output_file=None) as context: + with LogsCommandContext( + function_name, + stack_name=stack_name, + filter_pattern=filter_pattern, + start_time=start_time, + end_time=end_time, + # output_file is not yet supported by CLI + output_file=None, + ) as context: if tailing: - events_iterable = context.fetcher.tail(context.log_group_name, - filter_pattern=context.filter_pattern, - start=context.start_time) + events_iterable = context.fetcher.tail( + context.log_group_name, filter_pattern=context.filter_pattern, start=context.start_time + ) else: - events_iterable = context.fetcher.fetch(context.log_group_name, - filter_pattern=context.filter_pattern, - start=context.start_time, - end=context.end_time) + events_iterable = context.fetcher.fetch( + context.log_group_name, + filter_pattern=context.filter_pattern, + start=context.start_time, + end=context.end_time, + ) formatted_events = context.formatter.do_format(events_iterable) diff --git a/samcli/commands/logs/logs_context.py b/samcli/commands/logs/logs_context.py index 31bad78511..b2cd8d5138 100644 --- a/samcli/commands/logs/logs_context.py +++ b/samcli/commands/logs/logs_context.py @@ -27,13 +27,9 @@ class LogsCommandContext(object): context.fetcher.fetch(...) """ - def __init__(self, - function_name, - stack_name=None, - filter_pattern=None, - start_time=None, - end_time=None, - output_file=None): + def __init__( + self, function_name, stack_name=None, filter_pattern=None, start_time=None, end_time=None, output_file=None + ): """ Initializes the context @@ -69,8 +65,8 @@ def __init__(self, # No colors when we write to a file. Otherwise use colors self._must_print_colors = not self._output_file - self._logs_client = boto3.client('logs') - self._cfn_client = boto3.client('cloudformation') + self._logs_client = boto3.client("logs") + self._cfn_client = boto3.client("cloudformation") def __enter__(self): """ @@ -110,11 +106,9 @@ def formatter(self): """ formatter_chain = [ LambdaLogMsgFormatters.colorize_errors, - # Format JSON "before" highlighting the keywords. Otherwise, JSON will be invalid from all the # ANSI color codes and fail to pretty print JSONMsgFormatter.format_json, - KeywordHighlighter(self._filter_pattern).highlight_keywords, ] @@ -122,11 +116,11 @@ def formatter(self): @property def start_time(self): - return self._parse_time(self._start_time, 'start-time') + return self._parse_time(self._start_time, "start-time") @property def end_time(self): - return self._parse_time(self._end_time, 'end-time') + return self._parse_time(self._end_time, "end-time") @property def log_group_name(self): @@ -143,8 +137,12 @@ def log_group_name(self): function_id = self._function_name if self._stack_name: function_id = self._get_resource_id_from_stack(self._cfn_client, self._stack_name, self._function_name) - LOG.debug("Function with LogicalId '%s' in stack '%s' resolves to actual physical ID '%s'", - self._function_name, self._stack_name, function_id) + LOG.debug( + "Function with LogicalId '%s' in stack '%s' resolves to actual physical ID '%s'", + self._function_name, + self._stack_name, + function_id, + ) return LogGroupProvider.for_lambda_function(function_id) @@ -185,7 +183,7 @@ def _setup_output_file(output_file): if not output_file: return None - return open(output_file, 'wb') + return open(output_file, "wb") @staticmethod def _parse_time(time_str, property_name): @@ -247,8 +245,11 @@ def _get_resource_id_from_stack(cfn_client, stack_name, logical_id): If the stack or resource does not exist """ - LOG.debug("Getting resource's PhysicalId from AWS CloudFormation stack. StackName=%s, LogicalId=%s", - stack_name, logical_id) + LOG.debug( + "Getting resource's PhysicalId from AWS CloudFormation stack. StackName=%s, LogicalId=%s", + stack_name, + logical_id, + ) try: response = cfn_client.describe_stack_resource(StackName=stack_name, LogicalResourceId=logical_id) @@ -257,8 +258,13 @@ def _get_resource_id_from_stack(cfn_client, stack_name, logical_id): return response["StackResourceDetail"]["PhysicalResourceId"] except botocore.exceptions.ClientError as ex: - LOG.debug("Unable to fetch resource name from CloudFormation Stack: " - "StackName=%s, ResourceLogicalId=%s, Response=%s", stack_name, logical_id, ex.response) + LOG.debug( + "Unable to fetch resource name from CloudFormation Stack: " + "StackName=%s, ResourceLogicalId=%s, Response=%s", + stack_name, + logical_id, + ex.response, + ) # The exception message already has a well formatted error message that we can surface to user raise UserException(str(ex)) diff --git a/samcli/commands/package/__init__.py b/samcli/commands/package/__init__.py index 110ae5c16c..03864917fa 100644 --- a/samcli/commands/package/__init__.py +++ b/samcli/commands/package/__init__.py @@ -30,16 +30,19 @@ @click.command("package", short_help=SHORT_HELP, context_settings={"ignore_unknown_options": True}, help=HELP_TEXT) -@click.option('--template-file', - default=_TEMPLATE_OPTION_DEFAULT_VALUE, - type=click.Path(), - callback=partial(get_or_default_template_file_name, include_build=True), - show_default=False, - help="The path where your AWS SAM template is located") -@click.option('--s3-bucket', - required=True, - help="The name of the S3 bucket where this command uploads the artifacts that " - "are referenced in your template.") +@click.option( + "--template-file", + default=_TEMPLATE_OPTION_DEFAULT_VALUE, + type=click.Path(), + callback=partial(get_or_default_template_file_name, include_build=True), + show_default=False, + help="The path where your AWS SAM template is located", +) +@click.option( + "--s3-bucket", + required=True, + help="The name of the S3 bucket where this command uploads the artifacts that " "are referenced in your template.", +) @click.argument("args", nargs=-1, type=click.UNPROCESSED) @common_options @pass_context @@ -52,7 +55,7 @@ def cli(ctx, args, template_file, s3_bucket): def do_cli(args, template_file, s3_bucket): - args = args + ('--s3-bucket', s3_bucket) + args = args + ("--s3-bucket", s3_bucket) try: execute_command("package", args, template_file) diff --git a/samcli/commands/publish/command.py b/samcli/commands/publish/command.py index 9020b750b4..5a42c2ce5d 100644 --- a/samcli/commands/publish/command.py +++ b/samcli/commands/publish/command.py @@ -18,8 +18,8 @@ LOG = logging.getLogger(__name__) -SAM_PUBLISH_DOC = "https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-template-publishing-applications.html" # noqa -SAM_PACKAGE_DOC = "https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-cli-command-reference-sam-package.html" # noqa +SAM_PUBLISH_DOC = "https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-template-publishing-applications.html" # noqa +SAM_PACKAGE_DOC = "https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-cli-command-reference-sam-package.html" # noqa HELP_TEXT = """ Use this command to publish a packaged AWS SAM template to the AWS Serverless Application Repository to share within your team, @@ -34,16 +34,18 @@ -------- To publish an application $ sam publish -t packaged.yaml --region -""".format(SAM_PUBLISH_DOC) +""".format( + SAM_PUBLISH_DOC +) SHORT_HELP = "Publish a packaged AWS SAM template to the AWS Serverless Application Repository." SERVERLESSREPO_CONSOLE_URL = "https://console.aws.amazon.com/serverlessrepo/home?region={}#/published-applications/{}" SEMANTIC_VERSION_HELP = "Optional. The value provided here overrides SemanticVersion in the template metadata." -SEMANTIC_VERSION = 'SemanticVersion' +SEMANTIC_VERSION = "SemanticVersion" @click.command("publish", help=HELP_TEXT, short_help=SHORT_HELP) @template_common_option -@click.option('--semantic-version', help=SEMANTIC_VERSION_HELP) +@click.option("--semantic-version", help=SEMANTIC_VERSION_HELP) @aws_creds_options @cli_framework_options @pass_context @@ -59,7 +61,7 @@ def do_cli(ctx, template, semantic_version): try: template_data = get_template_data(template) except ValueError as ex: - click.secho("Publish Failed", fg='red') + click.secho("Publish Failed", fg="red") raise UserException(str(ex)) # Override SemanticVersion in template metadata when provided in command input @@ -71,17 +73,18 @@ def do_cli(ctx, template, semantic_version): click.secho("Publish Succeeded", fg="green") click.secho(_gen_success_message(publish_output)) except InvalidS3UriError: - click.secho("Publish Failed", fg='red') + click.secho("Publish Failed", fg="red") raise UserException( "Your SAM template contains invalid S3 URIs. Please make sure that you have uploaded application " - "artifacts to S3 by packaging the template. See more details in {}".format(SAM_PACKAGE_DOC)) + "artifacts to S3 by packaging the template. See more details in {}".format(SAM_PACKAGE_DOC) + ) except ServerlessRepoError as ex: - click.secho("Publish Failed", fg='red') + click.secho("Publish Failed", fg="red") LOG.debug("Failed to publish application to serverlessrepo", exc_info=True) - error_msg = '{}\nPlease follow the instructions in {}'.format(str(ex), SAM_PUBLISH_DOC) + error_msg = "{}\nPlease follow the instructions in {}".format(str(ex), SAM_PUBLISH_DOC) raise UserException(error_msg) - application_id = publish_output.get('application_id') + application_id = publish_output.get("application_id") _print_console_link(ctx.region, application_id) @@ -99,10 +102,10 @@ def _gen_success_message(publish_output): str Detailed success message """ - application_id = publish_output.get('application_id') - details = json.dumps(publish_output.get('details'), indent=2) + application_id = publish_output.get("application_id") + details = json.dumps(publish_output.get("details"), indent=2) - if CREATE_APPLICATION in publish_output.get('actions'): + if CREATE_APPLICATION in publish_output.get("actions"): return "Created new application with the following metadata:\n{}".format(details) return 'The following metadata of application "{}" has been updated:\n{}'.format(application_id, details) @@ -123,6 +126,6 @@ def _print_console_link(region, application_id): if not region: region = boto3.Session().region_name - console_link = SERVERLESSREPO_CONSOLE_URL.format(region, application_id.replace('/', '~')) + console_link = SERVERLESSREPO_CONSOLE_URL.format(region, application_id.replace("/", "~")) msg = "Click the link below to view your application in AWS console:\n{}".format(console_link) click.secho(msg, fg="yellow") diff --git a/samcli/commands/validate/lib/exceptions.py b/samcli/commands/validate/lib/exceptions.py index 84f6210e3b..b6afd4b0f4 100644 --- a/samcli/commands/validate/lib/exceptions.py +++ b/samcli/commands/validate/lib/exceptions.py @@ -7,4 +7,5 @@ class InvalidSamDocumentException(Exception): """ Exception for Invalid Sam Documents """ + pass diff --git a/samcli/commands/validate/lib/sam_template_validator.py b/samcli/commands/validate/lib/sam_template_validator.py index 0c81c52e50..6117273083 100644 --- a/samcli/commands/validate/lib/sam_template_validator.py +++ b/samcli/commands/validate/lib/sam_template_validator.py @@ -15,7 +15,6 @@ class SamTemplateValidator(object): - def __init__(self, sam_template, managed_policy_loader): """ Construct a SamTemplateValidator @@ -53,19 +52,17 @@ def is_valid(self): """ managed_policy_map = self.managed_policy_loader.load() - sam_translator = Translator(managed_policy_map=managed_policy_map, - sam_parser=self.sam_parser, - plugins=[]) + sam_translator = Translator(managed_policy_map=managed_policy_map, sam_parser=self.sam_parser, plugins=[]) self._replace_local_codeuri() try: - template = sam_translator.translate(sam_template=self.sam_template, - parameter_values={}) + template = sam_translator.translate(sam_template=self.sam_template, parameter_values={}) LOG.debug("Translated template is:\n%s", yaml_dump(template)) except InvalidDocumentException as e: raise InvalidSamDocumentException( - functools.reduce(lambda message, error: message + ' ' + str(error), e.causes, str(e))) + functools.reduce(lambda message, error: message + " " + str(error), e.causes, str(e)) + ) def _replace_local_codeuri(self): """ diff --git a/samcli/commands/validate/validate.py b/samcli/commands/validate/validate.py index 7461ae5692..ffbb8d946b 100644 --- a/samcli/commands/validate/validate.py +++ b/samcli/commands/validate/validate.py @@ -18,8 +18,7 @@ from .lib.sam_template_validator import SamTemplateValidator -@click.command("validate", - short_help="Validate an AWS SAM template.") +@click.command("validate", short_help="Validate an AWS SAM template.") @template_option_without_build @aws_creds_options @cli_framework_options @@ -39,18 +38,18 @@ def do_cli(ctx, template): sam_template = _read_sam_file(template) - iam_client = boto3.client('iam') + iam_client = boto3.client("iam") validator = SamTemplateValidator(sam_template, ManagedPolicyLoader(iam_client)) try: validator.is_valid() except InvalidSamDocumentException as e: - click.secho("Template provided at '{}' was invalid SAM Template.".format(template), bg='red') + click.secho("Template provided at '{}' was invalid SAM Template.".format(template), bg="red") raise InvalidSamTemplateException(str(e)) except NoCredentialsError as e: raise UserException("AWS Credentials are required. Please configure your credentials.") - click.secho("{} is a valid SAM Template".format(template), fg='green') + click.secho("{} is a valid SAM Template".format(template), fg="green") def _read_sam_file(template): @@ -62,10 +61,10 @@ def _read_sam_file(template): :raises: SamTemplateNotFoundException when the template file does not exist """ if not os.path.exists(template): - click.secho("SAM Template Not Found", bg='red') + click.secho("SAM Template Not Found", bg="red") raise SamTemplateNotFoundException("Template at {} is not found".format(template)) - with click.open_file(template, 'r') as sam_template: + with click.open_file(template, "r") as sam_template: sam_template = yaml_parse(sam_template.read()) return sam_template diff --git a/samcli/lib/intrinsic_resolver/intrinsic_property_resolver.py b/samcli/lib/intrinsic_resolver/intrinsic_property_resolver.py index b5af71a564..f9a7720ca4 100644 --- a/samcli/lib/intrinsic_resolver/intrinsic_property_resolver.py +++ b/samcli/lib/intrinsic_resolver/intrinsic_property_resolver.py @@ -21,10 +21,7 @@ verify_intrinsic_type_bool, verify_all_list_intrinsic_type, ) -from samcli.lib.intrinsic_resolver.invalid_intrinsic_exception import ( - InvalidIntrinsicException, - InvalidSymbolException -) +from samcli.lib.intrinsic_resolver.invalid_intrinsic_exception import InvalidIntrinsicException, InvalidSymbolException LOG = logging.getLogger(__name__) @@ -167,7 +164,7 @@ def set_conditional_function_map(self, function_map): """ self.conditional_key_function_map = function_map - def intrinsic_property_resolver(self, intrinsic, parent_function="template"): + def intrinsic_property_resolver(self, intrinsic, ignore_errors, parent_function="template"): """ This resolves the intrinsic of the format { @@ -196,7 +193,7 @@ def intrinsic_property_resolver(self, intrinsic, parent_function="template"): if intrinsic is None: raise InvalidIntrinsicException("Missing Intrinsic property in {}".format(parent_function)) if isinstance(intrinsic, list): - return [self.intrinsic_property_resolver(item) for item in intrinsic] + return [self.intrinsic_property_resolver(item, ignore_errors) for item in intrinsic] if not isinstance(intrinsic, dict) or intrinsic == {}: return intrinsic @@ -207,22 +204,33 @@ def intrinsic_property_resolver(self, intrinsic, parent_function="template"): if key in self.intrinsic_key_function_map: intrinsic_value = intrinsic.get(key) - return self.intrinsic_key_function_map.get(key)(intrinsic_value) + return self.intrinsic_key_function_map.get(key)(intrinsic_value, ignore_errors) elif key in self.conditional_key_function_map: intrinsic_value = intrinsic.get(key) - return self.conditional_key_function_map.get(key)(intrinsic_value) + return self.conditional_key_function_map.get(key)(intrinsic_value, ignore_errors) # In this case, it is a dictionary that doesn't directly contain an intrinsic resolver, we must recursively # resolve each of it's sub properties. sanitized_dict = {} for key, val in intrinsic.items(): - sanitized_key = self.intrinsic_property_resolver(key, parent_function=parent_function) - sanitized_val = self.intrinsic_property_resolver(val, parent_function=parent_function) - verify_intrinsic_type_str(sanitized_key, - message="The keys of the dictionary {} in {} must all resolve to a string".format( - sanitized_key, parent_function - )) - sanitized_dict[sanitized_key] = sanitized_val + try: + sanitized_key = self.intrinsic_property_resolver(key, ignore_errors, parent_function=parent_function) + sanitized_val = self.intrinsic_property_resolver(val, ignore_errors, parent_function=parent_function) + verify_intrinsic_type_str( + sanitized_key, + message="The keys of the dictionary {} in {} must all resolve to a string".format( + sanitized_key, parent_function + ), + ) + sanitized_dict[sanitized_key] = sanitized_val + # On any exception, leave the key:val of the orginal intact and continue on. https://github.com/awslabs/aws-sam-cli/issues/1386 + except Exception: + if ignore_errors: + LOG.debug("Unable to resolve property %s: %s. Leaving as is.", key, val) + sanitized_dict[key] = val + else: + raise + return sanitized_dict def resolve_template(self, ignore_errors=False): @@ -262,14 +270,12 @@ def resolve_attribute(self, cloud_formation_property, ignore_errors=False): for key, val in cloud_formation_property.items(): processed_key = self._symbol_resolver.get_translation(key) or key try: - processed_resource = self.intrinsic_property_resolver(val, parent_function=processed_key) + processed_resource = self.intrinsic_property_resolver(val, ignore_errors, parent_function=processed_key) processed_dict[processed_key] = processed_resource except (InvalidIntrinsicException, InvalidSymbolException) as e: resource_type = val.get("Type", "") if ignore_errors: - LOG.error( - "Unable to process properties of %s.%s", key, resource_type - ) + LOG.error("Unable to process properties of %s.%s", key, resource_type) processed_dict[key] = val else: raise InvalidIntrinsicException( @@ -277,7 +283,7 @@ def resolve_attribute(self, cloud_formation_property, ignore_errors=False): ) return processed_dict - def handle_fn_join(self, intrinsic_value): + def handle_fn_join(self, intrinsic_value, ignore_errors): """ { "Fn::Join" : [ "delimiter", [ comma-delimited list of values ] ] } This function will join the items in the list together based on the string using the python join. @@ -293,7 +299,9 @@ def handle_fn_join(self, intrinsic_value): ------- A string with the resolved attributes """ - arguments = self.intrinsic_property_resolver(intrinsic_value, parent_function=IntrinsicResolver.FN_JOIN) + arguments = self.intrinsic_property_resolver( + intrinsic_value, ignore_errors, parent_function=IntrinsicResolver.FN_JOIN + ) verify_intrinsic_type_list(arguments, IntrinsicResolver.FN_JOIN) @@ -301,30 +309,27 @@ def handle_fn_join(self, intrinsic_value): verify_intrinsic_type_str(delimiter, IntrinsicResolver.FN_JOIN, position_in_list="first") - value_list = self.intrinsic_property_resolver(arguments[1], parent_function=IntrinsicResolver.FN_JOIN) + value_list = self.intrinsic_property_resolver( + arguments[1], ignore_errors, parent_function=IntrinsicResolver.FN_JOIN + ) verify_intrinsic_type_list( value_list, IntrinsicResolver.FN_JOIN, - message="The list of values in {} after the " - "delimiter must be a list".format(IntrinsicResolver.FN_JOIN), + message="The list of values in {} after the " "delimiter must be a list".format(IntrinsicResolver.FN_JOIN), ) sanitized_value_list = [ - self.intrinsic_property_resolver( - item, parent_function=IntrinsicResolver.FN_JOIN - ) + self.intrinsic_property_resolver(item, ignore_errors, parent_function=IntrinsicResolver.FN_JOIN) for item in value_list ] verify_all_list_intrinsic_type( - sanitized_value_list, - verification_func=verify_intrinsic_type_str, - property_type=IntrinsicResolver.FN_JOIN, + sanitized_value_list, verification_func=verify_intrinsic_type_str, property_type=IntrinsicResolver.FN_JOIN ) return delimiter.join(sanitized_value_list) - def handle_fn_split(self, intrinsic_value): + def handle_fn_split(self, intrinsic_value, ignore_errors): """ { "Fn::Split" : [ "delimiter", "source string" ] } This function will then split the source_string based on the delimiter @@ -340,28 +345,24 @@ def handle_fn_split(self, intrinsic_value): A string with the resolved attributes """ arguments = self.intrinsic_property_resolver( - intrinsic_value, parent_function=IntrinsicResolver.FN_SPLIT + intrinsic_value, ignore_errors, parent_function=IntrinsicResolver.FN_SPLIT ) verify_intrinsic_type_list(arguments, IntrinsicResolver.FN_SPLIT) delimiter = arguments[0] - verify_intrinsic_type_str( - delimiter, IntrinsicResolver.FN_SPLIT, position_in_list="first" - ) + verify_intrinsic_type_str(delimiter, IntrinsicResolver.FN_SPLIT, position_in_list="first") source_string = self.intrinsic_property_resolver( - arguments[1], parent_function=IntrinsicResolver.FN_SPLIT + arguments[1], ignore_errors, parent_function=IntrinsicResolver.FN_SPLIT ) - verify_intrinsic_type_str( - source_string, IntrinsicResolver.FN_SPLIT, position_in_list="second" - ) + verify_intrinsic_type_str(source_string, IntrinsicResolver.FN_SPLIT, position_in_list="second") return source_string.split(delimiter) - def handle_fn_base64(self, intrinsic_value): + def handle_fn_base64(self, intrinsic_value, ignore_errors): """ { "Fn::Base64" : valueToEncode } This intrinsic function will then base64 encode the string using python's base64. @@ -377,14 +378,14 @@ def handle_fn_base64(self, intrinsic_value): A string with the resolved attributes """ data = self.intrinsic_property_resolver( - intrinsic_value, parent_function=IntrinsicResolver.FN_BASE64 + intrinsic_value, ignore_errors, parent_function=IntrinsicResolver.FN_BASE64 ) verify_intrinsic_type_str(data, IntrinsicResolver.FN_BASE64) # Encoding then decoding is required to return a string of the data return base64.b64encode(data.encode()).decode() - def handle_fn_select(self, intrinsic_value): + def handle_fn_select(self, intrinsic_value, ignore_errors): """ { "Fn::Select" : [ index, listOfObjects ] } It will select the item in the listOfObjects using python's base64. @@ -399,38 +400,32 @@ def handle_fn_select(self, intrinsic_value): A string with the resolved attributes """ arguments = self.intrinsic_property_resolver( - intrinsic_value, parent_function=IntrinsicResolver.FN_SELECT + intrinsic_value, ignore_errors, parent_function=IntrinsicResolver.FN_SELECT ) verify_intrinsic_type_list(arguments, IntrinsicResolver.FN_SELECT) index = self.intrinsic_property_resolver( - arguments[0], parent_function=IntrinsicResolver.FN_SELECT + arguments[0], ignore_errors, parent_function=IntrinsicResolver.FN_SELECT ) verify_intrinsic_type_int(index, IntrinsicResolver.FN_SELECT) list_of_objects = self.intrinsic_property_resolver( - arguments[1], parent_function=IntrinsicResolver.FN_SELECT + arguments[1], ignore_errors, parent_function=IntrinsicResolver.FN_SELECT ) verify_intrinsic_type_list(list_of_objects, IntrinsicResolver.FN_SELECT) sanitized_objects = [ - self.intrinsic_property_resolver( - item, parent_function=IntrinsicResolver.FN_SELECT - ) + self.intrinsic_property_resolver(item, ignore_errors, parent_function=IntrinsicResolver.FN_SELECT) for item in list_of_objects ] - verify_in_bounds( - index=index, - objects=sanitized_objects, - property_type=IntrinsicResolver.FN_SELECT, - ) + verify_in_bounds(index=index, objects=sanitized_objects, property_type=IntrinsicResolver.FN_SELECT) return sanitized_objects[index] - def handle_find_in_map(self, intrinsic_value): + def handle_find_in_map(self, intrinsic_value, ignore_errors): """ { "Fn::FindInMap" : [ "MapName", "TopLevelKey", "SecondLevelKey"] } This function will then lookup the specified dictionary in the Mappings dictionary as mappings[map_name][top_level_key][second_level_key]. @@ -456,43 +451,33 @@ def handle_find_in_map(self, intrinsic_value): A string with the resolved attributes """ arguments = self.intrinsic_property_resolver( - intrinsic_value, parent_function=IntrinsicResolver.FN_FIND_IN_MAP + intrinsic_value, ignore_errors, parent_function=IntrinsicResolver.FN_FIND_IN_MAP ) verify_intrinsic_type_list(arguments, IntrinsicResolver.FN_FIND_IN_MAP) - verify_number_arguments( - arguments, num=3, property_type=IntrinsicResolver.FN_FIND_IN_MAP - ) + verify_number_arguments(arguments, num=3, property_type=IntrinsicResolver.FN_FIND_IN_MAP) map_name = self.intrinsic_property_resolver( - arguments[0], parent_function=IntrinsicResolver.FN_FIND_IN_MAP + arguments[0], ignore_errors, parent_function=IntrinsicResolver.FN_FIND_IN_MAP ) top_level_key = self.intrinsic_property_resolver( - arguments[1], parent_function=IntrinsicResolver.FN_FIND_IN_MAP + arguments[1], ignore_errors, parent_function=IntrinsicResolver.FN_FIND_IN_MAP ) second_level_key = self.intrinsic_property_resolver( - arguments[2], parent_function=IntrinsicResolver.FN_FIND_IN_MAP + arguments[2], ignore_errors, parent_function=IntrinsicResolver.FN_FIND_IN_MAP ) - verify_intrinsic_type_str( - map_name, IntrinsicResolver.FN_FIND_IN_MAP, position_in_list="first" - ) - verify_intrinsic_type_str( - top_level_key, IntrinsicResolver.FN_FIND_IN_MAP, position_in_list="second" - ) - verify_intrinsic_type_str( - second_level_key, IntrinsicResolver.FN_FIND_IN_MAP, position_in_list="third" - ) + verify_intrinsic_type_str(map_name, IntrinsicResolver.FN_FIND_IN_MAP, position_in_list="first") + verify_intrinsic_type_str(top_level_key, IntrinsicResolver.FN_FIND_IN_MAP, position_in_list="second") + verify_intrinsic_type_str(second_level_key, IntrinsicResolver.FN_FIND_IN_MAP, position_in_list="third") map_value = self._mapping.get(map_name) verify_intrinsic_type_dict( map_value, IntrinsicResolver.FN_FIND_IN_MAP, position_in_list="first", - message="The MapName is missing in the Mappings dictionary in Fn::FindInMap for {}".format( - map_name - ), + message="The MapName is missing in the Mappings dictionary in Fn::FindInMap for {}".format(map_name), ) top_level_value = map_value.get(top_level_key) @@ -500,7 +485,7 @@ def handle_find_in_map(self, intrinsic_value): top_level_value, IntrinsicResolver.FN_FIND_IN_MAP, message="The TopLevelKey is missing in the Mappings dictionary in Fn::FindInMap " - "for {}".format(top_level_key), + "for {}".format(top_level_key), ) second_level_value = top_level_value.get(second_level_key) @@ -508,12 +493,12 @@ def handle_find_in_map(self, intrinsic_value): second_level_value, IntrinsicResolver.FN_FIND_IN_MAP, message="The SecondLevelKey is missing in the Mappings dictionary in Fn::FindInMap " - "for {}".format(second_level_key), + "for {}".format(second_level_key), ) return second_level_value - def handle_fn_get_azs(self, intrinsic_value): + def handle_fn_get_azs(self, intrinsic_value, ignore_errors): """ { "Fn::GetAZs" : "" } { "Fn::GetAZs" : { "Ref" : "AWS::Region" } } @@ -532,7 +517,7 @@ def handle_fn_get_azs(self, intrinsic_value): A string with the resolved attributes """ intrinsic_value = self.intrinsic_property_resolver( - intrinsic_value, parent_function=IntrinsicResolver.FN_GET_AZS + intrinsic_value, ignore_errors, parent_function=IntrinsicResolver.FN_GET_AZS ) verify_intrinsic_type_str(intrinsic_value, IntrinsicResolver.FN_GET_AZS) @@ -541,14 +526,12 @@ def handle_fn_get_azs(self, intrinsic_value): if intrinsic_value not in self._symbol_resolver.REGIONS: raise InvalidIntrinsicException( - "Invalid region string passed in to {}".format( - IntrinsicResolver.FN_GET_AZS - ) + "Invalid region string passed in to {}".format(IntrinsicResolver.FN_GET_AZS) ) return self._symbol_resolver.REGIONS.get(intrinsic_value) - def handle_fn_transform(self, intrinsic_value): + def handle_fn_transform(self, intrinsic_value, ignore_errors): """ { "Fn::Transform" : { "Name" : macro name, "Parameters" : {key : value, ... } } } This intrinsic function will transform the data with the body provided @@ -565,27 +548,23 @@ def handle_fn_transform(self, intrinsic_value): """ macro_name = intrinsic_value.get("Name") name = self.intrinsic_property_resolver( - macro_name, parent_function=IntrinsicResolver.FN_TRANSFORM + macro_name, ignore_errors, parent_function=IntrinsicResolver.FN_TRANSFORM ) if name not in IntrinsicResolver.SUPPORTED_MACRO_TRANSFORMATIONS: raise InvalidIntrinsicException( - "The type {} is not currently supported in {}".format( - name, IntrinsicResolver.FN_TRANSFORM - ) + "The type {} is not currently supported in {}".format(name, IntrinsicResolver.FN_TRANSFORM) ) parameters = intrinsic_value.get("Parameters") verify_intrinsic_type_dict( - parameters, - IntrinsicResolver.FN_TRANSFORM, - message=" Fn::Transform requires parameters section", + parameters, IntrinsicResolver.FN_TRANSFORM, message=" Fn::Transform requires parameters section" ) - location = self.intrinsic_property_resolver(parameters.get("Location")) + location = self.intrinsic_property_resolver(parameters.get("Location"), ignore_errors) return location - def handle_fn_import_value(self, intrinsic_value): + def handle_fn_import_value(self, intrinsic_value, ignore_errors): """ { "Fn::ImportValue" : sharedValueToImport } This intrinsic function requires handling multiple stacks, which is not currently supported by SAM-CLI. @@ -595,11 +574,9 @@ def handle_fn_import_value(self, intrinsic_value): ------- An InvalidIntrinsicException """ - raise InvalidIntrinsicException( - "Fn::ImportValue is currently not supported by IntrinsicResolver" - ) + raise InvalidIntrinsicException("Fn::ImportValue is currently not supported by IntrinsicResolver") - def handle_fn_getatt(self, intrinsic_value): + def handle_fn_getatt(self, intrinsic_value, ignore_errors): """ { "Fn::GetAtt" : [ "logicalNameOfResource", "attributeName" ] } This intrinsic function gets the attribute for logical_resource specified. Each attribute might have a different @@ -617,16 +594,16 @@ def handle_fn_getatt(self, intrinsic_value): A string with the resolved attributes """ arguments = self.intrinsic_property_resolver( - intrinsic_value, parent_function=IntrinsicResolver.FN_GET_ATT + intrinsic_value, ignore_errors, parent_function=IntrinsicResolver.FN_GET_ATT ) verify_intrinsic_type_list(arguments, IntrinsicResolver.FN_GET_ATT) verify_number_arguments(arguments, IntrinsicResolver.FN_GET_ATT, num=2) logical_id = self.intrinsic_property_resolver( - arguments[0], parent_function=IntrinsicResolver.FN_GET_ATT + arguments[0], ignore_errors, parent_function=IntrinsicResolver.FN_GET_ATT ) resource_type = self.intrinsic_property_resolver( - arguments[1], parent_function=IntrinsicResolver.FN_GET_ATT + arguments[1], ignore_errors, parent_function=IntrinsicResolver.FN_GET_ATT ) verify_intrinsic_type_str(logical_id, IntrinsicResolver.FN_GET_ATT) @@ -634,7 +611,7 @@ def handle_fn_getatt(self, intrinsic_value): return self._symbol_resolver.resolve_symbols(logical_id, resource_type) - def handle_fn_ref(self, intrinsic_value): + def handle_fn_ref(self, intrinsic_value, ignore_errors): """ {"Ref": "Logical ID"} This intrinsic function gets the reference to a certain attribute. Some Ref's have different functionality with @@ -652,13 +629,13 @@ def handle_fn_ref(self, intrinsic_value): A string with the resolved attributes """ arguments = self.intrinsic_property_resolver( - intrinsic_value, parent_function=IntrinsicResolver.REF + intrinsic_value, ignore_errors, parent_function=IntrinsicResolver.REF ) verify_intrinsic_type_str(arguments, IntrinsicResolver.REF) return self._symbol_resolver.resolve_symbols(arguments, IntrinsicResolver.REF) - def handle_fn_sub(self, intrinsic_value): + def handle_fn_sub(self, intrinsic_value, ignore_errors): """ { "Fn::Sub" : [ String, { Var1Name: Var1Value, Var2Name: Var2Value } ] } or { "Fn::Sub" : String } This intrinsic function will substitute the variables specified in the list into the string provided. The string @@ -680,53 +657,37 @@ def resolve_sub_attribute(intrinsic_item, symbol_resolver): (logical_id, attribute_type) = intrinsic_item.rsplit(".", 1) else: (logical_id, attribute_type) = intrinsic_item, IntrinsicResolver.REF - return symbol_resolver.resolve_symbols( - logical_id, attribute_type, ignore_errors=True - ) + return symbol_resolver.resolve_symbols(logical_id, attribute_type, ignore_errors=True) if isinstance(intrinsic_value, string_types): intrinsic_value = [intrinsic_value, {}] verify_intrinsic_type_list( - intrinsic_value, - IntrinsicResolver.FN_SUB, - message="The arguments to a Fn::Sub must be a list or a string", + intrinsic_value, IntrinsicResolver.FN_SUB, message="The arguments to a Fn::Sub must be a list or a string" ) verify_number_arguments(intrinsic_value, IntrinsicResolver.FN_SUB, num=2) sub_str = self.intrinsic_property_resolver( - intrinsic_value[0], parent_function=IntrinsicResolver.FN_SUB - ) - verify_intrinsic_type_str( - sub_str, IntrinsicResolver.FN_SUB, position_in_list="first" + intrinsic_value[0], ignore_errors, parent_function=IntrinsicResolver.FN_SUB ) + verify_intrinsic_type_str(sub_str, IntrinsicResolver.FN_SUB, position_in_list="first") variables = intrinsic_value[1] - verify_intrinsic_type_dict( - variables, IntrinsicResolver.FN_SUB, position_in_list="second" - ) + verify_intrinsic_type_dict(variables, IntrinsicResolver.FN_SUB, position_in_list="second") sanitized_variables = self.intrinsic_property_resolver( - variables, parent_function=IntrinsicResolver.FN_SUB + variables, ignore_errors, parent_function=IntrinsicResolver.FN_SUB ) - subable_props = re.findall( - string=sub_str, pattern=IntrinsicResolver._REGEX_SUB_FUNCTION - ) + subable_props = re.findall(string=sub_str, pattern=IntrinsicResolver._REGEX_SUB_FUNCTION) for sub_item in subable_props: - sanitized_item = ( - sanitized_variables[sub_item] - if sub_item in sanitized_variables - else sub_item - ) + sanitized_item = sanitized_variables[sub_item] if sub_item in sanitized_variables else sub_item result = resolve_sub_attribute(sanitized_item, self._symbol_resolver) - sub_str = re.sub( - pattern=r"\$\{" + sub_item + r"\}", string=sub_str, repl=str(result) - ) + sub_str = re.sub(pattern=r"\$\{" + sub_item + r"\}", string=sub_str, repl=str(result)) return sub_str - def handle_fn_if(self, intrinsic_value): + def handle_fn_if(self, intrinsic_value, ignore_errors): """ {"Fn::If": [condition_name, value_if_true, value_if_false]} This intrinsic function will evaluate the condition from the Conditions dictionary and then return value_if_true @@ -750,46 +711,42 @@ def handle_fn_if(self, intrinsic_value): This will return value_if_true and value_if_false depending on how the condition is evaluated """ arguments = self.intrinsic_property_resolver( - intrinsic_value, parent_function=IntrinsicResolver.FN_IF + intrinsic_value, ignore_errors, parent_function=IntrinsicResolver.FN_IF ) verify_intrinsic_type_list(arguments, IntrinsicResolver.FN_IF) verify_number_arguments(arguments, IntrinsicResolver.FN_IF, num=3) condition_name = self.intrinsic_property_resolver( - arguments[0], parent_function=IntrinsicResolver.FN_IF + arguments[0], ignore_errors, parent_function=IntrinsicResolver.FN_IF ) verify_intrinsic_type_str(condition_name, IntrinsicResolver.FN_IF) value_if_true = self.intrinsic_property_resolver( - arguments[1], parent_function=IntrinsicResolver.FN_IF + arguments[1], ignore_errors, parent_function=IntrinsicResolver.FN_IF ) value_if_false = self.intrinsic_property_resolver( - arguments[2], parent_function=IntrinsicResolver.FN_IF + arguments[2], ignore_errors, parent_function=IntrinsicResolver.FN_IF ) condition = self._conditions.get(condition_name) verify_intrinsic_type_dict( condition, IntrinsicResolver.FN_IF, - message="The condition is missing in the Conditions dictionary for {}".format( - IntrinsicResolver.FN_IF - ), + message="The condition is missing in the Conditions dictionary for {}".format(IntrinsicResolver.FN_IF), ) condition_evaluated = self.intrinsic_property_resolver( - condition, parent_function=IntrinsicResolver.FN_IF + condition, ignore_errors, parent_function=IntrinsicResolver.FN_IF ) verify_intrinsic_type_bool( condition_evaluated, IntrinsicResolver.FN_IF, - message="The result of {} must evaluate to bool".format( - IntrinsicResolver.FN_IF - ), + message="The result of {} must evaluate to bool".format(IntrinsicResolver.FN_IF), ) return value_if_true if condition_evaluated else value_if_false - def handle_fn_equals(self, intrinsic_value): + def handle_fn_equals(self, intrinsic_value, ignore_errors): """ {"Fn::Equals" : ["value_1", "value_2"]} This intrinsic function will verify that both items in the intrinsic function are equal after resolving them. @@ -805,20 +762,20 @@ def handle_fn_equals(self, intrinsic_value): A boolean depending on if both arguments is equal """ arguments = self.intrinsic_property_resolver( - intrinsic_value, parent_function=IntrinsicResolver.FN_EQUALS + intrinsic_value, ignore_errors, parent_function=IntrinsicResolver.FN_EQUALS ) verify_intrinsic_type_list(arguments, IntrinsicResolver.FN_EQUALS) verify_number_arguments(arguments, IntrinsicResolver.FN_EQUALS, num=2) value_1 = self.intrinsic_property_resolver( - arguments[0], parent_function=IntrinsicResolver.FN_EQUALS + arguments[0], ignore_errors, parent_function=IntrinsicResolver.FN_EQUALS ) value_2 = self.intrinsic_property_resolver( - arguments[1], parent_function=IntrinsicResolver.FN_EQUALS + arguments[1], ignore_errors, parent_function=IntrinsicResolver.FN_EQUALS ) return value_1 == value_2 - def handle_fn_not(self, intrinsic_value): + def handle_fn_not(self, intrinsic_value, ignore_errors): """ {"Fn::Not": [{condition}]} This intrinsic function will negate the evaluation of the condition specified. @@ -834,32 +791,28 @@ def handle_fn_not(self, intrinsic_value): A boolean that is the opposite of the condition evaluated """ arguments = self.intrinsic_property_resolver( - intrinsic_value, parent_function=IntrinsicResolver.FN_NOT + intrinsic_value, ignore_errors, parent_function=IntrinsicResolver.FN_NOT ) verify_intrinsic_type_list(arguments, IntrinsicResolver.FN_NOT) verify_number_arguments(arguments, IntrinsicResolver.FN_NOT, num=1) argument_sanitised = self.intrinsic_property_resolver( - arguments[0], parent_function=IntrinsicResolver.FN_NOT + arguments[0], ignore_errors, parent_function=IntrinsicResolver.FN_NOT ) if isinstance(argument_sanitised, dict) and "Condition" in arguments[0]: condition_name = argument_sanitised.get("Condition") verify_intrinsic_type_str(condition_name, IntrinsicResolver.FN_NOT) condition = self._conditions.get(condition_name) - verify_non_null( - condition, IntrinsicResolver.FN_NOT, position_in_list="first" - ) + verify_non_null(condition, IntrinsicResolver.FN_NOT, position_in_list="first") argument_sanitised = self.intrinsic_property_resolver( - condition, parent_function=IntrinsicResolver.FN_NOT + condition, ignore_errors, parent_function=IntrinsicResolver.FN_NOT ) verify_intrinsic_type_bool( argument_sanitised, IntrinsicResolver.FN_NOT, - message="The result of {} must evaluate to bool".format( - IntrinsicResolver.FN_NOT - ), + message="The result of {} must evaluate to bool".format(IntrinsicResolver.FN_NOT), ) return not argument_sanitised @@ -879,7 +832,7 @@ def get_prefix_position_in_list(i): prefix = "third " return prefix - def handle_fn_and(self, intrinsic_value): + def handle_fn_and(self, intrinsic_value, ignore_errors): """ {"Fn::And": [{condition}, {...}]} This intrinsic checks that every item in the list evaluates to a boolean. The items in the list can either @@ -904,7 +857,7 @@ def handle_fn_and(self, intrinsic_value): A boolean depending on if all of the properties in Fn::And evaluate to True """ arguments = self.intrinsic_property_resolver( - intrinsic_value, parent_function=IntrinsicResolver.FN_AND + intrinsic_value, ignore_errors, parent_function=IntrinsicResolver.FN_AND ) verify_intrinsic_type_list(arguments, IntrinsicResolver.FN_AND) @@ -915,23 +868,19 @@ def handle_fn_and(self, intrinsic_value): condition = self._conditions.get(condition_name) verify_non_null( - condition, - IntrinsicResolver.FN_AND, - position_in_list=self.get_prefix_position_in_list(i), + condition, IntrinsicResolver.FN_AND, position_in_list=self.get_prefix_position_in_list(i) ) condition_evaluated = self.intrinsic_property_resolver( - condition, parent_function=IntrinsicResolver.FN_AND - ) - verify_intrinsic_type_bool( - condition_evaluated, IntrinsicResolver.FN_AND + condition, ignore_errors, parent_function=IntrinsicResolver.FN_AND ) + verify_intrinsic_type_bool(condition_evaluated, IntrinsicResolver.FN_AND) if not condition_evaluated: return False else: condition = self.intrinsic_property_resolver( - argument, parent_function=IntrinsicResolver.FN_AND + argument, ignore_errors, parent_function=IntrinsicResolver.FN_AND ) verify_intrinsic_type_bool(condition, IntrinsicResolver.FN_AND) @@ -940,7 +889,7 @@ def handle_fn_and(self, intrinsic_value): return True - def handle_fn_or(self, intrinsic_value): + def handle_fn_or(self, intrinsic_value, ignore_errors): """ {"Fn::Or": [{condition}, {...}]} This intrinsic checks that a single item in the list evaluates to a boolean. The items in the list can either @@ -965,7 +914,7 @@ def handle_fn_or(self, intrinsic_value): A boolean depending on if any of the properties in Fn::And evaluate to True """ arguments = self.intrinsic_property_resolver( - intrinsic_value, parent_function=IntrinsicResolver.FN_OR + intrinsic_value, ignore_errors, parent_function=IntrinsicResolver.FN_OR ) verify_intrinsic_type_list(arguments, IntrinsicResolver.FN_OR) for i, argument in enumerate(arguments): @@ -975,20 +924,18 @@ def handle_fn_or(self, intrinsic_value): condition = self._conditions.get(condition_name) verify_non_null( - condition, - IntrinsicResolver.FN_OR, - position_in_list=self.get_prefix_position_in_list(i), + condition, IntrinsicResolver.FN_OR, position_in_list=self.get_prefix_position_in_list(i) ) condition_evaluated = self.intrinsic_property_resolver( - condition, parent_function=IntrinsicResolver.FN_OR + condition, ignore_errors, parent_function=IntrinsicResolver.FN_OR ) verify_intrinsic_type_bool(condition_evaluated, IntrinsicResolver.FN_OR) if condition_evaluated: return True else: condition = self.intrinsic_property_resolver( - argument, parent_function=IntrinsicResolver.FN_OR + argument, ignore_errors, parent_function=IntrinsicResolver.FN_OR ) verify_intrinsic_type_bool(condition, IntrinsicResolver.FN_OR) if condition: diff --git a/samcli/lib/intrinsic_resolver/intrinsics_symbol_table.py b/samcli/lib/intrinsic_resolver/intrinsics_symbol_table.py index 922ed92577..036deef72c 100644 --- a/samcli/lib/intrinsic_resolver/intrinsics_symbol_table.py +++ b/samcli/lib/intrinsic_resolver/intrinsics_symbol_table.py @@ -7,9 +7,7 @@ from six import string_types from samcli.lib.intrinsic_resolver.intrinsic_property_resolver import IntrinsicResolver -from samcli.lib.intrinsic_resolver.invalid_intrinsic_exception import ( - InvalidSymbolException, -) +from samcli.lib.intrinsic_resolver.invalid_intrinsic_exception import InvalidSymbolException LOG = logging.getLogger(__name__) @@ -43,19 +41,12 @@ class IntrinsicsSymbolTable(object): "AWS::Region": "us-east-1", "AWS::StackName": "local", "AWS::StackId": "arn:aws:cloudformation:us-east-1:123456789012:stack/" - "local/51af3dc0-da77-11e4-872e-1234567db123", + "local/51af3dc0-da77-11e4-872e-1234567db123", "AWS::URLSuffix": "localhost", } REGIONS = { - "us-east-1": [ - "us-east-1a", - "us-east-1b", - "us-east-1c", - "us-east-1d", - "us-east-1e", - "us-east-1f", - ], + "us-east-1": ["us-east-1a", "us-east-1b", "us-east-1c", "us-east-1d", "us-east-1e", "us-east-1f"], "us-west-1": ["us-west-1b", "us-west-1c"], "eu-north-1": ["eu-north-1a", "eu-north-1b", "eu-north-1c"], "ap-northeast-3": ["ap-northeast-3a"], @@ -91,11 +82,7 @@ class IntrinsicsSymbolTable(object): CFN_RESOURCE_TYPE = "Type" def __init__( - self, - template=None, - logical_id_translator=None, - default_type_resolver=None, - common_attribute_resolver=None, + self, template=None, logical_id_translator=None, default_type_resolver=None, common_attribute_resolver=None ): """ Initializes the Intrinsic Symbol Table so that runtime attributes can be resolved. @@ -150,12 +137,8 @@ def __init__( self._parameters = self._template.get("Parameters", {}) self._resources = self._template.get("Resources", {}) - self.default_type_resolver = ( - default_type_resolver or self.get_default_type_resolver() - ) - self.common_attribute_resolver = ( - common_attribute_resolver or self.get_default_attribute_resolver() - ) + self.default_type_resolver = default_type_resolver or self.get_default_type_resolver() + self.common_attribute_resolver = common_attribute_resolver or self.get_default_attribute_resolver() self.default_pseudo_resolver = self.get_default_pseudo_resolver() def get_default_pseudo_resolver(self): @@ -183,7 +166,7 @@ def get_default_type_resolver(): }, "AWS::Serverless::LayerVersion": { IntrinsicResolver.REF: lambda logical_id: {IntrinsicResolver.REF: logical_id} - } + }, } def resolve_symbols(self, logical_id, resource_attribute, ignore_errors=False): @@ -228,15 +211,9 @@ def resolve_symbols(self, logical_id, resource_attribute, ignore_errors=False): if translated: return translated # Handle Default Property Type Resolution - resource_type = self._resources.get(logical_id, {}).get( - IntrinsicsSymbolTable.CFN_RESOURCE_TYPE - ) + resource_type = self._resources.get(logical_id, {}).get(IntrinsicsSymbolTable.CFN_RESOURCE_TYPE) - resolver = ( - self.default_type_resolver.get(resource_type, {}).get(resource_attribute) - if resource_type - else {} - ) + resolver = self.default_type_resolver.get(resource_type, {}).get(resource_attribute) if resource_type else {} if resolver: if callable(resolver): return resolver(logical_id) @@ -253,9 +230,7 @@ def resolve_symbols(self, logical_id, resource_attribute, ignore_errors=False): return "${}".format(logical_id + "." + resource_attribute) raise InvalidSymbolException( "The {} is not supported in the logical_id_translator, default_type_resolver, or the attribute_resolver." - " It is also not a supported pseudo function".format( - logical_id + "." + resource_attribute - ) + " It is also not a supported pseudo function".format(logical_id + "." + resource_attribute) ) def arn_resolver(self, logical_id, service_name="lambda"): @@ -276,7 +251,7 @@ def arn_resolver(self, logical_id, service_name="lambda"): """ aws_region = self.handle_pseudo_region() account_id = ( - self.logical_id_translator.get(IntrinsicsSymbolTable.AWS_ACCOUNT_ID) or self.handle_pseudo_account_id() + self.logical_id_translator.get(IntrinsicsSymbolTable.AWS_ACCOUNT_ID) or self.handle_pseudo_account_id() ) partition_name = self.handle_pseudo_partition() resource_name = logical_id @@ -310,13 +285,8 @@ def get_translation(self, logical_id, resource_attributes=IntrinsicResolver.REF) """ logical_id_item = self.logical_id_translator.get(logical_id, {}) - if any( - isinstance(logical_id_item, object_type) - for object_type in [string_types, list, bool, int] - ): - if ( - resource_attributes != IntrinsicResolver.REF and resource_attributes != "" - ): + if any(isinstance(logical_id_item, object_type) for object_type in [string_types, list, bool, int]): + if resource_attributes != IntrinsicResolver.REF and resource_attributes != "": return None return logical_id_item @@ -346,9 +316,7 @@ def handle_pseudo_account_id(): ------- A pseudo account id """ - return IntrinsicsSymbolTable.DEFAULT_PSEUDO_PARAM_VALUES.get( - IntrinsicsSymbolTable.AWS_ACCOUNT_ID - ) + return IntrinsicsSymbolTable.DEFAULT_PSEUDO_PARAM_VALUES.get(IntrinsicsSymbolTable.AWS_ACCOUNT_ID) def handle_pseudo_region(self): """ @@ -361,10 +329,9 @@ def handle_pseudo_region(self): The region from the environment or a default one """ return ( - self.logical_id_translator.get(IntrinsicsSymbolTable.AWS_REGION) or os.getenv("AWS_REGION") or - IntrinsicsSymbolTable.DEFAULT_PSEUDO_PARAM_VALUES.get( - IntrinsicsSymbolTable.AWS_REGION - ) + self.logical_id_translator.get(IntrinsicsSymbolTable.AWS_REGION) + or os.getenv("AWS_REGION") + or IntrinsicsSymbolTable.DEFAULT_PSEUDO_PARAM_VALUES.get(IntrinsicsSymbolTable.AWS_REGION) ) def handle_pseudo_url_prefix(self): @@ -409,9 +376,7 @@ def handle_pseudo_stack_id(): ------- A randomized string """ - return IntrinsicsSymbolTable.DEFAULT_PSEUDO_PARAM_VALUES.get( - IntrinsicsSymbolTable.AWS_STACK_ID - ) + return IntrinsicsSymbolTable.DEFAULT_PSEUDO_PARAM_VALUES.get(IntrinsicsSymbolTable.AWS_STACK_ID) @staticmethod def handle_pseudo_stack_name(): @@ -424,9 +389,7 @@ def handle_pseudo_stack_name(): ------- A randomized string """ - return IntrinsicsSymbolTable.DEFAULT_PSEUDO_PARAM_VALUES.get( - IntrinsicsSymbolTable.AWS_STACK_NAME - ) + return IntrinsicsSymbolTable.DEFAULT_PSEUDO_PARAM_VALUES.get(IntrinsicsSymbolTable.AWS_STACK_NAME) @staticmethod def handle_pseudo_no_value(): diff --git a/samcli/lib/intrinsic_resolver/invalid_intrinsic_validation.py b/samcli/lib/intrinsic_resolver/invalid_intrinsic_validation.py index 9c82ced267..ebdc91d5e0 100644 --- a/samcli/lib/intrinsic_resolver/invalid_intrinsic_validation.py +++ b/samcli/lib/intrinsic_resolver/invalid_intrinsic_validation.py @@ -6,84 +6,52 @@ from samcli.lib.intrinsic_resolver.invalid_intrinsic_exception import InvalidIntrinsicException -def verify_intrinsic_type_bool( - argument, property_type="", message="", position_in_list="" -): - verify_intrinsic_type( - argument, property_type, message, position_in_list, primitive_type=bool - ) +def verify_intrinsic_type_bool(argument, property_type="", message="", position_in_list=""): + verify_intrinsic_type(argument, property_type, message, position_in_list, primitive_type=bool) -def verify_intrinsic_type_list( - argument, property_type="", message="", position_in_list="" -): - verify_intrinsic_type( - argument, property_type, message, position_in_list, primitive_type=list - ) +def verify_intrinsic_type_list(argument, property_type="", message="", position_in_list=""): + verify_intrinsic_type(argument, property_type, message, position_in_list, primitive_type=list) -def verify_intrinsic_type_dict( - argument, property_type="", message="", position_in_list="" -): - verify_intrinsic_type( - argument, property_type, message, position_in_list, primitive_type=dict - ) +def verify_intrinsic_type_dict(argument, property_type="", message="", position_in_list=""): + verify_intrinsic_type(argument, property_type, message, position_in_list, primitive_type=dict) -def verify_intrinsic_type_int( - argument, property_type="", message="", position_in_list="" -): +def verify_intrinsic_type_int(argument, property_type="", message="", position_in_list=""): # Special case since bool is a subclass of int in python if isinstance(argument, bool): raise InvalidIntrinsicException( - message or "The {} argument to {} must resolve to a {} type".format( - position_in_list, property_type, int - ) + message or "The {} argument to {} must resolve to a {} type".format(position_in_list, property_type, int) ) - verify_intrinsic_type( - argument, property_type, message, position_in_list, primitive_type=int - ) + verify_intrinsic_type(argument, property_type, message, position_in_list, primitive_type=int) -def verify_intrinsic_type_str( - argument, property_type="", message="", position_in_list="" -): - verify_intrinsic_type( - argument, property_type, message, position_in_list, primitive_type=string_types - ) +def verify_intrinsic_type_str(argument, property_type="", message="", position_in_list=""): + verify_intrinsic_type(argument, property_type, message, position_in_list, primitive_type=string_types) def verify_non_null(argument, property_type="", message="", position_in_list=""): if argument is None: raise InvalidIntrinsicException( - message or "The {} argument to {} is missing from the intrinsic function".format( - position_in_list, property_type - ) + message + or "The {} argument to {} is missing from the intrinsic function".format(position_in_list, property_type) ) -def verify_intrinsic_type( - argument, - property_type="", - message="", - position_in_list="", - primitive_type=string_types, -): +def verify_intrinsic_type(argument, property_type="", message="", position_in_list="", primitive_type=string_types): verify_non_null(argument, property_type, message, position_in_list) if not isinstance(argument, primitive_type): raise InvalidIntrinsicException( - message or "The {} argument to {} must resolve to a {} type".format( - position_in_list, property_type, primitive_type - ) + message + or "The {} argument to {} must resolve to a {} type".format(position_in_list, property_type, primitive_type) ) def verify_in_bounds(objects, index, property_type=""): if index < 0 or index >= len(objects): raise InvalidIntrinsicException( - "The index of {} resolved properties must be within the range".format( - property_type - ) + "The index of {} resolved properties must be within the range".format(property_type) ) @@ -96,8 +64,6 @@ def verify_number_arguments(arguments, property_type="", num=0): ) -def verify_all_list_intrinsic_type( - arguments, verification_func, property_type="", message="", position_in_list="" -): +def verify_all_list_intrinsic_type(arguments, verification_func, property_type="", message="", position_in_list=""): for argument in arguments: verification_func(argument, property_type, message, position_in_list) diff --git a/samcli/lib/logs/event.py b/samcli/lib/logs/event.py index a9886e44a7..ea95757252 100644 --- a/samcli/lib/logs/event.py +++ b/samcli/lib/logs/event.py @@ -37,10 +37,10 @@ def __init__(self, log_group_name, event_dict): # this class is a data wrapper to the `events_dict`. It doesn't try to be smart. return - self.log_stream_name = event_dict.get('logStreamName') - self.message = event_dict.get('message', '') + self.log_stream_name = event_dict.get("logStreamName") + self.message = event_dict.get("message", "") - self.timestamp_millis = event_dict.get('timestamp') + self.timestamp_millis = event_dict.get("timestamp") # Convert the timestamp from epoch to readable ISO timestamp, easier for formatting. if self.timestamp_millis: @@ -51,16 +51,20 @@ def __eq__(self, other): if not isinstance(other, LogEvent): return False - return self.log_group_name == other.log_group_name \ - and self.log_stream_name == other.log_stream_name \ - and self.timestamp == other.timestamp \ + return ( + self.log_group_name == other.log_group_name + and self.log_stream_name == other.log_stream_name + and self.timestamp == other.timestamp and self.message == other.message + ) def __repr__(self): # pragma: no cover # Used to print pretty diff when testing - return str({ - "log_group_name": self.log_group_name, - "log_stream_name": self.log_stream_name, - "message": self.message, - "timestamp": self.timestamp - }) + return str( + { + "log_group_name": self.log_group_name, + "log_stream_name": self.log_stream_name, + "message": self.message, + "timestamp": self.timestamp, + } + ) diff --git a/samcli/lib/logs/fetcher.py b/samcli/lib/logs/fetcher.py index d4c7597dac..239fc01a65 100644 --- a/samcli/lib/logs/fetcher.py +++ b/samcli/lib/logs/fetcher.py @@ -56,10 +56,7 @@ def fetch(self, log_group_name, start=None, end=None, filter_pattern=None): Object containing the information from each log event returned by CloudWatch Logs """ - kwargs = { - "logGroupName": log_group_name, - "interleaved": True - } + kwargs = {"logGroupName": log_group_name, "interleaved": True} if start: kwargs["startTime"] = to_timestamp(start) @@ -75,7 +72,7 @@ def fetch(self, log_group_name, start=None, end=None, filter_pattern=None): result = self.cw_client.filter_log_events(**kwargs) # Several events will be returned. Yield one at a time - for event in result.get('events', []): + for event in result.get("events", []): yield LogEvent(log_group_name, event) # Keep iterating until there are no more logs left to query. @@ -133,9 +130,7 @@ def tail(self, log_group_name, start=None, filter_pattern=None, max_retries=1000 has_data = False counter -= 1 - events_itr = self.fetch(log_group_name, - start=to_datetime(latest_event_time), - filter_pattern=filter_pattern) + events_itr = self.fetch(log_group_name, start=to_datetime(latest_event_time), filter_pattern=filter_pattern) # Find the timestamp of the most recent log event. for event in events_itr: diff --git a/samcli/lib/logs/formatter.py b/samcli/lib/logs/formatter.py index 0c8cd2f9d3..09f15dc9a0 100644 --- a/samcli/lib/logs/formatter.py +++ b/samcli/lib/logs/formatter.py @@ -118,7 +118,7 @@ def _pretty_print_event(event, colored): event.timestamp = colored.yellow(event.timestamp) event.log_stream_name = colored.cyan(event.log_stream_name) - return ' '.join([event.log_stream_name, event.timestamp, event.message]) + return " ".join([event.log_stream_name, event.timestamp, event.message]) class LambdaLogMsgFormatters(object): @@ -139,8 +139,7 @@ def colorize_errors(event, colored): nodejs_crash_msg = "Process exited before completing request" timeout_msg = "Task timed out" - if nodejs_crash_msg in event.message \ - or timeout_msg in event.message: + if nodejs_crash_msg in event.message or timeout_msg in event.message: event.message = colored.red(event.message) return event diff --git a/samcli/lib/samlib/cloudformation_command.py b/samcli/lib/samlib/cloudformation_command.py index 83cd155004..e9bdbb4304 100644 --- a/samcli/lib/samlib/cloudformation_command.py +++ b/samcli/lib/samlib/cloudformation_command.py @@ -29,7 +29,7 @@ def execute_command(command, args, template_file): # Since --template-file was parsed separately, add it here manually args.extend(["--template-file", template_file]) - subprocess.check_call([aws_cmd, 'cloudformation', command] + args, env=env) + subprocess.check_call([aws_cmd, "cloudformation", command] + args, env=env) LOG.debug("%s command successful", command) except subprocess.CalledProcessError as e: # Underlying aws command will print the exception to the user @@ -39,12 +39,8 @@ def execute_command(command, args, template_file): def find_executable(execname): - if platform.system().lower() == 'windows': - options = [ - "{}.cmd".format(execname), - "{}.exe".format(execname), - execname - ] + if platform.system().lower() == "windows": + options = ["{}.cmd".format(execname), "{}.exe".format(execname), execname] else: options = [execname] diff --git a/samcli/lib/samlib/resource_metadata_normalizer.py b/samcli/lib/samlib/resource_metadata_normalizer.py index 945246bd81..541652cddf 100644 --- a/samcli/lib/samlib/resource_metadata_normalizer.py +++ b/samcli/lib/samlib/resource_metadata_normalizer.py @@ -14,7 +14,6 @@ class ResourceMetadataNormalizer(object): - @staticmethod def normalize(template_dict): """ @@ -59,5 +58,8 @@ def _replace_property(property_key, property_value, resource, logical_id): if property_key and property_value: resource.get(PROPERTIES_KEY, {})[property_key] = property_value elif property_key or property_value: - LOG.info("WARNING: Ignoring Metadata for Resource %s. Metadata contains only aws:asset:path or " - "aws:assert:property but not both", logical_id) + LOG.info( + "WARNING: Ignoring Metadata for Resource %s. Metadata contains only aws:asset:path or " + "aws:assert:property but not both", + logical_id, + ) diff --git a/samcli/lib/samlib/wrapper.py b/samcli/lib/samlib/wrapper.py index ccf68981c6..3a31e2c16e 100644 --- a/samcli/lib/samlib/wrapper.py +++ b/samcli/lib/samlib/wrapper.py @@ -15,8 +15,12 @@ import boto3 # SAM Translator Library Internal module imports # -from samtranslator.model.exceptions import \ - InvalidDocumentException, InvalidTemplateException, InvalidResourceException, InvalidEventException +from samtranslator.model.exceptions import ( + InvalidDocumentException, + InvalidTemplateException, + InvalidResourceException, + InvalidEventException, +) from samtranslator.validator.validator import SamTemplateValidator from samtranslator.model import ResourceTypeResolver, sam_resources from samtranslator.plugins import LifeCycleEvents @@ -67,7 +71,8 @@ def run_plugins(self, convert_local_uris=True): parser.parse(template_copy, all_plugins) # parse() will run all configured plugins except InvalidDocumentException as e: raise InvalidSamDocumentException( - functools.reduce(lambda message, error: message + ' ' + str(error), e.causes, str(e))) + functools.reduce(lambda message, error: message + " " + str(error), e.causes, str(e)) + ) return template_copy @@ -79,13 +84,14 @@ def __translate(self, parameter_values): template_copy = self.template sam_parser = Parser() - sam_translator = Translator(managed_policy_map=self.__managed_policy_map(), - sam_parser=sam_parser, - # Default plugins are already initialized within the Translator - plugins=self.extra_plugins) + sam_translator = Translator( + managed_policy_map=self.__managed_policy_map(), + sam_parser=sam_parser, + # Default plugins are already initialized within the Translator + plugins=self.extra_plugins, + ) - return sam_translator.translate(sam_template=template_copy, - parameter_values=parameter_values) + return sam_translator.translate(sam_template=template_copy, parameter_values=parameter_values) @property def template(self): @@ -96,14 +102,14 @@ def __managed_policy_map(self): This method is unused and a Work In Progress """ try: - iam_client = boto3.client('iam') + iam_client = boto3.client("iam") return ManagedPolicyLoader(iam_client).load() except Exception as ex: if self._offline_fallback: # If offline flag is set, then fall back to the list of default managed policies # This should be sufficient for most cases - with open(self._DEFAULT_MANAGED_POLICIES_FILE, 'r') as fp: + with open(self._DEFAULT_MANAGED_POLICIES_FILE, "r") as fp: return json.load(fp) # Offline is not enabled. So just raise the exception @@ -124,9 +130,9 @@ def parse(self, sam_template, sam_plugins): for logical_id, resource in sam_template["Resources"].items(): try: if macro_resolver.can_resolve(resource): - macro_resolver \ - .resolve_resource_type(resource) \ - .from_dict(logical_id, resource, sam_plugins=sam_plugins) + macro_resolver.resolve_resource_type(resource).from_dict( + logical_id, resource, sam_plugins=sam_plugins + ) except (InvalidResourceException, InvalidEventException) as e: document_errors.append(e) @@ -139,9 +145,11 @@ def _validate(self, sam_template): :param dict sam_template: SAM template """ - if "Resources" not in sam_template or not isinstance(sam_template["Resources"], dict) \ - or not sam_template["Resources"]: - raise InvalidDocumentException( - [InvalidTemplateException("'Resources' section is required")]) + if ( + "Resources" not in sam_template + or not isinstance(sam_template["Resources"], dict) + or not sam_template["Resources"] + ): + raise InvalidDocumentException([InvalidTemplateException("'Resources' section is required")]) SamTemplateValidator.validate(sam_template) diff --git a/samcli/lib/telemetry/metrics.py b/samcli/lib/telemetry/metrics.py index ba2cc2271f..603642a6d9 100644 --- a/samcli/lib/telemetry/metrics.py +++ b/samcli/lib/telemetry/metrics.py @@ -22,10 +22,7 @@ def send_installed_metric(): LOG.debug("Sending Installed Metric") telemetry = Telemetry() - telemetry.emit("installed", { - "osPlatform": platform.system(), - "telemetryEnabled": _telemetry_enabled(), - }) + telemetry.emit("installed", {"osPlatform": platform.system(), "telemetryEnabled": _telemetry_enabled()}) def track_command(func): @@ -77,18 +74,20 @@ def wrapped(*args, **kwargs): exit_reason = type(ex).__name__ ctx = Context.get_current_context() - telemetry.emit("commandRun", { - # Metric about command's general environment - "awsProfileProvided": bool(ctx.profile), - "debugFlagProvided": bool(ctx.debug), - "region": ctx.region or "", - "commandName": ctx.command_path, # Full command path. ex: sam local start-api - - # Metric about command's execution characteristics - "duration": duration_fn(), - "exitReason": exit_reason, - "exitCode": exit_code - }) + telemetry.emit( + "commandRun", + { + # Metric about command's general environment + "awsProfileProvided": bool(ctx.profile), + "debugFlagProvided": bool(ctx.debug), + "region": ctx.region or "", + "commandName": ctx.command_path, # Full command path. ex: sam local start-api + # Metric about command's execution characteristics + "duration": duration_fn(), + "exitReason": exit_reason, + "exitCode": exit_code, + }, + ) if exception: raise exception # pylint: disable=raising-bad-type diff --git a/samcli/lib/telemetry/telemetry.py b/samcli/lib/telemetry/telemetry.py index 95a0f70663..9d25d08a5a 100644 --- a/samcli/lib/telemetry/telemetry.py +++ b/samcli/lib/telemetry/telemetry.py @@ -18,7 +18,6 @@ class Telemetry(object): - def __init__(self, url=None): """ Initialize the Telemetry object. @@ -80,9 +79,10 @@ def _send(self, metric, wait_for_response=False): timeout_ms = 2000 if wait_for_response else 100 # 2 seconds to wait for response or 100ms - timeout = (2, # connection timeout. Always set to 2 seconds - timeout_ms / 1000.0 # Read timeout. Tweaked based on input. - ) + timeout = ( + 2, # connection timeout. Always set to 2 seconds + timeout_ms / 1000.0, # Read timeout. Tweaked based on input. + ) try: r = requests.post(self._url, json=payload, timeout=timeout) LOG.debug("Telemetry response: %d", r.status_code) diff --git a/samcli/lib/utils/colors.py b/samcli/lib/utils/colors.py index 69ca887b0d..19c7550494 100644 --- a/samcli/lib/utils/colors.py +++ b/samcli/lib/utils/colors.py @@ -36,23 +36,23 @@ def __init__(self, colorize=True): def red(self, msg): """Color the input red""" - return self._color(msg, 'red') + return self._color(msg, "red") def green(self, msg): """Color the input green""" - return self._color(msg, 'green') + return self._color(msg, "green") def cyan(self, msg): """Color the input cyan""" - return self._color(msg, 'cyan') + return self._color(msg, "cyan") def white(self, msg): """Color the input white""" - return self._color(msg, 'white') + return self._color(msg, "white") def yellow(self, msg): """Color the input yellow""" - return self._color(msg, 'yellow') + return self._color(msg, "yellow") def underline(self, msg): """Underline the input""" @@ -60,5 +60,5 @@ def underline(self, msg): def _color(self, msg, color): """Internal helper method to add colors to input""" - kwargs = {'fg': color} + kwargs = {"fg": color} return click.style(msg, **kwargs) if self.colorize else msg diff --git a/samcli/lib/utils/sam_logging.py b/samcli/lib/utils/sam_logging.py index 47274c479c..069933f10a 100644 --- a/samcli/lib/utils/sam_logging.py +++ b/samcli/lib/utils/sam_logging.py @@ -5,7 +5,6 @@ class SamCliLogger(object): - @staticmethod def configure_logger(logger, formatter, level): """ diff --git a/samcli/lib/utils/stream_writer.py b/samcli/lib/utils/stream_writer.py index 223e4d90b8..a931452e1e 100644 --- a/samcli/lib/utils/stream_writer.py +++ b/samcli/lib/utils/stream_writer.py @@ -4,7 +4,6 @@ class StreamWriter(object): - def __init__(self, stream, auto_flush=False): """ Instatiates new StreamWriter to the specified stream diff --git a/samcli/lib/utils/tar.py b/samcli/lib/utils/tar.py index 38417d6e75..65323ca4ad 100644 --- a/samcli/lib/utils/tar.py +++ b/samcli/lib/utils/tar.py @@ -23,7 +23,7 @@ def create_tarball(tar_paths): """ tarballfile = TemporaryFile() - with tarfile.open(fileobj=tarballfile, mode='w') as archive: + with tarfile.open(fileobj=tarballfile, mode="w") as archive: for path_on_system, path_in_tarball in tar_paths.items(): archive.add(path_on_system, arcname=path_in_tarball) diff --git a/samcli/local/apigw/local_apigw_service.py b/samcli/local/apigw/local_apigw_service.py index 37425e2520..db647ba6ff 100644 --- a/samcli/local/apigw/local_apigw_service.py +++ b/samcli/local/apigw/local_apigw_service.py @@ -19,13 +19,7 @@ class Route(object): - ANY_HTTP_METHODS = ["GET", - "DELETE", - "PUT", - "POST", - "HEAD", - "OPTIONS", - "PATCH"] + ANY_HTTP_METHODS = ["GET", "DELETE", "PUT", "POST", "HEAD", "OPTIONS", "PATCH"] def __init__(self, function_name, path, methods): """ @@ -40,9 +34,12 @@ def __init__(self, function_name, path, methods): self.path = path def __eq__(self, other): - return isinstance(other, Route) and \ - sorted(self.methods) == sorted( - other.methods) and self.function_name == other.function_name and self.path == other.path + return ( + isinstance(other, Route) + and sorted(self.methods) == sorted(other.methods) + and self.function_name == other.function_name + and self.path == other.path + ) def __hash__(self): route_hash = hash(self.function_name) * hash(self.path) @@ -66,7 +63,7 @@ def normalize_method(self, methods): class LocalApigwService(BaseLocalService): _DEFAULT_PORT = 3000 - _DEFAULT_HOST = '127.0.0.1' + _DEFAULT_HOST = "127.0.0.1" def __init__(self, api, lambda_runner, static_dir=None, port=None, host=None, stderr=None): """ @@ -101,21 +98,23 @@ def create(self): Creates a Flask Application that can be started. """ - self._app = Flask(__name__, - static_url_path="", # Mount static files at root '/' - static_folder=self.static_dir # Serve static files from this directory - ) + self._app = Flask( + __name__, + static_url_path="", # Mount static files at root '/' + static_folder=self.static_dir, # Serve static files from this directory + ) for api_gateway_route in self.api.routes: path = PathConverter.convert_path_to_flask(api_gateway_route.path) - for route_key in self._generate_route_keys(api_gateway_route.methods, - path): + for route_key in self._generate_route_keys(api_gateway_route.methods, path): self._dict_of_routes[route_key] = api_gateway_route - self._app.add_url_rule(path, - endpoint=path, - view_func=self._request_handler, - methods=api_gateway_route.methods, - provide_automatic_options=False) + self._app.add_url_rule( + path, + endpoint=path, + view_func=self._request_handler, + methods=api_gateway_route.methods, + provide_automatic_options=False, + ) self._construct_error_handling() @@ -133,7 +132,7 @@ def _generate_route_keys(self, methods, path): @staticmethod def _route_key(method, path): - return '{}:{}'.format(path, method) + return "{}:{}".format(path, method) def _construct_error_handling(self): """ @@ -174,13 +173,14 @@ def _request_handler(self, **kwargs): cors_headers = Cors.cors_to_headers(self.api.cors) method, _ = self.get_request_methods_endpoints(request) - if method == 'OPTIONS': + if method == "OPTIONS": headers = Headers(cors_headers) - return self.service_response('', headers, 200) + return self.service_response("", headers, 200) try: - event = self._construct_event(request, self.port, self.api.binary_media_types, self.api.stage_name, - self.api.stage_variables) + event = self._construct_event( + request, self.port, self.api.binary_media_types, self.api.stage_name, self.api.stage_variables + ) except UnicodeDecodeError: return ServiceErrorResponses.lambda_failure_response() @@ -199,12 +199,15 @@ def _request_handler(self, **kwargs): self.stderr.write(lambda_logs) try: - (status_code, headers, body) = self._parse_lambda_output(lambda_response, - self.api.binary_media_types, - request) + (status_code, headers, body) = self._parse_lambda_output( + lambda_response, self.api.binary_media_types, request + ) except (KeyError, TypeError, ValueError): - LOG.error("Function returned an invalid response (must include one of: body, headers, multiValueHeaders or " - "statusCode in the response object). Response received: %s", lambda_response) + LOG.error( + "Function returned an invalid response (must include one of: body, headers, multiValueHeaders or " + "statusCode in the response object). Response received: %s", + lambda_response, + ) return ServiceErrorResponses.lambda_failure_response() return self.service_response(body, headers, status_code) @@ -222,9 +225,14 @@ def _get_current_route(self, flask_request): route = self._dict_of_routes.get(route_key, None) if not route: - LOG.debug("Lambda function for the route not found. This should not happen because Flask is " - "already configured to serve all path/methods given to the service. " - "Path=%s Method=%s RouteKey=%s", endpoint, method, route_key) + LOG.debug( + "Lambda function for the route not found. This should not happen because Flask is " + "already configured to serve all path/methods given to the service. " + "Path=%s Method=%s RouteKey=%s", + endpoint, + method, + route_key, + ) raise KeyError("Lambda function for the route not found") return route @@ -255,8 +263,9 @@ def _parse_lambda_output(lambda_output, binary_types, flask_request): raise TypeError("Lambda returned %{s} instead of dict", type(json_output)) status_code = json_output.get("statusCode") or 200 - headers = LocalApigwService._merge_response_headers(json_output.get("headers") or {}, - json_output.get("multiValueHeaders") or {}) + headers = LocalApigwService._merge_response_headers( + json_output.get("headers") or {}, json_output.get("multiValueHeaders") or {} + ) body = json_output.get("body") or "no data" is_base_64_encoded = json_output.get("isBase64Encoded") or False @@ -297,13 +306,7 @@ def _parse_lambda_output(lambda_output, binary_types, flask_request): @staticmethod def _invalid_apig_response_keys(output): - allowable = { - "statusCode", - "body", - "headers", - "multiValueHeaders", - "isBase64Encoded" - } + allowable = {"statusCode", "body", "headers", "multiValueHeaders", "isBase64Encoded"} # In Python 2.7, need to explicitly make the Dictionary keys into a set invalid_keys = set(output.keys()) - allowable return invalid_keys @@ -330,7 +333,7 @@ def _should_base64_decode_body(binary_types, flask_request, lamba_response_heade """ best_match_mimetype = flask_request.accept_mimetypes.best_match(lamba_response_headers.get_all("Content-Type")) - is_best_match_in_binary_types = best_match_mimetype in binary_types or '*/*' in binary_types + is_best_match_in_binary_types = best_match_mimetype in binary_types or "*/*" in binary_types return best_match_mimetype and is_best_match_in_binary_types and is_base_64_encoded @@ -394,30 +397,30 @@ def _construct_event(flask_request, port, binary_types, stage_name=None, stage_v if request_data: # Flask does not parse/decode the request data. We should do it ourselves - request_data = request_data.decode('utf-8') + request_data = request_data.decode("utf-8") - context = RequestContext(resource_path=endpoint, - http_method=method, - stage=stage_name, - identity=identity, - path=endpoint) + context = RequestContext( + resource_path=endpoint, http_method=method, stage=stage_name, identity=identity, path=endpoint + ) headers_dict, multi_value_headers_dict = LocalApigwService._event_headers(flask_request, port) query_string_dict, multi_value_query_string_dict = LocalApigwService._query_string_params(flask_request) - event = ApiGatewayLambdaEvent(http_method=method, - body=request_data, - resource=endpoint, - request_context=context, - query_string_params=query_string_dict, - multi_value_query_string_params=multi_value_query_string_dict, - headers=headers_dict, - multi_value_headers=multi_value_headers_dict, - path_parameters=flask_request.view_args, - path=flask_request.path, - is_base_64_encoded=is_base_64, - stage_variables=stage_variables) + event = ApiGatewayLambdaEvent( + http_method=method, + body=request_data, + resource=endpoint, + request_context=context, + query_string_params=query_string_dict, + multi_value_query_string_params=multi_value_query_string_dict, + headers=headers_dict, + multi_value_headers=multi_value_headers_dict, + path_parameters=flask_request.view_args, + path=flask_request.path, + is_base_64_encoded=is_base_64, + stage_variables=stage_variables, + ) event_str = json.dumps(event.to_dict()) LOG.debug("Constructed String representation of Event to invoke Lambda. Event: %s", event_str) diff --git a/samcli/local/apigw/path_converter.py b/samcli/local/apigw/path_converter.py index f4baf840fc..274f4a9907 100644 --- a/samcli/local/apigw/path_converter.py +++ b/samcli/local/apigw/path_converter.py @@ -32,7 +32,6 @@ class PathConverter(object): - @staticmethod def convert_path_to_flask(path): """ diff --git a/samcli/local/common/runtime_template.py b/samcli/local/common/runtime_template.py index 3535553160..b76c0ae9a9 100644 --- a/samcli/local/common/runtime_template.py +++ b/samcli/local/common/runtime_template.py @@ -11,7 +11,7 @@ import pathlib2 as pathlib _init_path = str(pathlib.Path(os.path.dirname(__file__)).parent) -_templates = os.path.join(_init_path, 'init', 'templates') +_templates = os.path.join(_init_path, "init", "templates") # Note(TheSriram): The ordering of the runtimes list per language is based on the latest to oldest. @@ -21,7 +21,7 @@ "runtimes": ["python3.7", "python3.6", "python2.7"], "dependency_manager": "pip", "init_location": os.path.join(_templates, "cookiecutter-aws-sam-hello-python"), - "build": True + "build": True, } ], "ruby": [ @@ -29,7 +29,7 @@ "runtimes": ["ruby2.5"], "dependency_manager": "bundler", "init_location": os.path.join(_templates, "cookiecutter-aws-sam-hello-ruby"), - "build": True + "build": True, } ], "nodejs": [ @@ -37,13 +37,13 @@ "runtimes": ["nodejs10.x", "nodejs8.10"], "dependency_manager": "npm", "init_location": os.path.join(_templates, "cookiecutter-aws-sam-hello-nodejs"), - "build": True + "build": True, }, { "runtimes": ["nodejs6.10"], "dependency_manager": "npm", "init_location": os.path.join(_templates, "cookiecutter-aws-sam-hello-nodejs6"), - "build": True + "build": True, }, ], "dotnet": [ @@ -51,15 +51,15 @@ "runtimes": ["dotnetcore2.1", "dotnetcore2.0", "dotnetcore1.0", "dotnetcore"], "dependency_manager": "cli-package", "init_location": os.path.join(_templates, "cookiecutter-aws-sam-hello-dotnet"), - "build": True - }, + "build": True, + } ], "go": [ { "runtimes": ["go1.x"], "dependency_manager": None, "init_location": os.path.join(_templates, "cookiecutter-aws-sam-hello-golang"), - "build": False + "build": False, } ], "java": [ @@ -67,22 +67,28 @@ "runtimes": ["java8"], "dependency_manager": "maven", "init_location": os.path.join(_templates, "cookiecutter-aws-sam-hello-java-maven"), - "build": True + "build": True, }, { "runtimes": ["java8"], "dependency_manager": "gradle", "init_location": os.path.join(_templates, "cookiecutter-aws-sam-hello-java-gradle"), - "build": True - } - ] + "build": True, + }, + ], } -SUPPORTED_DEP_MANAGERS = set([c['dependency_manager'] for c in list( - itertools.chain(*(RUNTIME_DEP_TEMPLATE_MAPPING.values()))) if c['dependency_manager']]) -RUNTIMES = set(itertools.chain(*[c['runtimes'] for c in list( - itertools.chain(*(RUNTIME_DEP_TEMPLATE_MAPPING.values())))])) +SUPPORTED_DEP_MANAGERS = set( + [ + c["dependency_manager"] + for c in list(itertools.chain(*(RUNTIME_DEP_TEMPLATE_MAPPING.values()))) + if c["dependency_manager"] + ] +) +RUNTIMES = set( + itertools.chain(*[c["runtimes"] for c in list(itertools.chain(*(RUNTIME_DEP_TEMPLATE_MAPPING.values())))]) +) INIT_RUNTIMES = RUNTIMES.union(RUNTIME_DEP_TEMPLATE_MAPPING.keys()) # NOTE(TheSriram): Default Runtime Choice when runtime is not chosen -DEFAULT_RUNTIME = RUNTIME_DEP_TEMPLATE_MAPPING['nodejs'][0]['runtimes'][0] +DEFAULT_RUNTIME = RUNTIME_DEP_TEMPLATE_MAPPING["nodejs"][0]["runtimes"][0] diff --git a/samcli/local/docker/attach_api.py b/samcli/local/docker/attach_api.py index 4b9d4d8c49..8a46a5268e 100644 --- a/samcli/local/docker/attach_api.py +++ b/samcli/local/docker/attach_api.py @@ -38,10 +38,7 @@ def attach(docker_client, container, stdout=True, stderr=True, logs=False): Do you want to include the container's previous output? """ - headers = { - "Connection": "Upgrade", - "Upgrade": "tcp" - } + headers = {"Connection": "Upgrade", "Upgrade": "tcp"} query_params = { "stdout": 1 if stdout else 0, @@ -178,4 +175,4 @@ def _read_header(socket): data = read_exactly(socket, 8) # >BxxxL is the struct notation to unpack data in correct header format in big-endian - return struct.unpack('>BxxxL', data) + return struct.unpack(">BxxxL", data) diff --git a/samcli/local/docker/container.py b/samcli/local/docker/container.py index 75174d58cc..d2910a936c 100644 --- a/samcli/local/docker/container.py +++ b/samcli/local/docker/container.py @@ -28,18 +28,20 @@ class Container(object): _STDOUT_FRAME_TYPE = 1 _STDERR_FRAME_TYPE = 2 - def __init__(self, - image, - cmd, - working_dir, - host_dir, - memory_limit_mb=None, - exposed_ports=None, - entrypoint=None, - env_vars=None, - docker_client=None, - container_opts=None, - additional_volumes=None): + def __init__( + self, + image, + cmd, + working_dir, + host_dir, + memory_limit_mb=None, + exposed_ports=None, + entrypoint=None, + env_vars=None, + docker_client=None, + container_opts=None, + additional_volumes=None, + ): """ Initializes the class with given configuration. This does not automatically create or run the container. @@ -95,13 +97,13 @@ def create(self): # https://docs.docker.com/storage/bind-mounts # Mount the host directory as "read only" inside container "bind": self._working_dir, - "mode": "ro,delegated" + "mode": "ro,delegated", } }, # We are not running an interactive shell here. "tty": False, # Set proxy configuration from global Docker config file - "use_config_proxy": True + "use_config_proxy": True, } if self._container_opts: @@ -126,13 +128,13 @@ def create(self): # Ex: 128m => 128MB kwargs["mem_limit"] = "{}m".format(self._memory_limit_mb) - if self.network_id == 'host': + if self.network_id == "host": kwargs["network_mode"] = self.network_id real_container = self.docker_client.containers.create(self._image, **kwargs) self.id = real_container.id - if self.network_id and self.network_id != 'host': + if self.network_id and self.network_id != "host": network = self.docker_client.networks.get(self.network_id) network.connect(self.id) @@ -147,9 +149,7 @@ def delete(self): return try: - self.docker_client.containers\ - .get(self.id)\ - .remove(force=True) # Remove a container, even if it is running + self.docker_client.containers.get(self.id).remove(force=True) # Remove a container, even if it is running except docker.errors.NotFound: # Container is already not there LOG.debug("Container with ID %s does not exist. Skipping deletion", self.id) @@ -200,11 +200,7 @@ def wait_for_logs(self, stdout=None, stderr=None): real_container = self.docker_client.containers.get(self.id) # Fetch both stdout and stderr streams from Docker as a single iterator. - logs_itr = attach(self.docker_client, - container=real_container, - stdout=True, - stderr=True, - logs=True) + logs_itr = attach(self.docker_client, container=real_container, stdout=True, stderr=True, logs=True) self._write_container_output(logs_itr, stdout=stdout, stderr=stderr) @@ -224,7 +220,7 @@ def copy(self, from_container_path, to_host_path): # Seek the handle back to start of file for tarfile to use fp.seek(0) - with tarfile.open(fileobj=fp, mode='r') as tar: + with tarfile.open(fileobj=fp, mode="r") as tar: tar.extractall(path=to_host_path) @staticmethod @@ -256,8 +252,11 @@ def _write_container_output(output_itr, stdout=None, stderr=None): else: # Either an unsupported frame type or stream for this frame type is not configured - LOG.debug("Dropping Docker container output because of unconfigured frame type. " - "Frame Type: %s. Data: %s", frame_type, data) + LOG.debug( + "Dropping Docker container output because of unconfigured frame type. " "Frame Type: %s. Data: %s", + frame_type, + data, + ) @property def network_id(self): diff --git a/samcli/local/docker/lambda_build_container.py b/samcli/local/docker/lambda_build_container.py index 8952328182..ed86f4fb87 100644 --- a/samcli/local/docker/lambda_build_container.py +++ b/samcli/local/docker/lambda_build_container.py @@ -25,19 +25,21 @@ class LambdaBuildContainer(Container): _LAMBCI_IMAGE_REPO_NAME = "lambci/lambda" _BUILDERS_EXECUTABLE = "lambda-builders" - def __init__(self, # pylint: disable=too-many-locals - protocol_version, - language, - dependency_manager, - application_framework, - source_dir, - manifest_path, - runtime, - optimizations=None, - options=None, - executable_search_paths=None, - log_level=None, - mode=None): + def __init__( # pylint: disable=too-many-locals + self, + protocol_version, + language, + dependency_manager, + application_framework, + source_dir, + manifest_path, + runtime, + optimizations=None, + options=None, + executable_search_paths=None, + log_level=None, + mode=None, + ): abs_manifest_path = pathlib.Path(manifest_path).resolve() manifest_file_name = abs_manifest_path.name @@ -56,20 +58,23 @@ def __init__(self, # pylint: disable=too-many-locals host_paths_to_convert=executable_search_paths, host_to_container_path_mapping={ source_dir: container_dirs["source_dir"], - manifest_dir: container_dirs["manifest_dir"] - }) - - request_json = self._make_request(protocol_version, - language, - dependency_manager, - application_framework, - container_dirs, - manifest_file_name, - runtime, - optimizations, - options, - executable_search_paths, - mode) + manifest_dir: container_dirs["manifest_dir"], + }, + ) + + request_json = self._make_request( + protocol_version, + language, + dependency_manager, + application_framework, + container_dirs, + manifest_file_name, + runtime, + optimizations, + options, + executable_search_paths, + mode, + ) image = LambdaBuildContainer._get_image(runtime) entry = LambdaBuildContainer._get_entrypoint(request_json) @@ -78,17 +83,12 @@ def __init__(self, # pylint: disable=too-many-locals additional_volumes = { # Manifest is mounted separately in order to support the case where manifest # is outside of source directory - manifest_dir: { - "bind": container_dirs["manifest_dir"], - "mode": "ro" - } + manifest_dir: {"bind": container_dirs["manifest_dir"], "mode": "ro"} } env_vars = None if log_level: - env_vars = { - "LAMBDA_BUILDERS_LOG_LEVEL": log_level - } + env_vars = {"LAMBDA_BUILDERS_LOG_LEVEL": log_level} super(LambdaBuildContainer, self).__init__( image, @@ -97,50 +97,53 @@ def __init__(self, # pylint: disable=too-many-locals source_dir, additional_volumes=additional_volumes, entrypoint=entry, - env_vars=env_vars) + env_vars=env_vars, + ) @property def executable_name(self): return LambdaBuildContainer._BUILDERS_EXECUTABLE @staticmethod - def _make_request(protocol_version, - language, - dependency_manager, - application_framework, - container_dirs, - manifest_file_name, - runtime, - optimizations, - options, - executable_search_paths, - mode): - - return json.dumps({ - "jsonschema": "2.0", - "id": 1, - "method": "LambdaBuilder.build", - "params": { - "__protocol_version": protocol_version, - "capability": { - "language": language, - "dependency_manager": dependency_manager, - "application_framework": application_framework + def _make_request( + protocol_version, + language, + dependency_manager, + application_framework, + container_dirs, + manifest_file_name, + runtime, + optimizations, + options, + executable_search_paths, + mode, + ): + + return json.dumps( + { + "jsonschema": "2.0", + "id": 1, + "method": "LambdaBuilder.build", + "params": { + "__protocol_version": protocol_version, + "capability": { + "language": language, + "dependency_manager": dependency_manager, + "application_framework": application_framework, + }, + "source_dir": container_dirs["source_dir"], + "artifacts_dir": container_dirs["artifacts_dir"], + "scratch_dir": container_dirs["scratch_dir"], + # Path is always inside a Linux container. So '/' is valid + "manifest_path": "{}/{}".format(container_dirs["manifest_dir"], manifest_file_name), + "runtime": runtime, + "optimizations": optimizations, + "options": options, + "executable_search_paths": executable_search_paths, + "mode": mode, }, - "source_dir": container_dirs["source_dir"], - "artifacts_dir": container_dirs["artifacts_dir"], - "scratch_dir": container_dirs["scratch_dir"], - - # Path is always inside a Linux container. So '/' is valid - "manifest_path": "{}/{}".format(container_dirs["manifest_dir"], manifest_file_name), - - "runtime": runtime, - "optimizations": optimizations, - "options": options, - "executable_search_paths": executable_search_paths, - "mode": mode } - }) + ) @staticmethod def _get_entrypoint(request_json): @@ -169,7 +172,7 @@ def _get_container_dirs(source_dir, manifest_dir): "source_dir": "{}/source".format(base), "artifacts_dir": "{}/artifacts".format(base), "scratch_dir": "{}/scratch".format(base), - "manifest_dir": "{}/manifest".format(base) + "manifest_dir": "{}/manifest".format(base), } if pathlib.PurePath(source_dir) == pathlib.PurePath(manifest_dir): @@ -222,8 +225,11 @@ def _convert_to_container_dirs(host_paths_to_convert, host_to_container_path_map result.append(mapping[abspath]) else: result.append(original_path) - LOG.debug("Cannot convert host path '%s' to its equivalent path within the container. " - "Host path is not mounted within the container", abspath) + LOG.debug( + "Cannot convert host path '%s' to its equivalent path within the container. " + "Host path is not mounted within the container", + abspath, + ) return result @@ -231,5 +237,6 @@ def _convert_to_container_dirs(host_paths_to_convert, host_to_container_path_map def _get_image(runtime): runtime_to_images = {"nodejs10.x": "amazon/lambda-build-node10.x"} - return runtime_to_images.get(runtime, - "{}:build-{}".format(LambdaBuildContainer._LAMBCI_IMAGE_REPO_NAME, runtime)) + return runtime_to_images.get( + runtime, "{}:build-{}".format(LambdaBuildContainer._LAMBCI_IMAGE_REPO_NAME, runtime) + ) diff --git a/samcli/local/docker/lambda_container.py b/samcli/local/docker/lambda_container.py index a3bffa7f55..8fa711d93d 100644 --- a/samcli/local/docker/lambda_container.py +++ b/samcli/local/docker/lambda_container.py @@ -31,15 +31,17 @@ class LambdaContainer(Container): # This is the dictionary that represents where the debugger_path arg is mounted in docker to as readonly. _DEBUGGER_VOLUME_MOUNT = {"bind": _DEBUGGER_VOLUME_MOUNT_PATH, "mode": "ro"} - def __init__(self, # pylint: disable=R0914 - runtime, - handler, - code_dir, - layers, - image_builder, - memory_mb=128, - env_vars=None, - debug_options=None): + def __init__( + self, # pylint: disable=R0914 + runtime, + handler, + code_dir, + layers, + image_builder, + memory_mb=128, + env_vars=None, + debug_options=None, + ): """ Initializes the class @@ -74,16 +76,18 @@ def __init__(self, # pylint: disable=R0914 additional_volumes = LambdaContainer._get_additional_volumes(debug_options) cmd = [handler] - super(LambdaContainer, self).__init__(image, - cmd, - self._WORKING_DIR, - code_dir, - memory_limit_mb=memory_mb, - exposed_ports=ports, - entrypoint=entry, - env_vars=env_vars, - container_opts=additional_options, - additional_volumes=additional_volumes) + super(LambdaContainer, self).__init__( + image, + cmd, + self._WORKING_DIR, + code_dir, + memory_limit_mb=memory_mb, + exposed_ports=ports, + entrypoint=entry, + env_vars=env_vars, + container_opts=additional_options, + additional_volumes=additional_volumes, + ) @staticmethod def _get_exposed_ports(debug_options): @@ -135,9 +139,7 @@ def _get_additional_volumes(debug_options): if not debug_options or not debug_options.debugger_path: return None - return { - debug_options.debugger_path: LambdaContainer._DEBUGGER_VOLUME_MOUNT - } + return {debug_options.debugger_path: LambdaContainer._DEBUGGER_VOLUME_MOUNT} @staticmethod def _get_image(image_builder, runtime, layers): @@ -185,7 +187,9 @@ def _get_entry_point(runtime, debug_options=None): # pylint: disable=too-many-b # configs from: https://github.com/lambci/docker-lambda # to which we add the extra debug mode options - return LambdaDebugEntryPoint.get_entry_point(debug_port=debug_port, - debug_args_list=debug_args_list, - runtime=runtime, - options=LambdaContainer._DEBUG_ENTRYPOINT_OPTIONS) + return LambdaDebugEntryPoint.get_entry_point( + debug_port=debug_port, + debug_args_list=debug_args_list, + runtime=runtime, + options=LambdaContainer._DEBUG_ENTRYPOINT_OPTIONS, + ) diff --git a/samcli/local/docker/lambda_debug_entrypoint.py b/samcli/local/docker/lambda_debug_entrypoint.py index f382adfa7c..17154e5394 100644 --- a/samcli/local/docker/lambda_debug_entrypoint.py +++ b/samcli/local/docker/lambda_debug_entrypoint.py @@ -12,137 +12,108 @@ class DebuggingNotSupported(Exception): class LambdaDebugEntryPoint(object): - @staticmethod def get_entry_point(debug_port, debug_args_list, runtime, options): entrypoint_mapping = { - Runtime.java8.value: - ["/usr/bin/java"] + - debug_args_list + - [ - "-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,quiet=y,address=" + str(debug_port), - "-XX:MaxHeapSize=2834432k", - "-XX:MaxMetaspaceSize=163840k", - "-XX:ReservedCodeCacheSize=81920k", - "-XX:+UseSerialGC", - # "-Xshare:on", doesn't work in conjunction with the debug options - "-XX:-TieredCompilation", - "-Djava.net.preferIPv4Stack=true", - "-jar", - "/var/runtime/lib/LambdaJavaRTEntry-1.0.jar" - ], - - Runtime.dotnetcore20.value: - ["/var/lang/bin/dotnet"] + \ - debug_args_list + \ - [ - "/var/runtime/MockBootstraps.dll", - "--debugger-spin-wait" - ], - - Runtime.dotnetcore21.value: - ["/var/lang/bin/dotnet"] + \ - debug_args_list + \ - [ - "/var/runtime/MockBootstraps.dll", - "--debugger-spin-wait" - ], - Runtime.go1x.value: - ["/var/runtime/aws-lambda-go"] + \ - debug_args_list + \ - [ - "-debug=true", - "-delvePort=" + str(debug_port), - "-delvePath=" + options.get("delvePath"), - ], - Runtime.nodejs.value: - ["/usr/bin/node"] + \ - debug_args_list + \ - [ - "--debug-brk=" + str(debug_port), - "--nolazy", - "--max-old-space-size=1229", - "--max-new-space-size=153", - "--max-executable-size=153", - "--expose-gc", - "/var/runtime/node_modules/awslambda/bin/awslambda", - ], - Runtime.nodejs43.value: - ["/usr/local/lib64/node-v4.3.x/bin/node"] + \ - debug_args_list + \ - [ - "--debug-brk=" + str(debug_port), - "--nolazy", - "--max-old-space-size=2547", - "--max-semi-space-size=150", - "--max-executable-size=160", - "--expose-gc", - "/var/runtime/node_modules/awslambda/index.js", - ], - Runtime.nodejs610.value: - ["/var/lang/bin/node"] + \ - debug_args_list + \ - [ - "--debug-brk=" + str(debug_port), - "--nolazy", - "--max-old-space-size=2547", - "--max-semi-space-size=150", - "--max-executable-size=160", - "--expose-gc", - "/var/runtime/node_modules/awslambda/index.js", - ], - Runtime.nodejs810.value: - ["/var/lang/bin/node"] + \ - debug_args_list + \ - [ - # Node8 requires the host to be explicitly set in order to bind to localhost - # instead of 127.0.0.1. https://github.com/nodejs/node/issues/11591#issuecomment-283110138 - "--inspect-brk=0.0.0.0:" + str(debug_port), - "--nolazy", - "--expose-gc", - "--max-semi-space-size=150", - "--max-old-space-size=2707", - "/var/runtime/node_modules/awslambda/index.js", - ], - Runtime.nodejs10x.value: - ["/var/rapid/init", - "--bootstrap", - "/var/lang/bin/node", - "--bootstrap-args", - json.dumps(debug_args_list + - [ - "--inspect-brk=0.0.0.0:" + str(debug_port), - "--nolazy", - "--expose-gc", - "--max-http-header-size", - "81920", - "/var/runtime/index.js" - ] - ) - ], - Runtime.python27.value: - ["/usr/bin/python2.7"] + \ - debug_args_list + \ - [ - "/var/runtime/awslambda/bootstrap.py" - ], - Runtime.python36.value: - ["/var/lang/bin/python3.6"] + - debug_args_list + \ - [ - "/var/runtime/awslambda/bootstrap.py" - ], - Runtime.python37.value: - ["/var/rapid/init", - "--bootstrap", - "/var/lang/bin/python3.7", - "--bootstrap-args", - json.dumps(debug_args_list + ["/var/runtime/bootstrap"]) - ] + Runtime.java8.value: ["/usr/bin/java"] + + debug_args_list + + [ + "-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,quiet=y,address=" + str(debug_port), + "-XX:MaxHeapSize=2834432k", + "-XX:MaxMetaspaceSize=163840k", + "-XX:ReservedCodeCacheSize=81920k", + "-XX:+UseSerialGC", + # "-Xshare:on", doesn't work in conjunction with the debug options + "-XX:-TieredCompilation", + "-Djava.net.preferIPv4Stack=true", + "-jar", + "/var/runtime/lib/LambdaJavaRTEntry-1.0.jar", + ], + Runtime.dotnetcore20.value: ["/var/lang/bin/dotnet"] + + debug_args_list + + ["/var/runtime/MockBootstraps.dll", "--debugger-spin-wait"], + Runtime.dotnetcore21.value: ["/var/lang/bin/dotnet"] + + debug_args_list + + ["/var/runtime/MockBootstraps.dll", "--debugger-spin-wait"], + Runtime.go1x.value: ["/var/runtime/aws-lambda-go"] + + debug_args_list + + ["-debug=true", "-delvePort=" + str(debug_port), "-delvePath=" + options.get("delvePath")], + Runtime.nodejs.value: ["/usr/bin/node"] + + debug_args_list + + [ + "--debug-brk=" + str(debug_port), + "--nolazy", + "--max-old-space-size=1229", + "--max-new-space-size=153", + "--max-executable-size=153", + "--expose-gc", + "/var/runtime/node_modules/awslambda/bin/awslambda", + ], + Runtime.nodejs43.value: ["/usr/local/lib64/node-v4.3.x/bin/node"] + + debug_args_list + + [ + "--debug-brk=" + str(debug_port), + "--nolazy", + "--max-old-space-size=2547", + "--max-semi-space-size=150", + "--max-executable-size=160", + "--expose-gc", + "/var/runtime/node_modules/awslambda/index.js", + ], + Runtime.nodejs610.value: ["/var/lang/bin/node"] + + debug_args_list + + [ + "--debug-brk=" + str(debug_port), + "--nolazy", + "--max-old-space-size=2547", + "--max-semi-space-size=150", + "--max-executable-size=160", + "--expose-gc", + "/var/runtime/node_modules/awslambda/index.js", + ], + Runtime.nodejs810.value: ["/var/lang/bin/node"] + + debug_args_list + + [ + # Node8 requires the host to be explicitly set in order to bind to localhost + # instead of 127.0.0.1. https://github.com/nodejs/node/issues/11591#issuecomment-283110138 + "--inspect-brk=0.0.0.0:" + str(debug_port), + "--nolazy", + "--expose-gc", + "--max-semi-space-size=150", + "--max-old-space-size=2707", + "/var/runtime/node_modules/awslambda/index.js", + ], + Runtime.nodejs10x.value: [ + "/var/rapid/init", + "--bootstrap", + "/var/lang/bin/node", + "--bootstrap-args", + json.dumps( + debug_args_list + + [ + "--inspect-brk=0.0.0.0:" + str(debug_port), + "--nolazy", + "--expose-gc", + "--max-http-header-size", + "81920", + "/var/runtime/index.js", + ] + ), + ], + Runtime.python27.value: ["/usr/bin/python2.7"] + debug_args_list + ["/var/runtime/awslambda/bootstrap.py"], + Runtime.python36.value: ["/var/lang/bin/python3.6"] + + debug_args_list + + ["/var/runtime/awslambda/bootstrap.py"], + Runtime.python37.value: [ + "/var/rapid/init", + "--bootstrap", + "/var/lang/bin/python3.7", + "--bootstrap-args", + json.dumps(debug_args_list + ["/var/runtime/bootstrap"]), + ], } try: return entrypoint_mapping[runtime] except KeyError: - raise DebuggingNotSupported( - "Debugging is not currently supported for {}".format(runtime)) + raise DebuggingNotSupported("Debugging is not currently supported for {}".format(runtime)) diff --git a/samcli/local/docker/lambda_image.py b/samcli/local/docker/lambda_image.py index 9afc9d341f..5a991f85fe 100644 --- a/samcli/local/docker/lambda_image.py +++ b/samcli/local/docker/lambda_image.py @@ -107,9 +107,11 @@ def build(self, runtime, layers): LOG.info("Image was not found.") image_not_found = True - if self.force_image_build or \ - image_not_found or \ - any(layer.is_defined_within_template for layer in downloaded_layers): + if ( + self.force_image_build + or image_not_found + or any(layer.is_defined_within_template for layer in downloaded_layers) + ): LOG.info("Building image...") self._build_image(base_image, image_tag, downloaded_layers) @@ -139,8 +141,9 @@ def _generate_docker_image_version(layers, runtime): # specified in the template. This will allow reuse of the runtime and layers across different # functions that are defined. If two functions use the same runtime with the same layers (in the # same order), SAM CLI will only produce one image and use this image across both functions for invoke. - return runtime + '-' + hashlib.sha256( - "-".join([layer.name for layer in layers]).encode('utf-8')).hexdigest()[0:25] + return ( + runtime + "-" + hashlib.sha256("-".join([layer.name for layer in layers]).encode("utf-8")).hexdigest()[0:25] + ) def _build_image(self, base_image, docker_tag, layers): """ @@ -176,15 +179,13 @@ def _build_image(self, base_image, docker_tag, layers): tar_paths = {str(full_dockerfile_path): "Dockerfile"} for layer in layers: - tar_paths[layer.codeuri] = '/' + layer.name + tar_paths[layer.codeuri] = "/" + layer.name with create_tarball(tar_paths) as tarballfile: try: - self.docker_client.images.build(fileobj=tarballfile, - custom_context=True, - rm=True, - tag=docker_tag, - pull=not self.skip_pull_image) + self.docker_client.images.build( + fileobj=tarballfile, custom_context=True, rm=True, tag=docker_tag, pull=not self.skip_pull_image + ) except (docker.errors.BuildError, docker.errors.APIError): LOG.exception("Failed to build Docker Image") raise ImageBuildException("Building Image failed.") @@ -221,6 +222,7 @@ def _generate_dockerfile(base_image, layers): dockerfile_content = "FROM {}\n".format(base_image) for layer in layers: - dockerfile_content = dockerfile_content + \ - "ADD --chown=sbx_user1051:495 {} {}\n".format(layer.name, LambdaImage._LAYERS_DIR) + dockerfile_content = dockerfile_content + "ADD --chown=sbx_user1051:495 {} {}\n".format( + layer.name, LambdaImage._LAYERS_DIR + ) return dockerfile_content diff --git a/samcli/local/docker/manager.py b/samcli/local/docker/manager.py index 2080acbb08..0fb07c0391 100644 --- a/samcli/local/docker/manager.py +++ b/samcli/local/docker/manager.py @@ -20,10 +20,7 @@ class ContainerManager(object): serve requests faster. It is also thread-safe. """ - def __init__(self, - docker_network_id=None, - docker_client=None, - skip_pull_image=False): + def __init__(self, docker_network_id=None, docker_client=None, skip_pull_image=False): """ Instantiate the container manager @@ -76,7 +73,7 @@ def run(self, container, input_data=None, warm=False): # Skip Pulling a new image if: a) Image name is samcli/lambda OR b) Image is available AND # c) We are asked to skip pulling the image - if (is_image_local and self.skip_pull_image) or image_name.startswith('samcli/lambda'): + if (is_image_local and self.skip_pull_image) or image_name.startswith("samcli/lambda"): LOG.info("Requested to skip pulling images ...\n") else: try: @@ -84,10 +81,10 @@ def run(self, container, input_data=None, warm=False): except DockerImagePullFailedException: if not is_image_local: raise DockerImagePullFailedException( - "Could not find {} image locally and failed to pull it from docker.".format(image_name)) + "Could not find {} image locally and failed to pull it from docker.".format(image_name) + ) - LOG.info( - "Failed to download a new %s image. Invoking with the already downloaded image.", image_name) + LOG.info("Failed to download a new %s image. Invoking with the already downloaded image.", image_name) if not container.is_created(): # Create the container first before running. @@ -135,7 +132,7 @@ def pull_image(self, image_name, stream=None): # Each line contains information on progress of the pull. Each line is a JSON string for _ in result_itr: # For every line, print a dot to show progress - stream_writer.write(u'.') + stream_writer.write(u".") stream_writer.flush() # We are done. Go to the next line diff --git a/samcli/local/docker/utils.py b/samcli/local/docker/utils.py index 3e1379fd80..2fadc38aa6 100644 --- a/samcli/local/docker/utils.py +++ b/samcli/local/docker/utils.py @@ -5,6 +5,7 @@ import os import re import posixpath + try: import pathlib except ImportError: @@ -31,6 +32,12 @@ def to_posix_path(code_path): /c/Users/UserName/AppData/Local/Temp/mydir """ - return re.sub("^([A-Za-z])+:", - lambda match: posixpath.sep + match.group().replace(":", "").lower(), - pathlib.PureWindowsPath(code_path).as_posix()) if os.name == "nt" else code_path + return ( + re.sub( + "^([A-Za-z])+:", + lambda match: posixpath.sep + match.group().replace(":", "").lower(), + pathlib.PureWindowsPath(code_path).as_posix(), + ) + if os.name == "nt" + else code_path + ) diff --git a/samcli/local/events/api_event.py b/samcli/local/events/api_event.py index c542e6e75e..e17f125780 100644 --- a/samcli/local/events/api_event.py +++ b/samcli/local/events/api_event.py @@ -2,18 +2,19 @@ class ContextIdentity(object): - - def __init__(self, - api_key=None, - user_arn=None, - cognito_authentication_type=None, - caller=None, - user_agent="Custom User Agent String", - user=None, - cognito_identity_pool_id=None, - cognito_authentication_provider=None, - source_ip="127.0.0.1", - account_id=None): + def __init__( + self, + api_key=None, + user_arn=None, + cognito_authentication_type=None, + caller=None, + user_agent="Custom User Agent String", + user=None, + cognito_identity_pool_id=None, + cognito_authentication_provider=None, + source_ip="127.0.0.1", + account_id=None, + ): """ Constructs a ContextIdentity @@ -45,34 +46,36 @@ def to_dict(self): :return: dict representing the object """ - json_dict = {"apiKey": self.api_key, - "userArn": self.user_arn, - "cognitoAuthenticationType": self.cognito_authentication_type, - "caller": self.caller, - "userAgent": self.user_agent, - "user": self.user, - "cognitoIdentityPoolId": self.cognito_identity_pool_id, - "cognitoAuthenticationProvider": self.cognito_authentication_provider, - "sourceIp": self.source_ip, - "accountId": self.account_id - } + json_dict = { + "apiKey": self.api_key, + "userArn": self.user_arn, + "cognitoAuthenticationType": self.cognito_authentication_type, + "caller": self.caller, + "userAgent": self.user_agent, + "user": self.user, + "cognitoIdentityPoolId": self.cognito_identity_pool_id, + "cognitoAuthenticationProvider": self.cognito_authentication_provider, + "sourceIp": self.source_ip, + "accountId": self.account_id, + } return json_dict class RequestContext(object): - - def __init__(self, - resource_id="123456", - api_id="1234567890", - resource_path=None, - http_method=None, - request_id="c6af9ac6-7b61-11e6-9a41-93e8deadbeef", - account_id="123456789012", - stage=None, - identity=None, - extended_request_id=None, - path=None): + def __init__( + self, + resource_id="123456", + api_id="1234567890", + resource_path=None, + http_method=None, + request_id="c6af9ac6-7b61-11e6-9a41-93e8deadbeef", + account_id="123456789012", + stage=None, + identity=None, + extended_request_id=None, + path=None, + ): """ Constructs a RequestContext @@ -109,36 +112,38 @@ def to_dict(self): if self.identity: identity_dict = self.identity.to_dict() - json_dict = {"resourceId": self.resource_id, - "apiId": self.api_id, - "resourcePath": self.resource_path, - "httpMethod": self.http_method, - "requestId": self.request_id, - "accountId": self.account_id, - "stage": self.stage, - "identity": identity_dict, - "extendedRequestId": self.extended_request_id, - "path": self.path - } + json_dict = { + "resourceId": self.resource_id, + "apiId": self.api_id, + "resourcePath": self.resource_path, + "httpMethod": self.http_method, + "requestId": self.request_id, + "accountId": self.account_id, + "stage": self.stage, + "identity": identity_dict, + "extendedRequestId": self.extended_request_id, + "path": self.path, + } return json_dict class ApiGatewayLambdaEvent(object): - - def __init__(self, - http_method=None, - body=None, - resource=None, - request_context=None, - query_string_params=None, - multi_value_query_string_params=None, - headers=None, - multi_value_headers=None, - path_parameters=None, - stage_variables=None, - path=None, - is_base_64_encoded=False): + def __init__( + self, + http_method=None, + body=None, + resource=None, + request_context=None, + query_string_params=None, + multi_value_query_string_params=None, + headers=None, + multi_value_headers=None, + path_parameters=None, + stage_variables=None, + path=None, + is_base_64_encoded=False, + ): """ Constructs an ApiGatewayLambdaEvent @@ -156,12 +161,10 @@ def __init__(self, :param bool is_base_64_encoded: True if the data is base64 encoded. """ - if not isinstance(query_string_params, dict) and \ - query_string_params is not None: + if not isinstance(query_string_params, dict) and query_string_params is not None: raise TypeError("'query_string_params' must be of type dict or None") - if not isinstance(multi_value_query_string_params, dict) and \ - multi_value_query_string_params is not None: + if not isinstance(multi_value_query_string_params, dict) and multi_value_query_string_params is not None: raise TypeError("'multi_value_query_string_params' must be of type dict or None") if not isinstance(headers, dict) and headers is not None: @@ -199,19 +202,21 @@ def to_dict(self): if self.request_context: request_context_dict = self.request_context.to_dict() - json_dict = {"httpMethod": self.http_method, - "body": self.body if self.body else None, - "resource": self.resource, - "requestContext": request_context_dict, - "queryStringParameters": dict(self.query_string_params) if self.query_string_params else None, - "multiValueQueryStringParameters": dict(self.multi_value_query_string_params) - if self.multi_value_query_string_params else None, - "headers": dict(self.headers) if self.headers else None, - "multiValueHeaders": dict(self.multi_value_headers) if self.multi_value_headers else None, - "pathParameters": dict(self.path_parameters) if self.path_parameters else None, - "stageVariables": dict(self.stage_variables) if self.stage_variables else None, - "path": self.path, - "isBase64Encoded": self.is_base_64_encoded - } + json_dict = { + "httpMethod": self.http_method, + "body": self.body if self.body else None, + "resource": self.resource, + "requestContext": request_context_dict, + "queryStringParameters": dict(self.query_string_params) if self.query_string_params else None, + "multiValueQueryStringParameters": dict(self.multi_value_query_string_params) + if self.multi_value_query_string_params + else None, + "headers": dict(self.headers) if self.headers else None, + "multiValueHeaders": dict(self.multi_value_headers) if self.multi_value_headers else None, + "pathParameters": dict(self.path_parameters) if self.path_parameters else None, + "stageVariables": dict(self.stage_variables) if self.stage_variables else None, + "path": self.path, + "isBase64Encoded": self.is_base_64_encoded, + } return json_dict diff --git a/samcli/local/init/__init__.py b/samcli/local/init/__init__.py index 84ec28c6b9..6a497b6280 100644 --- a/samcli/local/init/__init__.py +++ b/samcli/local/init/__init__.py @@ -14,8 +14,8 @@ def generate_project( - location=None, runtime="nodejs10.x", dependency_manager=None, - output_dir=".", name='sam-sample-app', no_input=False): + location=None, runtime="nodejs10.x", dependency_manager=None, output_dir=".", name="sam-sample-app", no_input=False +): """Generates project using cookiecutter and options given Generate project scaffolds a project using default templates if user @@ -50,27 +50,23 @@ def generate_project( template = None for mapping in list(itertools.chain(*(RUNTIME_DEP_TEMPLATE_MAPPING.values()))): - if runtime in mapping['runtimes'] or any([r.startswith(runtime) for r in mapping['runtimes']]): - if not dependency_manager or dependency_manager == mapping['dependency_manager']: - template = mapping['init_location'] + if runtime in mapping["runtimes"] or any([r.startswith(runtime) for r in mapping["runtimes"]]): + if not dependency_manager or dependency_manager == mapping["dependency_manager"]: + template = mapping["init_location"] break if not template: msg = "Lambda Runtime {} does not support dependency manager: {}".format(runtime, dependency_manager) raise GenerateProjectFailedError(project=name, provider_error=msg) - params = { - "template": location if location else template, - "output_dir": output_dir, - "no_input": no_input - } + params = {"template": location if location else template, "output_dir": output_dir, "no_input": no_input} LOG.debug("Parameters dict created with input given") LOG.debug("%s", params) if not location and name is not None: - params['extra_context'] = {'project_name': name, 'runtime': runtime} - params['no_input'] = True + params["extra_context"] = {"project_name": name, "runtime": runtime} + params["no_input"] = True LOG.debug("Parameters dict updated with project name as extra_context") LOG.debug("%s", params) diff --git a/samcli/local/init/exceptions.py b/samcli/local/init/exceptions.py index 63c5c089b1..7452f0021a 100644 --- a/samcli/local/init/exceptions.py +++ b/samcli/local/init/exceptions.py @@ -4,7 +4,7 @@ class InitErrorException(Exception): - fmt = 'An unspecified error occurred' + fmt = "An unspecified error occurred" def __init__(self, **kwargs): msg = self.fmt.format(**kwargs) @@ -13,5 +13,4 @@ def __init__(self, **kwargs): class GenerateProjectFailedError(InitErrorException): - fmt = \ - ("An error occurred while generating this {project}: {provider_error}") + fmt = "An error occurred while generating this {project}: {provider_error}" diff --git a/samcli/local/lambda_service/lambda_error_responses.py b/samcli/local/lambda_service/lambda_error_responses.py index c9cb3eedf2..d003980042 100644 --- a/samcli/local/lambda_service/lambda_error_responses.py +++ b/samcli/local/lambda_service/lambda_error_responses.py @@ -9,22 +9,22 @@ class LambdaErrorResponses(object): # The content type of the Invoke request body is not JSON. - UnsupportedMediaTypeException = ('UnsupportedMediaType', 415) + UnsupportedMediaTypeException = ("UnsupportedMediaType", 415) # The AWS Lambda service encountered an internal error. - ServiceException = ('Service', 500) + ServiceException = ("Service", 500) # The resource (for example, a Lambda function or access policy statement) specified in the request does not exist. - ResourceNotFoundException = ('ResourceNotFound', 404) + ResourceNotFoundException = ("ResourceNotFound", 404) # The request body could not be parsed as JSON. - InvalidRequestContentException = ('InvalidRequestContent', 400) + InvalidRequestContentException = ("InvalidRequestContent", 400) - NotImplementedException = ('NotImplemented', 501) + NotImplementedException = ("NotImplemented", 501) - PathNotFoundException = ('PathNotFoundLocally', 404) + PathNotFoundException = ("PathNotFoundLocally", 404) - MethodNotAllowedException = ('MethodNotAllowedLocally', 405) + MethodNotAllowedException = ("MethodNotAllowedLocally", 405) # Error Types USER_ERROR = "User" @@ -32,8 +32,8 @@ class LambdaErrorResponses(object): LOCAL_SERVICE_ERROR = "LocalService" # Header Information - CONTENT_TYPE = 'application/json' - CONTENT_TYPE_HEADER_KEY = 'Content-Type' + CONTENT_TYPE = "application/json" + CONTENT_TYPE_HEADER_KEY = "Content-Type" @staticmethod def resource_not_found(function_name): @@ -55,10 +55,10 @@ def resource_not_found(function_name): return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body( LambdaErrorResponses.USER_ERROR, - "Function not found: arn:aws:lambda:us-west-2:012345678901:function:{}".format(function_name) + "Function not found: arn:aws:lambda:us-west-2:012345678901:function:{}".format(function_name), ), LambdaErrorResponses._construct_headers(exception_tuple[0]), - exception_tuple[1] + exception_tuple[1], ) @staticmethod @@ -81,7 +81,7 @@ def invalid_request_content(message): return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.USER_ERROR, message), LambdaErrorResponses._construct_headers(exception_tuple[0]), - exception_tuple[1] + exception_tuple[1], ) @staticmethod @@ -102,10 +102,11 @@ def unsupported_media_type(content_type): exception_tuple = LambdaErrorResponses.UnsupportedMediaTypeException return BaseLocalService.service_response( - LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.USER_ERROR, - "Unsupported content type: {}".format(content_type)), + LambdaErrorResponses._construct_error_response_body( + LambdaErrorResponses.USER_ERROR, "Unsupported content type: {}".format(content_type) + ), LambdaErrorResponses._construct_headers(exception_tuple[0]), - exception_tuple[1] + exception_tuple[1], ) @staticmethod @@ -128,7 +129,7 @@ def generic_service_exception(*args): return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.SERVICE_ERROR, "ServiceException"), LambdaErrorResponses._construct_headers(exception_tuple[0]), - exception_tuple[1] + exception_tuple[1], ) @staticmethod @@ -151,7 +152,7 @@ def not_implemented_locally(message): return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.LOCAL_SERVICE_ERROR, message), LambdaErrorResponses._construct_headers(exception_tuple[0]), - exception_tuple[1] + exception_tuple[1], ) @staticmethod @@ -173,9 +174,10 @@ def generic_path_not_found(*args): return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body( - LambdaErrorResponses.LOCAL_SERVICE_ERROR, "PathNotFoundException"), + LambdaErrorResponses.LOCAL_SERVICE_ERROR, "PathNotFoundException" + ), LambdaErrorResponses._construct_headers(exception_tuple[0]), - exception_tuple[1] + exception_tuple[1], ) @staticmethod @@ -196,10 +198,11 @@ def generic_method_not_allowed(*args): exception_tuple = LambdaErrorResponses.MethodNotAllowedException return BaseLocalService.service_response( - LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.LOCAL_SERVICE_ERROR, - "MethodNotAllowedException"), + LambdaErrorResponses._construct_error_response_body( + LambdaErrorResponses.LOCAL_SERVICE_ERROR, "MethodNotAllowedException" + ), LambdaErrorResponses._construct_headers(exception_tuple[0]), - exception_tuple[1] + exception_tuple[1], ) @staticmethod @@ -238,5 +241,4 @@ def _construct_headers(error_type): dict Dict representing the Lambda Error Response Headers """ - return {'x-amzn-errortype': error_type, - 'Content-Type': 'application/json'} + return {"x-amzn-errortype": error_type, "Content-Type": "application/json"} diff --git a/samcli/local/lambda_service/local_lambda_invoke_service.py b/samcli/local/lambda_service/local_lambda_invoke_service.py index 976a468da8..b41072e606 100644 --- a/samcli/local/lambda_service/local_lambda_invoke_service.py +++ b/samcli/local/lambda_service/local_lambda_invoke_service.py @@ -15,7 +15,6 @@ class LocalLambdaInvokeService(BaseLocalService): - def __init__(self, lambda_runner, port, host, stderr=None): """ Creates a Local Lambda Service that will only response to invoking a function @@ -41,12 +40,14 @@ def create(self): """ self._app = Flask(__name__) - path = '/2015-03-31/functions//invocations' - self._app.add_url_rule(path, - endpoint=path, - view_func=self._invoke_request_handler, - methods=['POST'], - provide_automatic_options=False) + path = "/2015-03-31/functions//invocations" + self._app.add_url_rule( + path, + endpoint=path, + view_func=self._invoke_request_handler, + methods=["POST"], + provide_automatic_options=False, + ) # setup request validation before Flask calls the view_func self._app.before_request(LocalLambdaInvokeService.validate_request) @@ -77,16 +78,17 @@ def validate_request(): request_data = flask_request.get_data() if not request_data: - request_data = b'{}' + request_data = b"{}" - request_data = request_data.decode('utf-8') + request_data = request_data.decode("utf-8") try: json.loads(request_data) except ValueError as json_error: LOG.debug("Request body was not json. Exception: %s", str(json_error)) return LambdaErrorResponses.invalid_request_content( - "Could not parse request body into json: No JSON object could be decoded") + "Could not parse request body into json: No JSON object could be decoded" + ) if flask_request.args: LOG.debug("Query parameters are in the request but not supported") @@ -94,17 +96,19 @@ def validate_request(): request_headers = flask_request.headers - log_type = request_headers.get('X-Amz-Log-Type', 'None') - if log_type != 'None': + log_type = request_headers.get("X-Amz-Log-Type", "None") + if log_type != "None": LOG.debug("log-type: %s is not supported. None is only supported.", log_type) return LambdaErrorResponses.not_implemented_locally( - "log-type: {} is not supported. None is only supported.".format(log_type)) + "log-type: {} is not supported. None is only supported.".format(log_type) + ) - invocation_type = request_headers.get('X-Amz-Invocation-Type', 'RequestResponse') - if invocation_type != 'RequestResponse': + invocation_type = request_headers.get("X-Amz-Invocation-Type", "RequestResponse") + if invocation_type != "RequestResponse": LOG.warning("invocation-type: %s is not supported. RequestResponse is only supported.", invocation_type) return LambdaErrorResponses.not_implemented_locally( - "invocation-type: {} is not supported. RequestResponse is only supported.".format(invocation_type)) + "invocation-type: {} is not supported. RequestResponse is only supported.".format(invocation_type) + ) def _construct_error_handling(self): """ @@ -134,9 +138,9 @@ def _invoke_request_handler(self, function_name): request_data = flask_request.get_data() if not request_data: - request_data = b'{}' + request_data = b"{}" - request_data = request_data.decode('utf-8') + request_data = request_data.decode("utf-8") stdout_stream = io.BytesIO() stdout_stream_writer = StreamWriter(stdout_stream, self.is_debugging) @@ -144,19 +148,20 @@ def _invoke_request_handler(self, function_name): try: self.lambda_runner.invoke(function_name, request_data, stdout=stdout_stream_writer, stderr=self.stderr) except FunctionNotFound: - LOG.debug('%s was not found to invoke.', function_name) + LOG.debug("%s was not found to invoke.", function_name) return LambdaErrorResponses.resource_not_found(function_name) - lambda_response, lambda_logs, is_lambda_user_error_response = \ - LambdaOutputParser.get_lambda_output(stdout_stream) + lambda_response, lambda_logs, is_lambda_user_error_response = LambdaOutputParser.get_lambda_output( + stdout_stream + ) if self.stderr and lambda_logs: # Write the logs to stderr if available. self.stderr.write(lambda_logs) if is_lambda_user_error_response: - return self.service_response(lambda_response, - {'Content-Type': 'application/json', 'x-amz-function-error': 'Unhandled'}, - 200) + return self.service_response( + lambda_response, {"Content-Type": "application/json", "x-amz-function-error": "Unhandled"}, 200 + ) - return self.service_response(lambda_response, {'Content-Type': 'application/json'}, 200) + return self.service_response(lambda_response, {"Content-Type": "application/json"}, 200) diff --git a/samcli/local/lambdafn/config.py b/samcli/local/lambdafn/config.py index 40174ce935..b7df4b5466 100644 --- a/samcli/local/lambdafn/config.py +++ b/samcli/local/lambdafn/config.py @@ -14,15 +14,7 @@ class FunctionConfig(object): _DEFAULT_TIMEOUT_SECONDS = 3 _DEFAULT_MEMORY = 128 - def __init__(self, - name, - runtime, - handler, - code_abs_path, - layers, - memory=None, - timeout=None, - env_vars=None): + def __init__(self, name, runtime, handler, code_abs_path, layers, memory=None, timeout=None, env_vars=None): """ Initialize the class. diff --git a/samcli/local/lambdafn/env_vars.py b/samcli/local/lambdafn/env_vars.py index 33790db10e..cf63bc35aa 100644 --- a/samcli/local/lambdafn/env_vars.py +++ b/samcli/local/lambdafn/env_vars.py @@ -31,20 +31,18 @@ class EnvironmentVariables(object): """ _BLANK_VALUE = "" - _DEFAULT_AWS_CREDS = { - "region": "us-east-1", - "key": "defaultkey", - "secret": "defaultsecret" - } - - def __init__(self, - function_memory=None, - function_timeout=None, - function_handler=None, - variables=None, - shell_env_values=None, - override_values=None, - aws_creds=None): + _DEFAULT_AWS_CREDS = {"region": "us-east-1", "key": "defaultkey", "secret": "defaultsecret"} + + def __init__( + self, + function_memory=None, + function_timeout=None, + function_handler=None, + variables=None, + shell_env_values=None, + override_values=None, + aws_creds=None, + ): """ Initializes this class. It takes in two sets of properties: a) (Required) Function information @@ -63,11 +61,7 @@ def __init__(self, environment variables. It should contain "key", "secret", "region" and optional "sessiontoken" keys """ - self._function = { - "memory": function_memory, - "timeout": function_timeout, - "handler": function_handler - } + self._function = {"memory": function_memory, "timeout": function_timeout, "handler": function_handler} self.variables = variables or {} self.shell_env_values = shell_env_values or {} @@ -144,21 +138,15 @@ def _get_aws_variables(self): result = { # Variable that says this function is running in Local Lambda "AWS_SAM_LOCAL": "true", - # Function configuration "AWS_LAMBDA_FUNCTION_MEMORY_SIZE": str(self.memory), "AWS_LAMBDA_FUNCTION_TIMEOUT": str(self.timeout), "AWS_LAMBDA_FUNCTION_HANDLER": str(self._function["handler"]), - # AWS Credentials - Use the input credentials or use the defaults "AWS_REGION": self.aws_creds.get("region", self._DEFAULT_AWS_CREDS["region"]), - "AWS_DEFAULT_REGION": self.aws_creds.get("region", self._DEFAULT_AWS_CREDS["region"]), - "AWS_ACCESS_KEY_ID": self.aws_creds.get("key", self._DEFAULT_AWS_CREDS["key"]), - "AWS_SECRET_ACCESS_KEY": self.aws_creds.get("secret", self._DEFAULT_AWS_CREDS["secret"]) - # Additional variables we don't fill in # "AWS_ACCOUNT_ID=" # "AWS_LAMBDA_EVENT_BODY=", diff --git a/samcli/local/lambdafn/exceptions.py b/samcli/local/lambdafn/exceptions.py index f99ff10404..0fee680f43 100644 --- a/samcli/local/lambdafn/exceptions.py +++ b/samcli/local/lambdafn/exceptions.py @@ -7,4 +7,5 @@ class FunctionNotFound(Exception): """ Raised when the requested Lambda function is not found """ + pass diff --git a/samcli/local/lambdafn/runtime.py b/samcli/local/lambdafn/runtime.py index ed0e564933..0557fa781d 100644 --- a/samcli/local/lambdafn/runtime.py +++ b/samcli/local/lambdafn/runtime.py @@ -39,12 +39,7 @@ def __init__(self, container_manager, image_builder): self._container_manager = container_manager self._image_builder = image_builder - def invoke(self, - function_config, - event, - debug_context=None, - stdout=None, - stderr=None): + def invoke(self, function_config, event, debug_context=None, stdout=None, stderr=None): """ Invoke the given Lambda function locally. @@ -71,14 +66,16 @@ def invoke(self, env_vars = environ.resolve() with self._get_code_dir(function_config.code_abs_path) as code_dir: - container = LambdaContainer(function_config.runtime, - function_config.handler, - code_dir, - function_config.layers, - self._image_builder, - memory_mb=function_config.memory, - env_vars=env_vars, - debug_options=debug_context) + container = LambdaContainer( + function_config.runtime, + function_config.handler, + code_dir, + function_config.layers, + self._image_builder, + memory_mb=function_config.memory, + env_vars=env_vars, + debug_options=debug_context, + ) try: @@ -90,10 +87,9 @@ def invoke(self, # Start the timer **after** container starts. Container startup takes several seconds, only after which, # our Lambda function code will run. Starting the timer is a reasonable approximation that function has # started running. - timer = self._configure_interrupt(function_config.name, - function_config.timeout, - container, - bool(debug_context)) + timer = self._configure_interrupt( + function_config.name, function_config.timeout, container, bool(debug_context) + ) # NOTE: BLOCKING METHOD # Block the thread waiting to fetch logs from the container. This method will return after container @@ -191,7 +187,7 @@ def _unzip_file(filepath): temp_dir = tempfile.mkdtemp() - if os.name == 'posix': + if os.name == "posix": os.chmod(temp_dir, 0o755) LOG.info("Decompressing %s", filepath) diff --git a/samcli/local/lambdafn/zip.py b/samcli/local/lambdafn/zip.py index cf4f07a8d0..130e9569f7 100644 --- a/samcli/local/lambdafn/zip.py +++ b/samcli/local/lambdafn/zip.py @@ -36,7 +36,7 @@ def unzip(zip_file_path, output_dir, permission=None): Permission to set """ - with zipfile.ZipFile(zip_file_path, 'r') as zip_ref: + with zipfile.ZipFile(zip_file_path, "r") as zip_ref: # For each item in the zip file, extract the file and set permissions if available for file_info in zip_ref.infolist(): @@ -107,10 +107,10 @@ def unzip_from_uri(uri, layer_zip_path, unzip_output_dir, progressbar_label): Label to use in the Progressbar """ try: - get_request = requests.get(uri, stream=True, verify=os.environ.get('AWS_CA_BUNDLE', True)) + get_request = requests.get(uri, stream=True, verify=os.environ.get("AWS_CA_BUNDLE", True)) - with open(layer_zip_path, 'wb') as local_layer_file: - file_length = int(get_request.headers['Content-length']) + with open(layer_zip_path, "wb") as local_layer_file: + file_length = int(get_request.headers["Content-length"]) with progressbar(file_length, progressbar_label) as p_bar: # Set the chunk size to None. Since we are streaming the request, None will allow the data to be diff --git a/samcli/local/layers/layer_downloader.py b/samcli/local/layers/layer_downloader.py index da20d0a2a2..2744582c5b 100644 --- a/samcli/local/layers/layer_downloader.py +++ b/samcli/local/layers/layer_downloader.py @@ -21,7 +21,6 @@ class LayerDownloader(object): - def __init__(self, layer_cache, cwd, lambda_client=None): """ @@ -40,7 +39,7 @@ def __init__(self, layer_cache, cwd, lambda_client=None): @property def lambda_client(self): - self._lambda_client = self._lambda_client or boto3.client('lambda') + self._lambda_client = self._lambda_client or boto3.client("lambda") return self._lambda_client @property @@ -100,7 +99,7 @@ def download(self, layer, force=False): return layer # disabling no-member due to https://github.com/PyCQA/pylint/issues/1660 - layer_path = Path(self.layer_cache).joinpath(layer.name).resolve() # pylint: disable=no-member + layer_path = Path(self.layer_cache).resolve().joinpath(layer.name) # pylint: disable=no-member is_layer_downloaded = self._is_layer_cached(layer_path) layer.codeuri = str(layer_path) @@ -108,12 +107,14 @@ def download(self, layer, force=False): LOG.info("%s is already cached. Skipping download", layer.arn) return layer - layer_zip_path = layer.codeuri + '.zip' + layer_zip_path = layer.codeuri + ".zip" layer_zip_uri = self._fetch_layer_uri(layer) - unzip_from_uri(layer_zip_uri, - layer_zip_path, - unzip_output_dir=layer.codeuri, - progressbar_label='Downloading {}'.format(layer.layer_arn)) + unzip_from_uri( + layer_zip_uri, + layer_zip_path, + unzip_output_dir=layer.codeuri, + progressbar_label="Downloading {}".format(layer.layer_arn), + ) return layer @@ -137,17 +138,19 @@ def _fetch_layer_uri(self, layer): When the Credentials given are not sufficient to call AWS Lambda """ try: - layer_version_response = self.lambda_client.get_layer_version(LayerName=layer.layer_arn, - VersionNumber=layer.version) + layer_version_response = self.lambda_client.get_layer_version( + LayerName=layer.layer_arn, VersionNumber=layer.version + ) except NoCredentialsError: raise CredentialsRequired("Layers require credentials to download the layers locally.") except ClientError as e: - error_code = e.response.get('Error').get('Code') + error_code = e.response.get("Error").get("Code") error_exc = { - 'AccessDeniedException': CredentialsRequired( + "AccessDeniedException": CredentialsRequired( "Credentials provided are missing lambda:Getlayerversion policy that is needed to download the " - "layer or you do not have permission to download the layer"), - 'ResourceNotFoundException': ResourceNotFound("{} was not found.".format(layer.arn)) + "layer or you do not have permission to download the layer" + ), + "ResourceNotFoundException": ResourceNotFound("{} was not found.".format(layer.arn)), } if error_code in error_exc: diff --git a/samcli/local/services/base_local_service.py b/samcli/local/services/base_local_service.py index 0d54357e86..c0cea55a7d 100644 --- a/samcli/local/services/base_local_service.py +++ b/samcli/local/services/base_local_service.py @@ -10,7 +10,6 @@ class BaseLocalService(object): - def __init__(self, is_debugging, port, host): """ Creates a BaseLocalService class @@ -59,7 +58,7 @@ def run(self): # This environ signifies we are running a main function for Flask. This is true, since we are using it within # our cli and not on a production server. - os.environ['WERKZEUG_RUN_MAIN'] = 'true' + os.environ["WERKZEUG_RUN_MAIN"] = "true" self._app.run(threaded=multi_threaded, host=self.host, port=self.port) @@ -80,7 +79,6 @@ def service_response(body, headers, status_code): class LambdaOutputParser(object): - @staticmethod def get_lambda_output(stdout_stream): """ @@ -105,7 +103,7 @@ def get_lambda_output(stdout_stream): # We only want the last line of stdout, because it's possible that # the function may have written directly to stdout using # System.out.println or similar, before docker-lambda output the result - stdout_data = stdout_stream.getvalue().rstrip(b'\n') + stdout_data = stdout_stream.getvalue().rstrip(b"\n") # Usually the output is just one line and contains response as JSON string, but if the Lambda function # wrote anything directly to stdout, there will be additional lines. So just extract the last line as @@ -113,7 +111,7 @@ def get_lambda_output(stdout_stream): lambda_response = stdout_data lambda_logs = None - last_line_position = stdout_data.rfind(b'\n') + last_line_position = stdout_data.rfind(b"\n") if last_line_position >= 0: # So there are multiple lines. Separate them out. # Everything but the last line are logs @@ -121,7 +119,7 @@ def get_lambda_output(stdout_stream): # Last line is Lambda response. Make sure to strip() so we get rid of extra whitespaces & newlines around lambda_response = stdout_data[last_line_position:].strip() - lambda_response = lambda_response.decode('utf-8') + lambda_response = lambda_response.decode("utf-8") # When the Lambda Function returns an Error/Exception, the output is added to the stdout of the container. From # our perspective, the container returned some value, which is not always true. Since the output is the only @@ -155,11 +153,13 @@ def is_lambda_error_response(lambda_response): # this checking, we check for all three keys that can occur in Lambda raised/thrown/returned an # Error/Exception. This still risks false positives when the data returned matches exactly a dictionary with # the keys 'errorMessage', 'errorType' and 'stackTrace'. - if isinstance(lambda_response_dict, dict) and \ - len(lambda_response_dict) == 3 and \ - 'errorMessage' in lambda_response_dict and \ - 'errorType' in lambda_response_dict and \ - 'stackTrace' in lambda_response_dict: + if ( + isinstance(lambda_response_dict, dict) + and len(lambda_response_dict) == 3 + and "errorMessage" in lambda_response_dict + and "errorType" in lambda_response_dict + and "stackTrace" in lambda_response_dict + ): is_lambda_user_error_response = True except ValueError: # If you can't serialize the output into a dict, then do nothing diff --git a/samcli/yamlhelper.py b/samcli/yamlhelper.py index c4d3365361..658bb8183c 100644 --- a/samcli/yamlhelper.py +++ b/samcli/yamlhelper.py @@ -1,13 +1,30 @@ +# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. """ -Helper to be able to parse/dump YAML files +YAML helper, sourced from the AWS CLI + +https://github.com/aws/aws-cli/blob/develop/awscli/customizations/cloudformation/yamlhelper.py """ +# pylint: disable=too-many-ancestors import json -import six +from botocore.compat import OrderedDict import yaml from yaml.resolver import ScalarNode, SequenceNode +import six + def intrinsics_multi_constructor(loader, tag_prefix, node): """ @@ -46,13 +63,24 @@ def intrinsics_multi_constructor(loader, tag_prefix, node): return {cfntag: value} +def _dict_representer(dumper, data): + return dumper.represent_dict(data.items()) + + def yaml_dump(dict_to_dump): """ Dumps the dictionary as a YAML document :param dict_to_dump: :return: """ - return yaml.safe_dump(dict_to_dump, default_flow_style=False) + FlattenAliasDumper.add_representer(OrderedDict, _dict_representer) + return yaml.dump(dict_to_dump, default_flow_style=False, Dumper=FlattenAliasDumper) + + +def _dict_constructor(loader, node): + # Necessary in order to make yaml merge tags work + loader.flatten_mapping(node) + return OrderedDict(loader.construct_pairs(node)) def yaml_parse(yamlstr): @@ -61,7 +89,13 @@ def yaml_parse(yamlstr): # PyYAML doesn't support json as well as it should, so if the input # is actually just json it is better to parse it with the standard # json parser. - return json.loads(yamlstr) + return json.loads(yamlstr, object_pairs_hook=OrderedDict) except ValueError: + yaml.SafeLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, _dict_constructor) yaml.SafeLoader.add_multi_constructor("!", intrinsics_multi_constructor) return yaml.safe_load(yamlstr) + + +class FlattenAliasDumper(yaml.SafeDumper): + def ignore_aliases(self, data): + return True diff --git a/scripts/check-isolated-needs-update.py b/scripts/check-isolated-needs-update.py new file mode 100644 index 0000000000..c4325f2e59 --- /dev/null +++ b/scripts/check-isolated-needs-update.py @@ -0,0 +1,58 @@ +import io +import sys +import os +from subprocess import Popen, PIPE + + +def read(*filenames, **kwargs): + encoding = kwargs.get("encoding", "utf-8") + sep = kwargs.get("sep", os.linesep) + buf = [] + for filename in filenames: + with io.open(filename, encoding=encoding) as f: + buf.append(f.read()) + return sep.join(buf) + + +def get_requirements_list(content): + pkgs_versions = [] + for line in content.split(os.linesep): + if line: + # remove markers from the line, which are seperated by ';' + pkgs_versions.append(line.split(";")[0]) + + return pkgs_versions + + +# Don't try and compare the isolated list with the Python2 version. SAM CLI installers +# all use Python3.6+ and Python2.7 is going EOL +if sys.version_info[0] < 3: + sys.exit(0) + +isolated_req_content = read(os.path.join("requirements", "isolated.txt")) +base_req_content = read(os.path.join("requirements", "base.txt")) + +isolated_req_list = get_requirements_list(isolated_req_content) +base_req_list = get_requirements_list(base_req_content) + +process = Popen(["pip", "freeze"], stdout=PIPE) + +all_installed_pkgs_list = [] +for package in process.stdout.readlines(): + package = package.decode("utf-8").strip(os.linesep) + all_installed_pkgs_list.append(package) + +for installed_pkg_version in all_installed_pkgs_list: + for base_req in base_req_list: + # a base requirement can be defined with different specifiers (>, <, ==, etc.). Instead of doing tons of string parsing, + # brute force the check by assuming the installed_pkgs will have == as a specifier. This is true due to how pip freeze + # works. So check to make sure the installed pakcage we are looking at is in the base.txt file, if so make sure the + # full requirement==version is within the isolated list. + installed_pkg = installed_pkg_version.split("==")[0] + # There is a py library we use but due to how we are comparing requirements, we need to handle this as a special case. :( + if installed_pkg not in ("py", "boto3") and base_req.startswith(installed_pkg): + assert installed_pkg_version in isolated_req_list, "{} is in base.txt but not in isolated.txt".format( + installed_pkg_version + ) + print ("{} is in the isolated.txt file".format(installed_pkg_version)) + break diff --git a/scripts/check-requirements.py b/scripts/check-requirements.py new file mode 100755 index 0000000000..07dd425e4d --- /dev/null +++ b/scripts/check-requirements.py @@ -0,0 +1,43 @@ +import io +import os +from subprocess import Popen, PIPE + + +def read(*filenames, **kwargs): + encoding = kwargs.get("encoding", "utf-8") + sep = kwargs.get("sep", os.linesep) + buf = [] + for filename in filenames: + with io.open(filename, encoding=encoding) as f: + buf.append(f.read()) + return sep.join(buf) + + +exclude_packages = ("setuptools", "wheel", "pip", "aws-sam-cli") + +all_pkgs_list = [] +process = Popen(["pip", "freeze"], stdout=PIPE) + +for package in process.stdout.readlines(): + package = package.decode("utf-8").strip(os.linesep) + if package.split("==")[0] not in exclude_packages: + all_pkgs_list.append(package) +all_pkgs_list = sorted(all_pkgs_list) +print ("installed package/versions" + os.linesep) +print (",".join(all_pkgs_list)) +print (os.linesep) + +content = read(os.path.join("requirements", "isolated.txt")) + +locked_pkgs = [] +for line in content.split(os.linesep): + if line: + locked_pkgs.append(line) + +locked_pkgs = sorted(locked_pkgs) +print ("locked package/versions" + os.linesep) +print (",".join(locked_pkgs)) +print (os.linesep) + +assert len(locked_pkgs) == len(all_pkgs_list), "Number of expected dependencies do not match the number installed" +assert locked_pkgs == all_pkgs_list, "The list of expected dependencies do not match what is installed" diff --git a/setup.py b/setup.py index 30cb2e7b6d..77265e87df 100644 --- a/setup.py +++ b/setup.py @@ -7,8 +7,8 @@ def read(*filenames, **kwargs): - encoding = kwargs.get('encoding', 'utf-8') - sep = kwargs.get('sep', os.linesep) + encoding = kwargs.get("encoding", "utf-8") + sep = kwargs.get("sep", os.linesep) buf = [] for filename in filenames: with io.open(filename, encoding=encoding) as f: @@ -16,16 +16,14 @@ def read(*filenames, **kwargs): return sep.join(buf) -def read_requirements(req='base.txt'): - content = read(os.path.join('requirements', req)) - return [line for line in content.split(os.linesep) - if not line.strip().startswith('#')] +def read_requirements(req="base.txt"): + content = read(os.path.join("requirements", req)) + return [line for line in content.split(os.linesep) if not line.strip().startswith("#")] def read_version(): - content = read(os.path.join( - os.path.dirname(__file__), 'samcli', '__init__.py')) - return re.search(r"__version__ = '([^']+)'", content).group(1) + content = read(os.path.join(os.path.dirname(__file__), "samcli", "__init__.py")) + return re.search(r"__version__ = \"([^']+)\"", content).group(1) cmd_name = "sam" @@ -34,43 +32,37 @@ def read_version(): cmd_name = "samdev" setup( - name='aws-sam-cli', + name="aws-sam-cli", version=read_version(), - description='AWS SAM CLI is a CLI tool for local development and testing of Serverless applications', - long_description=read('README.md'), - long_description_content_type='text/markdown', - author='Amazon Web Services', - author_email='aws-sam-developers@amazon.com', - url='https://github.com/awslabs/aws-sam-cli', - license='Apache License 2.0', - packages=find_packages(exclude=['tests.*', 'tests']), + description="AWS SAM CLI is a CLI tool for local development and testing of Serverless applications", + long_description=read("README.md"), + long_description_content_type="text/markdown", + author="Amazon Web Services", + author_email="aws-sam-developers@amazon.com", + url="https://github.com/awslabs/aws-sam-cli", + license="Apache License 2.0", + packages=find_packages(exclude=["tests.*", "tests"]), keywords="AWS SAM CLI", # Support Python 2.7 and 3.6 or greater - python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*', - entry_points={ - 'console_scripts': [ - '{}=samcli.cli.main:cli'.format(cmd_name) - ] - }, - install_requires=read_requirements('base.txt'), - extras_require={ - 'dev': read_requirements('dev.txt') - }, + python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*", + entry_points={"console_scripts": ["{}=samcli.cli.main:cli".format(cmd_name)]}, + install_requires=read_requirements("base.txt"), + extras_require={"dev": read_requirements("dev.txt")}, include_package_data=True, classifiers=[ - 'Development Status :: 4 - Beta', - 'Environment :: Console', - 'Environment :: Other Environment', - 'Intended Audience :: Developers', - 'Intended Audience :: Information Technology', - 'License :: OSI Approved :: Apache Software License', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Topic :: Internet', - 'Topic :: Software Development :: Build Tools', - 'Topic :: Utilities', - ] + "Development Status :: 4 - Beta", + "Environment :: Console", + "Environment :: Other Environment", + "Intended Audience :: Developers", + "Intended Audience :: Information Technology", + "License :: OSI Approved :: Apache Software License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Topic :: Internet", + "Topic :: Software Development :: Build Tools", + "Topic :: Utilities", + ], ) diff --git a/tests/conftest.py b/tests/conftest.py index 9c4c78ad8d..032c3dd802 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,3 @@ - import os diff --git a/tests/functional/commands/cli/test_global_config.py b/tests/functional/commands/cli/test_global_config.py index 78b84c73b1..c4c92bcf58 100644 --- a/tests/functional/commands/cli/test_global_config.py +++ b/tests/functional/commands/cli/test_global_config.py @@ -14,7 +14,6 @@ class TestGlobalConfig(TestCase): - def setUp(self): self._cfg_dir = tempfile.mkdtemp() @@ -28,38 +27,38 @@ def test_installation_id_with_side_effect(self): json_body = json.loads(expected_path.read_text()) self.assertIsNotNone(installation_id) self.assertTrue(expected_path.exists()) - self.assertEquals(installation_id, json_body["installationId"]) + self.assertEqual(installation_id, json_body["installationId"]) installation_id_refetch = gc.installation_id - self.assertEquals(installation_id, installation_id_refetch) + self.assertEqual(installation_id, installation_id_refetch) def test_installation_id_on_existing_file(self): path = Path(self._cfg_dir, "metadata.json") - with open(str(path), 'w') as f: + with open(str(path), "w") as f: cfg = {"foo": "bar"} f.write(json.dumps(cfg, indent=4) + "\n") gc = GlobalConfig(config_dir=self._cfg_dir) installation_id = gc.installation_id json_body = json.loads(path.read_text()) - self.assertEquals(installation_id, json_body["installationId"]) - self.assertEquals("bar", json_body["foo"]) + self.assertEqual(installation_id, json_body["installationId"]) + self.assertEqual("bar", json_body["foo"]) def test_installation_id_exists(self): path = Path(self._cfg_dir, "metadata.json") - with open(str(path), 'w') as f: + with open(str(path), "w") as f: cfg = {"installationId": "stub-uuid"} f.write(json.dumps(cfg, indent=4) + "\n") gc = GlobalConfig(config_dir=self._cfg_dir) installation_id = gc.installation_id - self.assertEquals("stub-uuid", installation_id) + self.assertEqual("stub-uuid", installation_id) def test_init_override(self): gc = GlobalConfig(installation_id="foo") installation_id = gc.installation_id - self.assertEquals("foo", installation_id) + self.assertEqual("foo", installation_id) def test_invalid_json(self): path = Path(self._cfg_dir, "metadata.json") - with open(str(path), 'w') as f: + with open(str(path), "w") as f: f.write("NOT JSON, PROBABLY VALID YAML AM I RIGHT!?") gc = GlobalConfig(config_dir=self._cfg_dir) self.assertIsNone(gc.installation_id) @@ -71,7 +70,7 @@ def test_telemetry_flag_provided(self): def test_telemetry_flag_from_cfg(self): path = Path(self._cfg_dir, "metadata.json") - with open(str(path), 'w') as f: + with open(str(path), "w") as f: cfg = {"telemetryEnabled": True} f.write(json.dumps(cfg, indent=4) + "\n") gc = GlobalConfig(config_dir=self._cfg_dir) @@ -83,7 +82,7 @@ def test_telemetry_flag_no_file(self): def test_telemetry_flag_not_in_cfg(self): path = Path(self._cfg_dir, "metadata.json") - with open(str(path), 'w') as f: + with open(str(path), "w") as f: cfg = {"installationId": "stub-uuid"} f.write(json.dumps(cfg, indent=4) + "\n") gc = GlobalConfig(config_dir=self._cfg_dir) @@ -102,7 +101,7 @@ def test_set_telemetry_flag_no_file(self): def test_set_telemetry_flag_no_key(self): path = Path(self._cfg_dir, "metadata.json") - with open(str(path), 'w') as f: + with open(str(path), "w") as f: cfg = {"installationId": "stub-uuid"} f.write(json.dumps(cfg, indent=4) + "\n") gc = GlobalConfig(config_dir=self._cfg_dir) @@ -113,7 +112,7 @@ def test_set_telemetry_flag_no_key(self): def test_set_telemetry_flag_overwrite(self): path = Path(self._cfg_dir, "metadata.json") - with open(str(path), 'w') as f: + with open(str(path), "w") as f: cfg = {"telemetryEnabled": True} f.write(json.dumps(cfg, indent=4) + "\n") gc = GlobalConfig(config_dir=self._cfg_dir) @@ -125,7 +124,7 @@ def test_set_telemetry_flag_overwrite(self): def test_telemetry_flag_explicit_false(self): path = Path(self._cfg_dir, "metadata.json") - with open(str(path), 'w') as f: + with open(str(path), "w") as f: cfg = {"telemetryEnabled": True} f.write(json.dumps(cfg, indent=4) + "\n") gc = GlobalConfig(config_dir=self._cfg_dir, telemetry_enabled=False) @@ -133,7 +132,7 @@ def test_telemetry_flag_explicit_false(self): def test_setter_raises_on_invalid_json(self): path = Path(self._cfg_dir, "metadata.json") - with open(str(path), 'w') as f: + with open(str(path), "w") as f: f.write("NOT JSON, PROBABLY VALID YAML AM I RIGHT!?") gc = GlobalConfig(config_dir=self._cfg_dir) with self.assertRaises(JSONDecodeError): @@ -141,12 +140,12 @@ def test_setter_raises_on_invalid_json(self): def test_setter_cannot_open_file(self): path = Path(self._cfg_dir, "metadata.json") - with open(str(path), 'w') as f: + with open(str(path), "w") as f: cfg = {"telemetryEnabled": True} f.write(json.dumps(cfg, indent=4) + "\n") m = mock_open() m.side_effect = IOError("fail") gc = GlobalConfig(config_dir=self._cfg_dir) - with patch('samcli.cli.global_config.open', m): + with patch("samcli.cli.global_config.open", m): with self.assertRaises(IOError): gc.telemetry_enabled = True diff --git a/tests/functional/commands/cli/test_main.py b/tests/functional/commands/cli/test_main.py index 3abbd25f75..978b71f1db 100644 --- a/tests/functional/commands/cli/test_main.py +++ b/tests/functional/commands/cli/test_main.py @@ -9,7 +9,6 @@ class TestTelemetryPrompt(TestCase): - def setUp(self): self._cfg_dir = tempfile.mkdtemp() @@ -18,7 +17,7 @@ def tearDown(self): def test_cli_prompt(self): gc = GlobalConfig(config_dir=self._cfg_dir) - with mock.patch('samcli.cli.main.global_cfg', gc): + with mock.patch("samcli.cli.main.global_cfg", gc): self.assertIsNone(gc.telemetry_enabled) # pre-state test runner = CliRunner() runner.invoke(cli, ["local", "generate-event", "s3"]) @@ -27,7 +26,7 @@ def test_cli_prompt(self): def test_cli_prompt_false(self): gc = GlobalConfig(config_dir=self._cfg_dir) - with mock.patch('samcli.cli.main.global_cfg', gc): + with mock.patch("samcli.cli.main.global_cfg", gc): self.assertIsNone(gc.telemetry_enabled) # pre-state test runner = CliRunner() runner.invoke(cli, ["local", "generate-event", "s3"], input="Y") diff --git a/tests/functional/commands/local/lib/test_local_api_service.py b/tests/functional/commands/local/lib/test_local_api_service.py index 23df3e9025..50131b2f1c 100644 --- a/tests/functional/commands/local/lib/test_local_api_service.py +++ b/tests/functional/commands/local/lib/test_local_api_service.py @@ -28,7 +28,6 @@ class TestFunctionalLocalLambda(TestCase): - def setUp(self): self.host = "127.0.0.1" self.port = random.randint(30000, 40000) # get a random port @@ -44,16 +43,25 @@ def setUp(self): self.static_dir = "mystaticdir" self.static_file_name = "myfile.txt" self.static_file_content = "This is a static file" - self._setup_static_file(os.path.join(self.cwd, self.static_dir), # Create static directory with in cwd - self.static_file_name, - self.static_file_content) + self._setup_static_file( + os.path.join(self.cwd, self.static_dir), # Create static directory with in cwd + self.static_file_name, + self.static_file_content, + ) # Create one Lambda function self.function_name = "name" - self.function = provider.Function(name=self.function_name, runtime="nodejs4.3", memory=256, timeout=5, - handler="index.handler", codeuri=self.code_uri, - environment={}, - rolearn=None, layers=[]) + self.function = provider.Function( + name=self.function_name, + runtime="nodejs4.3", + memory=256, + timeout=5, + handler="index.handler", + codeuri=self.code_uri, + environment={}, + rolearn=None, + layers=[], + ) self.mock_function_provider = Mock() self.mock_function_provider.get.return_value = self.function @@ -73,10 +81,7 @@ def setUp(self): layer_downloader = LayerDownloader("./", "./") lambda_image = LambdaImage(layer_downloader, False, False) local_runtime = LambdaRuntime(manager, lambda_image) - lambda_runner = LocalLambdaRunner(local_runtime, - self.mock_function_provider, - self.cwd, - debug_context=None) + lambda_runner = LocalLambdaRunner(local_runtime, self.mock_function_provider, self.cwd, debug_context=None) self.lambda_invoke_context_mock.local_lambda_runner = lambda_runner self.lambda_invoke_context_mock.get_cwd.return_value = self.cwd @@ -87,42 +92,40 @@ def tearDown(self): def test_must_start_service_and_serve_endpoints(self, sam_api_provider_mock): sam_api_provider_mock.return_value = self.api_provider_mock - local_service = LocalApiService(self.lambda_invoke_context_mock, - self.port, - self.host, - None) # No static directory + local_service = LocalApiService( + self.lambda_invoke_context_mock, self.port, self.host, None + ) # No static directory self._start_service_thread(local_service) - response = requests.get(self.url + '/get') - self.assertEquals(response.status_code, 200) + response = requests.get(self.url + "/get") + self.assertEqual(response.status_code, 200) - response = requests.post(self.url + '/post', {}) - self.assertEquals(response.status_code, 200) + response = requests.post(self.url + "/post", {}) + self.assertEqual(response.status_code, 200) - response = requests.get(self.url + '/post') - self.assertEquals(response.status_code, 403) # "HTTP GET /post" must not exist + response = requests.get(self.url + "/post") + self.assertEqual(response.status_code, 403) # "HTTP GET /post" must not exist @patch("samcli.commands.local.lib.sam_api_provider.SamApiProvider") def test_must_serve_static_files(self, sam_api_provider_mock): sam_api_provider_mock.return_value = self.api_provider_mock - local_service = LocalApiService(self.lambda_invoke_context_mock, - self.port, - self.host, - self.static_dir) # Mount the static directory + local_service = LocalApiService( + self.lambda_invoke_context_mock, self.port, self.host, self.static_dir + ) # Mount the static directory self._start_service_thread(local_service) # NOTE: The URL does not contain the static_dir because this directory is mounted directly at / response = requests.get("{}/{}".format(self.url, self.static_file_name)) - self.assertEquals(response.status_code, 200) - self.assertEquals(self.static_file_content, response.text) + self.assertEqual(response.status_code, 200) + self.assertEqual(self.static_file_content, response.text) @staticmethod def _start_service_thread(service): - t = threading.Thread(name='thread', target=service.start, args=()) + t = threading.Thread(name="thread", target=service.start, args=()) t.setDaemon(True) t.start() time.sleep(1) # Wait for the Web server to spin up diff --git a/tests/functional/commands/local/lib/test_local_lambda.py b/tests/functional/commands/local/lib/test_local_lambda.py index bf68b3c7ea..19d1c1569a 100644 --- a/tests/functional/commands/local/lib/test_local_lambda.py +++ b/tests/functional/commands/local/lib/test_local_lambda.py @@ -24,7 +24,6 @@ class TestFunctionalLocalLambda(TestCase): - def setUp(self): self.code_abs_path = nodejs_lambda(GET_ENV_VAR) @@ -33,24 +32,24 @@ def setUp(self): self.code_uri = os.path.relpath(self.code_abs_path, self.cwd) # Get relative path with respect to CWD self.function_name = "name" - self.variables = { - "var1": "defaultvalue1", - "var2": "defaultvalue2" - } + self.variables = {"var1": "defaultvalue1", "var2": "defaultvalue2"} - self.env_var_overrides = { - self.function_name: { - "var1": "override_value1" - } - } + self.env_var_overrides = {self.function_name: {"var1": "override_value1"}} # Override "var2" through the Shell environment os.environ["var2"] = "shell_env_value2" - self.function = provider.Function(name=self.function_name, runtime="nodejs4.3", memory=256, timeout=5, - handler="index.handler", codeuri=self.code_uri, - environment={"Variables": self.variables}, - rolearn=None, layers=[]) + self.function = provider.Function( + name=self.function_name, + runtime="nodejs4.3", + memory=256, + timeout=5, + handler="index.handler", + codeuri=self.code_uri, + environment={"Variables": self.variables}, + rolearn=None, + layers=[], + ) self.mock_function_provider = Mock() self.mock_function_provider.get.return_value = self.function @@ -62,17 +61,15 @@ def tearDown(self): def test_must_invoke(self): input_event = '"some data"' - expected_env_vars = { - "var1": "override_value1", - "var2": "shell_env_value2" - } + expected_env_vars = {"var1": "override_value1", "var2": "shell_env_value2"} manager = ContainerManager() layer_downloader = LayerDownloader("./", "./") lambda_image = LambdaImage(layer_downloader, False, False) local_runtime = LambdaRuntime(manager, lambda_image) - runner = LocalLambdaRunner(local_runtime, self.mock_function_provider, self.cwd, self.env_var_overrides, - debug_context=None) + runner = LocalLambdaRunner( + local_runtime, self.mock_function_provider, self.cwd, self.env_var_overrides, debug_context=None + ) # Append the real AWS credentials to the expected values. creds = runner.get_aws_creds() @@ -93,7 +90,7 @@ def test_must_invoke(self): self.assertGreater(len(stderr_stream.getvalue().strip()), 0, "stderr stream must contain data") # This should contain all the environment variables passed to the function - actual_output = json.loads(stdout_stream.getvalue().strip().decode('utf-8')) + actual_output = json.loads(stdout_stream.getvalue().strip().decode("utf-8")) for key, value in expected_env_vars.items(): self.assertTrue(key in actual_output, "Key '{}' must be in function output".format(key)) diff --git a/tests/functional/commands/validate/lib/test_sam_template_validator.py b/tests/functional/commands/validate/lib/test_sam_template_validator.py index fb8b42f284..f24cd6f7e9 100644 --- a/tests/functional/commands/validate/lib/test_sam_template_validator.py +++ b/tests/functional/commands/validate/lib/test_sam_template_validator.py @@ -73,12 +73,16 @@ class TestValidate(TestCase): ("tests/functional/commands/validate/lib/models/function_with_alias_intrinsics.yaml"), ("tests/functional/commands/validate/lib/models/function_with_condition.yaml"), ("tests/functional/commands/validate/lib/models/function_with_custom_codedeploy_deployment_preference.yaml"), - ("tests/functional/commands/validate/lib/models/function_with_custom_conditional_codedeploy_deployment_preference.yaml"), + ( + "tests/functional/commands/validate/lib/models/function_with_custom_conditional_codedeploy_deployment_preference.yaml" + ), ("tests/functional/commands/validate/lib/models/function_with_deployment_and_custom_role.yaml"), ("tests/functional/commands/validate/lib/models/function_with_deployment_no_service_role.yaml"), ("tests/functional/commands/validate/lib/models/function_with_deployment_preference.yaml"), ("tests/functional/commands/validate/lib/models/function_with_deployment_preference_all_parameters.yaml"), - ("tests/functional/commands/validate/lib/models/function_with_deployment_preference_multiple_combinations.yaml"), + ( + "tests/functional/commands/validate/lib/models/function_with_deployment_preference_multiple_combinations.yaml" + ), ("tests/functional/commands/validate/lib/models/function_with_disabled_deployment_preference.yaml"), ("tests/functional/commands/validate/lib/models/function_with_dlq.yaml"), ("tests/functional/commands/validate/lib/models/function_with_global_layers.yaml"), @@ -123,7 +127,7 @@ class TestValidate(TestCase): ("tests/functional/commands/validate/lib/models/sns_topic_outside_template.yaml"), ("tests/functional/commands/validate/lib/models/sqs.yaml"), ("tests/functional/commands/validate/lib/models/streams.yaml"), - ("tests/functional/commands/validate/lib/models/unsupported_resources.yaml") + ("tests/functional/commands/validate/lib/models/unsupported_resources.yaml"), ] def test_valid_template(self): @@ -137,10 +141,10 @@ def test_valid_template(self): "Handler": "index.handler", "CodeUri": "s3://fake-bucket/lambda-code.zip", "Runtime": "nodejs6.10", - "Timeout": 60 - } + "Timeout": 60, + }, } - } + }, } managed_policy_mock = Mock() @@ -158,13 +162,9 @@ def test_invalid_template(self): "Resources": { "ServerlessFunction": { "Type": "AWS::Serverless::Function", - "Properties": { - "Handler": "index.handler", - "CodeUri": "s3://lambda-code.zip", - "Timeout": 60 - } + "Properties": {"Handler": "index.handler", "CodeUri": "s3://lambda-code.zip", "Timeout": 60}, } - } + }, } managed_policy_mock = Mock() @@ -182,15 +182,10 @@ def test_valid_template_with_local_code_for_function(self): "Resources": { "ServerlessFunction": { "Type": "AWS::Serverless::Function", - "Properties": { - "Handler": "index.handler", - "CodeUri": "./", - "Runtime": "nodejs6.10", - "Timeout": 60 - } - } + "Properties": {"Handler": "index.handler", "CodeUri": "./", "Runtime": "nodejs6.10", "Timeout": 60}, } - } + }, + } managed_policy_mock = Mock() managed_policy_mock.load.return_value = {"PolicyName": "FakePolicy"} @@ -205,14 +200,9 @@ def test_valid_template_with_local_code_for_layer_version(self): "AWSTemplateFormatVersion": "2010-09-09", "Transform": "AWS::Serverless-2016-10-31", "Resources": { - "ServerlessLayerVersion": { - "Type": "AWS::Serverless::LayerVersion", - "Properties": { - "ContentUri": "./" - } - } - } - } + "ServerlessLayerVersion": {"Type": "AWS::Serverless::LayerVersion", "Properties": {"ContentUri": "./"}} + }, + } managed_policy_mock = Mock() managed_policy_mock.load.return_value = {"PolicyName": "FakePolicy"} @@ -229,13 +219,10 @@ def test_valid_template_with_local_code_for_api(self): "Resources": { "ServerlessApi": { "Type": "AWS::Serverless::Api", - "Properties": { - "StageName": "Prod", - "DefinitionUri": "./" - } - } + "Properties": {"StageName": "Prod", "DefinitionUri": "./"}, } - } + }, + } managed_policy_mock = Mock() managed_policy_mock.load.return_value = {"PolicyName": "FakePolicy"} @@ -252,13 +239,10 @@ def test_valid_template_with_DefinitionBody_for_api(self): "Resources": { "ServerlessApi": { "Type": "AWS::Serverless::Api", - "Properties": { - "StageName": "Prod", - "DefinitionBody": {"swagger": "2.0"} - } - } + "Properties": {"StageName": "Prod", "DefinitionBody": {"swagger": "2.0"}}, } - } + }, + } managed_policy_mock = Mock() managed_policy_mock.load.return_value = {"PolicyName": "FakePolicy"} @@ -277,28 +261,20 @@ def test_valid_template_with_s3_object_passed(self): "Type": "AWS::Serverless::Api", "Properties": { "StageName": "Prod", - "DefinitionUri": { - "Bucket": "mybucket-name", - "Key": "swagger", - "Version": 121212 - } - } + "DefinitionUri": {"Bucket": "mybucket-name", "Key": "swagger", "Version": 121212}, }, + }, "ServerlessFunction": { "Type": "AWS::Serverless::Function", "Properties": { "Handler": "index.handler", - "CodeUri": { - "Bucket": "mybucket-name", - "Key": "code.zip", - "Version": 121212 - }, + "CodeUri": {"Bucket": "mybucket-name", "Key": "code.zip", "Version": 121212}, "Runtime": "nodejs6.10", - "Timeout": 60 - } - } - } - } + "Timeout": 60, + }, + }, + }, + } managed_policy_mock = Mock() managed_policy_mock.load.return_value = {"PolicyName": "FakePolicy"} @@ -309,8 +285,14 @@ def test_valid_template_with_s3_object_passed(self): validator.is_valid() # validate the CodeUri was not changed - self.assertEquals(validator.sam_template.get("Resources").get("ServerlessApi").get("Properties").get("DefinitionUri"), {"Bucket": "mybucket-name", "Key": "swagger", "Version": 121212}) - self.assertEquals(validator.sam_template.get("Resources").get("ServerlessFunction").get("Properties").get("CodeUri"), {"Bucket": "mybucket-name", "Key": "code.zip", "Version": 121212}) + self.assertEqual( + validator.sam_template.get("Resources").get("ServerlessApi").get("Properties").get("DefinitionUri"), + {"Bucket": "mybucket-name", "Key": "swagger", "Version": 121212}, + ) + self.assertEqual( + validator.sam_template.get("Resources").get("ServerlessFunction").get("Properties").get("CodeUri"), + {"Bucket": "mybucket-name", "Key": "code.zip", "Version": 121212}, + ) @parameterized.expand(VALID_TEST_TEMPLATES) def test_valid_api_request_model_template(self, template_path): @@ -323,4 +305,3 @@ def test_valid_api_request_model_template(self, template_path): # Should not throw an exception validator.is_valid() - diff --git a/tests/functional/function_code.py b/tests/functional/function_code.py index bff31b4641..660580ccad 100644 --- a/tests/functional/function_code.py +++ b/tests/functional/function_code.py @@ -130,6 +130,7 @@ def nodejs_lambda(code): # symlinks. Hence get the real path before passing to Docker. return os.path.realpath(directory) + @contextmanager def make_zip(directory, extension="zip"): """ @@ -146,7 +147,7 @@ def make_zip(directory, extension="zip"): tmpdir = tempfile.mkdtemp() path_prefix = os.path.join(tmpdir, "code") - zipfile_current_path = shutil.make_archive(path_prefix, 'zip', directory) + zipfile_current_path = shutil.make_archive(path_prefix, "zip", directory) # shutil always sets the file with .zip extension. Hence rename/move the file to be with right extension expected_path = path_prefix + "." + extension diff --git a/tests/functional/init/test_generate_project.py b/tests/functional/init/test_generate_project.py index f7be76f731..96e03ac574 100644 --- a/tests/functional/init/test_generate_project.py +++ b/tests/functional/init/test_generate_project.py @@ -8,7 +8,6 @@ class TestCli(TestCase): - def setUp(self): self.location = None self.runtime = "python3.6" @@ -17,10 +16,8 @@ def setUp(self): self.name = "testing project {}".format(random.randint(1, 10)) self.no_input = False self.cookiecutter_dir = tempfile.mkdtemp() - self.project_folder = os.path.abspath( - os.path.join(self.output_dir, self.name)) - self.custom_location_folder = os.path.abspath( - os.path.join(self.output_dir, 'Name of the project')) + self.project_folder = os.path.abspath(os.path.join(self.output_dir, self.name)) + self.custom_location_folder = os.path.abspath(os.path.join(self.output_dir, "Name of the project")) def tearDown(self): leftover_folders = (self.output_dir, self.cookiecutter_dir) @@ -33,9 +30,14 @@ def test_generate_project(self): # GIVEN generate_project successfully created a project # WHEN a project name has been passed init_cli( - ctx=None, location=self.location, runtime=self.runtime, - dependency_manager=self.dependency_manager, output_dir=self.output_dir, - name=self.name, no_input=self.no_input) + ctx=None, + location=self.location, + runtime=self.runtime, + dependency_manager=self.dependency_manager, + output_dir=self.output_dir, + name=self.name, + no_input=self.no_input, + ) # THEN we should see a new project created and a successful return self.assertTrue(os.path.isdir(self.project_folder)) @@ -47,9 +49,14 @@ def test_custom_location(self): self.location = "https://github.com/aws-samples/cookiecutter-aws-sam-python" init_cli( - ctx=None, location=self.location, runtime=self.runtime, - dependency_manager=self.dependency_manager, output_dir=self.output_dir, - name=self.name, no_input=True) + ctx=None, + location=self.location, + runtime=self.runtime, + dependency_manager=self.dependency_manager, + output_dir=self.output_dir, + name=self.name, + no_input=True, + ) # THEN we should see a new project created and a successful return # and this new folder should be named 'name-of-the-project' diff --git a/tests/functional/local/apigw/test_local_apigw_service.py b/tests/functional/local/apigw/test_local_apigw_service.py index 067092af3d..78b9c6b47a 100644 --- a/tests/functional/local/apigw/test_local_apigw_service.py +++ b/tests/functional/local/apigw/test_local_apigw_service.py @@ -10,7 +10,13 @@ from mock import Mock from samcli.local.apigw.local_apigw_service import Route, LocalApigwService -from tests.functional.function_code import nodejs_lambda, API_GATEWAY_ECHO_EVENT, API_GATEWAY_BAD_PROXY_RESPONSE, API_GATEWAY_ECHO_BASE64_EVENT, API_GATEWAY_CONTENT_TYPE_LOWER +from tests.functional.function_code import ( + nodejs_lambda, + API_GATEWAY_ECHO_EVENT, + API_GATEWAY_BAD_PROXY_RESPONSE, + API_GATEWAY_ECHO_BASE64_EVENT, + API_GATEWAY_CONTENT_TYPE_LOWER, +) from samcli.commands.local.lib import provider from samcli.local.lambdafn.runtime import LambdaRuntime from samcli.commands.local.lib.local_lambda import LocalLambdaRunner @@ -30,21 +36,30 @@ def setUpClass(cls): cls.function_name = "name" - cls.function = provider.Function(name=cls.function_name, runtime="nodejs4.3", memory=256, timeout=5, - handler="index.handler", codeuri=cls.code_uri, environment=None, - rolearn=None, layers=[]) + cls.function = provider.Function( + name=cls.function_name, + runtime="nodejs4.3", + memory=256, + timeout=5, + handler="index.handler", + codeuri=cls.code_uri, + environment=None, + rolearn=None, + layers=[], + ) cls.mock_function_provider = Mock() cls.mock_function_provider.get.return_value = cls.function - list_of_routes = [Route(['POST', 'GET'], cls.function_name, '/something'), - Route(['GET'], cls.function_name, '/'), - Route(['GET', 'PUT'], cls.function_name, '/something/{event}'), - ] + list_of_routes = [ + Route(["POST", "GET"], cls.function_name, "/something"), + Route(["GET"], cls.function_name, "/"), + Route(["GET", "PUT"], cls.function_name, "/something/{event}"), + ] cls.service, cls.port, cls.url, cls.scheme = make_service(list_of_routes, cls.mock_function_provider, cls.cwd) cls.service.create() - t = threading.Thread(name='thread', target=cls.service.run, args=()) + t = threading.Thread(name="thread", target=cls.service.run, args=()) t.setDaemon(True) t.start() time.sleep(1) @@ -60,9 +75,9 @@ def test_non_proxy_response(self): actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 502) - self.assertEquals(response.headers.get('Content-Type'), "application/json") + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 502) + self.assertEqual(response.headers.get("Content-Type"), "application/json") class TestService_ContentType(TestCase): @@ -76,24 +91,38 @@ def setUpClass(cls): cls.function_name = "name" - cls.function = provider.Function(name=cls.function_name, runtime="nodejs4.3", memory=256, timeout=5, - handler="index.handler", codeuri=cls.code_uri, environment=None, - rolearn=None, layers=[]) - - cls.base64_response_function = provider.Function(name=cls.function_name, runtime="nodejs4.3", memory=256, timeout=5, - handler="index.handler", codeuri=cls.code_uri, environment=None, - rolearn=None, layers=[]) + cls.function = provider.Function( + name=cls.function_name, + runtime="nodejs4.3", + memory=256, + timeout=5, + handler="index.handler", + codeuri=cls.code_uri, + environment=None, + rolearn=None, + layers=[], + ) + + cls.base64_response_function = provider.Function( + name=cls.function_name, + runtime="nodejs4.3", + memory=256, + timeout=5, + handler="index.handler", + codeuri=cls.code_uri, + environment=None, + rolearn=None, + layers=[], + ) cls.mock_function_provider = Mock() cls.mock_function_provider.get.return_value = cls.function - list_of_routes = [ - Route(['GET'], cls.function_name, '/'), - ] + list_of_routes = [Route(["GET"], cls.function_name, "/")] cls.service, cls.port, cls.url, cls.scheme = make_service(list_of_routes, cls.mock_function_provider, cls.cwd) cls.service.create() - t = threading.Thread(name='thread', target=cls.service.run, args=()) + t = threading.Thread(name="thread", target=cls.service.run, args=()) t.setDaemon(True) t.start() time.sleep(1) @@ -113,9 +142,9 @@ def test_calling_service_root(self): actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get('content-type'), "text/plain") + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("content-type"), "text/plain") class TestService_EventSerialization(TestCase): @@ -129,27 +158,44 @@ def setUpClass(cls): cls.function_name = "name" - cls.function = provider.Function(name=cls.function_name, runtime="nodejs4.3", memory=256, timeout=5, - handler="index.handler", codeuri=cls.code_uri, environment=None, - rolearn=None, layers=[]) - - cls.base64_response_function = provider.Function(name=cls.function_name, runtime="nodejs4.3", memory=256, timeout=5, - handler="index.handler", codeuri=cls.code_uri, environment=None, - rolearn=None, layers=[]) + cls.function = provider.Function( + name=cls.function_name, + runtime="nodejs4.3", + memory=256, + timeout=5, + handler="index.handler", + codeuri=cls.code_uri, + environment=None, + rolearn=None, + layers=[], + ) + + cls.base64_response_function = provider.Function( + name=cls.function_name, + runtime="nodejs4.3", + memory=256, + timeout=5, + handler="index.handler", + codeuri=cls.code_uri, + environment=None, + rolearn=None, + layers=[], + ) cls.mock_function_provider = Mock() cls.mock_function_provider.get.return_value = cls.function - list_of_routes = [Route(['POST', 'GET'], cls.function_name, '/something'), - Route(['GET'], cls.function_name, '/'), - Route(['GET', 'PUT'], cls.function_name, '/something/{event}'), - Route(['GET'], cls.function_name, '/proxypath/{proxy+}'), - Route(['GET'], cls.function_name, '/resourceproxypath/{resource+}') - ] + list_of_routes = [ + Route(["POST", "GET"], cls.function_name, "/something"), + Route(["GET"], cls.function_name, "/"), + Route(["GET", "PUT"], cls.function_name, "/something/{event}"), + Route(["GET"], cls.function_name, "/proxypath/{proxy+}"), + Route(["GET"], cls.function_name, "/resourceproxypath/{resource+}"), + ] cls.service, cls.port, cls.url, cls.scheme = make_service(list_of_routes, cls.mock_function_provider, cls.cwd) cls.service.create() - t = threading.Thread(name='thread', target=cls.service.run, args=()) + t = threading.Thread(name="thread", target=cls.service.run, args=()) t.setDaemon(True) t.start() time.sleep(1) @@ -163,23 +209,25 @@ def setUp(self): self.maxDiff = None def test_calling_service_root(self): - expected = make_service_response(self.port, - scheme=self.scheme, - method="GET", - resourcePath="/", - resolvedResourcePath="/", - pathParameters=None, - body=None, - queryParams=None, - headers=None) + expected = make_service_response( + self.port, + scheme=self.scheme, + method="GET", + resourcePath="/", + resolvedResourcePath="/", + pathParameters=None, + body=None, + queryParams=None, + headers=None, + ) response = requests.get(self.url) actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get('Content-Type'), "application/json") + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "application/json") def test_calling_service_with_nonexistent_endpoint(self): expected = {"message": "Missing Authentication Token"} @@ -188,9 +236,9 @@ def test_calling_service_with_nonexistent_endpoint(self): actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 403) - self.assertEquals(response.headers.get('Content-Type'), "application/json") + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 403) + self.assertEqual(response.headers.get("Content-Type"), "application/json") def test_calling_service_with_valid_path_invalid_method(self): expected = {"message": "Missing Authentication Token"} @@ -199,144 +247,149 @@ def test_calling_service_with_valid_path_invalid_method(self): actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 403) - self.assertEquals(response.headers.get('Content-Type'), "application/json") + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 403) + self.assertEqual(response.headers.get("Content-Type"), "application/json") def test_calling_service_with_data_on_path(self): path = "/something" body = {"json": "data"} - expected = make_service_response(self.port, - scheme=self.scheme, - method="POST", - resourcePath=path, - resolvedResourcePath=path, - pathParameters=None, - body=json.dumps(body), - queryParams=None, - headers={"Content-Length": "16", - "Content-Type": "application/json"}) + expected = make_service_response( + self.port, + scheme=self.scheme, + method="POST", + resourcePath=path, + resolvedResourcePath=path, + pathParameters=None, + body=json.dumps(body), + queryParams=None, + headers={"Content-Length": "16", "Content-Type": "application/json"}, + ) response = requests.post(self.url + path, json=body) actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get('Content-Type'), "application/json") + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "application/json") def test_calling_service_with_form_data_on_path(self): path = "/something" body = {"key1": "value1"} - expected = make_service_response(self.port, - scheme=self.scheme, - method="POST", - resourcePath=path, - resolvedResourcePath=path, - pathParameters=None, - body='key1=value1', - queryParams=None, - headers={"Content-Length": "11", - "Content-Type": "application/x-www-form-urlencoded"}) + expected = make_service_response( + self.port, + scheme=self.scheme, + method="POST", + resourcePath=path, + resolvedResourcePath=path, + pathParameters=None, + body="key1=value1", + queryParams=None, + headers={"Content-Length": "11", "Content-Type": "application/x-www-form-urlencoded"}, + ) response = requests.post(self.url + path, data=body) actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get('Content-Type'), "application/json") + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "application/json") def test_calling_service_with_path_params(self): - path = '/something/event1' - expected = make_service_response(self.port, - scheme=self.scheme, - method="GET", - resourcePath="/something/{event}", - resolvedResourcePath=path, - pathParameters={"event": "event1"}, - body=None, - queryParams=None, - headers=None) + path = "/something/event1" + expected = make_service_response( + self.port, + scheme=self.scheme, + method="GET", + resourcePath="/something/{event}", + resolvedResourcePath=path, + pathParameters={"event": "event1"}, + body=None, + queryParams=None, + headers=None, + ) response = requests.get(self.url + path) actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get('Content-Type'), "application/json") + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "application/json") def test_calling_proxy_path(self): - path = '/proxypath/thisisproxypath/thatshouldbecaught' - expected = make_service_response(self.port, - scheme=self.scheme, - method="GET", - resourcePath="/proxypath/{proxy+}", - resolvedResourcePath=path, - pathParameters={"proxy": "thisisproxypath/thatshouldbecaught"}, - body=None, - queryParams=None, - headers=None) + path = "/proxypath/thisisproxypath/thatshouldbecaught" + expected = make_service_response( + self.port, + scheme=self.scheme, + method="GET", + resourcePath="/proxypath/{proxy+}", + resolvedResourcePath=path, + pathParameters={"proxy": "thisisproxypath/thatshouldbecaught"}, + body=None, + queryParams=None, + headers=None, + ) response = requests.get(self.url + path) actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get('Content-Type'), "application/json") + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "application/json") def test_calling_proxy_path_that_has_name_resource(self): - path = '/resourceproxypath/resourcepath/thatshouldbecaught' - expected = make_service_response(self.port, - scheme=self.scheme, - method="GET", - resourcePath="/resourceproxypath/{resource+}", - resolvedResourcePath=path, - pathParameters={"resource": "resourcepath/thatshouldbecaught"}, - body=None, - queryParams=None, - headers=None) + path = "/resourceproxypath/resourcepath/thatshouldbecaught" + expected = make_service_response( + self.port, + scheme=self.scheme, + method="GET", + resourcePath="/resourceproxypath/{resource+}", + resolvedResourcePath=path, + pathParameters={"resource": "resourcepath/thatshouldbecaught"}, + body=None, + queryParams=None, + headers=None, + ) response = requests.get(self.url + path) actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get('Content-Type'), "application/json") + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "application/json") def test_calling_service_with_body_and_query_and_headers(self): path = "/something/event1" body = {"json": "data"} headers = {"X-Test": "TestValue"} - expected = make_service_response(self.port, - scheme=self.scheme, - method="PUT", - resourcePath="/something/{event}", - resolvedResourcePath=path, - pathParameters={"event": "event1"}, - body=json.dumps(body), - queryParams={"key": "value"}, - headers={"X-Test": "TestValue", "Content-Length": "16", - "Content-Type": "application/json"}) - - response = requests.put(self.url + path, - json=body, - params={"key": "value"}, - headers=headers) + expected = make_service_response( + self.port, + scheme=self.scheme, + method="PUT", + resourcePath="/something/{event}", + resolvedResourcePath=path, + pathParameters={"event": "event1"}, + body=json.dumps(body), + queryParams={"key": "value"}, + headers={"X-Test": "TestValue", "Content-Length": "16", "Content-Type": "application/json"}, + ) + + response = requests.put(self.url + path, json=body, params={"key": "value"}, headers=headers) actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get('Content-Type'), "application/json") + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "application/json") class TestService_ProxyAtBasePath(TestCase): - @classmethod def setUpClass(cls): cls.code_abs_path = nodejs_lambda(API_GATEWAY_ECHO_EVENT) @@ -347,20 +400,26 @@ def setUpClass(cls): cls.function_name = "name" - cls.function = provider.Function(name=cls.function_name, runtime="nodejs4.3", memory=256, timeout=5, - handler="index.handler", codeuri=cls.code_uri, environment=None, - rolearn=None, layers=[]) + cls.function = provider.Function( + name=cls.function_name, + runtime="nodejs4.3", + memory=256, + timeout=5, + handler="index.handler", + codeuri=cls.code_uri, + environment=None, + rolearn=None, + layers=[], + ) cls.mock_function_provider = Mock() cls.mock_function_provider.get.return_value = cls.function - list_of_routes = [ - Route(['GET'], cls.function_name, '/{proxy+}') - ] + list_of_routes = [Route(["GET"], cls.function_name, "/{proxy+}")] cls.service, cls.port, cls.url, cls.scheme = make_service(list_of_routes, cls.mock_function_provider, cls.cwd) cls.service.create() - t = threading.Thread(name='thread', target=cls.service.run, args=()) + t = threading.Thread(name="thread", target=cls.service.run, args=()) t.setDaemon(True) t.start() time.sleep(1) @@ -374,28 +433,29 @@ def setUp(self): self.maxDiff = None def test_calling_proxy_path(self): - path = '/proxypath/thisisproxypath/thatshouldbecaught' - expected = make_service_response(self.port, - scheme=self.scheme, - method="GET", - resourcePath="/{proxy+}", - resolvedResourcePath=path, - pathParameters={"proxy": "proxypath/thisisproxypath/thatshouldbecaught"}, - body=None, - queryParams=None, - headers=None) + path = "/proxypath/thisisproxypath/thatshouldbecaught" + expected = make_service_response( + self.port, + scheme=self.scheme, + method="GET", + resourcePath="/{proxy+}", + resolvedResourcePath=path, + pathParameters={"proxy": "proxypath/thisisproxypath/thatshouldbecaught"}, + body=None, + queryParams=None, + headers=None, + ) response = requests.get(self.url + path) actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get('Content-Type'), "application/json") + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "application/json") class TestService_Binary(TestCase): - @classmethod def setUpClass(cls): cls.code_abs_path = nodejs_lambda(API_GATEWAY_ECHO_BASE64_EVENT) @@ -406,23 +466,30 @@ def setUpClass(cls): cls.function_name = "name" - cls.function = provider.Function(name=cls.function_name, runtime="nodejs4.3", memory=256, timeout=5, - handler="index.echoimagehandler", codeuri=cls.code_uri, environment=None, - rolearn=None, layers=[]) + cls.function = provider.Function( + name=cls.function_name, + runtime="nodejs4.3", + memory=256, + timeout=5, + handler="index.echoimagehandler", + codeuri=cls.code_uri, + environment=None, + rolearn=None, + layers=[], + ) cls.mock_function_provider = Mock() cls.mock_function_provider.get.return_value = cls.function list_of_routes = [ - Route(['GET'], cls.function_name, '/getimagegifbinarydata', binary_types=['image/gif']), - Route(['GET'], cls.function_name, '/getanygifbinarydata', binary_types=['*/*']), - Route(['POST'], cls.function_name, '/postbinarygif', binary_types=['image/gif']) - - ] + Route(["GET"], cls.function_name, "/getimagegifbinarydata", binary_types=["image/gif"]), + Route(["GET"], cls.function_name, "/getanygifbinarydata", binary_types=["*/*"]), + Route(["POST"], cls.function_name, "/postbinarygif", binary_types=["image/gif"]), + ] cls.service, cls.port, cls.url, cls.scheme = make_service(list_of_routes, cls.mock_function_provider, cls.cwd) cls.service.create() - t = threading.Thread(name='thread', target=cls.service.run, args=()) + t = threading.Thread(name="thread", target=cls.service.run, args=()) t.setDaemon(True) t.start() time.sleep(1) @@ -436,43 +503,51 @@ def setUp(self): self.maxDiff = None def test_echo_with_defined_binary_types(self): - path = '/getimagegifbinarydata' + path = "/getimagegifbinarydata" response = requests.get(self.url + path) actual = response.content - self.assertEquals(actual, b'GIF89a=\x00D\x00\xf7\xa8\x00\x9a,3\xff\xc0\xc0\xef\xc0\xc0uXg\xfc\xf9\xf7\x993\x00\xff\xec\xec\xff\xa0\xa0\xe5\xcc\xbf\xcf\x9f\x87\x0f\xef\xef\x7f\x7f\x7f\xef\x0f\x0f\xdf\x1f\x1f\xff&&_\x9f\x9f\xffYY\xbf??5\xa5\xc2\xff\xff\xff\xac\x16\x19\xb2&\x00\xf8\x13\x10\xc2& \xdf`PP\x84\x9b\xf8\x03\x00\xb5\x0b\x0c\xdf\x0f\x00>\x9a\xb5\x87BM\x7f`P\xd2\xa5\x8f\xcc\x19\x00\xa5,\x00\xec\xd9\xcf\xe5\x0c\x00\xeb\t\x00\xff\xd9\xd9\xc7\x0c\x0c\x0f\x0f\x0f\xffyy~MZ\xfb\t\x08\xe5M@\xfb__\xff33\xcf\x90x\xf2\xe5\xdf\xc3\x06\x06\xbf\t\x08\xff\xb3\xb3\xd9\xb2\x9f\xff\x06\x06\xac)\x00\xff\xc6\xc6\x0c\t\x08\xf9\xf2\xef\xc9s`\xb8#\x00\x9f/\x00\xff__\xff\x8c\x8c\xc5\x1c\x00\xdf33\xffpp\xcf\x19\x19\xc0\x13\x10\xbf\x90x\xf7YY\xff\xf6\xf6\xe7??\xd7&&\xefLL2& \xdf\xbf\xaf\xbf\xbf\xbf???\xc5M@cn\x81_\x00\x00___\xcb00\xd8\x13\x00YC8\x80\x80\x80\xf3RRsVH\xc490\x10\x10\x10\x917@\xf2\x06\x00\xcf@@\xca\x86pooo\xa3!&\xc1\x1d\x18\xcf//\x1f\x1f\x1f\xdf\x00\x00\xd2\x16\x00\xcb\x90x\xbf\x1f\x00\x19\x13\x10\xf3\xd0\xd0\xe399&\x1d\x18Yy\x8e\x8f\x8f\x8f\xff\xa9\xa9\xcb\x13\x13\xbf00SF@\xb6& >\x1d\x18\xfb\xdd\xdd@@@\x99\x93\x90\xff\xbc\xbc\x7fPP\xaf\xaf\xaf\xc6VHzsp\x93& \xb7pp\xb3\x86ptPP|pp\xafOO\xd0\xd0\xd0\xef\xef\xefL90\xbc\xa9\xa0o0(\xeb\xb0\xb0\xff\xe0\xe0\xff\xd0\xd0\x870(K0(\xc9|h\x9f__lct\xebFF\xcf\xcf\xcf\xe0\xe0\xe0b& \xff },(@0(\xa9\x93\x88\xa6|h\x1f\xdf\xdf\xd5\xac\x97\xe2\xc5\xb7\xc7`POOO\x9cyhppp\xff\x80\x80\xff\x96\x96\xd7``\xcc\x99\x7f,\xb0\xcf\xbf\x00\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\xffff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00!\xf9\x04\x01\x00\x00\xa8\x00,\x00\x00\x00\x00=\x00D\x00\x00\x08\xff\x00Q\t\x1cH\xb0\xa0\xc1\x83\x08\x13*\\\xc8\xb0\xa1\xc0\x1b\x07\x0c8\x9cHq\xa1\x89\x14\xa72F\xac\xc8\xb1\xa2\t\x1f\x19Cj\x94\xd8\xb1$B\x03\x07D\xaa\x1ci\xb2%*#3V\xcad\xe9\xb2\xa2\x9d 3s\x9e\xdaX\x93!"\x8c:\x83\xf2\xeci\xf0c\xd0\xa3!\x87\x12E\x89\xb4iR\x92.a:\x9d\xfa\xb4\xe5\x0c\x9cT\xb3\xee\x84:\xf1\x06P\xad`\x95*4\n\xb6l\xd5\x84\x06>\x99]\x1b\xb2\xc5\x9c\x83F\xda\xb0\x9d{\xe4\x84\x00\x83W\xe7\xaeM\xe2f\xd4\xa8\xbb\x03\xbd\xea5kE\x88_\xbf\x80\x0fy\x1a\\\xb6\x08\x92\xc3\x87\x01\x070\xe5\x00\x02\xe3\xa9-\x80\xc4\x80\x1cY\xe0dS\x94-_\x0ezd3\xe7\xce\xa8>\x83\x0e=Zf\x92\x13\xa7Gm\x18 \xe1\xaf\xe7\xd5\xb8+\xb7\xceX8\xf6(\xda\xa2D\xd9N\x8d\xbb\xb8n\xc6\x8e}\x8f\xfa\x12<\xf8\xf0\xcf\x11\x1a\x14\x07}|mf\xdf\x00\x9elP\xd1\\\xb8dSaJ\x95\xffz }zu\xadiLs\xa6\xb0&8\x80\x01\xdd\x9f\x9b\x8a ^<\xf9\xe9\xac\xa9:\x82\x1d{\x83\x84\xe6\xef\xc5\xf7\x1d}\xf5\xd9W\x9eq\xa2\x1d\x95\x84a\xb1\xa9\xb0\x01\x00\xdd\x05\xd8\x9c|\x04\x16X\x8a\x02\x0b0\x80\x9f\x0b=\xe8\x94\\l\x1et \n\x00\x10\x02\x08\xdf\x84\x03ZX \x86\x1a\x16W\x03\x87+]\xe7[\x06\x00\x96\xe8\xde\x89\xce\xa5\xa8\xe2\x8a\x19N\xf7b\x87\x19\xa5\x17\x1b\x05\xa3P\x10\xa1\x8d#\xe2X\x9b\x8e;\xf2\xd8"n/\xd6\xd5\xdf\x13\xa2x\x80$\x89\x11\x9e\xd8\x81\x16\x146\xb9#\x8b\xd3\xf9\xe6\xc1\x7f\xa2\x0cp\xe5\x99\x12\xa8\x80\xdad\x15zi!\x98\xab\xf9Ff\x99gvG$g\xdf1\xa0\x80\x9bM\xc2\t\x19\x00\x19p\xd9\x9d\x99G6\xd7Hl\xdf\x99\xc2\xc8\x9e|~\t\x88)~Q@c\x99\xa3\x0cZg\x06\x00\xf8\x96\xa8)\x0c,\xc0h\xa3\x05^\x02\xe9(\x93Rji\x84\xcb)\'\x1fn\x9d~\nj)\xa3\x0e\xffZis\x84\x06\xd7\x81\xaak\xae\xc6\x01\x07\xa0\xb5\xfa*\xac~\xc9z\xaa\x04\x03l\x80+b\xb7\x81V@\x01$\xac\xd6\xe9\xab\xb1\xd2:kpj\x0ep\xe7\xb1\xab\x9aRA\x01!\x14\xd7\xc0\x03\x8dF\x1b\xdc\x00\xd3\x8ar-\xb6\xc8\x12\x07Z\t\x15\xf0:\xdd\xb7n\x8ak\xaa(\x1ddz\xac\x14\x86\x80\x92+~\xf8\xc1\xbb\xa3\xbc\xe4\xae\xe1\x01\xbaR\xfcAG\'\\\xa4\xab\x1a\xbf\xef\x82k\xa1\xbc\x03\xa3\xeb\xd7\x1d\xa4T\xcc\x87\xc2\xc5qP\x02\xc3\xab\xf9+\x9e\xb8OH\xec\xd7\x1bYTL\x8a\x1f~\xa1\x91\xecj"\xd8\xc01n\xfe\x8e\xdaA\x06\xe7\xa2;\t)Q\xb0AJ\x15\\\xa8\xbc2h!\x14\xe0\xee\xcb\xa05\x10\xc6\xa8"s&\x07\n\x13L\xb0sA\x0b\x9b\xa2\x81\x08"h\xf02\x0f\x15\xe0\x964g2\xa8\xd1D\xd3\xa4\xe8\x01\xf5t\x1c\x14`\xc6\xcb\xcbN\x11\xe7\xd6\x87]@\xca\xd7\x8f\x90\xf2\x01\x08#\x10t\x80$\xc5\x99\xc1-\xc7?\x14\xff@\xc6\xdal\x8f\xe2\x04)b0\xb1\t)}\x84\x12J&\x04\x05\x02\xc5\x18\xb8\xd9P\xc0\x0f\x1c\x93`5h\x81_\xb0H(j\x98\xacD( \xc0`P\xc5\x8f\x83\xa6\xc1\xb6;l1\x9d\x06\x1bk\x9d4\x18:(\x1e\n\x15&sR\xb7A9\xc0Q\xf1 \x18X\x00Z\xdf<\x84\xa0:h$H^\x1cgC\\\xa0\xdc\x10\x9a\xc8\xae8\x11gdQ\x07\x01\x07!\x10\n\x11W| {\xef\xa6\x90\xb0m\x01"T B\x01<\xa8\xed\xba_X|pE\x1e\xa7\xc9\xe0D\x19\xce\xcb\xbe\x04\xf5\x08\x11\x80@\x02\xf1+\xce}\t!\xecP\xc1\x0ed\xb8\xdc\xf9\x86\xa0\x88\x8aQA\x06\x90\xc1\x02\xfc\xf2G\x83\x1c4\xc4~\xf8\xcb\x1f\xf7^v\x98D\x98\x0c\x07\xca\x1b\xc5\x05\xba\x90\xbfP`Bt\x14\x81`\x07\'\xc8/\xbf\xc8@\toC\x01)\x9c\x00\xbb\x0e\xd2\xcd$"\x94\xa0\xef\xf0\xe3\x978\xe0l\x02^ \x05\x07\xf3\x97\x00\x04\xd0\xaf%1t\xde\x0b|X\xb0\x820\x8db\x0f\xa4`\xc2\x04\x16@\x8a\x0e\xce\x8f(\x02\t\xa2\xec\x86X\xc4\xb5\x15"\x898\xc4A\xfc\x1a\x08\xc5\x82HQqT\xc4\xdc("A\n<\x08\x02\x05\x94\x90\x1d\r@\xd8E\x83|1\x14T\xbc\x80\x0e>@\n\x14\x88An\xa0\xbb]\x1b\x13\xf2F\xd9Y\xc2dg\xe8\xe1\x1e\x1d\xd2\xc7P\xa0\x10\x07\x84\xf8\xe1 \x1fx\xbf\xfc\x11\xa1\x12\x90XdG\x82\xb8FI\x02q\t/\xb4\xa4&[\x12\x10\x00;') # NOQA - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get('Content-Type'), "image/gif") + self.assertEqual( + actual, + b'GIF89a=\x00D\x00\xf7\xa8\x00\x9a,3\xff\xc0\xc0\xef\xc0\xc0uXg\xfc\xf9\xf7\x993\x00\xff\xec\xec\xff\xa0\xa0\xe5\xcc\xbf\xcf\x9f\x87\x0f\xef\xef\x7f\x7f\x7f\xef\x0f\x0f\xdf\x1f\x1f\xff&&_\x9f\x9f\xffYY\xbf??5\xa5\xc2\xff\xff\xff\xac\x16\x19\xb2&\x00\xf8\x13\x10\xc2& \xdf`PP\x84\x9b\xf8\x03\x00\xb5\x0b\x0c\xdf\x0f\x00>\x9a\xb5\x87BM\x7f`P\xd2\xa5\x8f\xcc\x19\x00\xa5,\x00\xec\xd9\xcf\xe5\x0c\x00\xeb\t\x00\xff\xd9\xd9\xc7\x0c\x0c\x0f\x0f\x0f\xffyy~MZ\xfb\t\x08\xe5M@\xfb__\xff33\xcf\x90x\xf2\xe5\xdf\xc3\x06\x06\xbf\t\x08\xff\xb3\xb3\xd9\xb2\x9f\xff\x06\x06\xac)\x00\xff\xc6\xc6\x0c\t\x08\xf9\xf2\xef\xc9s`\xb8#\x00\x9f/\x00\xff__\xff\x8c\x8c\xc5\x1c\x00\xdf33\xffpp\xcf\x19\x19\xc0\x13\x10\xbf\x90x\xf7YY\xff\xf6\xf6\xe7??\xd7&&\xefLL2& \xdf\xbf\xaf\xbf\xbf\xbf???\xc5M@cn\x81_\x00\x00___\xcb00\xd8\x13\x00YC8\x80\x80\x80\xf3RRsVH\xc490\x10\x10\x10\x917@\xf2\x06\x00\xcf@@\xca\x86pooo\xa3!&\xc1\x1d\x18\xcf//\x1f\x1f\x1f\xdf\x00\x00\xd2\x16\x00\xcb\x90x\xbf\x1f\x00\x19\x13\x10\xf3\xd0\xd0\xe399&\x1d\x18Yy\x8e\x8f\x8f\x8f\xff\xa9\xa9\xcb\x13\x13\xbf00SF@\xb6& >\x1d\x18\xfb\xdd\xdd@@@\x99\x93\x90\xff\xbc\xbc\x7fPP\xaf\xaf\xaf\xc6VHzsp\x93& \xb7pp\xb3\x86ptPP|pp\xafOO\xd0\xd0\xd0\xef\xef\xefL90\xbc\xa9\xa0o0(\xeb\xb0\xb0\xff\xe0\xe0\xff\xd0\xd0\x870(K0(\xc9|h\x9f__lct\xebFF\xcf\xcf\xcf\xe0\xe0\xe0b& \xff },(@0(\xa9\x93\x88\xa6|h\x1f\xdf\xdf\xd5\xac\x97\xe2\xc5\xb7\xc7`POOO\x9cyhppp\xff\x80\x80\xff\x96\x96\xd7``\xcc\x99\x7f,\xb0\xcf\xbf\x00\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\xffff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00!\xf9\x04\x01\x00\x00\xa8\x00,\x00\x00\x00\x00=\x00D\x00\x00\x08\xff\x00Q\t\x1cH\xb0\xa0\xc1\x83\x08\x13*\\\xc8\xb0\xa1\xc0\x1b\x07\x0c8\x9cHq\xa1\x89\x14\xa72F\xac\xc8\xb1\xa2\t\x1f\x19Cj\x94\xd8\xb1$B\x03\x07D\xaa\x1ci\xb2%*#3V\xcad\xe9\xb2\xa2\x9d 3s\x9e\xdaX\x93!"\x8c:\x83\xf2\xeci\xf0c\xd0\xa3!\x87\x12E\x89\xb4iR\x92.a:\x9d\xfa\xb4\xe5\x0c\x9cT\xb3\xee\x84:\xf1\x06P\xad`\x95*4\n\xb6l\xd5\x84\x06>\x99]\x1b\xb2\xc5\x9c\x83F\xda\xb0\x9d{\xe4\x84\x00\x83W\xe7\xaeM\xe2f\xd4\xa8\xbb\x03\xbd\xea5kE\x88_\xbf\x80\x0fy\x1a\\\xb6\x08\x92\xc3\x87\x01\x070\xe5\x00\x02\xe3\xa9-\x80\xc4\x80\x1cY\xe0dS\x94-_\x0ezd3\xe7\xce\xa8>\x83\x0e=Zf\x92\x13\xa7Gm\x18 \xe1\xaf\xe7\xd5\xb8+\xb7\xceX8\xf6(\xda\xa2D\xd9N\x8d\xbb\xb8n\xc6\x8e}\x8f\xfa\x12<\xf8\xf0\xcf\x11\x1a\x14\x07}|mf\xdf\x00\x9elP\xd1\\\xb8dSaJ\x95\xffz }zu\xadiLs\xa6\xb0&8\x80\x01\xdd\x9f\x9b\x8a ^<\xf9\xe9\xac\xa9:\x82\x1d{\x83\x84\xe6\xef\xc5\xf7\x1d}\xf5\xd9W\x9eq\xa2\x1d\x95\x84a\xb1\xa9\xb0\x01\x00\xdd\x05\xd8\x9c|\x04\x16X\x8a\x02\x0b0\x80\x9f\x0b=\xe8\x94\\l\x1et \n\x00\x10\x02\x08\xdf\x84\x03ZX \x86\x1a\x16W\x03\x87+]\xe7[\x06\x00\x96\xe8\xde\x89\xce\xa5\xa8\xe2\x8a\x19N\xf7b\x87\x19\xa5\x17\x1b\x05\xa3P\x10\xa1\x8d#\xe2X\x9b\x8e;\xf2\xd8"n/\xd6\xd5\xdf\x13\xa2x\x80$\x89\x11\x9e\xd8\x81\x16\x146\xb9#\x8b\xd3\xf9\xe6\xc1\x7f\xa2\x0cp\xe5\x99\x12\xa8\x80\xdad\x15zi!\x98\xab\xf9Ff\x99gvG$g\xdf1\xa0\x80\x9bM\xc2\t\x19\x00\x19p\xd9\x9d\x99G6\xd7Hl\xdf\x99\xc2\xc8\x9e|~\t\x88)~Q@c\x99\xa3\x0cZg\x06\x00\xf8\x96\xa8)\x0c,\xc0h\xa3\x05^\x02\xe9(\x93Rji\x84\xcb)\'\x1fn\x9d~\nj)\xa3\x0e\xffZis\x84\x06\xd7\x81\xaak\xae\xc6\x01\x07\xa0\xb5\xfa*\xac~\xc9z\xaa\x04\x03l\x80+b\xb7\x81V@\x01$\xac\xd6\xe9\xab\xb1\xd2:kpj\x0ep\xe7\xb1\xab\x9aRA\x01!\x14\xd7\xc0\x03\x8dF\x1b\xdc\x00\xd3\x8ar-\xb6\xc8\x12\x07Z\t\x15\xf0:\xdd\xb7n\x8ak\xaa(\x1ddz\xac\x14\x86\x80\x92+~\xf8\xc1\xbb\xa3\xbc\xe4\xae\xe1\x01\xbaR\xfcAG\'\\\xa4\xab\x1a\xbf\xef\x82k\xa1\xbc\x03\xa3\xeb\xd7\x1d\xa4T\xcc\x87\xc2\xc5qP\x02\xc3\xab\xf9+\x9e\xb8OH\xec\xd7\x1bYTL\x8a\x1f~\xa1\x91\xecj"\xd8\xc01n\xfe\x8e\xdaA\x06\xe7\xa2;\t)Q\xb0AJ\x15\\\xa8\xbc2h!\x14\xe0\xee\xcb\xa05\x10\xc6\xa8"s&\x07\n\x13L\xb0sA\x0b\x9b\xa2\x81\x08"h\xf02\x0f\x15\xe0\x964g2\xa8\xd1D\xd3\xa4\xe8\x01\xf5t\x1c\x14`\xc6\xcb\xcbN\x11\xe7\xd6\x87]@\xca\xd7\x8f\x90\xf2\x01\x08#\x10t\x80$\xc5\x99\xc1-\xc7?\x14\xff@\xc6\xdal\x8f\xe2\x04)b0\xb1\t)}\x84\x12J&\x04\x05\x02\xc5\x18\xb8\xd9P\xc0\x0f\x1c\x93`5h\x81_\xb0H(j\x98\xacD( \xc0`P\xc5\x8f\x83\xa6\xc1\xb6;l1\x9d\x06\x1bk\x9d4\x18:(\x1e\n\x15&sR\xb7A9\xc0Q\xf1 \x18X\x00Z\xdf<\x84\xa0:h$H^\x1cgC\\\xa0\xdc\x10\x9a\xc8\xae8\x11gdQ\x07\x01\x07!\x10\n\x11W| {\xef\xa6\x90\xb0m\x01"T B\x01<\xa8\xed\xba_X|pE\x1e\xa7\xc9\xe0D\x19\xce\xcb\xbe\x04\xf5\x08\x11\x80@\x02\xf1+\xce}\t!\xecP\xc1\x0ed\xb8\xdc\xf9\x86\xa0\x88\x8aQA\x06\x90\xc1\x02\xfc\xf2G\x83\x1c4\xc4~\xf8\xcb\x1f\xf7^v\x98D\x98\x0c\x07\xca\x1b\xc5\x05\xba\x90\xbfP`Bt\x14\x81`\x07\'\xc8/\xbf\xc8@\toC\x01)\x9c\x00\xbb\x0e\xd2\xcd$"\x94\xa0\xef\xf0\xe3\x978\xe0l\x02^ \x05\x07\xf3\x97\x00\x04\xd0\xaf%1t\xde\x0b|X\xb0\x820\x8db\x0f\xa4`\xc2\x04\x16@\x8a\x0e\xce\x8f(\x02\t\xa2\xec\x86X\xc4\xb5\x15"\x898\xc4A\xfc\x1a\x08\xc5\x82HQqT\xc4\xdc("A\n<\x08\x02\x05\x94\x90\x1d\r@\xd8E\x83|1\x14T\xbc\x80\x0e>@\n\x14\x88An\xa0\xbb]\x1b\x13\xf2F\xd9Y\xc2dg\xe8\xe1\x1e\x1d\xd2\xc7P\xa0\x10\x07\x84\xf8\xe1 \x1fx\xbf\xfc\x11\xa1\x12\x90XdG\x82\xb8FI\x02q\t/\xb4\xa4&[\x12\x10\x00;', + ) # NOQA + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "image/gif") def test_echo_with_any_binary_types(self): - path = '/getanygifbinarydata' + path = "/getanygifbinarydata" response = requests.get(self.url + path) actual = response.content - self.assertEquals(actual, b'GIF89a=\x00D\x00\xf7\xa8\x00\x9a,3\xff\xc0\xc0\xef\xc0\xc0uXg\xfc\xf9\xf7\x993\x00\xff\xec\xec\xff\xa0\xa0\xe5\xcc\xbf\xcf\x9f\x87\x0f\xef\xef\x7f\x7f\x7f\xef\x0f\x0f\xdf\x1f\x1f\xff&&_\x9f\x9f\xffYY\xbf??5\xa5\xc2\xff\xff\xff\xac\x16\x19\xb2&\x00\xf8\x13\x10\xc2& \xdf`PP\x84\x9b\xf8\x03\x00\xb5\x0b\x0c\xdf\x0f\x00>\x9a\xb5\x87BM\x7f`P\xd2\xa5\x8f\xcc\x19\x00\xa5,\x00\xec\xd9\xcf\xe5\x0c\x00\xeb\t\x00\xff\xd9\xd9\xc7\x0c\x0c\x0f\x0f\x0f\xffyy~MZ\xfb\t\x08\xe5M@\xfb__\xff33\xcf\x90x\xf2\xe5\xdf\xc3\x06\x06\xbf\t\x08\xff\xb3\xb3\xd9\xb2\x9f\xff\x06\x06\xac)\x00\xff\xc6\xc6\x0c\t\x08\xf9\xf2\xef\xc9s`\xb8#\x00\x9f/\x00\xff__\xff\x8c\x8c\xc5\x1c\x00\xdf33\xffpp\xcf\x19\x19\xc0\x13\x10\xbf\x90x\xf7YY\xff\xf6\xf6\xe7??\xd7&&\xefLL2& \xdf\xbf\xaf\xbf\xbf\xbf???\xc5M@cn\x81_\x00\x00___\xcb00\xd8\x13\x00YC8\x80\x80\x80\xf3RRsVH\xc490\x10\x10\x10\x917@\xf2\x06\x00\xcf@@\xca\x86pooo\xa3!&\xc1\x1d\x18\xcf//\x1f\x1f\x1f\xdf\x00\x00\xd2\x16\x00\xcb\x90x\xbf\x1f\x00\x19\x13\x10\xf3\xd0\xd0\xe399&\x1d\x18Yy\x8e\x8f\x8f\x8f\xff\xa9\xa9\xcb\x13\x13\xbf00SF@\xb6& >\x1d\x18\xfb\xdd\xdd@@@\x99\x93\x90\xff\xbc\xbc\x7fPP\xaf\xaf\xaf\xc6VHzsp\x93& \xb7pp\xb3\x86ptPP|pp\xafOO\xd0\xd0\xd0\xef\xef\xefL90\xbc\xa9\xa0o0(\xeb\xb0\xb0\xff\xe0\xe0\xff\xd0\xd0\x870(K0(\xc9|h\x9f__lct\xebFF\xcf\xcf\xcf\xe0\xe0\xe0b& \xff },(@0(\xa9\x93\x88\xa6|h\x1f\xdf\xdf\xd5\xac\x97\xe2\xc5\xb7\xc7`POOO\x9cyhppp\xff\x80\x80\xff\x96\x96\xd7``\xcc\x99\x7f,\xb0\xcf\xbf\x00\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\xffff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00!\xf9\x04\x01\x00\x00\xa8\x00,\x00\x00\x00\x00=\x00D\x00\x00\x08\xff\x00Q\t\x1cH\xb0\xa0\xc1\x83\x08\x13*\\\xc8\xb0\xa1\xc0\x1b\x07\x0c8\x9cHq\xa1\x89\x14\xa72F\xac\xc8\xb1\xa2\t\x1f\x19Cj\x94\xd8\xb1$B\x03\x07D\xaa\x1ci\xb2%*#3V\xcad\xe9\xb2\xa2\x9d 3s\x9e\xdaX\x93!"\x8c:\x83\xf2\xeci\xf0c\xd0\xa3!\x87\x12E\x89\xb4iR\x92.a:\x9d\xfa\xb4\xe5\x0c\x9cT\xb3\xee\x84:\xf1\x06P\xad`\x95*4\n\xb6l\xd5\x84\x06>\x99]\x1b\xb2\xc5\x9c\x83F\xda\xb0\x9d{\xe4\x84\x00\x83W\xe7\xaeM\xe2f\xd4\xa8\xbb\x03\xbd\xea5kE\x88_\xbf\x80\x0fy\x1a\\\xb6\x08\x92\xc3\x87\x01\x070\xe5\x00\x02\xe3\xa9-\x80\xc4\x80\x1cY\xe0dS\x94-_\x0ezd3\xe7\xce\xa8>\x83\x0e=Zf\x92\x13\xa7Gm\x18 \xe1\xaf\xe7\xd5\xb8+\xb7\xceX8\xf6(\xda\xa2D\xd9N\x8d\xbb\xb8n\xc6\x8e}\x8f\xfa\x12<\xf8\xf0\xcf\x11\x1a\x14\x07}|mf\xdf\x00\x9elP\xd1\\\xb8dSaJ\x95\xffz }zu\xadiLs\xa6\xb0&8\x80\x01\xdd\x9f\x9b\x8a ^<\xf9\xe9\xac\xa9:\x82\x1d{\x83\x84\xe6\xef\xc5\xf7\x1d}\xf5\xd9W\x9eq\xa2\x1d\x95\x84a\xb1\xa9\xb0\x01\x00\xdd\x05\xd8\x9c|\x04\x16X\x8a\x02\x0b0\x80\x9f\x0b=\xe8\x94\\l\x1et \n\x00\x10\x02\x08\xdf\x84\x03ZX \x86\x1a\x16W\x03\x87+]\xe7[\x06\x00\x96\xe8\xde\x89\xce\xa5\xa8\xe2\x8a\x19N\xf7b\x87\x19\xa5\x17\x1b\x05\xa3P\x10\xa1\x8d#\xe2X\x9b\x8e;\xf2\xd8"n/\xd6\xd5\xdf\x13\xa2x\x80$\x89\x11\x9e\xd8\x81\x16\x146\xb9#\x8b\xd3\xf9\xe6\xc1\x7f\xa2\x0cp\xe5\x99\x12\xa8\x80\xdad\x15zi!\x98\xab\xf9Ff\x99gvG$g\xdf1\xa0\x80\x9bM\xc2\t\x19\x00\x19p\xd9\x9d\x99G6\xd7Hl\xdf\x99\xc2\xc8\x9e|~\t\x88)~Q@c\x99\xa3\x0cZg\x06\x00\xf8\x96\xa8)\x0c,\xc0h\xa3\x05^\x02\xe9(\x93Rji\x84\xcb)\'\x1fn\x9d~\nj)\xa3\x0e\xffZis\x84\x06\xd7\x81\xaak\xae\xc6\x01\x07\xa0\xb5\xfa*\xac~\xc9z\xaa\x04\x03l\x80+b\xb7\x81V@\x01$\xac\xd6\xe9\xab\xb1\xd2:kpj\x0ep\xe7\xb1\xab\x9aRA\x01!\x14\xd7\xc0\x03\x8dF\x1b\xdc\x00\xd3\x8ar-\xb6\xc8\x12\x07Z\t\x15\xf0:\xdd\xb7n\x8ak\xaa(\x1ddz\xac\x14\x86\x80\x92+~\xf8\xc1\xbb\xa3\xbc\xe4\xae\xe1\x01\xbaR\xfcAG\'\\\xa4\xab\x1a\xbf\xef\x82k\xa1\xbc\x03\xa3\xeb\xd7\x1d\xa4T\xcc\x87\xc2\xc5qP\x02\xc3\xab\xf9+\x9e\xb8OH\xec\xd7\x1bYTL\x8a\x1f~\xa1\x91\xecj"\xd8\xc01n\xfe\x8e\xdaA\x06\xe7\xa2;\t)Q\xb0AJ\x15\\\xa8\xbc2h!\x14\xe0\xee\xcb\xa05\x10\xc6\xa8"s&\x07\n\x13L\xb0sA\x0b\x9b\xa2\x81\x08"h\xf02\x0f\x15\xe0\x964g2\xa8\xd1D\xd3\xa4\xe8\x01\xf5t\x1c\x14`\xc6\xcb\xcbN\x11\xe7\xd6\x87]@\xca\xd7\x8f\x90\xf2\x01\x08#\x10t\x80$\xc5\x99\xc1-\xc7?\x14\xff@\xc6\xdal\x8f\xe2\x04)b0\xb1\t)}\x84\x12J&\x04\x05\x02\xc5\x18\xb8\xd9P\xc0\x0f\x1c\x93`5h\x81_\xb0H(j\x98\xacD( \xc0`P\xc5\x8f\x83\xa6\xc1\xb6;l1\x9d\x06\x1bk\x9d4\x18:(\x1e\n\x15&sR\xb7A9\xc0Q\xf1 \x18X\x00Z\xdf<\x84\xa0:h$H^\x1cgC\\\xa0\xdc\x10\x9a\xc8\xae8\x11gdQ\x07\x01\x07!\x10\n\x11W| {\xef\xa6\x90\xb0m\x01"T B\x01<\xa8\xed\xba_X|pE\x1e\xa7\xc9\xe0D\x19\xce\xcb\xbe\x04\xf5\x08\x11\x80@\x02\xf1+\xce}\t!\xecP\xc1\x0ed\xb8\xdc\xf9\x86\xa0\x88\x8aQA\x06\x90\xc1\x02\xfc\xf2G\x83\x1c4\xc4~\xf8\xcb\x1f\xf7^v\x98D\x98\x0c\x07\xca\x1b\xc5\x05\xba\x90\xbfP`Bt\x14\x81`\x07\'\xc8/\xbf\xc8@\toC\x01)\x9c\x00\xbb\x0e\xd2\xcd$"\x94\xa0\xef\xf0\xe3\x978\xe0l\x02^ \x05\x07\xf3\x97\x00\x04\xd0\xaf%1t\xde\x0b|X\xb0\x820\x8db\x0f\xa4`\xc2\x04\x16@\x8a\x0e\xce\x8f(\x02\t\xa2\xec\x86X\xc4\xb5\x15"\x898\xc4A\xfc\x1a\x08\xc5\x82HQqT\xc4\xdc("A\n<\x08\x02\x05\x94\x90\x1d\r@\xd8E\x83|1\x14T\xbc\x80\x0e>@\n\x14\x88An\xa0\xbb]\x1b\x13\xf2F\xd9Y\xc2dg\xe8\xe1\x1e\x1d\xd2\xc7P\xa0\x10\x07\x84\xf8\xe1 \x1fx\xbf\xfc\x11\xa1\x12\x90XdG\x82\xb8FI\x02q\t/\xb4\xa4&[\x12\x10\x00;') # NOQA - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get('Content-Type'), "image/gif") + self.assertEqual( + actual, + b'GIF89a=\x00D\x00\xf7\xa8\x00\x9a,3\xff\xc0\xc0\xef\xc0\xc0uXg\xfc\xf9\xf7\x993\x00\xff\xec\xec\xff\xa0\xa0\xe5\xcc\xbf\xcf\x9f\x87\x0f\xef\xef\x7f\x7f\x7f\xef\x0f\x0f\xdf\x1f\x1f\xff&&_\x9f\x9f\xffYY\xbf??5\xa5\xc2\xff\xff\xff\xac\x16\x19\xb2&\x00\xf8\x13\x10\xc2& \xdf`PP\x84\x9b\xf8\x03\x00\xb5\x0b\x0c\xdf\x0f\x00>\x9a\xb5\x87BM\x7f`P\xd2\xa5\x8f\xcc\x19\x00\xa5,\x00\xec\xd9\xcf\xe5\x0c\x00\xeb\t\x00\xff\xd9\xd9\xc7\x0c\x0c\x0f\x0f\x0f\xffyy~MZ\xfb\t\x08\xe5M@\xfb__\xff33\xcf\x90x\xf2\xe5\xdf\xc3\x06\x06\xbf\t\x08\xff\xb3\xb3\xd9\xb2\x9f\xff\x06\x06\xac)\x00\xff\xc6\xc6\x0c\t\x08\xf9\xf2\xef\xc9s`\xb8#\x00\x9f/\x00\xff__\xff\x8c\x8c\xc5\x1c\x00\xdf33\xffpp\xcf\x19\x19\xc0\x13\x10\xbf\x90x\xf7YY\xff\xf6\xf6\xe7??\xd7&&\xefLL2& \xdf\xbf\xaf\xbf\xbf\xbf???\xc5M@cn\x81_\x00\x00___\xcb00\xd8\x13\x00YC8\x80\x80\x80\xf3RRsVH\xc490\x10\x10\x10\x917@\xf2\x06\x00\xcf@@\xca\x86pooo\xa3!&\xc1\x1d\x18\xcf//\x1f\x1f\x1f\xdf\x00\x00\xd2\x16\x00\xcb\x90x\xbf\x1f\x00\x19\x13\x10\xf3\xd0\xd0\xe399&\x1d\x18Yy\x8e\x8f\x8f\x8f\xff\xa9\xa9\xcb\x13\x13\xbf00SF@\xb6& >\x1d\x18\xfb\xdd\xdd@@@\x99\x93\x90\xff\xbc\xbc\x7fPP\xaf\xaf\xaf\xc6VHzsp\x93& \xb7pp\xb3\x86ptPP|pp\xafOO\xd0\xd0\xd0\xef\xef\xefL90\xbc\xa9\xa0o0(\xeb\xb0\xb0\xff\xe0\xe0\xff\xd0\xd0\x870(K0(\xc9|h\x9f__lct\xebFF\xcf\xcf\xcf\xe0\xe0\xe0b& \xff },(@0(\xa9\x93\x88\xa6|h\x1f\xdf\xdf\xd5\xac\x97\xe2\xc5\xb7\xc7`POOO\x9cyhppp\xff\x80\x80\xff\x96\x96\xd7``\xcc\x99\x7f,\xb0\xcf\xbf\x00\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\xffff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00!\xf9\x04\x01\x00\x00\xa8\x00,\x00\x00\x00\x00=\x00D\x00\x00\x08\xff\x00Q\t\x1cH\xb0\xa0\xc1\x83\x08\x13*\\\xc8\xb0\xa1\xc0\x1b\x07\x0c8\x9cHq\xa1\x89\x14\xa72F\xac\xc8\xb1\xa2\t\x1f\x19Cj\x94\xd8\xb1$B\x03\x07D\xaa\x1ci\xb2%*#3V\xcad\xe9\xb2\xa2\x9d 3s\x9e\xdaX\x93!"\x8c:\x83\xf2\xeci\xf0c\xd0\xa3!\x87\x12E\x89\xb4iR\x92.a:\x9d\xfa\xb4\xe5\x0c\x9cT\xb3\xee\x84:\xf1\x06P\xad`\x95*4\n\xb6l\xd5\x84\x06>\x99]\x1b\xb2\xc5\x9c\x83F\xda\xb0\x9d{\xe4\x84\x00\x83W\xe7\xaeM\xe2f\xd4\xa8\xbb\x03\xbd\xea5kE\x88_\xbf\x80\x0fy\x1a\\\xb6\x08\x92\xc3\x87\x01\x070\xe5\x00\x02\xe3\xa9-\x80\xc4\x80\x1cY\xe0dS\x94-_\x0ezd3\xe7\xce\xa8>\x83\x0e=Zf\x92\x13\xa7Gm\x18 \xe1\xaf\xe7\xd5\xb8+\xb7\xceX8\xf6(\xda\xa2D\xd9N\x8d\xbb\xb8n\xc6\x8e}\x8f\xfa\x12<\xf8\xf0\xcf\x11\x1a\x14\x07}|mf\xdf\x00\x9elP\xd1\\\xb8dSaJ\x95\xffz }zu\xadiLs\xa6\xb0&8\x80\x01\xdd\x9f\x9b\x8a ^<\xf9\xe9\xac\xa9:\x82\x1d{\x83\x84\xe6\xef\xc5\xf7\x1d}\xf5\xd9W\x9eq\xa2\x1d\x95\x84a\xb1\xa9\xb0\x01\x00\xdd\x05\xd8\x9c|\x04\x16X\x8a\x02\x0b0\x80\x9f\x0b=\xe8\x94\\l\x1et \n\x00\x10\x02\x08\xdf\x84\x03ZX \x86\x1a\x16W\x03\x87+]\xe7[\x06\x00\x96\xe8\xde\x89\xce\xa5\xa8\xe2\x8a\x19N\xf7b\x87\x19\xa5\x17\x1b\x05\xa3P\x10\xa1\x8d#\xe2X\x9b\x8e;\xf2\xd8"n/\xd6\xd5\xdf\x13\xa2x\x80$\x89\x11\x9e\xd8\x81\x16\x146\xb9#\x8b\xd3\xf9\xe6\xc1\x7f\xa2\x0cp\xe5\x99\x12\xa8\x80\xdad\x15zi!\x98\xab\xf9Ff\x99gvG$g\xdf1\xa0\x80\x9bM\xc2\t\x19\x00\x19p\xd9\x9d\x99G6\xd7Hl\xdf\x99\xc2\xc8\x9e|~\t\x88)~Q@c\x99\xa3\x0cZg\x06\x00\xf8\x96\xa8)\x0c,\xc0h\xa3\x05^\x02\xe9(\x93Rji\x84\xcb)\'\x1fn\x9d~\nj)\xa3\x0e\xffZis\x84\x06\xd7\x81\xaak\xae\xc6\x01\x07\xa0\xb5\xfa*\xac~\xc9z\xaa\x04\x03l\x80+b\xb7\x81V@\x01$\xac\xd6\xe9\xab\xb1\xd2:kpj\x0ep\xe7\xb1\xab\x9aRA\x01!\x14\xd7\xc0\x03\x8dF\x1b\xdc\x00\xd3\x8ar-\xb6\xc8\x12\x07Z\t\x15\xf0:\xdd\xb7n\x8ak\xaa(\x1ddz\xac\x14\x86\x80\x92+~\xf8\xc1\xbb\xa3\xbc\xe4\xae\xe1\x01\xbaR\xfcAG\'\\\xa4\xab\x1a\xbf\xef\x82k\xa1\xbc\x03\xa3\xeb\xd7\x1d\xa4T\xcc\x87\xc2\xc5qP\x02\xc3\xab\xf9+\x9e\xb8OH\xec\xd7\x1bYTL\x8a\x1f~\xa1\x91\xecj"\xd8\xc01n\xfe\x8e\xdaA\x06\xe7\xa2;\t)Q\xb0AJ\x15\\\xa8\xbc2h!\x14\xe0\xee\xcb\xa05\x10\xc6\xa8"s&\x07\n\x13L\xb0sA\x0b\x9b\xa2\x81\x08"h\xf02\x0f\x15\xe0\x964g2\xa8\xd1D\xd3\xa4\xe8\x01\xf5t\x1c\x14`\xc6\xcb\xcbN\x11\xe7\xd6\x87]@\xca\xd7\x8f\x90\xf2\x01\x08#\x10t\x80$\xc5\x99\xc1-\xc7?\x14\xff@\xc6\xdal\x8f\xe2\x04)b0\xb1\t)}\x84\x12J&\x04\x05\x02\xc5\x18\xb8\xd9P\xc0\x0f\x1c\x93`5h\x81_\xb0H(j\x98\xacD( \xc0`P\xc5\x8f\x83\xa6\xc1\xb6;l1\x9d\x06\x1bk\x9d4\x18:(\x1e\n\x15&sR\xb7A9\xc0Q\xf1 \x18X\x00Z\xdf<\x84\xa0:h$H^\x1cgC\\\xa0\xdc\x10\x9a\xc8\xae8\x11gdQ\x07\x01\x07!\x10\n\x11W| {\xef\xa6\x90\xb0m\x01"T B\x01<\xa8\xed\xba_X|pE\x1e\xa7\xc9\xe0D\x19\xce\xcb\xbe\x04\xf5\x08\x11\x80@\x02\xf1+\xce}\t!\xecP\xc1\x0ed\xb8\xdc\xf9\x86\xa0\x88\x8aQA\x06\x90\xc1\x02\xfc\xf2G\x83\x1c4\xc4~\xf8\xcb\x1f\xf7^v\x98D\x98\x0c\x07\xca\x1b\xc5\x05\xba\x90\xbfP`Bt\x14\x81`\x07\'\xc8/\xbf\xc8@\toC\x01)\x9c\x00\xbb\x0e\xd2\xcd$"\x94\xa0\xef\xf0\xe3\x978\xe0l\x02^ \x05\x07\xf3\x97\x00\x04\xd0\xaf%1t\xde\x0b|X\xb0\x820\x8db\x0f\xa4`\xc2\x04\x16@\x8a\x0e\xce\x8f(\x02\t\xa2\xec\x86X\xc4\xb5\x15"\x898\xc4A\xfc\x1a\x08\xc5\x82HQqT\xc4\xdc("A\n<\x08\x02\x05\x94\x90\x1d\r@\xd8E\x83|1\x14T\xbc\x80\x0e>@\n\x14\x88An\xa0\xbb]\x1b\x13\xf2F\xd9Y\xc2dg\xe8\xe1\x1e\x1d\xd2\xc7P\xa0\x10\x07\x84\xf8\xe1 \x1fx\xbf\xfc\x11\xa1\x12\x90XdG\x82\xb8FI\x02q\t/\xb4\xa4&[\x12\x10\x00;', + ) # NOQA + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "image/gif") def test_accept_json_should_return_base64(self): # This test is asserting the behavior we currently have. I find it strange that we will return a Content-Type # that does not match the Accept headers - path = '/getimagegifbinarydata' + path = "/getimagegifbinarydata" response = requests.get(self.url + path, headers={"Accept": "application/json"}) actual = response.content - self.assertEquals(actual.decode('utf-8'), "R0lGODlhPQBEAPeoAJosM//AwO/AwHVYZ/z595kzAP/s7P+goOXMv8+fhw/v739/f+8PD98fH/8mJl+fn/9ZWb8/PzWlwv///6wWGbImAPgTEMImIN9gUFCEm/gDALULDN8PAD6atYdCTX9gUNKlj8wZAKUsAOzZz+UMAOsJAP/Z2ccMDA8PD/95eX5NWvsJCOVNQPtfX/8zM8+QePLl38MGBr8JCP+zs9myn/8GBqwpAP/GxgwJCPny78lzYLgjAJ8vAP9fX/+MjMUcAN8zM/9wcM8ZGcATEL+QePdZWf/29uc/P9cmJu9MTDImIN+/r7+/vz8/P8VNQGNugV8AAF9fX8swMNgTAFlDOICAgPNSUnNWSMQ5MBAQEJE3QPIGAM9AQMqGcG9vb6MhJsEdGM8vLx8fH98AANIWAMuQeL8fABkTEPPQ0OM5OSYdGFl5jo+Pj/+pqcsTE78wMFNGQLYmID4dGPvd3UBAQJmTkP+8vH9QUK+vr8ZWSHpzcJMmILdwcLOGcHRQUHxwcK9PT9DQ0O/v70w5MLypoG8wKOuwsP/g4P/Q0IcwKEswKMl8aJ9fX2xjdOtGRs/Pz+Dg4GImIP8gIH0sKEAwKKmTiKZ8aB/f39Wsl+LFt8dgUE9PT5x5aHBwcP+AgP+WltdgYMyZfyywz78AAAAAAAD///8AAP9mZv///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAEAAKgALAAAAAA9AEQAAAj/AFEJHEiwoMGDCBMqXMiwocAbBww4nEhxoYkUpzJGrMixogkfGUNqlNixJEIDB0SqHGmyJSojM1bKZOmyop0gM3Oe2liTISKMOoPy7GnwY9CjIYcSRYm0aVKSLmE6nfq05QycVLPuhDrxBlCtYJUqNAq2bNWEBj6ZXRuyxZyDRtqwnXvkhACDV+euTeJm1Ki7A73qNWtFiF+/gA95Gly2CJLDhwEHMOUAAuOpLYDEgBxZ4GRTlC1fDnpkM+fOqD6DDj1aZpITp0dtGCDhr+fVuCu3zlg49ijaokTZTo27uG7Gjn2P+hI8+PDPERoUB318bWbfAJ5sUNFcuGRTYUqV/3ogfXp1rWlMc6awJjiAAd2fm4ogXjz56aypOoIde4OE5u/F9x199dlXnnGiHZWEYbGpsAEA3QXYnHwEFliKAgswgJ8LPeiUXGwedCAKABACCN+EA1pYIIYaFlcDhytd51sGAJbo3onOpajiihlO92KHGaUXGwWjUBChjSPiWJuOO/LYIm4v1tXfE6J4gCSJEZ7YgRYUNrkji9P55sF/ogxw5ZkSqIDaZBV6aSGYq/lGZplndkckZ98xoICbTcIJGQAZcNmdmUc210hs35nCyJ58fgmIKX5RQGOZowxaZwYA+JaoKQwswGijBV4C6SiTUmpphMspJx9unX4KaimjDv9aaXOEBteBqmuuxgEHoLX6Kqx+yXqqBANsgCtit4FWQAEkrNbpq7HSOmtwag5w57GrmlJBASEU18ADjUYb3ADTinIttsgSB1oJFfA63bduimuqKB1keqwUhoCSK374wbujvOSu4QG6UvxBRydcpKsav++Ca6G8A6Pr1x2kVMyHwsVxUALDq/krnrhPSOzXG1lUTIoffqGR7Goi2MAxbv6O2kEG56I7CSlRsEFKFVyovDJoIRTg7sugNRDGqCJzJgcKE0ywc0ELm6KBCCJo8DIPFeCWNGcyqNFE06ToAfV0HBRgxsvLThHn1oddQMrXj5DyAQgjEHSAJMWZwS3HPxT/QMbabI/iBCliMLEJKX2EEkomBAUCxRi42VDADxyTYDVogV+wSChqmKxEKCDAYFDFj4OmwbY7bDGdBhtrnTQYOigeChUmc1K3QTnAUfEgGFgAWt88hKA6aCRIXhxnQ1yg3BCayK44EWdkUQcBByEQChFXfCB776aQsG0BIlQgQgE8qO26X1h8cEUep8ngRBnOy74E9QgRgEAC8SvOfQkh7FDBDmS43PmGoIiKUUEGkMEC/PJHgxw0xH74yx/3XnaYRJgMB8obxQW6kL9QYEJ0FIFgByfIL7/IQAlvQwEpnAC7DtLNJCKUoO/w45c44GwCXiAFB/OXAATQryUxdN4LfFiwgjCNYg+kYMIEFkCKDs6PKAIJouyGWMS1FSKJOMRB/BoIxYJIUXFUxNwoIkEKPAgCBZSQHQ1A2EWDfDEUVLyADj5AChSIQW6gu10bE/JG2VnCZGfo4R4d0sdQoBAHhPjhIB94v/wRoRKQWGRHgrhGSQJxCS+0pCZbEhAAOw==") # NOQA - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get('Content-Type'), "image/gif") + self.assertEqual( + actual.decode("utf-8"), + "R0lGODlhPQBEAPeoAJosM//AwO/AwHVYZ/z595kzAP/s7P+goOXMv8+fhw/v739/f+8PD98fH/8mJl+fn/9ZWb8/PzWlwv///6wWGbImAPgTEMImIN9gUFCEm/gDALULDN8PAD6atYdCTX9gUNKlj8wZAKUsAOzZz+UMAOsJAP/Z2ccMDA8PD/95eX5NWvsJCOVNQPtfX/8zM8+QePLl38MGBr8JCP+zs9myn/8GBqwpAP/GxgwJCPny78lzYLgjAJ8vAP9fX/+MjMUcAN8zM/9wcM8ZGcATEL+QePdZWf/29uc/P9cmJu9MTDImIN+/r7+/vz8/P8VNQGNugV8AAF9fX8swMNgTAFlDOICAgPNSUnNWSMQ5MBAQEJE3QPIGAM9AQMqGcG9vb6MhJsEdGM8vLx8fH98AANIWAMuQeL8fABkTEPPQ0OM5OSYdGFl5jo+Pj/+pqcsTE78wMFNGQLYmID4dGPvd3UBAQJmTkP+8vH9QUK+vr8ZWSHpzcJMmILdwcLOGcHRQUHxwcK9PT9DQ0O/v70w5MLypoG8wKOuwsP/g4P/Q0IcwKEswKMl8aJ9fX2xjdOtGRs/Pz+Dg4GImIP8gIH0sKEAwKKmTiKZ8aB/f39Wsl+LFt8dgUE9PT5x5aHBwcP+AgP+WltdgYMyZfyywz78AAAAAAAD///8AAP9mZv///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAEAAKgALAAAAAA9AEQAAAj/AFEJHEiwoMGDCBMqXMiwocAbBww4nEhxoYkUpzJGrMixogkfGUNqlNixJEIDB0SqHGmyJSojM1bKZOmyop0gM3Oe2liTISKMOoPy7GnwY9CjIYcSRYm0aVKSLmE6nfq05QycVLPuhDrxBlCtYJUqNAq2bNWEBj6ZXRuyxZyDRtqwnXvkhACDV+euTeJm1Ki7A73qNWtFiF+/gA95Gly2CJLDhwEHMOUAAuOpLYDEgBxZ4GRTlC1fDnpkM+fOqD6DDj1aZpITp0dtGCDhr+fVuCu3zlg49ijaokTZTo27uG7Gjn2P+hI8+PDPERoUB318bWbfAJ5sUNFcuGRTYUqV/3ogfXp1rWlMc6awJjiAAd2fm4ogXjz56aypOoIde4OE5u/F9x199dlXnnGiHZWEYbGpsAEA3QXYnHwEFliKAgswgJ8LPeiUXGwedCAKABACCN+EA1pYIIYaFlcDhytd51sGAJbo3onOpajiihlO92KHGaUXGwWjUBChjSPiWJuOO/LYIm4v1tXfE6J4gCSJEZ7YgRYUNrkji9P55sF/ogxw5ZkSqIDaZBV6aSGYq/lGZplndkckZ98xoICbTcIJGQAZcNmdmUc210hs35nCyJ58fgmIKX5RQGOZowxaZwYA+JaoKQwswGijBV4C6SiTUmpphMspJx9unX4KaimjDv9aaXOEBteBqmuuxgEHoLX6Kqx+yXqqBANsgCtit4FWQAEkrNbpq7HSOmtwag5w57GrmlJBASEU18ADjUYb3ADTinIttsgSB1oJFfA63bduimuqKB1keqwUhoCSK374wbujvOSu4QG6UvxBRydcpKsav++Ca6G8A6Pr1x2kVMyHwsVxUALDq/krnrhPSOzXG1lUTIoffqGR7Goi2MAxbv6O2kEG56I7CSlRsEFKFVyovDJoIRTg7sugNRDGqCJzJgcKE0ywc0ELm6KBCCJo8DIPFeCWNGcyqNFE06ToAfV0HBRgxsvLThHn1oddQMrXj5DyAQgjEHSAJMWZwS3HPxT/QMbabI/iBCliMLEJKX2EEkomBAUCxRi42VDADxyTYDVogV+wSChqmKxEKCDAYFDFj4OmwbY7bDGdBhtrnTQYOigeChUmc1K3QTnAUfEgGFgAWt88hKA6aCRIXhxnQ1yg3BCayK44EWdkUQcBByEQChFXfCB776aQsG0BIlQgQgE8qO26X1h8cEUep8ngRBnOy74E9QgRgEAC8SvOfQkh7FDBDmS43PmGoIiKUUEGkMEC/PJHgxw0xH74yx/3XnaYRJgMB8obxQW6kL9QYEJ0FIFgByfIL7/IQAlvQwEpnAC7DtLNJCKUoO/w45c44GwCXiAFB/OXAATQryUxdN4LfFiwgjCNYg+kYMIEFkCKDs6PKAIJouyGWMS1FSKJOMRB/BoIxYJIUXFUxNwoIkEKPAgCBZSQHQ1A2EWDfDEUVLyADj5AChSIQW6gu10bE/JG2VnCZGfo4R4d0sdQoBAHhPjhIB94v/wRoRKQWGRHgrhGSQJxCS+0pCZbEhAAOw==", + ) # NOQA + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "image/gif") class TestService_PostingBinary(TestCase): - @classmethod def setUpClass(cls): cls.code_abs_path = nodejs_lambda(API_GATEWAY_ECHO_BASE64_EVENT) @@ -483,21 +558,29 @@ def setUpClass(cls): cls.function_name = "name" - cls.function = provider.Function(name=cls.function_name, runtime="nodejs4.3", memory=256, timeout=5, - handler="index.base54request", codeuri=cls.code_uri, environment=None, - rolearn=None, layers=[]) + cls.function = provider.Function( + name=cls.function_name, + runtime="nodejs4.3", + memory=256, + timeout=5, + handler="index.base54request", + codeuri=cls.code_uri, + environment=None, + rolearn=None, + layers=[], + ) cls.mock_function_provider = Mock() cls.mock_function_provider.get.return_value = cls.function list_of_routes = [ - Route(['POST'], cls.function_name, '/postbinarygif', binary_types=['image/gif']), - Route(['POST'], cls.function_name, '/postanybinary', binary_types=['*/*']) - ] + Route(["POST"], cls.function_name, "/postbinarygif", binary_types=["image/gif"]), + Route(["POST"], cls.function_name, "/postanybinary", binary_types=["*/*"]), + ] cls.service, cls.port, cls.url, cls.scheme = make_service(list_of_routes, cls.mock_function_provider, cls.cwd) cls.service.create() - t = threading.Thread(name='thread', target=cls.service.run, args=()) + t = threading.Thread(name="thread", target=cls.service.run, args=()) t.setDaemon(True) t.start() time.sleep(1) @@ -511,45 +594,57 @@ def setUp(self): self.maxDiff = None def test_post_binary_image_gif(self): - path = '/postbinarygif' + path = "/postbinarygif" - response = requests.post(self.url + path, - headers={"Content-Type": "image/gif"}, - data='GIF89a=\x00D\x00\xf7\xa8\x00\x9a,3\xff\xc0\xc0\xef\xc0\xc0uXg\xfc\xf9\xf7\x993\x00\xff\xec\xec\xff\xa0\xa0\xe5\xcc\xbf\xcf\x9f\x87\x0f\xef\xef\x7f\x7f\x7f\xef\x0f\x0f\xdf\x1f\x1f\xff&&_\x9f\x9f\xffYY\xbf??5\xa5\xc2\xff\xff\xff\xac\x16\x19\xb2&\x00\xf8\x13\x10\xc2& \xdf`PP\x84\x9b\xf8\x03\x00\xb5\x0b\x0c\xdf\x0f\x00>\x9a\xb5\x87BM\x7f`P\xd2\xa5\x8f\xcc\x19\x00\xa5,\x00\xec\xd9\xcf\xe5\x0c\x00\xeb\t\x00\xff\xd9\xd9\xc7\x0c\x0c\x0f\x0f\x0f\xffyy~MZ\xfb\t\x08\xe5M@\xfb__\xff33\xcf\x90x\xf2\xe5\xdf\xc3\x06\x06\xbf\t\x08\xff\xb3\xb3\xd9\xb2\x9f\xff\x06\x06\xac)\x00\xff\xc6\xc6\x0c\t\x08\xf9\xf2\xef\xc9s`\xb8#\x00\x9f/\x00\xff__\xff\x8c\x8c\xc5\x1c\x00\xdf33\xffpp\xcf\x19\x19\xc0\x13\x10\xbf\x90x\xf7YY\xff\xf6\xf6\xe7??\xd7&&\xefLL2& \xdf\xbf\xaf\xbf\xbf\xbf???\xc5M@cn\x81_\x00\x00___\xcb00\xd8\x13\x00YC8\x80\x80\x80\xf3RRsVH\xc490\x10\x10\x10\x917@\xf2\x06\x00\xcf@@\xca\x86pooo\xa3!&\xc1\x1d\x18\xcf//\x1f\x1f\x1f\xdf\x00\x00\xd2\x16\x00\xcb\x90x\xbf\x1f\x00\x19\x13\x10\xf3\xd0\xd0\xe399&\x1d\x18Yy\x8e\x8f\x8f\x8f\xff\xa9\xa9\xcb\x13\x13\xbf00SF@\xb6& >\x1d\x18\xfb\xdd\xdd@@@\x99\x93\x90\xff\xbc\xbc\x7fPP\xaf\xaf\xaf\xc6VHzsp\x93& \xb7pp\xb3\x86ptPP|pp\xafOO\xd0\xd0\xd0\xef\xef\xefL90\xbc\xa9\xa0o0(\xeb\xb0\xb0\xff\xe0\xe0\xff\xd0\xd0\x870(K0(\xc9|h\x9f__lct\xebFF\xcf\xcf\xcf\xe0\xe0\xe0b& \xff },(@0(\xa9\x93\x88\xa6|h\x1f\xdf\xdf\xd5\xac\x97\xe2\xc5\xb7\xc7`POOO\x9cyhppp\xff\x80\x80\xff\x96\x96\xd7``\xcc\x99\x7f,\xb0\xcf\xbf\x00\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\xffff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00!\xf9\x04\x01\x00\x00\xa8\x00,\x00\x00\x00\x00=\x00D\x00\x00\x08\xff\x00Q\t\x1cH\xb0\xa0\xc1\x83\x08\x13*\\\xc8\xb0\xa1\xc0\x1b\x07\x0c8\x9cHq\xa1\x89\x14\xa72F\xac\xc8\xb1\xa2\t\x1f\x19Cj\x94\xd8\xb1$B\x03\x07D\xaa\x1ci\xb2%*#3V\xcad\xe9\xb2\xa2\x9d 3s\x9e\xdaX\x93!"\x8c:\x83\xf2\xeci\xf0c\xd0\xa3!\x87\x12E\x89\xb4iR\x92.a:\x9d\xfa\xb4\xe5\x0c\x9cT\xb3\xee\x84:\xf1\x06P\xad`\x95*4\n\xb6l\xd5\x84\x06>\x99]\x1b\xb2\xc5\x9c\x83F\xda\xb0\x9d{\xe4\x84\x00\x83W\xe7\xaeM\xe2f\xd4\xa8\xbb\x03\xbd\xea5kE\x88_\xbf\x80\x0fy\x1a\\\xb6\x08\x92\xc3\x87\x01\x070\xe5\x00\x02\xe3\xa9-\x80\xc4\x80\x1cY\xe0dS\x94-_\x0ezd3\xe7\xce\xa8>\x83\x0e=Zf\x92\x13\xa7Gm\x18 \xe1\xaf\xe7\xd5\xb8+\xb7\xceX8\xf6(\xda\xa2D\xd9N\x8d\xbb\xb8n\xc6\x8e}\x8f\xfa\x12<\xf8\xf0\xcf\x11\x1a\x14\x07}|mf\xdf\x00\x9elP\xd1\\\xb8dSaJ\x95\xffz }zu\xadiLs\xa6\xb0&8\x80\x01\xdd\x9f\x9b\x8a ^<\xf9\xe9\xac\xa9:\x82\x1d{\x83\x84\xe6\xef\xc5\xf7\x1d}\xf5\xd9W\x9eq\xa2\x1d\x95\x84a\xb1\xa9\xb0\x01\x00\xdd\x05\xd8\x9c|\x04\x16X\x8a\x02\x0b0\x80\x9f\x0b=\xe8\x94\\l\x1et \n\x00\x10\x02\x08\xdf\x84\x03ZX \x86\x1a\x16W\x03\x87+]\xe7[\x06\x00\x96\xe8\xde\x89\xce\xa5\xa8\xe2\x8a\x19N\xf7b\x87\x19\xa5\x17\x1b\x05\xa3P\x10\xa1\x8d#\xe2X\x9b\x8e;\xf2\xd8"n/\xd6\xd5\xdf\x13\xa2x\x80$\x89\x11\x9e\xd8\x81\x16\x146\xb9#\x8b\xd3\xf9\xe6\xc1\x7f\xa2\x0cp\xe5\x99\x12\xa8\x80\xdad\x15zi!\x98\xab\xf9Ff\x99gvG$g\xdf1\xa0\x80\x9bM\xc2\t\x19\x00\x19p\xd9\x9d\x99G6\xd7Hl\xdf\x99\xc2\xc8\x9e|~\t\x88)~Q@c\x99\xa3\x0cZg\x06\x00\xf8\x96\xa8)\x0c,\xc0h\xa3\x05^\x02\xe9(\x93Rji\x84\xcb)\'\x1fn\x9d~\nj)\xa3\x0e\xffZis\x84\x06\xd7\x81\xaak\xae\xc6\x01\x07\xa0\xb5\xfa*\xac~\xc9z\xaa\x04\x03l\x80+b\xb7\x81V@\x01$\xac\xd6\xe9\xab\xb1\xd2:kpj\x0ep\xe7\xb1\xab\x9aRA\x01!\x14\xd7\xc0\x03\x8dF\x1b\xdc\x00\xd3\x8ar-\xb6\xc8\x12\x07Z\t\x15\xf0:\xdd\xb7n\x8ak\xaa(\x1ddz\xac\x14\x86\x80\x92+~\xf8\xc1\xbb\xa3\xbc\xe4\xae\xe1\x01\xbaR\xfcAG\'\\\xa4\xab\x1a\xbf\xef\x82k\xa1\xbc\x03\xa3\xeb\xd7\x1d\xa4T\xcc\x87\xc2\xc5qP\x02\xc3\xab\xf9+\x9e\xb8OH\xec\xd7\x1bYTL\x8a\x1f~\xa1\x91\xecj"\xd8\xc01n\xfe\x8e\xdaA\x06\xe7\xa2;\t)Q\xb0AJ\x15\\\xa8\xbc2h!\x14\xe0\xee\xcb\xa05\x10\xc6\xa8"s&\x07\n\x13L\xb0sA\x0b\x9b\xa2\x81\x08"h\xf02\x0f\x15\xe0\x964g2\xa8\xd1D\xd3\xa4\xe8\x01\xf5t\x1c\x14`\xc6\xcb\xcbN\x11\xe7\xd6\x87]@\xca\xd7\x8f\x90\xf2\x01\x08#\x10t\x80$\xc5\x99\xc1-\xc7?\x14\xff@\xc6\xdal\x8f\xe2\x04)b0\xb1\t)}\x84\x12J&\x04\x05\x02\xc5\x18\xb8\xd9P\xc0\x0f\x1c\x93`5h\x81_\xb0H(j\x98\xacD( \xc0`P\xc5\x8f\x83\xa6\xc1\xb6;l1\x9d\x06\x1bk\x9d4\x18:(\x1e\n\x15&sR\xb7A9\xc0Q\xf1 \x18X\x00Z\xdf<\x84\xa0:h$H^\x1cgC\\\xa0\xdc\x10\x9a\xc8\xae8\x11gdQ\x07\x01\x07!\x10\n\x11W| {\xef\xa6\x90\xb0m\x01"T B\x01<\xa8\xed\xba_X|pE\x1e\xa7\xc9\xe0D\x19\xce\xcb\xbe\x04\xf5\x08\x11\x80@\x02\xf1+\xce}\t!\xecP\xc1\x0ed\xb8\xdc\xf9\x86\xa0\x88\x8aQA\x06\x90\xc1\x02\xfc\xf2G\x83\x1c4\xc4~\xf8\xcb\x1f\xf7^v\x98D\x98\x0c\x07\xca\x1b\xc5\x05\xba\x90\xbfP`Bt\x14\x81`\x07\'\xc8/\xbf\xc8@\toC\x01)\x9c\x00\xbb\x0e\xd2\xcd$"\x94\xa0\xef\xf0\xe3\x978\xe0l\x02^ \x05\x07\xf3\x97\x00\x04\xd0\xaf%1t\xde\x0b|X\xb0\x820\x8db\x0f\xa4`\xc2\x04\x16@\x8a\x0e\xce\x8f(\x02\t\xa2\xec\x86X\xc4\xb5\x15"\x898\xc4A\xfc\x1a\x08\xc5\x82HQqT\xc4\xdc("A\n<\x08\x02\x05\x94\x90\x1d\r@\xd8E\x83|1\x14T\xbc\x80\x0e>@\n\x14\x88An\xa0\xbb]\x1b\x13\xf2F\xd9Y\xc2dg\xe8\xe1\x1e\x1d\xd2\xc7P\xa0\x10\x07\x84\xf8\xe1 \x1fx\xbf\xfc\x11\xa1\x12\x90XdG\x82\xb8FI\x02q\t/\xb4\xa4&[\x12\x10\x00;') # NOQA + response = requests.post( + self.url + path, + headers={"Content-Type": "image/gif"}, + data='GIF89a=\x00D\x00\xf7\xa8\x00\x9a,3\xff\xc0\xc0\xef\xc0\xc0uXg\xfc\xf9\xf7\x993\x00\xff\xec\xec\xff\xa0\xa0\xe5\xcc\xbf\xcf\x9f\x87\x0f\xef\xef\x7f\x7f\x7f\xef\x0f\x0f\xdf\x1f\x1f\xff&&_\x9f\x9f\xffYY\xbf??5\xa5\xc2\xff\xff\xff\xac\x16\x19\xb2&\x00\xf8\x13\x10\xc2& \xdf`PP\x84\x9b\xf8\x03\x00\xb5\x0b\x0c\xdf\x0f\x00>\x9a\xb5\x87BM\x7f`P\xd2\xa5\x8f\xcc\x19\x00\xa5,\x00\xec\xd9\xcf\xe5\x0c\x00\xeb\t\x00\xff\xd9\xd9\xc7\x0c\x0c\x0f\x0f\x0f\xffyy~MZ\xfb\t\x08\xe5M@\xfb__\xff33\xcf\x90x\xf2\xe5\xdf\xc3\x06\x06\xbf\t\x08\xff\xb3\xb3\xd9\xb2\x9f\xff\x06\x06\xac)\x00\xff\xc6\xc6\x0c\t\x08\xf9\xf2\xef\xc9s`\xb8#\x00\x9f/\x00\xff__\xff\x8c\x8c\xc5\x1c\x00\xdf33\xffpp\xcf\x19\x19\xc0\x13\x10\xbf\x90x\xf7YY\xff\xf6\xf6\xe7??\xd7&&\xefLL2& \xdf\xbf\xaf\xbf\xbf\xbf???\xc5M@cn\x81_\x00\x00___\xcb00\xd8\x13\x00YC8\x80\x80\x80\xf3RRsVH\xc490\x10\x10\x10\x917@\xf2\x06\x00\xcf@@\xca\x86pooo\xa3!&\xc1\x1d\x18\xcf//\x1f\x1f\x1f\xdf\x00\x00\xd2\x16\x00\xcb\x90x\xbf\x1f\x00\x19\x13\x10\xf3\xd0\xd0\xe399&\x1d\x18Yy\x8e\x8f\x8f\x8f\xff\xa9\xa9\xcb\x13\x13\xbf00SF@\xb6& >\x1d\x18\xfb\xdd\xdd@@@\x99\x93\x90\xff\xbc\xbc\x7fPP\xaf\xaf\xaf\xc6VHzsp\x93& \xb7pp\xb3\x86ptPP|pp\xafOO\xd0\xd0\xd0\xef\xef\xefL90\xbc\xa9\xa0o0(\xeb\xb0\xb0\xff\xe0\xe0\xff\xd0\xd0\x870(K0(\xc9|h\x9f__lct\xebFF\xcf\xcf\xcf\xe0\xe0\xe0b& \xff },(@0(\xa9\x93\x88\xa6|h\x1f\xdf\xdf\xd5\xac\x97\xe2\xc5\xb7\xc7`POOO\x9cyhppp\xff\x80\x80\xff\x96\x96\xd7``\xcc\x99\x7f,\xb0\xcf\xbf\x00\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\xffff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00!\xf9\x04\x01\x00\x00\xa8\x00,\x00\x00\x00\x00=\x00D\x00\x00\x08\xff\x00Q\t\x1cH\xb0\xa0\xc1\x83\x08\x13*\\\xc8\xb0\xa1\xc0\x1b\x07\x0c8\x9cHq\xa1\x89\x14\xa72F\xac\xc8\xb1\xa2\t\x1f\x19Cj\x94\xd8\xb1$B\x03\x07D\xaa\x1ci\xb2%*#3V\xcad\xe9\xb2\xa2\x9d 3s\x9e\xdaX\x93!"\x8c:\x83\xf2\xeci\xf0c\xd0\xa3!\x87\x12E\x89\xb4iR\x92.a:\x9d\xfa\xb4\xe5\x0c\x9cT\xb3\xee\x84:\xf1\x06P\xad`\x95*4\n\xb6l\xd5\x84\x06>\x99]\x1b\xb2\xc5\x9c\x83F\xda\xb0\x9d{\xe4\x84\x00\x83W\xe7\xaeM\xe2f\xd4\xa8\xbb\x03\xbd\xea5kE\x88_\xbf\x80\x0fy\x1a\\\xb6\x08\x92\xc3\x87\x01\x070\xe5\x00\x02\xe3\xa9-\x80\xc4\x80\x1cY\xe0dS\x94-_\x0ezd3\xe7\xce\xa8>\x83\x0e=Zf\x92\x13\xa7Gm\x18 \xe1\xaf\xe7\xd5\xb8+\xb7\xceX8\xf6(\xda\xa2D\xd9N\x8d\xbb\xb8n\xc6\x8e}\x8f\xfa\x12<\xf8\xf0\xcf\x11\x1a\x14\x07}|mf\xdf\x00\x9elP\xd1\\\xb8dSaJ\x95\xffz }zu\xadiLs\xa6\xb0&8\x80\x01\xdd\x9f\x9b\x8a ^<\xf9\xe9\xac\xa9:\x82\x1d{\x83\x84\xe6\xef\xc5\xf7\x1d}\xf5\xd9W\x9eq\xa2\x1d\x95\x84a\xb1\xa9\xb0\x01\x00\xdd\x05\xd8\x9c|\x04\x16X\x8a\x02\x0b0\x80\x9f\x0b=\xe8\x94\\l\x1et \n\x00\x10\x02\x08\xdf\x84\x03ZX \x86\x1a\x16W\x03\x87+]\xe7[\x06\x00\x96\xe8\xde\x89\xce\xa5\xa8\xe2\x8a\x19N\xf7b\x87\x19\xa5\x17\x1b\x05\xa3P\x10\xa1\x8d#\xe2X\x9b\x8e;\xf2\xd8"n/\xd6\xd5\xdf\x13\xa2x\x80$\x89\x11\x9e\xd8\x81\x16\x146\xb9#\x8b\xd3\xf9\xe6\xc1\x7f\xa2\x0cp\xe5\x99\x12\xa8\x80\xdad\x15zi!\x98\xab\xf9Ff\x99gvG$g\xdf1\xa0\x80\x9bM\xc2\t\x19\x00\x19p\xd9\x9d\x99G6\xd7Hl\xdf\x99\xc2\xc8\x9e|~\t\x88)~Q@c\x99\xa3\x0cZg\x06\x00\xf8\x96\xa8)\x0c,\xc0h\xa3\x05^\x02\xe9(\x93Rji\x84\xcb)\'\x1fn\x9d~\nj)\xa3\x0e\xffZis\x84\x06\xd7\x81\xaak\xae\xc6\x01\x07\xa0\xb5\xfa*\xac~\xc9z\xaa\x04\x03l\x80+b\xb7\x81V@\x01$\xac\xd6\xe9\xab\xb1\xd2:kpj\x0ep\xe7\xb1\xab\x9aRA\x01!\x14\xd7\xc0\x03\x8dF\x1b\xdc\x00\xd3\x8ar-\xb6\xc8\x12\x07Z\t\x15\xf0:\xdd\xb7n\x8ak\xaa(\x1ddz\xac\x14\x86\x80\x92+~\xf8\xc1\xbb\xa3\xbc\xe4\xae\xe1\x01\xbaR\xfcAG\'\\\xa4\xab\x1a\xbf\xef\x82k\xa1\xbc\x03\xa3\xeb\xd7\x1d\xa4T\xcc\x87\xc2\xc5qP\x02\xc3\xab\xf9+\x9e\xb8OH\xec\xd7\x1bYTL\x8a\x1f~\xa1\x91\xecj"\xd8\xc01n\xfe\x8e\xdaA\x06\xe7\xa2;\t)Q\xb0AJ\x15\\\xa8\xbc2h!\x14\xe0\xee\xcb\xa05\x10\xc6\xa8"s&\x07\n\x13L\xb0sA\x0b\x9b\xa2\x81\x08"h\xf02\x0f\x15\xe0\x964g2\xa8\xd1D\xd3\xa4\xe8\x01\xf5t\x1c\x14`\xc6\xcb\xcbN\x11\xe7\xd6\x87]@\xca\xd7\x8f\x90\xf2\x01\x08#\x10t\x80$\xc5\x99\xc1-\xc7?\x14\xff@\xc6\xdal\x8f\xe2\x04)b0\xb1\t)}\x84\x12J&\x04\x05\x02\xc5\x18\xb8\xd9P\xc0\x0f\x1c\x93`5h\x81_\xb0H(j\x98\xacD( \xc0`P\xc5\x8f\x83\xa6\xc1\xb6;l1\x9d\x06\x1bk\x9d4\x18:(\x1e\n\x15&sR\xb7A9\xc0Q\xf1 \x18X\x00Z\xdf<\x84\xa0:h$H^\x1cgC\\\xa0\xdc\x10\x9a\xc8\xae8\x11gdQ\x07\x01\x07!\x10\n\x11W| {\xef\xa6\x90\xb0m\x01"T B\x01<\xa8\xed\xba_X|pE\x1e\xa7\xc9\xe0D\x19\xce\xcb\xbe\x04\xf5\x08\x11\x80@\x02\xf1+\xce}\t!\xecP\xc1\x0ed\xb8\xdc\xf9\x86\xa0\x88\x8aQA\x06\x90\xc1\x02\xfc\xf2G\x83\x1c4\xc4~\xf8\xcb\x1f\xf7^v\x98D\x98\x0c\x07\xca\x1b\xc5\x05\xba\x90\xbfP`Bt\x14\x81`\x07\'\xc8/\xbf\xc8@\toC\x01)\x9c\x00\xbb\x0e\xd2\xcd$"\x94\xa0\xef\xf0\xe3\x978\xe0l\x02^ \x05\x07\xf3\x97\x00\x04\xd0\xaf%1t\xde\x0b|X\xb0\x820\x8db\x0f\xa4`\xc2\x04\x16@\x8a\x0e\xce\x8f(\x02\t\xa2\xec\x86X\xc4\xb5\x15"\x898\xc4A\xfc\x1a\x08\xc5\x82HQqT\xc4\xdc("A\n<\x08\x02\x05\x94\x90\x1d\r@\xd8E\x83|1\x14T\xbc\x80\x0e>@\n\x14\x88An\xa0\xbb]\x1b\x13\xf2F\xd9Y\xc2dg\xe8\xe1\x1e\x1d\xd2\xc7P\xa0\x10\x07\x84\xf8\xe1 \x1fx\xbf\xfc\x11\xa1\x12\x90XdG\x82\xb8FI\x02q\t/\xb4\xa4&[\x12\x10\x00;', + ) # NOQA actual = response.content - self.assertEquals(actual, b'GIF89a=\x00D\x00\xf7\xa8\x00\x9a,3\xff\xc0\xc0\xef\xc0\xc0uXg\xfc\xf9\xf7\x993\x00\xff\xec\xec\xff\xa0\xa0\xe5\xcc\xbf\xcf\x9f\x87\x0f\xef\xef\x7f\x7f\x7f\xef\x0f\x0f\xdf\x1f\x1f\xff&&_\x9f\x9f\xffYY\xbf??5\xa5\xc2\xff\xff\xff\xac\x16\x19\xb2&\x00\xf8\x13\x10\xc2& \xdf`PP\x84\x9b\xf8\x03\x00\xb5\x0b\x0c\xdf\x0f\x00>\x9a\xb5\x87BM\x7f`P\xd2\xa5\x8f\xcc\x19\x00\xa5,\x00\xec\xd9\xcf\xe5\x0c\x00\xeb\t\x00\xff\xd9\xd9\xc7\x0c\x0c\x0f\x0f\x0f\xffyy~MZ\xfb\t\x08\xe5M@\xfb__\xff33\xcf\x90x\xf2\xe5\xdf\xc3\x06\x06\xbf\t\x08\xff\xb3\xb3\xd9\xb2\x9f\xff\x06\x06\xac)\x00\xff\xc6\xc6\x0c\t\x08\xf9\xf2\xef\xc9s`\xb8#\x00\x9f/\x00\xff__\xff\x8c\x8c\xc5\x1c\x00\xdf33\xffpp\xcf\x19\x19\xc0\x13\x10\xbf\x90x\xf7YY\xff\xf6\xf6\xe7??\xd7&&\xefLL2& \xdf\xbf\xaf\xbf\xbf\xbf???\xc5M@cn\x81_\x00\x00___\xcb00\xd8\x13\x00YC8\x80\x80\x80\xf3RRsVH\xc490\x10\x10\x10\x917@\xf2\x06\x00\xcf@@\xca\x86pooo\xa3!&\xc1\x1d\x18\xcf//\x1f\x1f\x1f\xdf\x00\x00\xd2\x16\x00\xcb\x90x\xbf\x1f\x00\x19\x13\x10\xf3\xd0\xd0\xe399&\x1d\x18Yy\x8e\x8f\x8f\x8f\xff\xa9\xa9\xcb\x13\x13\xbf00SF@\xb6& >\x1d\x18\xfb\xdd\xdd@@@\x99\x93\x90\xff\xbc\xbc\x7fPP\xaf\xaf\xaf\xc6VHzsp\x93& \xb7pp\xb3\x86ptPP|pp\xafOO\xd0\xd0\xd0\xef\xef\xefL90\xbc\xa9\xa0o0(\xeb\xb0\xb0\xff\xe0\xe0\xff\xd0\xd0\x870(K0(\xc9|h\x9f__lct\xebFF\xcf\xcf\xcf\xe0\xe0\xe0b& \xff },(@0(\xa9\x93\x88\xa6|h\x1f\xdf\xdf\xd5\xac\x97\xe2\xc5\xb7\xc7`POOO\x9cyhppp\xff\x80\x80\xff\x96\x96\xd7``\xcc\x99\x7f,\xb0\xcf\xbf\x00\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\xffff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00!\xf9\x04\x01\x00\x00\xa8\x00,\x00\x00\x00\x00=\x00D\x00\x00\x08\xff\x00Q\t\x1cH\xb0\xa0\xc1\x83\x08\x13*\\\xc8\xb0\xa1\xc0\x1b\x07\x0c8\x9cHq\xa1\x89\x14\xa72F\xac\xc8\xb1\xa2\t\x1f\x19Cj\x94\xd8\xb1$B\x03\x07D\xaa\x1ci\xb2%*#3V\xcad\xe9\xb2\xa2\x9d 3s\x9e\xdaX\x93!"\x8c:\x83\xf2\xeci\xf0c\xd0\xa3!\x87\x12E\x89\xb4iR\x92.a:\x9d\xfa\xb4\xe5\x0c\x9cT\xb3\xee\x84:\xf1\x06P\xad`\x95*4\n\xb6l\xd5\x84\x06>\x99]\x1b\xb2\xc5\x9c\x83F\xda\xb0\x9d{\xe4\x84\x00\x83W\xe7\xaeM\xe2f\xd4\xa8\xbb\x03\xbd\xea5kE\x88_\xbf\x80\x0fy\x1a\\\xb6\x08\x92\xc3\x87\x01\x070\xe5\x00\x02\xe3\xa9-\x80\xc4\x80\x1cY\xe0dS\x94-_\x0ezd3\xe7\xce\xa8>\x83\x0e=Zf\x92\x13\xa7Gm\x18 \xe1\xaf\xe7\xd5\xb8+\xb7\xceX8\xf6(\xda\xa2D\xd9N\x8d\xbb\xb8n\xc6\x8e}\x8f\xfa\x12<\xf8\xf0\xcf\x11\x1a\x14\x07}|mf\xdf\x00\x9elP\xd1\\\xb8dSaJ\x95\xffz }zu\xadiLs\xa6\xb0&8\x80\x01\xdd\x9f\x9b\x8a ^<\xf9\xe9\xac\xa9:\x82\x1d{\x83\x84\xe6\xef\xc5\xf7\x1d}\xf5\xd9W\x9eq\xa2\x1d\x95\x84a\xb1\xa9\xb0\x01\x00\xdd\x05\xd8\x9c|\x04\x16X\x8a\x02\x0b0\x80\x9f\x0b=\xe8\x94\\l\x1et \n\x00\x10\x02\x08\xdf\x84\x03ZX \x86\x1a\x16W\x03\x87+]\xe7[\x06\x00\x96\xe8\xde\x89\xce\xa5\xa8\xe2\x8a\x19N\xf7b\x87\x19\xa5\x17\x1b\x05\xa3P\x10\xa1\x8d#\xe2X\x9b\x8e;\xf2\xd8"n/\xd6\xd5\xdf\x13\xa2x\x80$\x89\x11\x9e\xd8\x81\x16\x146\xb9#\x8b\xd3\xf9\xe6\xc1\x7f\xa2\x0cp\xe5\x99\x12\xa8\x80\xdad\x15zi!\x98\xab\xf9Ff\x99gvG$g\xdf1\xa0\x80\x9bM\xc2\t\x19\x00\x19p\xd9\x9d\x99G6\xd7Hl\xdf\x99\xc2\xc8\x9e|~\t\x88)~Q@c\x99\xa3\x0cZg\x06\x00\xf8\x96\xa8)\x0c,\xc0h\xa3\x05^\x02\xe9(\x93Rji\x84\xcb)\'\x1fn\x9d~\nj)\xa3\x0e\xffZis\x84\x06\xd7\x81\xaak\xae\xc6\x01\x07\xa0\xb5\xfa*\xac~\xc9z\xaa\x04\x03l\x80+b\xb7\x81V@\x01$\xac\xd6\xe9\xab\xb1\xd2:kpj\x0ep\xe7\xb1\xab\x9aRA\x01!\x14\xd7\xc0\x03\x8dF\x1b\xdc\x00\xd3\x8ar-\xb6\xc8\x12\x07Z\t\x15\xf0:\xdd\xb7n\x8ak\xaa(\x1ddz\xac\x14\x86\x80\x92+~\xf8\xc1\xbb\xa3\xbc\xe4\xae\xe1\x01\xbaR\xfcAG\'\\\xa4\xab\x1a\xbf\xef\x82k\xa1\xbc\x03\xa3\xeb\xd7\x1d\xa4T\xcc\x87\xc2\xc5qP\x02\xc3\xab\xf9+\x9e\xb8OH\xec\xd7\x1bYTL\x8a\x1f~\xa1\x91\xecj"\xd8\xc01n\xfe\x8e\xdaA\x06\xe7\xa2;\t)Q\xb0AJ\x15\\\xa8\xbc2h!\x14\xe0\xee\xcb\xa05\x10\xc6\xa8"s&\x07\n\x13L\xb0sA\x0b\x9b\xa2\x81\x08"h\xf02\x0f\x15\xe0\x964g2\xa8\xd1D\xd3\xa4\xe8\x01\xf5t\x1c\x14`\xc6\xcb\xcbN\x11\xe7\xd6\x87]@\xca\xd7\x8f\x90\xf2\x01\x08#\x10t\x80$\xc5\x99\xc1-\xc7?\x14\xff@\xc6\xdal\x8f\xe2\x04)b0\xb1\t)}\x84\x12J&\x04\x05\x02\xc5\x18\xb8\xd9P\xc0\x0f\x1c\x93`5h\x81_\xb0H(j\x98\xacD( \xc0`P\xc5\x8f\x83\xa6\xc1\xb6;l1\x9d\x06\x1bk\x9d4\x18:(\x1e\n\x15&sR\xb7A9\xc0Q\xf1 \x18X\x00Z\xdf<\x84\xa0:h$H^\x1cgC\\\xa0\xdc\x10\x9a\xc8\xae8\x11gdQ\x07\x01\x07!\x10\n\x11W| {\xef\xa6\x90\xb0m\x01"T B\x01<\xa8\xed\xba_X|pE\x1e\xa7\xc9\xe0D\x19\xce\xcb\xbe\x04\xf5\x08\x11\x80@\x02\xf1+\xce}\t!\xecP\xc1\x0ed\xb8\xdc\xf9\x86\xa0\x88\x8aQA\x06\x90\xc1\x02\xfc\xf2G\x83\x1c4\xc4~\xf8\xcb\x1f\xf7^v\x98D\x98\x0c\x07\xca\x1b\xc5\x05\xba\x90\xbfP`Bt\x14\x81`\x07\'\xc8/\xbf\xc8@\toC\x01)\x9c\x00\xbb\x0e\xd2\xcd$"\x94\xa0\xef\xf0\xe3\x978\xe0l\x02^ \x05\x07\xf3\x97\x00\x04\xd0\xaf%1t\xde\x0b|X\xb0\x820\x8db\x0f\xa4`\xc2\x04\x16@\x8a\x0e\xce\x8f(\x02\t\xa2\xec\x86X\xc4\xb5\x15"\x898\xc4A\xfc\x1a\x08\xc5\x82HQqT\xc4\xdc("A\n<\x08\x02\x05\x94\x90\x1d\r@\xd8E\x83|1\x14T\xbc\x80\x0e>@\n\x14\x88An\xa0\xbb]\x1b\x13\xf2F\xd9Y\xc2dg\xe8\xe1\x1e\x1d\xd2\xc7P\xa0\x10\x07\x84\xf8\xe1 \x1fx\xbf\xfc\x11\xa1\x12\x90XdG\x82\xb8FI\x02q\t/\xb4\xa4&[\x12\x10\x00;') # NOQA - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get('Content-Type'), "image/gif") + self.assertEqual( + actual, + b'GIF89a=\x00D\x00\xf7\xa8\x00\x9a,3\xff\xc0\xc0\xef\xc0\xc0uXg\xfc\xf9\xf7\x993\x00\xff\xec\xec\xff\xa0\xa0\xe5\xcc\xbf\xcf\x9f\x87\x0f\xef\xef\x7f\x7f\x7f\xef\x0f\x0f\xdf\x1f\x1f\xff&&_\x9f\x9f\xffYY\xbf??5\xa5\xc2\xff\xff\xff\xac\x16\x19\xb2&\x00\xf8\x13\x10\xc2& \xdf`PP\x84\x9b\xf8\x03\x00\xb5\x0b\x0c\xdf\x0f\x00>\x9a\xb5\x87BM\x7f`P\xd2\xa5\x8f\xcc\x19\x00\xa5,\x00\xec\xd9\xcf\xe5\x0c\x00\xeb\t\x00\xff\xd9\xd9\xc7\x0c\x0c\x0f\x0f\x0f\xffyy~MZ\xfb\t\x08\xe5M@\xfb__\xff33\xcf\x90x\xf2\xe5\xdf\xc3\x06\x06\xbf\t\x08\xff\xb3\xb3\xd9\xb2\x9f\xff\x06\x06\xac)\x00\xff\xc6\xc6\x0c\t\x08\xf9\xf2\xef\xc9s`\xb8#\x00\x9f/\x00\xff__\xff\x8c\x8c\xc5\x1c\x00\xdf33\xffpp\xcf\x19\x19\xc0\x13\x10\xbf\x90x\xf7YY\xff\xf6\xf6\xe7??\xd7&&\xefLL2& \xdf\xbf\xaf\xbf\xbf\xbf???\xc5M@cn\x81_\x00\x00___\xcb00\xd8\x13\x00YC8\x80\x80\x80\xf3RRsVH\xc490\x10\x10\x10\x917@\xf2\x06\x00\xcf@@\xca\x86pooo\xa3!&\xc1\x1d\x18\xcf//\x1f\x1f\x1f\xdf\x00\x00\xd2\x16\x00\xcb\x90x\xbf\x1f\x00\x19\x13\x10\xf3\xd0\xd0\xe399&\x1d\x18Yy\x8e\x8f\x8f\x8f\xff\xa9\xa9\xcb\x13\x13\xbf00SF@\xb6& >\x1d\x18\xfb\xdd\xdd@@@\x99\x93\x90\xff\xbc\xbc\x7fPP\xaf\xaf\xaf\xc6VHzsp\x93& \xb7pp\xb3\x86ptPP|pp\xafOO\xd0\xd0\xd0\xef\xef\xefL90\xbc\xa9\xa0o0(\xeb\xb0\xb0\xff\xe0\xe0\xff\xd0\xd0\x870(K0(\xc9|h\x9f__lct\xebFF\xcf\xcf\xcf\xe0\xe0\xe0b& \xff },(@0(\xa9\x93\x88\xa6|h\x1f\xdf\xdf\xd5\xac\x97\xe2\xc5\xb7\xc7`POOO\x9cyhppp\xff\x80\x80\xff\x96\x96\xd7``\xcc\x99\x7f,\xb0\xcf\xbf\x00\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\xffff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00!\xf9\x04\x01\x00\x00\xa8\x00,\x00\x00\x00\x00=\x00D\x00\x00\x08\xff\x00Q\t\x1cH\xb0\xa0\xc1\x83\x08\x13*\\\xc8\xb0\xa1\xc0\x1b\x07\x0c8\x9cHq\xa1\x89\x14\xa72F\xac\xc8\xb1\xa2\t\x1f\x19Cj\x94\xd8\xb1$B\x03\x07D\xaa\x1ci\xb2%*#3V\xcad\xe9\xb2\xa2\x9d 3s\x9e\xdaX\x93!"\x8c:\x83\xf2\xeci\xf0c\xd0\xa3!\x87\x12E\x89\xb4iR\x92.a:\x9d\xfa\xb4\xe5\x0c\x9cT\xb3\xee\x84:\xf1\x06P\xad`\x95*4\n\xb6l\xd5\x84\x06>\x99]\x1b\xb2\xc5\x9c\x83F\xda\xb0\x9d{\xe4\x84\x00\x83W\xe7\xaeM\xe2f\xd4\xa8\xbb\x03\xbd\xea5kE\x88_\xbf\x80\x0fy\x1a\\\xb6\x08\x92\xc3\x87\x01\x070\xe5\x00\x02\xe3\xa9-\x80\xc4\x80\x1cY\xe0dS\x94-_\x0ezd3\xe7\xce\xa8>\x83\x0e=Zf\x92\x13\xa7Gm\x18 \xe1\xaf\xe7\xd5\xb8+\xb7\xceX8\xf6(\xda\xa2D\xd9N\x8d\xbb\xb8n\xc6\x8e}\x8f\xfa\x12<\xf8\xf0\xcf\x11\x1a\x14\x07}|mf\xdf\x00\x9elP\xd1\\\xb8dSaJ\x95\xffz }zu\xadiLs\xa6\xb0&8\x80\x01\xdd\x9f\x9b\x8a ^<\xf9\xe9\xac\xa9:\x82\x1d{\x83\x84\xe6\xef\xc5\xf7\x1d}\xf5\xd9W\x9eq\xa2\x1d\x95\x84a\xb1\xa9\xb0\x01\x00\xdd\x05\xd8\x9c|\x04\x16X\x8a\x02\x0b0\x80\x9f\x0b=\xe8\x94\\l\x1et \n\x00\x10\x02\x08\xdf\x84\x03ZX \x86\x1a\x16W\x03\x87+]\xe7[\x06\x00\x96\xe8\xde\x89\xce\xa5\xa8\xe2\x8a\x19N\xf7b\x87\x19\xa5\x17\x1b\x05\xa3P\x10\xa1\x8d#\xe2X\x9b\x8e;\xf2\xd8"n/\xd6\xd5\xdf\x13\xa2x\x80$\x89\x11\x9e\xd8\x81\x16\x146\xb9#\x8b\xd3\xf9\xe6\xc1\x7f\xa2\x0cp\xe5\x99\x12\xa8\x80\xdad\x15zi!\x98\xab\xf9Ff\x99gvG$g\xdf1\xa0\x80\x9bM\xc2\t\x19\x00\x19p\xd9\x9d\x99G6\xd7Hl\xdf\x99\xc2\xc8\x9e|~\t\x88)~Q@c\x99\xa3\x0cZg\x06\x00\xf8\x96\xa8)\x0c,\xc0h\xa3\x05^\x02\xe9(\x93Rji\x84\xcb)\'\x1fn\x9d~\nj)\xa3\x0e\xffZis\x84\x06\xd7\x81\xaak\xae\xc6\x01\x07\xa0\xb5\xfa*\xac~\xc9z\xaa\x04\x03l\x80+b\xb7\x81V@\x01$\xac\xd6\xe9\xab\xb1\xd2:kpj\x0ep\xe7\xb1\xab\x9aRA\x01!\x14\xd7\xc0\x03\x8dF\x1b\xdc\x00\xd3\x8ar-\xb6\xc8\x12\x07Z\t\x15\xf0:\xdd\xb7n\x8ak\xaa(\x1ddz\xac\x14\x86\x80\x92+~\xf8\xc1\xbb\xa3\xbc\xe4\xae\xe1\x01\xbaR\xfcAG\'\\\xa4\xab\x1a\xbf\xef\x82k\xa1\xbc\x03\xa3\xeb\xd7\x1d\xa4T\xcc\x87\xc2\xc5qP\x02\xc3\xab\xf9+\x9e\xb8OH\xec\xd7\x1bYTL\x8a\x1f~\xa1\x91\xecj"\xd8\xc01n\xfe\x8e\xdaA\x06\xe7\xa2;\t)Q\xb0AJ\x15\\\xa8\xbc2h!\x14\xe0\xee\xcb\xa05\x10\xc6\xa8"s&\x07\n\x13L\xb0sA\x0b\x9b\xa2\x81\x08"h\xf02\x0f\x15\xe0\x964g2\xa8\xd1D\xd3\xa4\xe8\x01\xf5t\x1c\x14`\xc6\xcb\xcbN\x11\xe7\xd6\x87]@\xca\xd7\x8f\x90\xf2\x01\x08#\x10t\x80$\xc5\x99\xc1-\xc7?\x14\xff@\xc6\xdal\x8f\xe2\x04)b0\xb1\t)}\x84\x12J&\x04\x05\x02\xc5\x18\xb8\xd9P\xc0\x0f\x1c\x93`5h\x81_\xb0H(j\x98\xacD( \xc0`P\xc5\x8f\x83\xa6\xc1\xb6;l1\x9d\x06\x1bk\x9d4\x18:(\x1e\n\x15&sR\xb7A9\xc0Q\xf1 \x18X\x00Z\xdf<\x84\xa0:h$H^\x1cgC\\\xa0\xdc\x10\x9a\xc8\xae8\x11gdQ\x07\x01\x07!\x10\n\x11W| {\xef\xa6\x90\xb0m\x01"T B\x01<\xa8\xed\xba_X|pE\x1e\xa7\xc9\xe0D\x19\xce\xcb\xbe\x04\xf5\x08\x11\x80@\x02\xf1+\xce}\t!\xecP\xc1\x0ed\xb8\xdc\xf9\x86\xa0\x88\x8aQA\x06\x90\xc1\x02\xfc\xf2G\x83\x1c4\xc4~\xf8\xcb\x1f\xf7^v\x98D\x98\x0c\x07\xca\x1b\xc5\x05\xba\x90\xbfP`Bt\x14\x81`\x07\'\xc8/\xbf\xc8@\toC\x01)\x9c\x00\xbb\x0e\xd2\xcd$"\x94\xa0\xef\xf0\xe3\x978\xe0l\x02^ \x05\x07\xf3\x97\x00\x04\xd0\xaf%1t\xde\x0b|X\xb0\x820\x8db\x0f\xa4`\xc2\x04\x16@\x8a\x0e\xce\x8f(\x02\t\xa2\xec\x86X\xc4\xb5\x15"\x898\xc4A\xfc\x1a\x08\xc5\x82HQqT\xc4\xdc("A\n<\x08\x02\x05\x94\x90\x1d\r@\xd8E\x83|1\x14T\xbc\x80\x0e>@\n\x14\x88An\xa0\xbb]\x1b\x13\xf2F\xd9Y\xc2dg\xe8\xe1\x1e\x1d\xd2\xc7P\xa0\x10\x07\x84\xf8\xe1 \x1fx\xbf\xfc\x11\xa1\x12\x90XdG\x82\xb8FI\x02q\t/\xb4\xa4&[\x12\x10\x00;', + ) # NOQA + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "image/gif") def test_post_binary_and_accept_any(self): - path = '/postanybinary' + path = "/postanybinary" - response = requests.post(self.url + path, - headers={"Content-Type": "image/gif"}, - data='GIF89a=\x00D\x00\xf7\xa8\x00\x9a,3\xff\xc0\xc0\xef\xc0\xc0uXg\xfc\xf9\xf7\x993\x00\xff\xec\xec\xff\xa0\xa0\xe5\xcc\xbf\xcf\x9f\x87\x0f\xef\xef\x7f\x7f\x7f\xef\x0f\x0f\xdf\x1f\x1f\xff&&_\x9f\x9f\xffYY\xbf??5\xa5\xc2\xff\xff\xff\xac\x16\x19\xb2&\x00\xf8\x13\x10\xc2& \xdf`PP\x84\x9b\xf8\x03\x00\xb5\x0b\x0c\xdf\x0f\x00>\x9a\xb5\x87BM\x7f`P\xd2\xa5\x8f\xcc\x19\x00\xa5,\x00\xec\xd9\xcf\xe5\x0c\x00\xeb\t\x00\xff\xd9\xd9\xc7\x0c\x0c\x0f\x0f\x0f\xffyy~MZ\xfb\t\x08\xe5M@\xfb__\xff33\xcf\x90x\xf2\xe5\xdf\xc3\x06\x06\xbf\t\x08\xff\xb3\xb3\xd9\xb2\x9f\xff\x06\x06\xac)\x00\xff\xc6\xc6\x0c\t\x08\xf9\xf2\xef\xc9s`\xb8#\x00\x9f/\x00\xff__\xff\x8c\x8c\xc5\x1c\x00\xdf33\xffpp\xcf\x19\x19\xc0\x13\x10\xbf\x90x\xf7YY\xff\xf6\xf6\xe7??\xd7&&\xefLL2& \xdf\xbf\xaf\xbf\xbf\xbf???\xc5M@cn\x81_\x00\x00___\xcb00\xd8\x13\x00YC8\x80\x80\x80\xf3RRsVH\xc490\x10\x10\x10\x917@\xf2\x06\x00\xcf@@\xca\x86pooo\xa3!&\xc1\x1d\x18\xcf//\x1f\x1f\x1f\xdf\x00\x00\xd2\x16\x00\xcb\x90x\xbf\x1f\x00\x19\x13\x10\xf3\xd0\xd0\xe399&\x1d\x18Yy\x8e\x8f\x8f\x8f\xff\xa9\xa9\xcb\x13\x13\xbf00SF@\xb6& >\x1d\x18\xfb\xdd\xdd@@@\x99\x93\x90\xff\xbc\xbc\x7fPP\xaf\xaf\xaf\xc6VHzsp\x93& \xb7pp\xb3\x86ptPP|pp\xafOO\xd0\xd0\xd0\xef\xef\xefL90\xbc\xa9\xa0o0(\xeb\xb0\xb0\xff\xe0\xe0\xff\xd0\xd0\x870(K0(\xc9|h\x9f__lct\xebFF\xcf\xcf\xcf\xe0\xe0\xe0b& \xff },(@0(\xa9\x93\x88\xa6|h\x1f\xdf\xdf\xd5\xac\x97\xe2\xc5\xb7\xc7`POOO\x9cyhppp\xff\x80\x80\xff\x96\x96\xd7``\xcc\x99\x7f,\xb0\xcf\xbf\x00\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\xffff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00!\xf9\x04\x01\x00\x00\xa8\x00,\x00\x00\x00\x00=\x00D\x00\x00\x08\xff\x00Q\t\x1cH\xb0\xa0\xc1\x83\x08\x13*\\\xc8\xb0\xa1\xc0\x1b\x07\x0c8\x9cHq\xa1\x89\x14\xa72F\xac\xc8\xb1\xa2\t\x1f\x19Cj\x94\xd8\xb1$B\x03\x07D\xaa\x1ci\xb2%*#3V\xcad\xe9\xb2\xa2\x9d 3s\x9e\xdaX\x93!"\x8c:\x83\xf2\xeci\xf0c\xd0\xa3!\x87\x12E\x89\xb4iR\x92.a:\x9d\xfa\xb4\xe5\x0c\x9cT\xb3\xee\x84:\xf1\x06P\xad`\x95*4\n\xb6l\xd5\x84\x06>\x99]\x1b\xb2\xc5\x9c\x83F\xda\xb0\x9d{\xe4\x84\x00\x83W\xe7\xaeM\xe2f\xd4\xa8\xbb\x03\xbd\xea5kE\x88_\xbf\x80\x0fy\x1a\\\xb6\x08\x92\xc3\x87\x01\x070\xe5\x00\x02\xe3\xa9-\x80\xc4\x80\x1cY\xe0dS\x94-_\x0ezd3\xe7\xce\xa8>\x83\x0e=Zf\x92\x13\xa7Gm\x18 \xe1\xaf\xe7\xd5\xb8+\xb7\xceX8\xf6(\xda\xa2D\xd9N\x8d\xbb\xb8n\xc6\x8e}\x8f\xfa\x12<\xf8\xf0\xcf\x11\x1a\x14\x07}|mf\xdf\x00\x9elP\xd1\\\xb8dSaJ\x95\xffz }zu\xadiLs\xa6\xb0&8\x80\x01\xdd\x9f\x9b\x8a ^<\xf9\xe9\xac\xa9:\x82\x1d{\x83\x84\xe6\xef\xc5\xf7\x1d}\xf5\xd9W\x9eq\xa2\x1d\x95\x84a\xb1\xa9\xb0\x01\x00\xdd\x05\xd8\x9c|\x04\x16X\x8a\x02\x0b0\x80\x9f\x0b=\xe8\x94\\l\x1et \n\x00\x10\x02\x08\xdf\x84\x03ZX \x86\x1a\x16W\x03\x87+]\xe7[\x06\x00\x96\xe8\xde\x89\xce\xa5\xa8\xe2\x8a\x19N\xf7b\x87\x19\xa5\x17\x1b\x05\xa3P\x10\xa1\x8d#\xe2X\x9b\x8e;\xf2\xd8"n/\xd6\xd5\xdf\x13\xa2x\x80$\x89\x11\x9e\xd8\x81\x16\x146\xb9#\x8b\xd3\xf9\xe6\xc1\x7f\xa2\x0cp\xe5\x99\x12\xa8\x80\xdad\x15zi!\x98\xab\xf9Ff\x99gvG$g\xdf1\xa0\x80\x9bM\xc2\t\x19\x00\x19p\xd9\x9d\x99G6\xd7Hl\xdf\x99\xc2\xc8\x9e|~\t\x88)~Q@c\x99\xa3\x0cZg\x06\x00\xf8\x96\xa8)\x0c,\xc0h\xa3\x05^\x02\xe9(\x93Rji\x84\xcb)\'\x1fn\x9d~\nj)\xa3\x0e\xffZis\x84\x06\xd7\x81\xaak\xae\xc6\x01\x07\xa0\xb5\xfa*\xac~\xc9z\xaa\x04\x03l\x80+b\xb7\x81V@\x01$\xac\xd6\xe9\xab\xb1\xd2:kpj\x0ep\xe7\xb1\xab\x9aRA\x01!\x14\xd7\xc0\x03\x8dF\x1b\xdc\x00\xd3\x8ar-\xb6\xc8\x12\x07Z\t\x15\xf0:\xdd\xb7n\x8ak\xaa(\x1ddz\xac\x14\x86\x80\x92+~\xf8\xc1\xbb\xa3\xbc\xe4\xae\xe1\x01\xbaR\xfcAG\'\\\xa4\xab\x1a\xbf\xef\x82k\xa1\xbc\x03\xa3\xeb\xd7\x1d\xa4T\xcc\x87\xc2\xc5qP\x02\xc3\xab\xf9+\x9e\xb8OH\xec\xd7\x1bYTL\x8a\x1f~\xa1\x91\xecj"\xd8\xc01n\xfe\x8e\xdaA\x06\xe7\xa2;\t)Q\xb0AJ\x15\\\xa8\xbc2h!\x14\xe0\xee\xcb\xa05\x10\xc6\xa8"s&\x07\n\x13L\xb0sA\x0b\x9b\xa2\x81\x08"h\xf02\x0f\x15\xe0\x964g2\xa8\xd1D\xd3\xa4\xe8\x01\xf5t\x1c\x14`\xc6\xcb\xcbN\x11\xe7\xd6\x87]@\xca\xd7\x8f\x90\xf2\x01\x08#\x10t\x80$\xc5\x99\xc1-\xc7?\x14\xff@\xc6\xdal\x8f\xe2\x04)b0\xb1\t)}\x84\x12J&\x04\x05\x02\xc5\x18\xb8\xd9P\xc0\x0f\x1c\x93`5h\x81_\xb0H(j\x98\xacD( \xc0`P\xc5\x8f\x83\xa6\xc1\xb6;l1\x9d\x06\x1bk\x9d4\x18:(\x1e\n\x15&sR\xb7A9\xc0Q\xf1 \x18X\x00Z\xdf<\x84\xa0:h$H^\x1cgC\\\xa0\xdc\x10\x9a\xc8\xae8\x11gdQ\x07\x01\x07!\x10\n\x11W| {\xef\xa6\x90\xb0m\x01"T B\x01<\xa8\xed\xba_X|pE\x1e\xa7\xc9\xe0D\x19\xce\xcb\xbe\x04\xf5\x08\x11\x80@\x02\xf1+\xce}\t!\xecP\xc1\x0ed\xb8\xdc\xf9\x86\xa0\x88\x8aQA\x06\x90\xc1\x02\xfc\xf2G\x83\x1c4\xc4~\xf8\xcb\x1f\xf7^v\x98D\x98\x0c\x07\xca\x1b\xc5\x05\xba\x90\xbfP`Bt\x14\x81`\x07\'\xc8/\xbf\xc8@\toC\x01)\x9c\x00\xbb\x0e\xd2\xcd$"\x94\xa0\xef\xf0\xe3\x978\xe0l\x02^ \x05\x07\xf3\x97\x00\x04\xd0\xaf%1t\xde\x0b|X\xb0\x820\x8db\x0f\xa4`\xc2\x04\x16@\x8a\x0e\xce\x8f(\x02\t\xa2\xec\x86X\xc4\xb5\x15"\x898\xc4A\xfc\x1a\x08\xc5\x82HQqT\xc4\xdc("A\n<\x08\x02\x05\x94\x90\x1d\r@\xd8E\x83|1\x14T\xbc\x80\x0e>@\n\x14\x88An\xa0\xbb]\x1b\x13\xf2F\xd9Y\xc2dg\xe8\xe1\x1e\x1d\xd2\xc7P\xa0\x10\x07\x84\xf8\xe1 \x1fx\xbf\xfc\x11\xa1\x12\x90XdG\x82\xb8FI\x02q\t/\xb4\xa4&[\x12\x10\x00;') # NOQA + response = requests.post( + self.url + path, + headers={"Content-Type": "image/gif"}, + data='GIF89a=\x00D\x00\xf7\xa8\x00\x9a,3\xff\xc0\xc0\xef\xc0\xc0uXg\xfc\xf9\xf7\x993\x00\xff\xec\xec\xff\xa0\xa0\xe5\xcc\xbf\xcf\x9f\x87\x0f\xef\xef\x7f\x7f\x7f\xef\x0f\x0f\xdf\x1f\x1f\xff&&_\x9f\x9f\xffYY\xbf??5\xa5\xc2\xff\xff\xff\xac\x16\x19\xb2&\x00\xf8\x13\x10\xc2& \xdf`PP\x84\x9b\xf8\x03\x00\xb5\x0b\x0c\xdf\x0f\x00>\x9a\xb5\x87BM\x7f`P\xd2\xa5\x8f\xcc\x19\x00\xa5,\x00\xec\xd9\xcf\xe5\x0c\x00\xeb\t\x00\xff\xd9\xd9\xc7\x0c\x0c\x0f\x0f\x0f\xffyy~MZ\xfb\t\x08\xe5M@\xfb__\xff33\xcf\x90x\xf2\xe5\xdf\xc3\x06\x06\xbf\t\x08\xff\xb3\xb3\xd9\xb2\x9f\xff\x06\x06\xac)\x00\xff\xc6\xc6\x0c\t\x08\xf9\xf2\xef\xc9s`\xb8#\x00\x9f/\x00\xff__\xff\x8c\x8c\xc5\x1c\x00\xdf33\xffpp\xcf\x19\x19\xc0\x13\x10\xbf\x90x\xf7YY\xff\xf6\xf6\xe7??\xd7&&\xefLL2& \xdf\xbf\xaf\xbf\xbf\xbf???\xc5M@cn\x81_\x00\x00___\xcb00\xd8\x13\x00YC8\x80\x80\x80\xf3RRsVH\xc490\x10\x10\x10\x917@\xf2\x06\x00\xcf@@\xca\x86pooo\xa3!&\xc1\x1d\x18\xcf//\x1f\x1f\x1f\xdf\x00\x00\xd2\x16\x00\xcb\x90x\xbf\x1f\x00\x19\x13\x10\xf3\xd0\xd0\xe399&\x1d\x18Yy\x8e\x8f\x8f\x8f\xff\xa9\xa9\xcb\x13\x13\xbf00SF@\xb6& >\x1d\x18\xfb\xdd\xdd@@@\x99\x93\x90\xff\xbc\xbc\x7fPP\xaf\xaf\xaf\xc6VHzsp\x93& \xb7pp\xb3\x86ptPP|pp\xafOO\xd0\xd0\xd0\xef\xef\xefL90\xbc\xa9\xa0o0(\xeb\xb0\xb0\xff\xe0\xe0\xff\xd0\xd0\x870(K0(\xc9|h\x9f__lct\xebFF\xcf\xcf\xcf\xe0\xe0\xe0b& \xff },(@0(\xa9\x93\x88\xa6|h\x1f\xdf\xdf\xd5\xac\x97\xe2\xc5\xb7\xc7`POOO\x9cyhppp\xff\x80\x80\xff\x96\x96\xd7``\xcc\x99\x7f,\xb0\xcf\xbf\x00\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\xffff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00!\xf9\x04\x01\x00\x00\xa8\x00,\x00\x00\x00\x00=\x00D\x00\x00\x08\xff\x00Q\t\x1cH\xb0\xa0\xc1\x83\x08\x13*\\\xc8\xb0\xa1\xc0\x1b\x07\x0c8\x9cHq\xa1\x89\x14\xa72F\xac\xc8\xb1\xa2\t\x1f\x19Cj\x94\xd8\xb1$B\x03\x07D\xaa\x1ci\xb2%*#3V\xcad\xe9\xb2\xa2\x9d 3s\x9e\xdaX\x93!"\x8c:\x83\xf2\xeci\xf0c\xd0\xa3!\x87\x12E\x89\xb4iR\x92.a:\x9d\xfa\xb4\xe5\x0c\x9cT\xb3\xee\x84:\xf1\x06P\xad`\x95*4\n\xb6l\xd5\x84\x06>\x99]\x1b\xb2\xc5\x9c\x83F\xda\xb0\x9d{\xe4\x84\x00\x83W\xe7\xaeM\xe2f\xd4\xa8\xbb\x03\xbd\xea5kE\x88_\xbf\x80\x0fy\x1a\\\xb6\x08\x92\xc3\x87\x01\x070\xe5\x00\x02\xe3\xa9-\x80\xc4\x80\x1cY\xe0dS\x94-_\x0ezd3\xe7\xce\xa8>\x83\x0e=Zf\x92\x13\xa7Gm\x18 \xe1\xaf\xe7\xd5\xb8+\xb7\xceX8\xf6(\xda\xa2D\xd9N\x8d\xbb\xb8n\xc6\x8e}\x8f\xfa\x12<\xf8\xf0\xcf\x11\x1a\x14\x07}|mf\xdf\x00\x9elP\xd1\\\xb8dSaJ\x95\xffz }zu\xadiLs\xa6\xb0&8\x80\x01\xdd\x9f\x9b\x8a ^<\xf9\xe9\xac\xa9:\x82\x1d{\x83\x84\xe6\xef\xc5\xf7\x1d}\xf5\xd9W\x9eq\xa2\x1d\x95\x84a\xb1\xa9\xb0\x01\x00\xdd\x05\xd8\x9c|\x04\x16X\x8a\x02\x0b0\x80\x9f\x0b=\xe8\x94\\l\x1et \n\x00\x10\x02\x08\xdf\x84\x03ZX \x86\x1a\x16W\x03\x87+]\xe7[\x06\x00\x96\xe8\xde\x89\xce\xa5\xa8\xe2\x8a\x19N\xf7b\x87\x19\xa5\x17\x1b\x05\xa3P\x10\xa1\x8d#\xe2X\x9b\x8e;\xf2\xd8"n/\xd6\xd5\xdf\x13\xa2x\x80$\x89\x11\x9e\xd8\x81\x16\x146\xb9#\x8b\xd3\xf9\xe6\xc1\x7f\xa2\x0cp\xe5\x99\x12\xa8\x80\xdad\x15zi!\x98\xab\xf9Ff\x99gvG$g\xdf1\xa0\x80\x9bM\xc2\t\x19\x00\x19p\xd9\x9d\x99G6\xd7Hl\xdf\x99\xc2\xc8\x9e|~\t\x88)~Q@c\x99\xa3\x0cZg\x06\x00\xf8\x96\xa8)\x0c,\xc0h\xa3\x05^\x02\xe9(\x93Rji\x84\xcb)\'\x1fn\x9d~\nj)\xa3\x0e\xffZis\x84\x06\xd7\x81\xaak\xae\xc6\x01\x07\xa0\xb5\xfa*\xac~\xc9z\xaa\x04\x03l\x80+b\xb7\x81V@\x01$\xac\xd6\xe9\xab\xb1\xd2:kpj\x0ep\xe7\xb1\xab\x9aRA\x01!\x14\xd7\xc0\x03\x8dF\x1b\xdc\x00\xd3\x8ar-\xb6\xc8\x12\x07Z\t\x15\xf0:\xdd\xb7n\x8ak\xaa(\x1ddz\xac\x14\x86\x80\x92+~\xf8\xc1\xbb\xa3\xbc\xe4\xae\xe1\x01\xbaR\xfcAG\'\\\xa4\xab\x1a\xbf\xef\x82k\xa1\xbc\x03\xa3\xeb\xd7\x1d\xa4T\xcc\x87\xc2\xc5qP\x02\xc3\xab\xf9+\x9e\xb8OH\xec\xd7\x1bYTL\x8a\x1f~\xa1\x91\xecj"\xd8\xc01n\xfe\x8e\xdaA\x06\xe7\xa2;\t)Q\xb0AJ\x15\\\xa8\xbc2h!\x14\xe0\xee\xcb\xa05\x10\xc6\xa8"s&\x07\n\x13L\xb0sA\x0b\x9b\xa2\x81\x08"h\xf02\x0f\x15\xe0\x964g2\xa8\xd1D\xd3\xa4\xe8\x01\xf5t\x1c\x14`\xc6\xcb\xcbN\x11\xe7\xd6\x87]@\xca\xd7\x8f\x90\xf2\x01\x08#\x10t\x80$\xc5\x99\xc1-\xc7?\x14\xff@\xc6\xdal\x8f\xe2\x04)b0\xb1\t)}\x84\x12J&\x04\x05\x02\xc5\x18\xb8\xd9P\xc0\x0f\x1c\x93`5h\x81_\xb0H(j\x98\xacD( \xc0`P\xc5\x8f\x83\xa6\xc1\xb6;l1\x9d\x06\x1bk\x9d4\x18:(\x1e\n\x15&sR\xb7A9\xc0Q\xf1 \x18X\x00Z\xdf<\x84\xa0:h$H^\x1cgC\\\xa0\xdc\x10\x9a\xc8\xae8\x11gdQ\x07\x01\x07!\x10\n\x11W| {\xef\xa6\x90\xb0m\x01"T B\x01<\xa8\xed\xba_X|pE\x1e\xa7\xc9\xe0D\x19\xce\xcb\xbe\x04\xf5\x08\x11\x80@\x02\xf1+\xce}\t!\xecP\xc1\x0ed\xb8\xdc\xf9\x86\xa0\x88\x8aQA\x06\x90\xc1\x02\xfc\xf2G\x83\x1c4\xc4~\xf8\xcb\x1f\xf7^v\x98D\x98\x0c\x07\xca\x1b\xc5\x05\xba\x90\xbfP`Bt\x14\x81`\x07\'\xc8/\xbf\xc8@\toC\x01)\x9c\x00\xbb\x0e\xd2\xcd$"\x94\xa0\xef\xf0\xe3\x978\xe0l\x02^ \x05\x07\xf3\x97\x00\x04\xd0\xaf%1t\xde\x0b|X\xb0\x820\x8db\x0f\xa4`\xc2\x04\x16@\x8a\x0e\xce\x8f(\x02\t\xa2\xec\x86X\xc4\xb5\x15"\x898\xc4A\xfc\x1a\x08\xc5\x82HQqT\xc4\xdc("A\n<\x08\x02\x05\x94\x90\x1d\r@\xd8E\x83|1\x14T\xbc\x80\x0e>@\n\x14\x88An\xa0\xbb]\x1b\x13\xf2F\xd9Y\xc2dg\xe8\xe1\x1e\x1d\xd2\xc7P\xa0\x10\x07\x84\xf8\xe1 \x1fx\xbf\xfc\x11\xa1\x12\x90XdG\x82\xb8FI\x02q\t/\xb4\xa4&[\x12\x10\x00;', + ) # NOQA actual = response.content - self.assertEquals(actual, b'GIF89a=\x00D\x00\xf7\xa8\x00\x9a,3\xff\xc0\xc0\xef\xc0\xc0uXg\xfc\xf9\xf7\x993\x00\xff\xec\xec\xff\xa0\xa0\xe5\xcc\xbf\xcf\x9f\x87\x0f\xef\xef\x7f\x7f\x7f\xef\x0f\x0f\xdf\x1f\x1f\xff&&_\x9f\x9f\xffYY\xbf??5\xa5\xc2\xff\xff\xff\xac\x16\x19\xb2&\x00\xf8\x13\x10\xc2& \xdf`PP\x84\x9b\xf8\x03\x00\xb5\x0b\x0c\xdf\x0f\x00>\x9a\xb5\x87BM\x7f`P\xd2\xa5\x8f\xcc\x19\x00\xa5,\x00\xec\xd9\xcf\xe5\x0c\x00\xeb\t\x00\xff\xd9\xd9\xc7\x0c\x0c\x0f\x0f\x0f\xffyy~MZ\xfb\t\x08\xe5M@\xfb__\xff33\xcf\x90x\xf2\xe5\xdf\xc3\x06\x06\xbf\t\x08\xff\xb3\xb3\xd9\xb2\x9f\xff\x06\x06\xac)\x00\xff\xc6\xc6\x0c\t\x08\xf9\xf2\xef\xc9s`\xb8#\x00\x9f/\x00\xff__\xff\x8c\x8c\xc5\x1c\x00\xdf33\xffpp\xcf\x19\x19\xc0\x13\x10\xbf\x90x\xf7YY\xff\xf6\xf6\xe7??\xd7&&\xefLL2& \xdf\xbf\xaf\xbf\xbf\xbf???\xc5M@cn\x81_\x00\x00___\xcb00\xd8\x13\x00YC8\x80\x80\x80\xf3RRsVH\xc490\x10\x10\x10\x917@\xf2\x06\x00\xcf@@\xca\x86pooo\xa3!&\xc1\x1d\x18\xcf//\x1f\x1f\x1f\xdf\x00\x00\xd2\x16\x00\xcb\x90x\xbf\x1f\x00\x19\x13\x10\xf3\xd0\xd0\xe399&\x1d\x18Yy\x8e\x8f\x8f\x8f\xff\xa9\xa9\xcb\x13\x13\xbf00SF@\xb6& >\x1d\x18\xfb\xdd\xdd@@@\x99\x93\x90\xff\xbc\xbc\x7fPP\xaf\xaf\xaf\xc6VHzsp\x93& \xb7pp\xb3\x86ptPP|pp\xafOO\xd0\xd0\xd0\xef\xef\xefL90\xbc\xa9\xa0o0(\xeb\xb0\xb0\xff\xe0\xe0\xff\xd0\xd0\x870(K0(\xc9|h\x9f__lct\xebFF\xcf\xcf\xcf\xe0\xe0\xe0b& \xff },(@0(\xa9\x93\x88\xa6|h\x1f\xdf\xdf\xd5\xac\x97\xe2\xc5\xb7\xc7`POOO\x9cyhppp\xff\x80\x80\xff\x96\x96\xd7``\xcc\x99\x7f,\xb0\xcf\xbf\x00\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\xffff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00!\xf9\x04\x01\x00\x00\xa8\x00,\x00\x00\x00\x00=\x00D\x00\x00\x08\xff\x00Q\t\x1cH\xb0\xa0\xc1\x83\x08\x13*\\\xc8\xb0\xa1\xc0\x1b\x07\x0c8\x9cHq\xa1\x89\x14\xa72F\xac\xc8\xb1\xa2\t\x1f\x19Cj\x94\xd8\xb1$B\x03\x07D\xaa\x1ci\xb2%*#3V\xcad\xe9\xb2\xa2\x9d 3s\x9e\xdaX\x93!"\x8c:\x83\xf2\xeci\xf0c\xd0\xa3!\x87\x12E\x89\xb4iR\x92.a:\x9d\xfa\xb4\xe5\x0c\x9cT\xb3\xee\x84:\xf1\x06P\xad`\x95*4\n\xb6l\xd5\x84\x06>\x99]\x1b\xb2\xc5\x9c\x83F\xda\xb0\x9d{\xe4\x84\x00\x83W\xe7\xaeM\xe2f\xd4\xa8\xbb\x03\xbd\xea5kE\x88_\xbf\x80\x0fy\x1a\\\xb6\x08\x92\xc3\x87\x01\x070\xe5\x00\x02\xe3\xa9-\x80\xc4\x80\x1cY\xe0dS\x94-_\x0ezd3\xe7\xce\xa8>\x83\x0e=Zf\x92\x13\xa7Gm\x18 \xe1\xaf\xe7\xd5\xb8+\xb7\xceX8\xf6(\xda\xa2D\xd9N\x8d\xbb\xb8n\xc6\x8e}\x8f\xfa\x12<\xf8\xf0\xcf\x11\x1a\x14\x07}|mf\xdf\x00\x9elP\xd1\\\xb8dSaJ\x95\xffz }zu\xadiLs\xa6\xb0&8\x80\x01\xdd\x9f\x9b\x8a ^<\xf9\xe9\xac\xa9:\x82\x1d{\x83\x84\xe6\xef\xc5\xf7\x1d}\xf5\xd9W\x9eq\xa2\x1d\x95\x84a\xb1\xa9\xb0\x01\x00\xdd\x05\xd8\x9c|\x04\x16X\x8a\x02\x0b0\x80\x9f\x0b=\xe8\x94\\l\x1et \n\x00\x10\x02\x08\xdf\x84\x03ZX \x86\x1a\x16W\x03\x87+]\xe7[\x06\x00\x96\xe8\xde\x89\xce\xa5\xa8\xe2\x8a\x19N\xf7b\x87\x19\xa5\x17\x1b\x05\xa3P\x10\xa1\x8d#\xe2X\x9b\x8e;\xf2\xd8"n/\xd6\xd5\xdf\x13\xa2x\x80$\x89\x11\x9e\xd8\x81\x16\x146\xb9#\x8b\xd3\xf9\xe6\xc1\x7f\xa2\x0cp\xe5\x99\x12\xa8\x80\xdad\x15zi!\x98\xab\xf9Ff\x99gvG$g\xdf1\xa0\x80\x9bM\xc2\t\x19\x00\x19p\xd9\x9d\x99G6\xd7Hl\xdf\x99\xc2\xc8\x9e|~\t\x88)~Q@c\x99\xa3\x0cZg\x06\x00\xf8\x96\xa8)\x0c,\xc0h\xa3\x05^\x02\xe9(\x93Rji\x84\xcb)\'\x1fn\x9d~\nj)\xa3\x0e\xffZis\x84\x06\xd7\x81\xaak\xae\xc6\x01\x07\xa0\xb5\xfa*\xac~\xc9z\xaa\x04\x03l\x80+b\xb7\x81V@\x01$\xac\xd6\xe9\xab\xb1\xd2:kpj\x0ep\xe7\xb1\xab\x9aRA\x01!\x14\xd7\xc0\x03\x8dF\x1b\xdc\x00\xd3\x8ar-\xb6\xc8\x12\x07Z\t\x15\xf0:\xdd\xb7n\x8ak\xaa(\x1ddz\xac\x14\x86\x80\x92+~\xf8\xc1\xbb\xa3\xbc\xe4\xae\xe1\x01\xbaR\xfcAG\'\\\xa4\xab\x1a\xbf\xef\x82k\xa1\xbc\x03\xa3\xeb\xd7\x1d\xa4T\xcc\x87\xc2\xc5qP\x02\xc3\xab\xf9+\x9e\xb8OH\xec\xd7\x1bYTL\x8a\x1f~\xa1\x91\xecj"\xd8\xc01n\xfe\x8e\xdaA\x06\xe7\xa2;\t)Q\xb0AJ\x15\\\xa8\xbc2h!\x14\xe0\xee\xcb\xa05\x10\xc6\xa8"s&\x07\n\x13L\xb0sA\x0b\x9b\xa2\x81\x08"h\xf02\x0f\x15\xe0\x964g2\xa8\xd1D\xd3\xa4\xe8\x01\xf5t\x1c\x14`\xc6\xcb\xcbN\x11\xe7\xd6\x87]@\xca\xd7\x8f\x90\xf2\x01\x08#\x10t\x80$\xc5\x99\xc1-\xc7?\x14\xff@\xc6\xdal\x8f\xe2\x04)b0\xb1\t)}\x84\x12J&\x04\x05\x02\xc5\x18\xb8\xd9P\xc0\x0f\x1c\x93`5h\x81_\xb0H(j\x98\xacD( \xc0`P\xc5\x8f\x83\xa6\xc1\xb6;l1\x9d\x06\x1bk\x9d4\x18:(\x1e\n\x15&sR\xb7A9\xc0Q\xf1 \x18X\x00Z\xdf<\x84\xa0:h$H^\x1cgC\\\xa0\xdc\x10\x9a\xc8\xae8\x11gdQ\x07\x01\x07!\x10\n\x11W| {\xef\xa6\x90\xb0m\x01"T B\x01<\xa8\xed\xba_X|pE\x1e\xa7\xc9\xe0D\x19\xce\xcb\xbe\x04\xf5\x08\x11\x80@\x02\xf1+\xce}\t!\xecP\xc1\x0ed\xb8\xdc\xf9\x86\xa0\x88\x8aQA\x06\x90\xc1\x02\xfc\xf2G\x83\x1c4\xc4~\xf8\xcb\x1f\xf7^v\x98D\x98\x0c\x07\xca\x1b\xc5\x05\xba\x90\xbfP`Bt\x14\x81`\x07\'\xc8/\xbf\xc8@\toC\x01)\x9c\x00\xbb\x0e\xd2\xcd$"\x94\xa0\xef\xf0\xe3\x978\xe0l\x02^ \x05\x07\xf3\x97\x00\x04\xd0\xaf%1t\xde\x0b|X\xb0\x820\x8db\x0f\xa4`\xc2\x04\x16@\x8a\x0e\xce\x8f(\x02\t\xa2\xec\x86X\xc4\xb5\x15"\x898\xc4A\xfc\x1a\x08\xc5\x82HQqT\xc4\xdc("A\n<\x08\x02\x05\x94\x90\x1d\r@\xd8E\x83|1\x14T\xbc\x80\x0e>@\n\x14\x88An\xa0\xbb]\x1b\x13\xf2F\xd9Y\xc2dg\xe8\xe1\x1e\x1d\xd2\xc7P\xa0\x10\x07\x84\xf8\xe1 \x1fx\xbf\xfc\x11\xa1\x12\x90XdG\x82\xb8FI\x02q\t/\xb4\xa4&[\x12\x10\x00;') # NOQA - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get('Content-Type'), "image/gif") + self.assertEqual( + actual, + b'GIF89a=\x00D\x00\xf7\xa8\x00\x9a,3\xff\xc0\xc0\xef\xc0\xc0uXg\xfc\xf9\xf7\x993\x00\xff\xec\xec\xff\xa0\xa0\xe5\xcc\xbf\xcf\x9f\x87\x0f\xef\xef\x7f\x7f\x7f\xef\x0f\x0f\xdf\x1f\x1f\xff&&_\x9f\x9f\xffYY\xbf??5\xa5\xc2\xff\xff\xff\xac\x16\x19\xb2&\x00\xf8\x13\x10\xc2& \xdf`PP\x84\x9b\xf8\x03\x00\xb5\x0b\x0c\xdf\x0f\x00>\x9a\xb5\x87BM\x7f`P\xd2\xa5\x8f\xcc\x19\x00\xa5,\x00\xec\xd9\xcf\xe5\x0c\x00\xeb\t\x00\xff\xd9\xd9\xc7\x0c\x0c\x0f\x0f\x0f\xffyy~MZ\xfb\t\x08\xe5M@\xfb__\xff33\xcf\x90x\xf2\xe5\xdf\xc3\x06\x06\xbf\t\x08\xff\xb3\xb3\xd9\xb2\x9f\xff\x06\x06\xac)\x00\xff\xc6\xc6\x0c\t\x08\xf9\xf2\xef\xc9s`\xb8#\x00\x9f/\x00\xff__\xff\x8c\x8c\xc5\x1c\x00\xdf33\xffpp\xcf\x19\x19\xc0\x13\x10\xbf\x90x\xf7YY\xff\xf6\xf6\xe7??\xd7&&\xefLL2& \xdf\xbf\xaf\xbf\xbf\xbf???\xc5M@cn\x81_\x00\x00___\xcb00\xd8\x13\x00YC8\x80\x80\x80\xf3RRsVH\xc490\x10\x10\x10\x917@\xf2\x06\x00\xcf@@\xca\x86pooo\xa3!&\xc1\x1d\x18\xcf//\x1f\x1f\x1f\xdf\x00\x00\xd2\x16\x00\xcb\x90x\xbf\x1f\x00\x19\x13\x10\xf3\xd0\xd0\xe399&\x1d\x18Yy\x8e\x8f\x8f\x8f\xff\xa9\xa9\xcb\x13\x13\xbf00SF@\xb6& >\x1d\x18\xfb\xdd\xdd@@@\x99\x93\x90\xff\xbc\xbc\x7fPP\xaf\xaf\xaf\xc6VHzsp\x93& \xb7pp\xb3\x86ptPP|pp\xafOO\xd0\xd0\xd0\xef\xef\xefL90\xbc\xa9\xa0o0(\xeb\xb0\xb0\xff\xe0\xe0\xff\xd0\xd0\x870(K0(\xc9|h\x9f__lct\xebFF\xcf\xcf\xcf\xe0\xe0\xe0b& \xff },(@0(\xa9\x93\x88\xa6|h\x1f\xdf\xdf\xd5\xac\x97\xe2\xc5\xb7\xc7`POOO\x9cyhppp\xff\x80\x80\xff\x96\x96\xd7``\xcc\x99\x7f,\xb0\xcf\xbf\x00\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\xffff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00!\xf9\x04\x01\x00\x00\xa8\x00,\x00\x00\x00\x00=\x00D\x00\x00\x08\xff\x00Q\t\x1cH\xb0\xa0\xc1\x83\x08\x13*\\\xc8\xb0\xa1\xc0\x1b\x07\x0c8\x9cHq\xa1\x89\x14\xa72F\xac\xc8\xb1\xa2\t\x1f\x19Cj\x94\xd8\xb1$B\x03\x07D\xaa\x1ci\xb2%*#3V\xcad\xe9\xb2\xa2\x9d 3s\x9e\xdaX\x93!"\x8c:\x83\xf2\xeci\xf0c\xd0\xa3!\x87\x12E\x89\xb4iR\x92.a:\x9d\xfa\xb4\xe5\x0c\x9cT\xb3\xee\x84:\xf1\x06P\xad`\x95*4\n\xb6l\xd5\x84\x06>\x99]\x1b\xb2\xc5\x9c\x83F\xda\xb0\x9d{\xe4\x84\x00\x83W\xe7\xaeM\xe2f\xd4\xa8\xbb\x03\xbd\xea5kE\x88_\xbf\x80\x0fy\x1a\\\xb6\x08\x92\xc3\x87\x01\x070\xe5\x00\x02\xe3\xa9-\x80\xc4\x80\x1cY\xe0dS\x94-_\x0ezd3\xe7\xce\xa8>\x83\x0e=Zf\x92\x13\xa7Gm\x18 \xe1\xaf\xe7\xd5\xb8+\xb7\xceX8\xf6(\xda\xa2D\xd9N\x8d\xbb\xb8n\xc6\x8e}\x8f\xfa\x12<\xf8\xf0\xcf\x11\x1a\x14\x07}|mf\xdf\x00\x9elP\xd1\\\xb8dSaJ\x95\xffz }zu\xadiLs\xa6\xb0&8\x80\x01\xdd\x9f\x9b\x8a ^<\xf9\xe9\xac\xa9:\x82\x1d{\x83\x84\xe6\xef\xc5\xf7\x1d}\xf5\xd9W\x9eq\xa2\x1d\x95\x84a\xb1\xa9\xb0\x01\x00\xdd\x05\xd8\x9c|\x04\x16X\x8a\x02\x0b0\x80\x9f\x0b=\xe8\x94\\l\x1et \n\x00\x10\x02\x08\xdf\x84\x03ZX \x86\x1a\x16W\x03\x87+]\xe7[\x06\x00\x96\xe8\xde\x89\xce\xa5\xa8\xe2\x8a\x19N\xf7b\x87\x19\xa5\x17\x1b\x05\xa3P\x10\xa1\x8d#\xe2X\x9b\x8e;\xf2\xd8"n/\xd6\xd5\xdf\x13\xa2x\x80$\x89\x11\x9e\xd8\x81\x16\x146\xb9#\x8b\xd3\xf9\xe6\xc1\x7f\xa2\x0cp\xe5\x99\x12\xa8\x80\xdad\x15zi!\x98\xab\xf9Ff\x99gvG$g\xdf1\xa0\x80\x9bM\xc2\t\x19\x00\x19p\xd9\x9d\x99G6\xd7Hl\xdf\x99\xc2\xc8\x9e|~\t\x88)~Q@c\x99\xa3\x0cZg\x06\x00\xf8\x96\xa8)\x0c,\xc0h\xa3\x05^\x02\xe9(\x93Rji\x84\xcb)\'\x1fn\x9d~\nj)\xa3\x0e\xffZis\x84\x06\xd7\x81\xaak\xae\xc6\x01\x07\xa0\xb5\xfa*\xac~\xc9z\xaa\x04\x03l\x80+b\xb7\x81V@\x01$\xac\xd6\xe9\xab\xb1\xd2:kpj\x0ep\xe7\xb1\xab\x9aRA\x01!\x14\xd7\xc0\x03\x8dF\x1b\xdc\x00\xd3\x8ar-\xb6\xc8\x12\x07Z\t\x15\xf0:\xdd\xb7n\x8ak\xaa(\x1ddz\xac\x14\x86\x80\x92+~\xf8\xc1\xbb\xa3\xbc\xe4\xae\xe1\x01\xbaR\xfcAG\'\\\xa4\xab\x1a\xbf\xef\x82k\xa1\xbc\x03\xa3\xeb\xd7\x1d\xa4T\xcc\x87\xc2\xc5qP\x02\xc3\xab\xf9+\x9e\xb8OH\xec\xd7\x1bYTL\x8a\x1f~\xa1\x91\xecj"\xd8\xc01n\xfe\x8e\xdaA\x06\xe7\xa2;\t)Q\xb0AJ\x15\\\xa8\xbc2h!\x14\xe0\xee\xcb\xa05\x10\xc6\xa8"s&\x07\n\x13L\xb0sA\x0b\x9b\xa2\x81\x08"h\xf02\x0f\x15\xe0\x964g2\xa8\xd1D\xd3\xa4\xe8\x01\xf5t\x1c\x14`\xc6\xcb\xcbN\x11\xe7\xd6\x87]@\xca\xd7\x8f\x90\xf2\x01\x08#\x10t\x80$\xc5\x99\xc1-\xc7?\x14\xff@\xc6\xdal\x8f\xe2\x04)b0\xb1\t)}\x84\x12J&\x04\x05\x02\xc5\x18\xb8\xd9P\xc0\x0f\x1c\x93`5h\x81_\xb0H(j\x98\xacD( \xc0`P\xc5\x8f\x83\xa6\xc1\xb6;l1\x9d\x06\x1bk\x9d4\x18:(\x1e\n\x15&sR\xb7A9\xc0Q\xf1 \x18X\x00Z\xdf<\x84\xa0:h$H^\x1cgC\\\xa0\xdc\x10\x9a\xc8\xae8\x11gdQ\x07\x01\x07!\x10\n\x11W| {\xef\xa6\x90\xb0m\x01"T B\x01<\xa8\xed\xba_X|pE\x1e\xa7\xc9\xe0D\x19\xce\xcb\xbe\x04\xf5\x08\x11\x80@\x02\xf1+\xce}\t!\xecP\xc1\x0ed\xb8\xdc\xf9\x86\xa0\x88\x8aQA\x06\x90\xc1\x02\xfc\xf2G\x83\x1c4\xc4~\xf8\xcb\x1f\xf7^v\x98D\x98\x0c\x07\xca\x1b\xc5\x05\xba\x90\xbfP`Bt\x14\x81`\x07\'\xc8/\xbf\xc8@\toC\x01)\x9c\x00\xbb\x0e\xd2\xcd$"\x94\xa0\xef\xf0\xe3\x978\xe0l\x02^ \x05\x07\xf3\x97\x00\x04\xd0\xaf%1t\xde\x0b|X\xb0\x820\x8db\x0f\xa4`\xc2\x04\x16@\x8a\x0e\xce\x8f(\x02\t\xa2\xec\x86X\xc4\xb5\x15"\x898\xc4A\xfc\x1a\x08\xc5\x82HQqT\xc4\xdc("A\n<\x08\x02\x05\x94\x90\x1d\r@\xd8E\x83|1\x14T\xbc\x80\x0e>@\n\x14\x88An\xa0\xbb]\x1b\x13\xf2F\xd9Y\xc2dg\xe8\xe1\x1e\x1d\xd2\xc7P\xa0\x10\x07\x84\xf8\xe1 \x1fx\xbf\xfc\x11\xa1\x12\x90XdG\x82\xb8FI\x02q\t/\xb4\xa4&[\x12\x10\x00;', + ) # NOQA + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "image/gif") def test_post_binary_with_incorrect_content_type(self): expected = {"message": "Internal server error"} - path = '/postbinarygif' + path = "/postbinarygif" - response = requests.post(self.url + path, - headers={"Content-Type": "application/json"}, - data='GIF89a=\x00D\x00\xf7\xa8\x00\x9a,3\xff\xc0\xc0\xef\xc0\xc0uXg\xfc\xf9\xf7\x993\x00\xff\xec\xec\xff\xa0\xa0\xe5\xcc\xbf\xcf\x9f\x87\x0f\xef\xef\x7f\x7f\x7f\xef\x0f\x0f\xdf\x1f\x1f\xff&&_\x9f\x9f\xffYY\xbf??5\xa5\xc2\xff\xff\xff\xac\x16\x19\xb2&\x00\xf8\x13\x10\xc2& \xdf`PP\x84\x9b\xf8\x03\x00\xb5\x0b\x0c\xdf\x0f\x00>\x9a\xb5\x87BM\x7f`P\xd2\xa5\x8f\xcc\x19\x00\xa5,\x00\xec\xd9\xcf\xe5\x0c\x00\xeb\t\x00\xff\xd9\xd9\xc7\x0c\x0c\x0f\x0f\x0f\xffyy~MZ\xfb\t\x08\xe5M@\xfb__\xff33\xcf\x90x\xf2\xe5\xdf\xc3\x06\x06\xbf\t\x08\xff\xb3\xb3\xd9\xb2\x9f\xff\x06\x06\xac)\x00\xff\xc6\xc6\x0c\t\x08\xf9\xf2\xef\xc9s`\xb8#\x00\x9f/\x00\xff__\xff\x8c\x8c\xc5\x1c\x00\xdf33\xffpp\xcf\x19\x19\xc0\x13\x10\xbf\x90x\xf7YY\xff\xf6\xf6\xe7??\xd7&&\xefLL2& \xdf\xbf\xaf\xbf\xbf\xbf???\xc5M@cn\x81_\x00\x00___\xcb00\xd8\x13\x00YC8\x80\x80\x80\xf3RRsVH\xc490\x10\x10\x10\x917@\xf2\x06\x00\xcf@@\xca\x86pooo\xa3!&\xc1\x1d\x18\xcf//\x1f\x1f\x1f\xdf\x00\x00\xd2\x16\x00\xcb\x90x\xbf\x1f\x00\x19\x13\x10\xf3\xd0\xd0\xe399&\x1d\x18Yy\x8e\x8f\x8f\x8f\xff\xa9\xa9\xcb\x13\x13\xbf00SF@\xb6& >\x1d\x18\xfb\xdd\xdd@@@\x99\x93\x90\xff\xbc\xbc\x7fPP\xaf\xaf\xaf\xc6VHzsp\x93& \xb7pp\xb3\x86ptPP|pp\xafOO\xd0\xd0\xd0\xef\xef\xefL90\xbc\xa9\xa0o0(\xeb\xb0\xb0\xff\xe0\xe0\xff\xd0\xd0\x870(K0(\xc9|h\x9f__lct\xebFF\xcf\xcf\xcf\xe0\xe0\xe0b& \xff },(@0(\xa9\x93\x88\xa6|h\x1f\xdf\xdf\xd5\xac\x97\xe2\xc5\xb7\xc7`POOO\x9cyhppp\xff\x80\x80\xff\x96\x96\xd7``\xcc\x99\x7f,\xb0\xcf\xbf\x00\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\xffff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00!\xf9\x04\x01\x00\x00\xa8\x00,\x00\x00\x00\x00=\x00D\x00\x00\x08\xff\x00Q\t\x1cH\xb0\xa0\xc1\x83\x08\x13*\\\xc8\xb0\xa1\xc0\x1b\x07\x0c8\x9cHq\xa1\x89\x14\xa72F\xac\xc8\xb1\xa2\t\x1f\x19Cj\x94\xd8\xb1$B\x03\x07D\xaa\x1ci\xb2%*#3V\xcad\xe9\xb2\xa2\x9d 3s\x9e\xdaX\x93!"\x8c:\x83\xf2\xeci\xf0c\xd0\xa3!\x87\x12E\x89\xb4iR\x92.a:\x9d\xfa\xb4\xe5\x0c\x9cT\xb3\xee\x84:\xf1\x06P\xad`\x95*4\n\xb6l\xd5\x84\x06>\x99]\x1b\xb2\xc5\x9c\x83F\xda\xb0\x9d{\xe4\x84\x00\x83W\xe7\xaeM\xe2f\xd4\xa8\xbb\x03\xbd\xea5kE\x88_\xbf\x80\x0fy\x1a\\\xb6\x08\x92\xc3\x87\x01\x070\xe5\x00\x02\xe3\xa9-\x80\xc4\x80\x1cY\xe0dS\x94-_\x0ezd3\xe7\xce\xa8>\x83\x0e=Zf\x92\x13\xa7Gm\x18 \xe1\xaf\xe7\xd5\xb8+\xb7\xceX8\xf6(\xda\xa2D\xd9N\x8d\xbb\xb8n\xc6\x8e}\x8f\xfa\x12<\xf8\xf0\xcf\x11\x1a\x14\x07}|mf\xdf\x00\x9elP\xd1\\\xb8dSaJ\x95\xffz }zu\xadiLs\xa6\xb0&8\x80\x01\xdd\x9f\x9b\x8a ^<\xf9\xe9\xac\xa9:\x82\x1d{\x83\x84\xe6\xef\xc5\xf7\x1d}\xf5\xd9W\x9eq\xa2\x1d\x95\x84a\xb1\xa9\xb0\x01\x00\xdd\x05\xd8\x9c|\x04\x16X\x8a\x02\x0b0\x80\x9f\x0b=\xe8\x94\\l\x1et \n\x00\x10\x02\x08\xdf\x84\x03ZX \x86\x1a\x16W\x03\x87+]\xe7[\x06\x00\x96\xe8\xde\x89\xce\xa5\xa8\xe2\x8a\x19N\xf7b\x87\x19\xa5\x17\x1b\x05\xa3P\x10\xa1\x8d#\xe2X\x9b\x8e;\xf2\xd8"n/\xd6\xd5\xdf\x13\xa2x\x80$\x89\x11\x9e\xd8\x81\x16\x146\xb9#\x8b\xd3\xf9\xe6\xc1\x7f\xa2\x0cp\xe5\x99\x12\xa8\x80\xdad\x15zi!\x98\xab\xf9Ff\x99gvG$g\xdf1\xa0\x80\x9bM\xc2\t\x19\x00\x19p\xd9\x9d\x99G6\xd7Hl\xdf\x99\xc2\xc8\x9e|~\t\x88)~Q@c\x99\xa3\x0cZg\x06\x00\xf8\x96\xa8)\x0c,\xc0h\xa3\x05^\x02\xe9(\x93Rji\x84\xcb)\'\x1fn\x9d~\nj)\xa3\x0e\xffZis\x84\x06\xd7\x81\xaak\xae\xc6\x01\x07\xa0\xb5\xfa*\xac~\xc9z\xaa\x04\x03l\x80+b\xb7\x81V@\x01$\xac\xd6\xe9\xab\xb1\xd2:kpj\x0ep\xe7\xb1\xab\x9aRA\x01!\x14\xd7\xc0\x03\x8dF\x1b\xdc\x00\xd3\x8ar-\xb6\xc8\x12\x07Z\t\x15\xf0:\xdd\xb7n\x8ak\xaa(\x1ddz\xac\x14\x86\x80\x92+~\xf8\xc1\xbb\xa3\xbc\xe4\xae\xe1\x01\xbaR\xfcAG\'\\\xa4\xab\x1a\xbf\xef\x82k\xa1\xbc\x03\xa3\xeb\xd7\x1d\xa4T\xcc\x87\xc2\xc5qP\x02\xc3\xab\xf9+\x9e\xb8OH\xec\xd7\x1bYTL\x8a\x1f~\xa1\x91\xecj"\xd8\xc01n\xfe\x8e\xdaA\x06\xe7\xa2;\t)Q\xb0AJ\x15\\\xa8\xbc2h!\x14\xe0\xee\xcb\xa05\x10\xc6\xa8"s&\x07\n\x13L\xb0sA\x0b\x9b\xa2\x81\x08"h\xf02\x0f\x15\xe0\x964g2\xa8\xd1D\xd3\xa4\xe8\x01\xf5t\x1c\x14`\xc6\xcb\xcbN\x11\xe7\xd6\x87]@\xca\xd7\x8f\x90\xf2\x01\x08#\x10t\x80$\xc5\x99\xc1-\xc7?\x14\xff@\xc6\xdal\x8f\xe2\x04)b0\xb1\t)}\x84\x12J&\x04\x05\x02\xc5\x18\xb8\xd9P\xc0\x0f\x1c\x93`5h\x81_\xb0H(j\x98\xacD( \xc0`P\xc5\x8f\x83\xa6\xc1\xb6;l1\x9d\x06\x1bk\x9d4\x18:(\x1e\n\x15&sR\xb7A9\xc0Q\xf1 \x18X\x00Z\xdf<\x84\xa0:h$H^\x1cgC\\\xa0\xdc\x10\x9a\xc8\xae8\x11gdQ\x07\x01\x07!\x10\n\x11W| {\xef\xa6\x90\xb0m\x01"T B\x01<\xa8\xed\xba_X|pE\x1e\xa7\xc9\xe0D\x19\xce\xcb\xbe\x04\xf5\x08\x11\x80@\x02\xf1+\xce}\t!\xecP\xc1\x0ed\xb8\xdc\xf9\x86\xa0\x88\x8aQA\x06\x90\xc1\x02\xfc\xf2G\x83\x1c4\xc4~\xf8\xcb\x1f\xf7^v\x98D\x98\x0c\x07\xca\x1b\xc5\x05\xba\x90\xbfP`Bt\x14\x81`\x07\'\xc8/\xbf\xc8@\toC\x01)\x9c\x00\xbb\x0e\xd2\xcd$"\x94\xa0\xef\xf0\xe3\x978\xe0l\x02^ \x05\x07\xf3\x97\x00\x04\xd0\xaf%1t\xde\x0b|X\xb0\x820\x8db\x0f\xa4`\xc2\x04\x16@\x8a\x0e\xce\x8f(\x02\t\xa2\xec\x86X\xc4\xb5\x15"\x898\xc4A\xfc\x1a\x08\xc5\x82HQqT\xc4\xdc("A\n<\x08\x02\x05\x94\x90\x1d\r@\xd8E\x83|1\x14T\xbc\x80\x0e>@\n\x14\x88An\xa0\xbb]\x1b\x13\xf2F\xd9Y\xc2dg\xe8\xe1\x1e\x1d\xd2\xc7P\xa0\x10\x07\x84\xf8\xe1 \x1fx\xbf\xfc\x11\xa1\x12\x90XdG\x82\xb8FI\x02q\t/\xb4\xa4&[\x12\x10\x00;') # NOQA + response = requests.post( + self.url + path, + headers={"Content-Type": "application/json"}, + data='GIF89a=\x00D\x00\xf7\xa8\x00\x9a,3\xff\xc0\xc0\xef\xc0\xc0uXg\xfc\xf9\xf7\x993\x00\xff\xec\xec\xff\xa0\xa0\xe5\xcc\xbf\xcf\x9f\x87\x0f\xef\xef\x7f\x7f\x7f\xef\x0f\x0f\xdf\x1f\x1f\xff&&_\x9f\x9f\xffYY\xbf??5\xa5\xc2\xff\xff\xff\xac\x16\x19\xb2&\x00\xf8\x13\x10\xc2& \xdf`PP\x84\x9b\xf8\x03\x00\xb5\x0b\x0c\xdf\x0f\x00>\x9a\xb5\x87BM\x7f`P\xd2\xa5\x8f\xcc\x19\x00\xa5,\x00\xec\xd9\xcf\xe5\x0c\x00\xeb\t\x00\xff\xd9\xd9\xc7\x0c\x0c\x0f\x0f\x0f\xffyy~MZ\xfb\t\x08\xe5M@\xfb__\xff33\xcf\x90x\xf2\xe5\xdf\xc3\x06\x06\xbf\t\x08\xff\xb3\xb3\xd9\xb2\x9f\xff\x06\x06\xac)\x00\xff\xc6\xc6\x0c\t\x08\xf9\xf2\xef\xc9s`\xb8#\x00\x9f/\x00\xff__\xff\x8c\x8c\xc5\x1c\x00\xdf33\xffpp\xcf\x19\x19\xc0\x13\x10\xbf\x90x\xf7YY\xff\xf6\xf6\xe7??\xd7&&\xefLL2& \xdf\xbf\xaf\xbf\xbf\xbf???\xc5M@cn\x81_\x00\x00___\xcb00\xd8\x13\x00YC8\x80\x80\x80\xf3RRsVH\xc490\x10\x10\x10\x917@\xf2\x06\x00\xcf@@\xca\x86pooo\xa3!&\xc1\x1d\x18\xcf//\x1f\x1f\x1f\xdf\x00\x00\xd2\x16\x00\xcb\x90x\xbf\x1f\x00\x19\x13\x10\xf3\xd0\xd0\xe399&\x1d\x18Yy\x8e\x8f\x8f\x8f\xff\xa9\xa9\xcb\x13\x13\xbf00SF@\xb6& >\x1d\x18\xfb\xdd\xdd@@@\x99\x93\x90\xff\xbc\xbc\x7fPP\xaf\xaf\xaf\xc6VHzsp\x93& \xb7pp\xb3\x86ptPP|pp\xafOO\xd0\xd0\xd0\xef\xef\xefL90\xbc\xa9\xa0o0(\xeb\xb0\xb0\xff\xe0\xe0\xff\xd0\xd0\x870(K0(\xc9|h\x9f__lct\xebFF\xcf\xcf\xcf\xe0\xe0\xe0b& \xff },(@0(\xa9\x93\x88\xa6|h\x1f\xdf\xdf\xd5\xac\x97\xe2\xc5\xb7\xc7`POOO\x9cyhppp\xff\x80\x80\xff\x96\x96\xd7``\xcc\x99\x7f,\xb0\xcf\xbf\x00\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\xffff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00!\xf9\x04\x01\x00\x00\xa8\x00,\x00\x00\x00\x00=\x00D\x00\x00\x08\xff\x00Q\t\x1cH\xb0\xa0\xc1\x83\x08\x13*\\\xc8\xb0\xa1\xc0\x1b\x07\x0c8\x9cHq\xa1\x89\x14\xa72F\xac\xc8\xb1\xa2\t\x1f\x19Cj\x94\xd8\xb1$B\x03\x07D\xaa\x1ci\xb2%*#3V\xcad\xe9\xb2\xa2\x9d 3s\x9e\xdaX\x93!"\x8c:\x83\xf2\xeci\xf0c\xd0\xa3!\x87\x12E\x89\xb4iR\x92.a:\x9d\xfa\xb4\xe5\x0c\x9cT\xb3\xee\x84:\xf1\x06P\xad`\x95*4\n\xb6l\xd5\x84\x06>\x99]\x1b\xb2\xc5\x9c\x83F\xda\xb0\x9d{\xe4\x84\x00\x83W\xe7\xaeM\xe2f\xd4\xa8\xbb\x03\xbd\xea5kE\x88_\xbf\x80\x0fy\x1a\\\xb6\x08\x92\xc3\x87\x01\x070\xe5\x00\x02\xe3\xa9-\x80\xc4\x80\x1cY\xe0dS\x94-_\x0ezd3\xe7\xce\xa8>\x83\x0e=Zf\x92\x13\xa7Gm\x18 \xe1\xaf\xe7\xd5\xb8+\xb7\xceX8\xf6(\xda\xa2D\xd9N\x8d\xbb\xb8n\xc6\x8e}\x8f\xfa\x12<\xf8\xf0\xcf\x11\x1a\x14\x07}|mf\xdf\x00\x9elP\xd1\\\xb8dSaJ\x95\xffz }zu\xadiLs\xa6\xb0&8\x80\x01\xdd\x9f\x9b\x8a ^<\xf9\xe9\xac\xa9:\x82\x1d{\x83\x84\xe6\xef\xc5\xf7\x1d}\xf5\xd9W\x9eq\xa2\x1d\x95\x84a\xb1\xa9\xb0\x01\x00\xdd\x05\xd8\x9c|\x04\x16X\x8a\x02\x0b0\x80\x9f\x0b=\xe8\x94\\l\x1et \n\x00\x10\x02\x08\xdf\x84\x03ZX \x86\x1a\x16W\x03\x87+]\xe7[\x06\x00\x96\xe8\xde\x89\xce\xa5\xa8\xe2\x8a\x19N\xf7b\x87\x19\xa5\x17\x1b\x05\xa3P\x10\xa1\x8d#\xe2X\x9b\x8e;\xf2\xd8"n/\xd6\xd5\xdf\x13\xa2x\x80$\x89\x11\x9e\xd8\x81\x16\x146\xb9#\x8b\xd3\xf9\xe6\xc1\x7f\xa2\x0cp\xe5\x99\x12\xa8\x80\xdad\x15zi!\x98\xab\xf9Ff\x99gvG$g\xdf1\xa0\x80\x9bM\xc2\t\x19\x00\x19p\xd9\x9d\x99G6\xd7Hl\xdf\x99\xc2\xc8\x9e|~\t\x88)~Q@c\x99\xa3\x0cZg\x06\x00\xf8\x96\xa8)\x0c,\xc0h\xa3\x05^\x02\xe9(\x93Rji\x84\xcb)\'\x1fn\x9d~\nj)\xa3\x0e\xffZis\x84\x06\xd7\x81\xaak\xae\xc6\x01\x07\xa0\xb5\xfa*\xac~\xc9z\xaa\x04\x03l\x80+b\xb7\x81V@\x01$\xac\xd6\xe9\xab\xb1\xd2:kpj\x0ep\xe7\xb1\xab\x9aRA\x01!\x14\xd7\xc0\x03\x8dF\x1b\xdc\x00\xd3\x8ar-\xb6\xc8\x12\x07Z\t\x15\xf0:\xdd\xb7n\x8ak\xaa(\x1ddz\xac\x14\x86\x80\x92+~\xf8\xc1\xbb\xa3\xbc\xe4\xae\xe1\x01\xbaR\xfcAG\'\\\xa4\xab\x1a\xbf\xef\x82k\xa1\xbc\x03\xa3\xeb\xd7\x1d\xa4T\xcc\x87\xc2\xc5qP\x02\xc3\xab\xf9+\x9e\xb8OH\xec\xd7\x1bYTL\x8a\x1f~\xa1\x91\xecj"\xd8\xc01n\xfe\x8e\xdaA\x06\xe7\xa2;\t)Q\xb0AJ\x15\\\xa8\xbc2h!\x14\xe0\xee\xcb\xa05\x10\xc6\xa8"s&\x07\n\x13L\xb0sA\x0b\x9b\xa2\x81\x08"h\xf02\x0f\x15\xe0\x964g2\xa8\xd1D\xd3\xa4\xe8\x01\xf5t\x1c\x14`\xc6\xcb\xcbN\x11\xe7\xd6\x87]@\xca\xd7\x8f\x90\xf2\x01\x08#\x10t\x80$\xc5\x99\xc1-\xc7?\x14\xff@\xc6\xdal\x8f\xe2\x04)b0\xb1\t)}\x84\x12J&\x04\x05\x02\xc5\x18\xb8\xd9P\xc0\x0f\x1c\x93`5h\x81_\xb0H(j\x98\xacD( \xc0`P\xc5\x8f\x83\xa6\xc1\xb6;l1\x9d\x06\x1bk\x9d4\x18:(\x1e\n\x15&sR\xb7A9\xc0Q\xf1 \x18X\x00Z\xdf<\x84\xa0:h$H^\x1cgC\\\xa0\xdc\x10\x9a\xc8\xae8\x11gdQ\x07\x01\x07!\x10\n\x11W| {\xef\xa6\x90\xb0m\x01"T B\x01<\xa8\xed\xba_X|pE\x1e\xa7\xc9\xe0D\x19\xce\xcb\xbe\x04\xf5\x08\x11\x80@\x02\xf1+\xce}\t!\xecP\xc1\x0ed\xb8\xdc\xf9\x86\xa0\x88\x8aQA\x06\x90\xc1\x02\xfc\xf2G\x83\x1c4\xc4~\xf8\xcb\x1f\xf7^v\x98D\x98\x0c\x07\xca\x1b\xc5\x05\xba\x90\xbfP`Bt\x14\x81`\x07\'\xc8/\xbf\xc8@\toC\x01)\x9c\x00\xbb\x0e\xd2\xcd$"\x94\xa0\xef\xf0\xe3\x978\xe0l\x02^ \x05\x07\xf3\x97\x00\x04\xd0\xaf%1t\xde\x0b|X\xb0\x820\x8db\x0f\xa4`\xc2\x04\x16@\x8a\x0e\xce\x8f(\x02\t\xa2\xec\x86X\xc4\xb5\x15"\x898\xc4A\xfc\x1a\x08\xc5\x82HQqT\xc4\xdc("A\n<\x08\x02\x05\x94\x90\x1d\r@\xd8E\x83|1\x14T\xbc\x80\x0e>@\n\x14\x88An\xa0\xbb]\x1b\x13\xf2F\xd9Y\xc2dg\xe8\xe1\x1e\x1d\xd2\xc7P\xa0\x10\x07\x84\xf8\xe1 \x1fx\xbf\xfc\x11\xa1\x12\x90XdG\x82\xb8FI\x02q\t/\xb4\xa4&[\x12\x10\x00;', + ) # NOQA actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 502) - self.assertEquals(response.headers.get('Content-Type'), "application/json") + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 502) + self.assertEqual(response.headers.get("Content-Type"), "application/json") class TestService_FlaskDefaultOptionsDisabled(TestCase): @@ -563,24 +658,41 @@ def setUpClass(cls): cls.function_name = "name" - cls.function = provider.Function(name=cls.function_name, runtime="nodejs4.3", memory=256, timeout=5, - handler="index.handler", codeuri=cls.code_uri, environment=None, - rolearn=None, layers=[]) - - cls.base64_response_function = provider.Function(name=cls.function_name, runtime="nodejs4.3", memory=256, timeout=5, - handler="index.handler", codeuri=cls.code_uri, environment=None, - rolearn=None, layers=[]) + cls.function = provider.Function( + name=cls.function_name, + runtime="nodejs4.3", + memory=256, + timeout=5, + handler="index.handler", + codeuri=cls.code_uri, + environment=None, + rolearn=None, + layers=[], + ) + + cls.base64_response_function = provider.Function( + name=cls.function_name, + runtime="nodejs4.3", + memory=256, + timeout=5, + handler="index.handler", + codeuri=cls.code_uri, + environment=None, + rolearn=None, + layers=[], + ) cls.mock_function_provider = Mock() cls.mock_function_provider.get.return_value = cls.function - list_of_routes = [Route(['GET'], cls.function_name, '/something'), - Route(['OPTIONS'], cls.function_name, '/something') - ] + list_of_routes = [ + Route(["GET"], cls.function_name, "/something"), + Route(["OPTIONS"], cls.function_name, "/something"), + ] cls.service, cls.port, cls.url, cls.scheme = make_service(list_of_routes, cls.mock_function_provider, cls.cwd) cls.service.create() - t = threading.Thread(name='thread', target=cls.service.run, args=()) + t = threading.Thread(name="thread", target=cls.service.run, args=()) t.setDaemon(True) t.start() time.sleep(1) @@ -594,22 +706,25 @@ def setUp(self): self.maxDiff = None def test_flask_default_options_is_disabled(self): - expected = make_service_response(self.port, - scheme=self.scheme, - method="OPTIONS", - resourcePath="/something", - resolvedResourcePath="/something", - pathParameters=None, - body=None, - queryParams=None, - headers={"Content-Length": "0"}) - - response = requests.options(self.url + '/something') + expected = make_service_response( + self.port, + scheme=self.scheme, + method="OPTIONS", + resourcePath="/something", + resolvedResourcePath="/something", + pathParameters=None, + body=None, + queryParams=None, + headers={"Content-Length": "0"}, + ) + + response = requests.options(self.url + "/something") actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 200) + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 200) + def make_service(list_of_routes, function_provider, cwd): port = random_port() @@ -617,19 +732,27 @@ def make_service(list_of_routes, function_provider, cwd): layer_downloader = LayerDownloader("./", "./") lambda_image = LambdaImage(layer_downloader, False, False) local_runtime = LambdaRuntime(manager, lambda_image) - lambda_runner = LocalLambdaRunner(local_runtime=local_runtime, - function_provider=function_provider, - cwd=cwd) + lambda_runner = LocalLambdaRunner(local_runtime=local_runtime, function_provider=function_provider, cwd=cwd) service = LocalApigwService(list_of_routes, lambda_runner, port=port) scheme = "http" - url = '{}://127.0.0.1:{}'.format(scheme, port) + url = "{}://127.0.0.1:{}".format(scheme, port) return service, port, url, scheme -def make_service_response(port, method, scheme, resourcePath, resolvedResourcePath, pathParameters=None, - body=None, headers=None, queryParams=None, isBase64Encoded=False): +def make_service_response( + port, + method, + scheme, + resourcePath, + resolvedResourcePath, + pathParameters=None, + body=None, + headers=None, + queryParams=None, + isBase64Encoded=False, +): response = { "httpMethod": "GET", "body": None, @@ -652,10 +775,10 @@ def make_service_response(port, method, scheme, resourcePath, resolvedResourcePa "cognitoIdentityPoolId": None, "cognitoAuthenticationProvider": None, "sourceIp": "127.0.0.1", - "accountId": None + "accountId": None, }, "extendedRequestId": None, - "path": "/something/{event}" + "path": "/something/{event}", }, "queryStringParameters": None, "headers": { @@ -663,19 +786,19 @@ def make_service_response(port, method, scheme, resourcePath, resolvedResourcePa "User-Agent": "python-requests/{}".format(requests.__version__), "Accept-Encoding": "gzip, deflate", "Accept": "*/*", - "Connection": "keep-alive" + "Connection": "keep-alive", }, "pathParameters": {"event": "event1"}, "stageVariables": None, "path": "/something/event1", - "isBase64Encoded": False + "isBase64Encoded": False, } if body: response["body"] = body response["httpMethod"] = method - response['requestContext']["httpMethod"] = method + response["requestContext"]["httpMethod"] = method response["requestContext"]["resourcePath"] = resourcePath response["requestContext"]["path"] = resourcePath response["resource"] = resourcePath @@ -684,7 +807,6 @@ def make_service_response(port, method, scheme, resourcePath, resolvedResourcePa response["queryStringParameters"] = queryParams response["isBase64Encoded"] = isBase64Encoded - headers = headers or {} for header, value in headers.items(): response["headers"][header] = value @@ -698,4 +820,3 @@ def make_service_response(port, method, scheme, resourcePath, resolvedResourcePa def random_port(): return random.randint(30000, 40000) - diff --git a/tests/functional/local/docker/test_container_manager.py b/tests/functional/local/docker/test_container_manager.py index 1a23f871e3..1dbbc2c051 100644 --- a/tests/functional/local/docker/test_container_manager.py +++ b/tests/functional/local/docker/test_container_manager.py @@ -8,6 +8,7 @@ class TestContainerManager(TestCase): """ Verifies functionality of ContainerManager by calling Docker APIs """ + IMAGE = "busybox" # small sized Linux container @classmethod diff --git a/tests/functional/local/docker/test_lambda_container.py b/tests/functional/local/docker/test_lambda_container.py index 28813f6e8b..28326f6a1c 100644 --- a/tests/functional/local/docker/test_lambda_container.py +++ b/tests/functional/local/docker/test_lambda_container.py @@ -26,6 +26,7 @@ class TestLambdaContainer(TestCase): setting up debug port forwarding. These operations might also exhibit differences across Operating Systems, hence necessary to tests them here. """ + IMAGE_NAME = "lambci/lambda:nodejs4.3" HELLO_WORLD_CODE = """ @@ -51,9 +52,7 @@ def setUp(self): self.handler = "index.handler" self.layers = [] self.debug_port = _rand_port() - self.debug_context = DebugContext(debug_port=self.debug_port, - debugger_path=None, - debug_args=None) + self.debug_context = DebugContext(debug_port=self.debug_port, debugger_path=None, debug_args=None) self.code_dir = nodejs_lambda(self.HELLO_WORLD_CODE) self.network_prefix = "sam_cli_test_network" @@ -81,15 +80,19 @@ def test_basic_creation(self): # Call Docker API to make sure container indeed exists actual_container = self.docker_client.containers.get(container.id) - self.assertEquals(actual_container.status, "created") - self.assertTrue(self.expected_docker_image in actual_container.image.tags, - "Image name of the container must be " + self.expected_docker_image) + self.assertEqual(actual_container.status, "created") + self.assertTrue( + self.expected_docker_image in actual_container.image.tags, + "Image name of the container must be " + self.expected_docker_image, + ) def test_debug_port_is_created_on_host(self): layer_downloader = LayerDownloader("./", "./") image_builder = LambdaImage(layer_downloader, False, False) - container = LambdaContainer(self.runtime, self.handler, self.code_dir, self.layers, image_builder, debug_options=self.debug_context) + container = LambdaContainer( + self.runtime, self.handler, self.code_dir, self.layers, image_builder, debug_options=self.debug_context + ) with self._create(container): @@ -98,8 +101,8 @@ def test_debug_port_is_created_on_host(self): # After container is started, query the container to make sure it is bound to the right ports port_binding = self.docker_client.api.port(container.id, self.debug_port) self.assertIsNotNone(port_binding, "Container must be bound to a port on host machine") - self.assertEquals(1, len(port_binding), "Only one port must be bound to the container") - self.assertEquals(port_binding[0]["HostPort"], str(self.debug_port)) + self.assertEqual(1, len(port_binding), "Only one port must be bound to the container") + self.assertEqual(port_binding[0]["HostPort"], str(self.debug_port)) def test_container_is_attached_to_network(self): layer_downloader = LayerDownloader("./", "./") @@ -118,8 +121,8 @@ def test_container_is_attached_to_network(self): # Fetch the latest information about this network from server network.reload() - self.assertEquals(1, len(network.containers)) - self.assertEquals(container.id, network.containers[0].id) + self.assertEqual(1, len(network.containers)) + self.assertEqual(container.id, network.containers[0].id) def test_function_result_is_available_in_stdout_and_logs_in_stderr(self): @@ -146,7 +149,7 @@ def test_function_result_is_available_in_stdout_and_logs_in_stderr(self): function_output = stdout_stream.getvalue() function_stderr = stderr_stream.getvalue() - self.assertEquals(function_output.strip(), expected_output) + self.assertEqual(function_output.strip(), expected_output) self.assertIn(expected_stderr, function_stderr) @contextmanager @@ -179,6 +182,6 @@ def _network_create(self): if network: network.remove() + def _rand_port(): return random.randint(30000, 40000) - diff --git a/tests/functional/local/lambda_service/test_local_lambda_invoke.py b/tests/functional/local/lambda_service/test_local_lambda_invoke.py index 78519fd45c..4c4489f1df 100644 --- a/tests/functional/local/lambda_service/test_local_lambda_invoke.py +++ b/tests/functional/local/lambda_service/test_local_lambda_invoke.py @@ -20,7 +20,6 @@ class TestLocalLambdaService(TestCase): - @classmethod def mocked_function_provider(cls, function_name): if function_name == "HelloWorld": @@ -36,7 +35,9 @@ def setUpClass(cls): # Let's convert this absolute path to relative path. Let the parent be the CWD, and codeuri be the folder cls.cwd_for_throw_error = os.path.dirname(cls.code_abs_path_for_throw_error) - cls.code_uri_for_throw_error = os.path.relpath(cls.code_abs_path_for_throw_error, cls.cwd_for_throw_error) # Get relative path with respect to CWD + cls.code_uri_for_throw_error = os.path.relpath( + cls.code_abs_path_for_throw_error, cls.cwd_for_throw_error + ) # Get relative path with respect to CWD cls.code_abs_path = nodejs_lambda(HELLO_FROM_LAMBDA) @@ -46,23 +47,38 @@ def setUpClass(cls): cls.hello_world_function_name = "HelloWorld" - cls.hello_world_function = provider.Function(name=cls.hello_world_function_name, runtime="nodejs4.3", - memory=256, timeout=5, handler="index.handler", - codeuri=cls.code_uri, environment=None, rolearn=None, layers=[]) + cls.hello_world_function = provider.Function( + name=cls.hello_world_function_name, + runtime="nodejs4.3", + memory=256, + timeout=5, + handler="index.handler", + codeuri=cls.code_uri, + environment=None, + rolearn=None, + layers=[], + ) cls.throw_error_function_name = "ThrowError" - cls.throw_error_function = provider.Function(name=cls.throw_error_function_name, runtime="nodejs4.3", - memory=256, timeout=5, handler="index.handler", - codeuri=cls.code_uri_for_throw_error, environment=None, - rolearn=None, layers=[]) + cls.throw_error_function = provider.Function( + name=cls.throw_error_function_name, + runtime="nodejs4.3", + memory=256, + timeout=5, + handler="index.handler", + codeuri=cls.code_uri_for_throw_error, + environment=None, + rolearn=None, + layers=[], + ) cls.mock_function_provider = Mock() cls.mock_function_provider.get.side_effect = cls.mocked_function_provider cls.service, cls.port, cls.url, cls.scheme = make_service(cls.mock_function_provider, cls.cwd) cls.service.create() - t = threading.Thread(name='thread', target=cls.service.run, args=()) + t = threading.Thread(name="thread", target=cls.service.run, args=()) t.setDaemon(True) t.start() time.sleep(1) @@ -77,39 +93,45 @@ def setUp(self): self.maxDiff = None def test_lambda_str_response_is_returned(self): - expected = 'Hello from Lambda' + expected = "Hello from Lambda" - response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations') + response = requests.post(self.url + "/2015-03-31/functions/HelloWorld/invocations") actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 200) + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 200) def test_request_with_non_existing_function(self): - expected_data = {"Message": "Function not found: arn:aws:lambda:us-west-2:012345678901:function:{}".format('IDoNotExist'), - "Type": "User"} + expected_data = { + "Message": "Function not found: arn:aws:lambda:us-west-2:012345678901:function:{}".format("IDoNotExist"), + "Type": "User", + } - response = requests.post(self.url + '/2015-03-31/functions/IDoNotExist/invocations') + response = requests.post(self.url + "/2015-03-31/functions/IDoNotExist/invocations") actual_data = response.json() - acutal_error_type_header = response.headers.get('x-amzn-errortype') + acutal_error_type_header = response.headers.get("x-amzn-errortype") - self.assertEquals(actual_data, expected_data) - self.assertEquals(acutal_error_type_header, 'ResourceNotFound') - self.assertEquals(response.status_code, 404) + self.assertEqual(actual_data, expected_data) + self.assertEqual(acutal_error_type_header, "ResourceNotFound") + self.assertEqual(response.status_code, 404) def test_request_a_function_that_throws_an_error(self): - expected_data = {'errorMessage': 'something is wrong', 'errorType': 'Error','stackTrace': ['exports.handler (/var/task/index.js:3:17)']} + expected_data = { + "errorMessage": "something is wrong", + "errorType": "Error", + "stackTrace": ["exports.handler (/var/task/index.js:3:17)"], + } - response = requests.post(self.url + '/2015-03-31/functions/ThrowError/invocations') + response = requests.post(self.url + "/2015-03-31/functions/ThrowError/invocations") actual_data = response.json() - acutal_error_type_header = response.headers.get('x-amz-function-error') + acutal_error_type_header = response.headers.get("x-amz-function-error") - self.assertEquals(actual_data, expected_data) - self.assertEquals(acutal_error_type_header, 'Unhandled') - self.assertEquals(response.status_code, 200) + self.assertEqual(actual_data, expected_data) + self.assertEqual(acutal_error_type_header, "Unhandled") + self.assertEqual(response.status_code, 200) class TestLocalEchoLambdaService(TestCase): @@ -123,16 +145,24 @@ def setUpClass(cls): cls.function_name = "HelloWorld" - cls.function = provider.Function(name=cls.function_name, runtime="nodejs4.3", memory=256, timeout=5, - handler="index.handler", codeuri=cls.code_uri, environment=None, - rolearn=None, layers=[]) + cls.function = provider.Function( + name=cls.function_name, + runtime="nodejs4.3", + memory=256, + timeout=5, + handler="index.handler", + codeuri=cls.code_uri, + environment=None, + rolearn=None, + layers=[], + ) cls.mock_function_provider = Mock() cls.mock_function_provider.get.return_value = cls.function cls.service, cls.port, cls.url, cls.scheme = make_service(cls.mock_function_provider, cls.cwd) cls.service.create() - t = threading.Thread(name='thread', target=cls.service.run, args=()) + t = threading.Thread(name="thread", target=cls.service.run, args=()) t.setDaemon(True) t.start() time.sleep(1) @@ -148,32 +178,36 @@ def setUp(self): def test_mock_response_is_returned(self): expected = {"key1": "value1"} - response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations', json={"key1": "value1"}) + response = requests.post(self.url + "/2015-03-31/functions/HelloWorld/invocations", json={"key1": "value1"}) actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 200) + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 200) def test_binary_octet_stream_format(self): expected = {"key1": "value1"} - response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations', json={"key1": "value1"}, headers={"Content-Type":"binary/octet-stream"}) + response = requests.post( + self.url + "/2015-03-31/functions/HelloWorld/invocations", + json={"key1": "value1"}, + headers={"Content-Type": "binary/octet-stream"}, + ) actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 200) + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 200) def test_function_executed_when_no_data_provided(self): expected = {} - response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations') + response = requests.post(self.url + "/2015-03-31/functions/HelloWorld/invocations") actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 200) + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 200) class TestLocalLambdaService_NotSupportedRequests(TestCase): @@ -187,9 +221,17 @@ def setUpClass(cls): cls.function_name = "HelloWorld" - cls.function = provider.Function(name=cls.function_name, runtime="nodejs4.3", memory=256, timeout=5, - handler="index.handler", codeuri=cls.code_uri, environment=None, - rolearn=None, layers=[]) + cls.function = provider.Function( + name=cls.function_name, + runtime="nodejs4.3", + memory=256, + timeout=5, + handler="index.handler", + codeuri=cls.code_uri, + environment=None, + rolearn=None, + layers=[], + ) cls.mock_function_provider = Mock() cls.mock_function_provider.get.return_value = cls.function @@ -197,7 +239,7 @@ def setUpClass(cls): cls.service, cls.port, cls.url, cls.scheme = make_service(cls.mock_function_provider, cls.cwd) cls.service.create() # import pdb; pdb.set_trace() - t = threading.Thread(name='thread', target=cls.service.run, args=()) + t = threading.Thread(name="thread", target=cls.service.run, args=()) t.setDaemon(True) t.start() time.sleep(1) @@ -211,103 +253,117 @@ def setUp(self): self.maxDiff = None def test_query_string_parameters_in_request(self): - expected = {"Type": "User", - "Message": "Query Parameters are not supported"} + expected = {"Type": "User", "Message": "Query Parameters are not supported"} - response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations', json={"key1": "value1"}, params={"key": "value"}) + response = requests.post( + self.url + "/2015-03-31/functions/HelloWorld/invocations", json={"key1": "value1"}, params={"key": "value"} + ) actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 400) - self.assertEquals(response.headers.get('x-amzn-errortype'), 'InvalidRequestContent') - self.assertEquals(response.headers.get('Content-Type'),'application/json') + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 400) + self.assertEqual(response.headers.get("x-amzn-errortype"), "InvalidRequestContent") + self.assertEqual(response.headers.get("Content-Type"), "application/json") def test_payload_is_not_json_serializable(self): - expected = {"Type": "User", - "Message": "Could not parse request body into json: No JSON object could be decoded"} + expected = { + "Type": "User", + "Message": "Could not parse request body into json: No JSON object could be decoded", + } - response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations', data='notat:asdfasdf') + response = requests.post(self.url + "/2015-03-31/functions/HelloWorld/invocations", data="notat:asdfasdf") actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 400) - self.assertEquals(response.headers.get('x-amzn-errortype'), 'InvalidRequestContent') - self.assertEquals(response.headers.get('Content-Type'), 'application/json') + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 400) + self.assertEqual(response.headers.get("x-amzn-errortype"), "InvalidRequestContent") + self.assertEqual(response.headers.get("Content-Type"), "application/json") def test_log_type_tail_in_request(self): expected = {"Type": "LocalService", "Message": "log-type: Tail is not supported. None is only supported."} - response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations', headers={'X-Amz-Log-Type': 'Tail'}) + response = requests.post( + self.url + "/2015-03-31/functions/HelloWorld/invocations", headers={"X-Amz-Log-Type": "Tail"} + ) actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 501) - self.assertEquals(response.headers.get('Content-Type'), 'application/json') - self.assertEquals(response.headers.get('x-amzn-errortype'), 'NotImplemented') + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 501) + self.assertEqual(response.headers.get("Content-Type"), "application/json") + self.assertEqual(response.headers.get("x-amzn-errortype"), "NotImplemented") def test_log_type_tail_in_request_with_lowercase_header(self): expected = {"Type": "LocalService", "Message": "log-type: Tail is not supported. None is only supported."} - response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations', headers={'x-amz-log-type': 'Tail'}) + response = requests.post( + self.url + "/2015-03-31/functions/HelloWorld/invocations", headers={"x-amz-log-type": "Tail"} + ) actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 501) - self.assertEquals(response.headers.get('Content-Type'), 'application/json') - self.assertEquals(response.headers.get('x-amzn-errortype'), 'NotImplemented') + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 501) + self.assertEqual(response.headers.get("Content-Type"), "application/json") + self.assertEqual(response.headers.get("x-amzn-errortype"), "NotImplemented") def test_invocation_type_event_in_request(self): - expected = {"Type": "LocalService", "Message": "invocation-type: Event is not supported. RequestResponse is only supported."} + expected = { + "Type": "LocalService", + "Message": "invocation-type: Event is not supported. RequestResponse is only supported.", + } - response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations', - headers={'X-Amz-Invocation-Type': 'Event'}) + response = requests.post( + self.url + "/2015-03-31/functions/HelloWorld/invocations", headers={"X-Amz-Invocation-Type": "Event"} + ) actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 501) - self.assertEquals(response.headers.get('Content-Type'), 'application/json') - self.assertEquals(response.headers.get('x-amzn-errortype'), 'NotImplemented') + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 501) + self.assertEqual(response.headers.get("Content-Type"), "application/json") + self.assertEqual(response.headers.get("x-amzn-errortype"), "NotImplemented") def test_invocation_type_dry_run_in_request(self): - expected = {"Type": "LocalService", "Message": "invocation-type: DryRun is not supported. RequestResponse is only supported."} + expected = { + "Type": "LocalService", + "Message": "invocation-type: DryRun is not supported. RequestResponse is only supported.", + } - response = requests.post(self.url + '/2015-03-31/functions/HelloWorld/invocations', - headers={'X-Amz-Invocation-Type': 'DryRun'}) + response = requests.post( + self.url + "/2015-03-31/functions/HelloWorld/invocations", headers={"X-Amz-Invocation-Type": "DryRun"} + ) actual = response.json() - self.assertEquals(actual, expected) - self.assertEquals(response.status_code, 501) - self.assertEquals(response.headers.get('Content-Type'), 'application/json') - self.assertEquals(response.headers.get('x-amzn-errortype'), 'NotImplemented') + self.assertEqual(actual, expected) + self.assertEqual(response.status_code, 501) + self.assertEqual(response.headers.get("Content-Type"), "application/json") + self.assertEqual(response.headers.get("x-amzn-errortype"), "NotImplemented") def test_generic_404_error_when_request_to_nonexisting_endpoint(self): - expected_data = {'Type': 'LocalService', 'Message': 'PathNotFoundException'} + expected_data = {"Type": "LocalService", "Message": "PathNotFoundException"} - response = requests.post(self.url + '/some/random/path/that/does/not/exist') + response = requests.post(self.url + "/some/random/path/that/does/not/exist") actual_data = response.json() - self.assertEquals(actual_data, expected_data) - self.assertEquals(response.status_code, 404) - self.assertEquals(response.headers.get('x-amzn-errortype'), 'PathNotFoundLocally') - + self.assertEqual(actual_data, expected_data) + self.assertEqual(response.status_code, 404) + self.assertEqual(response.headers.get("x-amzn-errortype"), "PathNotFoundLocally") def test_generic_405_error_when_request_path_with_invalid_method(self): - expected_data = {'Type': 'LocalService', 'Message': 'MethodNotAllowedException'} + expected_data = {"Type": "LocalService", "Message": "MethodNotAllowedException"} - response = requests.get(self.url + '/2015-03-31/functions/HelloWorld/invocations') + response = requests.get(self.url + "/2015-03-31/functions/HelloWorld/invocations") actual_data = response.json() - self.assertEquals(actual_data, expected_data) - self.assertEquals(response.status_code, 405) - self.assertEquals(response.headers.get('x-amzn-errortype'), 'MethodNotAllowedLocally') + self.assertEqual(actual_data, expected_data) + self.assertEqual(response.status_code, 405) + self.assertEqual(response.headers.get("x-amzn-errortype"), "MethodNotAllowedLocally") def make_service(function_provider, cwd): @@ -316,15 +372,14 @@ def make_service(function_provider, cwd): layer_downloader = LayerDownloader("./", "./") image_builder = LambdaImage(layer_downloader, False, False) local_runtime = LambdaRuntime(manager, image_builder) - lambda_runner = LocalLambdaRunner(local_runtime=local_runtime, - function_provider=function_provider, - cwd=cwd) + lambda_runner = LocalLambdaRunner(local_runtime=local_runtime, function_provider=function_provider, cwd=cwd) - service = LocalLambdaInvokeService(lambda_runner, port=port, host='127.0.0.1') + service = LocalLambdaInvokeService(lambda_runner, port=port, host="127.0.0.1") scheme = "http" - url = '{}://127.0.0.1:{}'.format(scheme, port) + url = "{}://127.0.0.1:{}".format(scheme, port) return service, port, url, scheme + def random_port(): - return random.randint(30000, 40000) \ No newline at end of file + return random.randint(30000, 40000) diff --git a/tests/functional/local/lambdafn/test_runtime.py b/tests/functional/local/lambdafn/test_runtime.py index ae767cd27c..57f4dd4a4f 100644 --- a/tests/functional/local/lambdafn/test_runtime.py +++ b/tests/functional/local/lambdafn/test_runtime.py @@ -35,7 +35,7 @@ def setUp(self): self.code_dir = { "echo": nodejs_lambda(ECHO_CODE), "sleep": nodejs_lambda(SLEEP_CODE), - "envvar": nodejs_lambda(GET_ENV_VAR) + "envvar": nodejs_lambda(GET_ENV_VAR), } self.container_manager = ContainerManager() @@ -52,12 +52,14 @@ def test_echo_function(self): input_event = '{"a":"b"}' expected_output = b'{"a":"b"}' - config = FunctionConfig(name="helloworld", - runtime=RUNTIME, - handler=HANDLER, - code_abs_path=self.code_dir["echo"], - layers=[], - timeout=timeout) + config = FunctionConfig( + name="helloworld", + runtime=RUNTIME, + handler=HANDLER, + code_abs_path=self.code_dir["echo"], + layers=[], + timeout=timeout, + ) stdout_stream = io.BytesIO() stdout_stream_writer = StreamWriter(stdout_stream) @@ -65,7 +67,7 @@ def test_echo_function(self): self.runtime.invoke(config, input_event, stdout=stdout_stream_writer) actual_output = stdout_stream.getvalue() - self.assertEquals(actual_output.strip(), expected_output) + self.assertEqual(actual_output.strip(), expected_output) def test_function_timeout(self): """ @@ -77,12 +79,14 @@ def test_function_timeout(self): timeout = 1 # 1 second timeout sleep_seconds = 20 # Ask the function to sleep for 20 seconds - config = FunctionConfig(name="sleep_timeout", - runtime=RUNTIME, - handler=HANDLER, - code_abs_path=self.code_dir["sleep"], - layers=[], - timeout=timeout) + config = FunctionConfig( + name="sleep_timeout", + runtime=RUNTIME, + handler=HANDLER, + code_abs_path=self.code_dir["sleep"], + layers=[], + timeout=timeout, + ) # Measure the actual duration of execution start = timer() @@ -91,7 +95,7 @@ def test_function_timeout(self): # Make sure that the wall clock duration is around the ballpark of timeout value wall_clock_func_duration = end - start - print("Function completed in {} seconds".format(wall_clock_func_duration)) + print ("Function completed in {} seconds".format(wall_clock_func_duration)) # The function should *not* preemptively stop self.assertGreater(wall_clock_func_duration, timeout - 1) # The function should not run for much longer than timeout. @@ -99,14 +103,9 @@ def test_function_timeout(self): # There should be no output from the function because timer was interrupted actual_output = stdout_stream.getvalue() - self.assertEquals(actual_output.strip(), b"") - - @parameterized.expand([ - ("zip"), - ("jar"), - ("ZIP"), - ("JAR") - ]) + self.assertEqual(actual_output.strip(), b"") + + @parameterized.expand([("zip"), ("jar"), ("ZIP"), ("JAR")]) def test_echo_function_with_zip_file(self, file_name_extension): timeout = 3 input_event = '"this input should be echoed"' @@ -115,12 +114,14 @@ def test_echo_function_with_zip_file(self, file_name_extension): code_dir = self.code_dir["echo"] with make_zip(code_dir, file_name_extension) as code_zip_path: - config = FunctionConfig(name="helloworld", - runtime=RUNTIME, - handler=HANDLER, - code_abs_path=code_zip_path, - layers=[], - timeout=timeout) + config = FunctionConfig( + name="helloworld", + runtime=RUNTIME, + handler=HANDLER, + code_abs_path=code_zip_path, + layers=[], + timeout=timeout, + ) stdout_stream = io.BytesIO() stdout_stream_writer = StreamWriter(stdout_stream) @@ -128,7 +129,7 @@ def test_echo_function_with_zip_file(self, file_name_extension): self.runtime.invoke(config, input_event, stdout=stdout_stream_writer) actual_output = stdout_stream.getvalue() - self.assertEquals(actual_output.strip(), expected_output) + self.assertEqual(actual_output.strip(), expected_output) def test_check_environment_variables(self): variables = {"var1": "value1", "var2": "value2"} @@ -145,25 +146,25 @@ def test_check_environment_variables(self): "AWS_LAMBDA_FUNCTION_MEMORY_SIZE": "1024", "AWS_LAMBDA_FUNCTION_TIMEOUT": "30", "AWS_LAMBDA_FUNCTION_HANDLER": "index.handler", - # Values coming from AWS Credentials "AWS_REGION": "ap-south-1", "AWS_DEFAULT_REGION": "ap-south-1", "AWS_ACCESS_KEY_ID": "mykey", "AWS_SECRET_ACCESS_KEY": "mysecret", - # Custom environment variables "var1": "value1", - "var2": "value2" + "var2": "value2", } - config = FunctionConfig(name="helloworld", - runtime=RUNTIME, - handler=HANDLER, - code_abs_path=self.code_dir["envvar"], - layers=[], - memory=MEMORY, - timeout=timeout) + config = FunctionConfig( + name="helloworld", + runtime=RUNTIME, + handler=HANDLER, + code_abs_path=self.code_dir["envvar"], + layers=[], + memory=MEMORY, + timeout=timeout, + ) # Set the appropriate environment variables config.env_vars.variables = variables @@ -171,22 +172,26 @@ def test_check_environment_variables(self): self.runtime.invoke(config, input_event, stdout=stdout_stream_writer) - actual_output = json.loads(stdout_stream.getvalue().strip().decode('utf-8')) # Output is a JSON String. Deserialize. + actual_output = json.loads( + stdout_stream.getvalue().strip().decode("utf-8") + ) # Output is a JSON String. Deserialize. # Make sure all key/value from expected_output is present in actual_output for key, value in expected_output.items(): # Do the key check first to print a nice error error message when it fails self.assertTrue(key in actual_output, "'{}' should be in environment variable output".format(key)) - self.assertEquals(actual_output[key], expected_output[key], - "Value of environment variable '{}' differs fromm expectation".format(key)) + self.assertEqual( + actual_output[key], + expected_output[key], + "Value of environment variable '{}' differs fromm expectation".format(key), + ) class TestLambdaRuntime_MultipleInvokes(TestCase): - def setUp(self): self.code_dir = nodejs_lambda(SLEEP_CODE) - Input = namedtuple('Input', ["timeout", "sleep", "check_stdout"]) + Input = namedtuple("Input", ["timeout", "sleep", "check_stdout"]) self.inputs = [ Input(sleep=1, timeout=10, check_stdout=True), Input(sleep=2, timeout=10, check_stdout=True), @@ -209,23 +214,25 @@ def tearDown(self): def _invoke_sleep(self, timeout, sleep_duration, check_stdout, exceptions=None): name = "sleepfunction_timeout_{}_sleep_{}".format(timeout, sleep_duration) - print("Invoking function " + name) + print ("Invoking function " + name) try: stdout_stream = io.BytesIO() stdout_stream_writer = StreamWriter(stdout_stream) - config = FunctionConfig(name=name, - runtime=RUNTIME, - handler=HANDLER, - code_abs_path=self.code_dir, - layers=[], - memory=1024, - timeout=timeout) + config = FunctionConfig( + name=name, + runtime=RUNTIME, + handler=HANDLER, + code_abs_path=self.code_dir, + layers=[], + memory=1024, + timeout=timeout, + ) self.runtime.invoke(config, sleep_duration, stdout=stdout_stream_writer) actual_output = stdout_stream.getvalue().strip() # Must output the sleep duration if check_stdout: - self.assertEquals(actual_output.decode('utf-8'), str(sleep_duration)) + self.assertEqual(actual_output.decode("utf-8"), str(sleep_duration)) except Exception as ex: if exceptions is not None: exceptions.append({"name": name, "error": ex}) @@ -255,8 +262,11 @@ def test_parallel(self): for input in self.inputs: - t = threading.Thread(name='thread', target=self._invoke_sleep, - args=(input.timeout, input.sleep, input.check_stdout, exceptions)) + t = threading.Thread( + name="thread", + target=self._invoke_sleep, + args=(input.timeout, input.sleep, input.check_stdout, exceptions), + ) t.setDaemon(True) t.start() threads.append(t) @@ -266,10 +276,10 @@ def test_parallel(self): t.join() for e in exceptions: - print("-------------") - print("ERROR in function " + e["name"]) - print(e["error"]) - print("-------------") + print ("-------------") + print ("ERROR in function " + e["name"]) + print (e["error"]) + print ("-------------") if len(exceptions) > 0: raise AssertionError("Test failed. See print outputs above for details on the thread that failed") diff --git a/tests/integration/buildcmd/build_integ_base.py b/tests/integration/buildcmd/build_integ_base.py index 703e7ff95d..8b69f94726 100644 --- a/tests/integration/buildcmd/build_integ_base.py +++ b/tests/integration/buildcmd/build_integ_base.py @@ -1,6 +1,8 @@ import os +import uuid import shutil import tempfile +import time import logging import subprocess import json @@ -14,7 +16,7 @@ from pathlib2 import Path from samcli.yamlhelper import yaml_parse - +from tests.testing_utils import IS_WINDOWS LOG = logging.getLogger(__name__) @@ -25,20 +27,16 @@ class BuildIntegBase(TestCase): @classmethod def setUpClass(cls): cls.cmd = cls.base_command() - integration_dir = Path(__file__).resolve().parents[1] + cls.test_data_path = str(Path(integration_dir, "testdata", "buildcmd")) + cls.template_path = str(Path(cls.test_data_path, cls.template)) + def setUp(self): # To invoke a function creaated by the build command, we need the built artifacts to be in a # location that is shared in Docker. Most temp directories are not shared. Therefore we are # using a scratch space within the test folder that is .gitignored. Contents of this folder # is also deleted after every test run - cls.scratch_dir = str(Path(__file__).resolve().parent.joinpath("scratch")) - - cls.test_data_path = str(Path(integration_dir, "testdata", "buildcmd")) - cls.template_path = str(Path(cls.test_data_path, cls.template)) - - def setUp(self): - + self.scratch_dir = str(Path(__file__).resolve().parent.joinpath(str(uuid.uuid4()).replace("-", "")[:10])) shutil.rmtree(self.scratch_dir, ignore_errors=True) os.mkdir(self.scratch_dir) @@ -49,9 +47,9 @@ def setUp(self): self.built_template = self.default_build_dir.joinpath("template.yaml") def tearDown(self): - self.custom_build_dir and shutil.rmtree(self.custom_build_dir) - self.working_dir and shutil.rmtree(self.working_dir) - self.scratch_dir and shutil.rmtree(self.scratch_dir) + self.custom_build_dir and shutil.rmtree(self.custom_build_dir, ignore_errors=True) + self.working_dir and shutil.rmtree(self.working_dir, ignore_errors=True) + self.scratch_dir and shutil.rmtree(self.scratch_dir, ignore_errors=True) @classmethod def base_command(cls): @@ -61,8 +59,16 @@ def base_command(cls): return command - def get_command_list(self, build_dir=None, base_dir=None, manifest_path=None, use_container=None, - parameter_overrides=None, mode=None, function_identifier=None): + def get_command_list( + self, + build_dir=None, + base_dir=None, + manifest_path=None, + use_container=None, + parameter_overrides=None, + mode=None, + function_identifier=None, + ): command_list = [self.cmd, "build"] @@ -89,30 +95,40 @@ def get_command_list(self, build_dir=None, base_dir=None, manifest_path=None, us return command_list def verify_docker_container_cleanedup(self, runtime): + if IS_WINDOWS: + time.sleep(1) docker_client = docker.from_env() - samcli_containers = \ - docker_client.containers.list(all=True, filters={"ancestor": "lambci/lambda:build-{}".format(runtime)}) + samcli_containers = docker_client.containers.list( + all=True, filters={"ancestor": "lambci/lambda:build-{}".format(runtime)} + ) self.assertFalse(bool(samcli_containers), "Build containers have not been removed") def _make_parameter_override_arg(self, overrides): - return " ".join([ - "ParameterKey={},ParameterValue={}".format(key, value) for key, value in overrides.items() - ]) + return " ".join(["ParameterKey={},ParameterValue={}".format(key, value) for key, value in overrides.items()]) def _verify_resource_property(self, template_path, logical_id, property, expected_value): - with open(template_path, 'r') as fp: + with open(template_path, "r") as fp: template_dict = yaml_parse(fp.read()) - self.assertEquals(expected_value, template_dict["Resources"][logical_id]["Properties"][property]) + self.assertEqual(expected_value, template_dict["Resources"][logical_id]["Properties"][property]) def _verify_invoke_built_function(self, template_path, function_logical_id, overrides, expected_result): LOG.info("Invoking built function '{}'".format(function_logical_id)) - cmdlist = [self.cmd, "local", "invoke", function_logical_id, "-t", str(template_path), "--no-event", - "--parameter-overrides", overrides] + cmdlist = [ + self.cmd, + "local", + "invoke", + function_logical_id, + "-t", + str(template_path), + "--no-event", + "--parameter-overrides", + overrides, + ] process = subprocess.Popen(cmdlist, stdout=subprocess.PIPE) process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip().decode('utf-8') - self.assertEquals(json.loads(process_stdout), expected_result) + process_stdout = b"".join(process.stdout.readlines()).strip().decode("utf-8") + self.assertEqual(json.loads(process_stdout), expected_result) diff --git a/tests/integration/buildcmd/test_build_cmd.py b/tests/integration/buildcmd/test_build_cmd.py index 3f34cdb540..cf075b96af 100644 --- a/tests/integration/buildcmd/test_build_cmd.py +++ b/tests/integration/buildcmd/test_build_cmd.py @@ -2,6 +2,7 @@ import os import subprocess import logging +from unittest import skipIf try: from pathlib import Path @@ -10,64 +11,71 @@ from parameterized import parameterized from .build_integ_base import BuildIntegBase +from tests.testing_utils import IS_WINDOWS, RUNNING_ON_CI LOG = logging.getLogger(__name__) +@skipIf(IS_WINDOWS and RUNNING_ON_CI, "Skip build tests on windows when running in CI") class TestBuildCommand_PythonFunctions(BuildIntegBase): EXPECTED_FILES_GLOBAL_MANIFEST = set() - EXPECTED_FILES_PROJECT_MANIFEST = {'__init__.py', 'main.py', 'numpy', - # 'cryptography', - "jinja2", - 'requirements.txt'} + EXPECTED_FILES_PROJECT_MANIFEST = { + "__init__.py", + "main.py", + "numpy", + # 'cryptography', + "jinja2", + "requirements.txt", + } FUNCTION_LOGICAL_ID = "Function" - @parameterized.expand([ - ("python2.7", False), - ("python3.6", False), - ("python3.7", False), - ("python2.7", "use_container"), - ("python3.6", "use_container"), - ("python3.7", "use_container"), - ]) + @parameterized.expand( + [ + ("python2.7", False), + ("python3.6", False), + ("python3.7", False), + ("python2.7", "use_container"), + ("python3.6", "use_container"), + ("python3.7", "use_container"), + ] + ) def test_with_default_requirements(self, runtime, use_container): # Don't run test on wrong Python versions py_version = self._get_python_version() if py_version != runtime: - self.skipTest("Current Python version '{}' does not match Lambda runtime version '{}'".format(py_version, - runtime)) + self.skipTest( + "Current Python version '{}' does not match Lambda runtime version '{}'".format(py_version, runtime) + ) overrides = {"Runtime": runtime, "CodeUri": "Python", "Handler": "main.handler"} - cmdlist = self.get_command_list(use_container=use_container, - parameter_overrides=overrides) + cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) LOG.info("Running Command: {}", cmdlist) process = subprocess.Popen(cmdlist, cwd=self.working_dir) process.wait() - self._verify_built_artifact(self.default_build_dir, self.FUNCTION_LOGICAL_ID, - self.EXPECTED_FILES_PROJECT_MANIFEST) - - self._verify_resource_property(str(self.built_template), - "OtherRelativePathResource", - "BodyS3Location", - os.path.relpath( - os.path.normpath(os.path.join(str(self.test_data_path), "SomeRelativePath")), - str(self.default_build_dir)) - ) - - expected = { - "pi": "3.14", - "jinja": "Hello World" - } - self._verify_invoke_built_function(self.built_template, - self.FUNCTION_LOGICAL_ID, - self._make_parameter_override_arg(overrides), - expected) + self._verify_built_artifact( + self.default_build_dir, self.FUNCTION_LOGICAL_ID, self.EXPECTED_FILES_PROJECT_MANIFEST + ) + + self._verify_resource_property( + str(self.built_template), + "OtherRelativePathResource", + "BodyS3Location", + os.path.relpath( + os.path.normpath(os.path.join(str(self.test_data_path), "SomeRelativePath")), + str(self.default_build_dir), + ), + ) + + expected = {"pi": "3.14", "jinja": "Hello World"} + self._verify_invoke_built_function( + self.built_template, self.FUNCTION_LOGICAL_ID, self._make_parameter_override_arg(overrides), expected + ) self.verify_docker_container_cleanedup(runtime) def _verify_built_artifact(self, build_dir, function_logical_id, expected_files): @@ -82,70 +90,75 @@ def _verify_built_artifact(self, build_dir, function_logical_id, expected_files) resource_artifact_dir = build_dir.joinpath(function_logical_id) # Make sure the template has correct CodeUri for resource - self._verify_resource_property(str(template_path), - function_logical_id, - "CodeUri", - function_logical_id) + self._verify_resource_property(str(template_path), function_logical_id, "CodeUri", function_logical_id) all_artifacts = set(os.listdir(str(resource_artifact_dir))) actual_files = all_artifacts.intersection(expected_files) - self.assertEquals(actual_files, expected_files) + self.assertEqual(actual_files, expected_files) def _get_python_version(self): return "python{}.{}".format(sys.version_info.major, sys.version_info.minor) +@skipIf(IS_WINDOWS and RUNNING_ON_CI, "Skip build tests on windows when running in CI") class TestBuildCommand_ErrorCases(BuildIntegBase): - def test_unsupported_runtime(self): - overrides = {"Runtime": "unsupportedpython", "CodeUri": "NoThere"} + overrides = {"Runtime": "unsupportedpython", "CodeUri": "Python"} cmdlist = self.get_command_list(parameter_overrides=overrides) LOG.info("Running Command: {}", cmdlist) process = subprocess.Popen(cmdlist, cwd=self.working_dir, stdout=subprocess.PIPE) process.wait() - process_stdout = b"".join(process.stdout.readlines()).strip().decode('utf-8') - self.assertEquals(1, process.returncode) + process_stdout = b"".join(process.stdout.readlines()).strip().decode("utf-8") + self.assertEqual(1, process.returncode) self.assertIn("Build Failed", process_stdout) +@skipIf(IS_WINDOWS and RUNNING_ON_CI, "Skip build tests on windows when running in CI") class TestBuildCommand_NodeFunctions(BuildIntegBase): EXPECTED_FILES_GLOBAL_MANIFEST = set() - EXPECTED_FILES_PROJECT_MANIFEST = {'node_modules', 'main.js'} - EXPECTED_NODE_MODULES = {'minimal-request-promise'} + EXPECTED_FILES_PROJECT_MANIFEST = {"node_modules", "main.js"} + EXPECTED_NODE_MODULES = {"minimal-request-promise"} FUNCTION_LOGICAL_ID = "Function" - @parameterized.expand([ - ("nodejs6.10", False), - ("nodejs8.10", False), - ("nodejs10.x", False), - ("nodejs6.10", "use_container"), - ("nodejs8.10", "use_container"), - ("nodejs10.x", "use_container") - ]) + @parameterized.expand( + [ + ("nodejs6.10", False), + ("nodejs8.10", False), + ("nodejs10.x", False), + ("nodejs6.10", "use_container"), + ("nodejs8.10", "use_container"), + ("nodejs10.x", "use_container"), + ] + ) def test_with_default_package_json(self, runtime, use_container): overrides = {"Runtime": runtime, "CodeUri": "Node", "Handler": "ignored"} - cmdlist = self.get_command_list(use_container=use_container, - parameter_overrides=overrides) + cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) LOG.info("Running Command: {}", cmdlist) process = subprocess.Popen(cmdlist, cwd=self.working_dir) process.wait() - self._verify_built_artifact(self.default_build_dir, self.FUNCTION_LOGICAL_ID, - self.EXPECTED_FILES_PROJECT_MANIFEST, self.EXPECTED_NODE_MODULES) - - self._verify_resource_property(str(self.built_template), - "OtherRelativePathResource", - "BodyS3Location", - os.path.relpath( - os.path.normpath(os.path.join(str(self.test_data_path), "SomeRelativePath")), - str(self.default_build_dir)) - ) + self._verify_built_artifact( + self.default_build_dir, + self.FUNCTION_LOGICAL_ID, + self.EXPECTED_FILES_PROJECT_MANIFEST, + self.EXPECTED_NODE_MODULES, + ) + + self._verify_resource_property( + str(self.built_template), + "OtherRelativePathResource", + "BodyS3Location", + os.path.relpath( + os.path.normpath(os.path.join(str(self.test_data_path), "SomeRelativePath")), + str(self.default_build_dir), + ), + ) self.verify_docker_container_cleanedup(runtime) def _verify_built_artifact(self, build_dir, function_logical_id, expected_files, expected_modules): @@ -160,51 +173,51 @@ def _verify_built_artifact(self, build_dir, function_logical_id, expected_files, resource_artifact_dir = build_dir.joinpath(function_logical_id) # Make sure the template has correct CodeUri for resource - self._verify_resource_property(str(template_path), - function_logical_id, - "CodeUri", - function_logical_id) + self._verify_resource_property(str(template_path), function_logical_id, "CodeUri", function_logical_id) all_artifacts = set(os.listdir(str(resource_artifact_dir))) actual_files = all_artifacts.intersection(expected_files) - self.assertEquals(actual_files, expected_files) + self.assertEqual(actual_files, expected_files) - all_modules = set(os.listdir(str(resource_artifact_dir.joinpath('node_modules')))) + all_modules = set(os.listdir(str(resource_artifact_dir.joinpath("node_modules")))) actual_files = all_modules.intersection(expected_modules) - self.assertEquals(actual_files, expected_modules) + self.assertEqual(actual_files, expected_modules) +@skipIf(IS_WINDOWS and RUNNING_ON_CI, "Skip build tests on windows when running in CI") class TestBuildCommand_RubyFunctions(BuildIntegBase): EXPECTED_FILES_GLOBAL_MANIFEST = set() - EXPECTED_FILES_PROJECT_MANIFEST = {'app.rb'} - EXPECTED_RUBY_GEM = 'httparty' + EXPECTED_FILES_PROJECT_MANIFEST = {"app.rb"} + EXPECTED_RUBY_GEM = "httparty" FUNCTION_LOGICAL_ID = "Function" - @parameterized.expand([ - ("ruby2.5", False), - ("ruby2.5", "use_container") - ]) + @parameterized.expand([("ruby2.5", False), ("ruby2.5", "use_container")]) def test_with_default_gemfile(self, runtime, use_container): overrides = {"Runtime": runtime, "CodeUri": "Ruby", "Handler": "ignored"} - cmdlist = self.get_command_list(use_container=use_container, - parameter_overrides=overrides) + cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) LOG.info("Running Command: {}".format(cmdlist)) process = subprocess.Popen(cmdlist, cwd=self.working_dir) process.wait() - self._verify_built_artifact(self.default_build_dir, self.FUNCTION_LOGICAL_ID, - self.EXPECTED_FILES_PROJECT_MANIFEST, self.EXPECTED_RUBY_GEM) - - self._verify_resource_property(str(self.built_template), - "OtherRelativePathResource", - "BodyS3Location", - os.path.relpath( - os.path.normpath(os.path.join(str(self.test_data_path), "SomeRelativePath")), - str(self.default_build_dir)) - ) + self._verify_built_artifact( + self.default_build_dir, + self.FUNCTION_LOGICAL_ID, + self.EXPECTED_FILES_PROJECT_MANIFEST, + self.EXPECTED_RUBY_GEM, + ) + + self._verify_resource_property( + str(self.built_template), + "OtherRelativePathResource", + "BodyS3Location", + os.path.relpath( + os.path.normpath(os.path.join(str(self.test_data_path), "SomeRelativePath")), + str(self.default_build_dir), + ), + ) self.verify_docker_container_cleanedup(runtime) def _verify_built_artifact(self, build_dir, function_logical_id, expected_files, expected_modules): @@ -219,75 +232,80 @@ def _verify_built_artifact(self, build_dir, function_logical_id, expected_files, resource_artifact_dir = build_dir.joinpath(function_logical_id) # Make sure the template has correct CodeUri for resource - self._verify_resource_property(str(template_path), - function_logical_id, - "CodeUri", - function_logical_id) + self._verify_resource_property(str(template_path), function_logical_id, "CodeUri", function_logical_id) all_artifacts = set(os.listdir(str(resource_artifact_dir))) actual_files = all_artifacts.intersection(expected_files) - self.assertEquals(actual_files, expected_files) + self.assertEqual(actual_files, expected_files) ruby_version = None ruby_bundled_path = None # Walk through ruby version to get to the gem path - for dirpath, dirname, _ in os.walk(str(resource_artifact_dir.joinpath('vendor', 'bundle', 'ruby'))): + for dirpath, dirname, _ in os.walk(str(resource_artifact_dir.joinpath("vendor", "bundle", "ruby"))): ruby_version = dirname ruby_bundled_path = Path(dirpath) break - gem_path = ruby_bundled_path.joinpath(ruby_version[0], 'gems') + gem_path = ruby_bundled_path.joinpath(ruby_version[0], "gems") self.assertTrue(any([True if self.EXPECTED_RUBY_GEM in gem else False for gem in os.listdir(str(gem_path))])) +@skipIf(IS_WINDOWS and RUNNING_ON_CI, "Skip build tests on windows when running in CI") class TestBuildCommand_Java(BuildIntegBase): - EXPECTED_FILES_PROJECT_MANIFEST_GRADLE = {'aws', 'lib', "META-INF"} - EXPECTED_FILES_PROJECT_MANIFEST_MAVEN = {'aws', 'lib'} - EXPECTED_DEPENDENCIES = {'annotations-2.1.0.jar', "aws-lambda-java-core-1.1.0.jar"} + EXPECTED_FILES_PROJECT_MANIFEST_GRADLE = {"aws", "lib", "META-INF"} + EXPECTED_FILES_PROJECT_MANIFEST_MAVEN = {"aws", "lib"} + EXPECTED_DEPENDENCIES = {"annotations-2.1.0.jar", "aws-lambda-java-core-1.1.0.jar"} FUNCTION_LOGICAL_ID = "Function" USING_GRADLE_PATH = os.path.join("Java", "gradle") USING_GRADLEW_PATH = os.path.join("Java", "gradlew") USING_GRADLE_KOTLIN_PATH = os.path.join("Java", "gradle-kotlin") - USING_MAVEN_PATH = str(Path('Java', 'maven')) - - @parameterized.expand([ - ("java8", USING_GRADLE_PATH, EXPECTED_FILES_PROJECT_MANIFEST_GRADLE, False), - ("java8", USING_GRADLEW_PATH, EXPECTED_FILES_PROJECT_MANIFEST_GRADLE, False), - ("java8", USING_GRADLE_KOTLIN_PATH, EXPECTED_FILES_PROJECT_MANIFEST_GRADLE, False), - ("java8", USING_MAVEN_PATH, EXPECTED_FILES_PROJECT_MANIFEST_MAVEN, False), - ("java8", USING_GRADLE_PATH, EXPECTED_FILES_PROJECT_MANIFEST_GRADLE, "use_container"), - ("java8", USING_GRADLEW_PATH, EXPECTED_FILES_PROJECT_MANIFEST_GRADLE, "use_container"), - ("java8", USING_GRADLE_KOTLIN_PATH, EXPECTED_FILES_PROJECT_MANIFEST_GRADLE, "use_container"), - ("java8", USING_MAVEN_PATH, EXPECTED_FILES_PROJECT_MANIFEST_MAVEN, "use_container") - ]) + USING_MAVEN_PATH = os.path.join("Java", "maven") + WINDOWS_LINE_ENDING = b"\r\n" + UNIX_LINE_ENDING = b"\n" + + @parameterized.expand( + [ + ("java8", USING_GRADLE_PATH, EXPECTED_FILES_PROJECT_MANIFEST_GRADLE, False), + ("java8", USING_GRADLEW_PATH, EXPECTED_FILES_PROJECT_MANIFEST_GRADLE, False), + ("java8", USING_GRADLE_KOTLIN_PATH, EXPECTED_FILES_PROJECT_MANIFEST_GRADLE, False), + ("java8", USING_MAVEN_PATH, EXPECTED_FILES_PROJECT_MANIFEST_MAVEN, False), + ("java8", USING_GRADLE_PATH, EXPECTED_FILES_PROJECT_MANIFEST_GRADLE, "use_container"), + ("java8", USING_GRADLEW_PATH, EXPECTED_FILES_PROJECT_MANIFEST_GRADLE, "use_container"), + ("java8", USING_GRADLE_KOTLIN_PATH, EXPECTED_FILES_PROJECT_MANIFEST_GRADLE, "use_container"), + ("java8", USING_MAVEN_PATH, EXPECTED_FILES_PROJECT_MANIFEST_MAVEN, "use_container"), + ] + ) def test_with_building_java(self, runtime, code_path, expected_files, use_container): overrides = {"Runtime": runtime, "CodeUri": code_path, "Handler": "aws.example.Hello::myHandler"} - cmdlist = self.get_command_list(use_container=use_container, - parameter_overrides=overrides) + cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) + if code_path == self.USING_GRADLEW_PATH and use_container and IS_WINDOWS: + self._change_to_unix_line_ending(os.path.join(self.test_data_path, self.USING_GRADLEW_PATH, "gradlew")) LOG.info("Running Command: {}".format(cmdlist)) process = subprocess.Popen(cmdlist, cwd=self.working_dir) process.wait() - self._verify_built_artifact(self.default_build_dir, self.FUNCTION_LOGICAL_ID, - expected_files, self.EXPECTED_DEPENDENCIES) + self._verify_built_artifact( + self.default_build_dir, self.FUNCTION_LOGICAL_ID, expected_files, self.EXPECTED_DEPENDENCIES + ) - self._verify_resource_property(str(self.built_template), - "OtherRelativePathResource", - "BodyS3Location", - os.path.relpath( - os.path.normpath(os.path.join(str(self.test_data_path), "SomeRelativePath")), - str(self.default_build_dir)) - ) + self._verify_resource_property( + str(self.built_template), + "OtherRelativePathResource", + "BodyS3Location", + os.path.relpath( + os.path.normpath(os.path.join(str(self.test_data_path), "SomeRelativePath")), + str(self.default_build_dir), + ), + ) expected = "Hello World" - self._verify_invoke_built_function(self.built_template, - self.FUNCTION_LOGICAL_ID, - self._make_parameter_override_arg(overrides), - expected) + self._verify_invoke_built_function( + self.built_template, self.FUNCTION_LOGICAL_ID, self._make_parameter_override_arg(overrides), expected + ) self.verify_docker_container_cleanedup(runtime) @@ -303,38 +321,55 @@ def _verify_built_artifact(self, build_dir, function_logical_id, expected_files, resource_artifact_dir = build_dir.joinpath(function_logical_id) # Make sure the template has correct CodeUri for resource - self._verify_resource_property(str(template_path), - function_logical_id, - "CodeUri", - function_logical_id) + self._verify_resource_property(str(template_path), function_logical_id, "CodeUri", function_logical_id) all_artifacts = set(os.listdir(str(resource_artifact_dir))) actual_files = all_artifacts.intersection(expected_files) - self.assertEquals(actual_files, expected_files) + self.assertEqual(actual_files, expected_files) lib_dir_contents = set(os.listdir(str(resource_artifact_dir.joinpath("lib")))) - self.assertEquals(lib_dir_contents, expected_modules) + self.assertEqual(lib_dir_contents, expected_modules) + + def _change_to_unix_line_ending(self, path): + with open(os.path.abspath(path), "rb") as open_file: + content = open_file.read() + content = content.replace(self.WINDOWS_LINE_ENDING, self.UNIX_LINE_ENDING) + with open(os.path.abspath(path), "wb") as open_file: + open_file.write(content) + + +@skipIf(IS_WINDOWS and RUNNING_ON_CI, "Skip build tests on windows when running in CI") class TestBuildCommand_Dotnet_cli_package(BuildIntegBase): FUNCTION_LOGICAL_ID = "Function" - EXPECTED_FILES_PROJECT_MANIFEST = {"Amazon.Lambda.APIGatewayEvents.dll", "HelloWorld.pdb", - "Amazon.Lambda.Core.dll", "HelloWorld.runtimeconfig.json", - "Amazon.Lambda.Serialization.Json.dll", "Newtonsoft.Json.dll", - "HelloWorld.deps.json", "HelloWorld.dll"} - - @parameterized.expand([ - ("dotnetcore2.0", "Dotnetcore2.0", None), - ("dotnetcore2.1", "Dotnetcore2.1", None), - ("dotnetcore2.0", "Dotnetcore2.0", "debug"), - ("dotnetcore2.1", "Dotnetcore2.1", "debug"), - ]) + EXPECTED_FILES_PROJECT_MANIFEST = { + "Amazon.Lambda.APIGatewayEvents.dll", + "HelloWorld.pdb", + "Amazon.Lambda.Core.dll", + "HelloWorld.runtimeconfig.json", + "Amazon.Lambda.Serialization.Json.dll", + "Newtonsoft.Json.dll", + "HelloWorld.deps.json", + "HelloWorld.dll", + } + + @parameterized.expand( + [ + ("dotnetcore2.0", "Dotnetcore2.0", None), + ("dotnetcore2.1", "Dotnetcore2.1", None), + ("dotnetcore2.0", "Dotnetcore2.0", "debug"), + ("dotnetcore2.1", "Dotnetcore2.1", "debug"), + ] + ) def test_with_dotnetcore(self, runtime, code_uri, mode): - overrides = {"Runtime": runtime, "CodeUri": code_uri, - "Handler": "HelloWorld::HelloWorld.Function::FunctionHandler"} - cmdlist = self.get_command_list(use_container=False, - parameter_overrides=overrides) + overrides = { + "Runtime": runtime, + "CodeUri": code_uri, + "Handler": "HelloWorld::HelloWorld.Function::FunctionHandler", + } + cmdlist = self.get_command_list(use_container=False, parameter_overrides=overrides) LOG.info("Running Command: {}".format(cmdlist)) LOG.info("Running with SAM_BUILD_MODE={}".format(mode)) @@ -346,42 +381,43 @@ def test_with_dotnetcore(self, runtime, code_uri, mode): process = subprocess.Popen(cmdlist, cwd=self.working_dir, env=newenv) process.wait() - self._verify_built_artifact(self.default_build_dir, self.FUNCTION_LOGICAL_ID, - self.EXPECTED_FILES_PROJECT_MANIFEST) + self._verify_built_artifact( + self.default_build_dir, self.FUNCTION_LOGICAL_ID, self.EXPECTED_FILES_PROJECT_MANIFEST + ) - self._verify_resource_property(str(self.built_template), - "OtherRelativePathResource", - "BodyS3Location", - os.path.relpath( - os.path.normpath(os.path.join(str(self.test_data_path), "SomeRelativePath")), - str(self.default_build_dir)) - ) + self._verify_resource_property( + str(self.built_template), + "OtherRelativePathResource", + "BodyS3Location", + os.path.relpath( + os.path.normpath(os.path.join(str(self.test_data_path), "SomeRelativePath")), + str(self.default_build_dir), + ), + ) expected = "{'message': 'Hello World'}" - self._verify_invoke_built_function(self.built_template, - self.FUNCTION_LOGICAL_ID, - self._make_parameter_override_arg(overrides), - expected) + self._verify_invoke_built_function( + self.built_template, self.FUNCTION_LOGICAL_ID, self._make_parameter_override_arg(overrides), expected + ) self.verify_docker_container_cleanedup(runtime) - @parameterized.expand([ - ("dotnetcore2.0", "Dotnetcore2.0"), - ("dotnetcore2.1", "Dotnetcore2.1"), - ]) + @parameterized.expand([("dotnetcore2.0", "Dotnetcore2.0"), ("dotnetcore2.1", "Dotnetcore2.1")]) def test_must_fail_with_container(self, runtime, code_uri): use_container = True - overrides = {"Runtime": runtime, "CodeUri": code_uri, - "Handler": "HelloWorld::HelloWorld.Function::FunctionHandler"} - cmdlist = self.get_command_list(use_container=use_container, - parameter_overrides=overrides) + overrides = { + "Runtime": runtime, + "CodeUri": code_uri, + "Handler": "HelloWorld::HelloWorld.Function::FunctionHandler", + } + cmdlist = self.get_command_list(use_container=use_container, parameter_overrides=overrides) LOG.info("Running Command: {}".format(cmdlist)) process = subprocess.Popen(cmdlist, cwd=self.working_dir) process.wait() # Must error out, because container builds are not supported - self.assertEquals(process.returncode, 1) + self.assertEqual(process.returncode, 1) def _verify_built_artifact(self, build_dir, function_logical_id, expected_files): @@ -395,69 +431,68 @@ def _verify_built_artifact(self, build_dir, function_logical_id, expected_files) resource_artifact_dir = build_dir.joinpath(function_logical_id) # Make sure the template has correct CodeUri for resource - self._verify_resource_property(str(template_path), - function_logical_id, - "CodeUri", - function_logical_id) + self._verify_resource_property(str(template_path), function_logical_id, "CodeUri", function_logical_id) all_artifacts = set(os.listdir(str(resource_artifact_dir))) actual_files = all_artifacts.intersection(expected_files) - self.assertEquals(actual_files, expected_files) + self.assertEqual(actual_files, expected_files) +@skipIf(IS_WINDOWS and RUNNING_ON_CI, "Skip build tests on windows when running in CI") class TestBuildCommand_SingleFunctionBuilds(BuildIntegBase): template = "many-functions-template.yaml" EXPECTED_FILES_GLOBAL_MANIFEST = set() - EXPECTED_FILES_PROJECT_MANIFEST = {'__init__.py', 'main.py', 'numpy', - # 'cryptography', - "jinja2", - 'requirements.txt'} + EXPECTED_FILES_PROJECT_MANIFEST = { + "__init__.py", + "main.py", + "numpy", + # 'cryptography', + "jinja2", + "requirements.txt", + } def test_function_not_found(self): - overrides = {"Runtime": 'python3.7', "CodeUri": "Python", "Handler": "main.handler"} - cmdlist = self.get_command_list(parameter_overrides=overrides, - function_identifier="FunctionNotInTemplate") + overrides = {"Runtime": "python3.7", "CodeUri": "Python", "Handler": "main.handler"} + cmdlist = self.get_command_list(parameter_overrides=overrides, function_identifier="FunctionNotInTemplate") process = subprocess.Popen(cmdlist, cwd=self.working_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() - self.assertEquals(process.returncode, 1) - self.assertIn('FunctionNotInTemplate not found', str(stderr.decode('utf8'))) - - @parameterized.expand([ - ("python3.7", False, "FunctionOne"), - ("python3.7", "use_container", "FunctionOne"), - ("python3.7", False, "FunctionTwo"), - ("python3.7", "use_container", "FunctionTwo") - ]) + self.assertEqual(process.returncode, 1) + self.assertIn("FunctionNotInTemplate not found", str(stderr.decode("utf8"))) + + @parameterized.expand( + [ + ("python3.7", False, "FunctionOne"), + ("python3.7", "use_container", "FunctionOne"), + ("python3.7", False, "FunctionTwo"), + ("python3.7", "use_container", "FunctionTwo"), + ] + ) def test_build_single_function(self, runtime, use_container, function_identifier): # Don't run test on wrong Python versions py_version = self._get_python_version() if py_version != runtime: - self.skipTest("Current Python version '{}' does not match Lambda runtime version '{}'".format(py_version, - runtime)) + self.skipTest( + "Current Python version '{}' does not match Lambda runtime version '{}'".format(py_version, runtime) + ) overrides = {"Runtime": runtime, "CodeUri": "Python", "Handler": "main.handler"} - cmdlist = self.get_command_list(use_container=use_container, - parameter_overrides=overrides, - function_identifier=function_identifier) + cmdlist = self.get_command_list( + use_container=use_container, parameter_overrides=overrides, function_identifier=function_identifier + ) LOG.info("Running Command: {}", cmdlist) process = subprocess.Popen(cmdlist, cwd=self.working_dir) process.wait() - self._verify_built_artifact(self.default_build_dir, function_identifier, - self.EXPECTED_FILES_PROJECT_MANIFEST) + self._verify_built_artifact(self.default_build_dir, function_identifier, self.EXPECTED_FILES_PROJECT_MANIFEST) - expected = { - "pi": "3.14", - "jinja": "Hello World" - } - self._verify_invoke_built_function(self.built_template, - function_identifier, - self._make_parameter_override_arg(overrides), - expected) + expected = {"pi": "3.14", "jinja": "Hello World"} + self._verify_invoke_built_function( + self.built_template, function_identifier, self._make_parameter_override_arg(overrides), expected + ) self.verify_docker_container_cleanedup(runtime) def _verify_built_artifact(self, build_dir, function_logical_id, expected_files): @@ -471,14 +506,11 @@ def _verify_built_artifact(self, build_dir, function_logical_id, expected_files) resource_artifact_dir = build_dir.joinpath(function_logical_id) # Make sure the template has correct CodeUri for resource - self._verify_resource_property(str(template_path), - function_logical_id, - "CodeUri", - function_logical_id) + self._verify_resource_property(str(template_path), function_logical_id, "CodeUri", function_logical_id) all_artifacts = set(os.listdir(str(resource_artifact_dir))) actual_files = all_artifacts.intersection(expected_files) - self.assertEquals(actual_files, expected_files) + self.assertEqual(actual_files, expected_files) def _get_python_version(self): return "python{}.{}".format(sys.version_info.major, sys.version_info.minor) diff --git a/tests/integration/deprecation/test_deprecation_warning.py b/tests/integration/deprecation/test_deprecation_warning.py index aebf882f76..d4ea9e8c56 100644 --- a/tests/integration/deprecation/test_deprecation_warning.py +++ b/tests/integration/deprecation/test_deprecation_warning.py @@ -1,4 +1,5 @@ import os +import re import subprocess import sys @@ -8,7 +9,6 @@ class TestPy2DeprecationWarning(TestCase): - def base_command(self): command = "sam" if os.getenv("SAM_CLI_DEV"): @@ -26,6 +26,7 @@ def test_print_deprecation_warning_if_py2(self): process = self.run_cmd() (stdoutdata, stderrdata) = process.communicate() + expected_notice = re.sub(r"\n", os.linesep, DEPRECATION_NOTICE) # Deprecation notice should be part of the command output if running in python 2 if sys.version_info.major == 2: - self.assertIn(DEPRECATION_NOTICE, stderrdata.decode()) + self.assertIn(expected_notice, stderrdata.decode()) diff --git a/tests/integration/init/test_init_command.py b/tests/integration/init/test_init_command.py index f657ee53d7..79acb2527c 100644 --- a/tests/integration/init/test_init_command.py +++ b/tests/integration/init/test_init_command.py @@ -6,13 +6,12 @@ class TestBasicInitCommand(TestCase): - def test_init_command_passes_and_dir_created(self): with tempfile.TemporaryDirectory() as temp: process = Popen([TestBasicInitCommand._get_command(), "init", "-o", temp]) return_code = process.wait() - self.assertEquals(return_code, 0) + self.assertEqual(return_code, 0) self.assertTrue(os.path.isdir(temp + "/sam-app")) @staticmethod diff --git a/tests/integration/local/generate_event/test_cli_integ.py b/tests/integration/local/generate_event/test_cli_integ.py index 2bad1fb072..a5f33885b1 100644 --- a/tests/integration/local/generate_event/test_cli_integ.py +++ b/tests/integration/local/generate_event/test_cli_integ.py @@ -4,11 +4,10 @@ class Test_EventGeneration_Integ(TestCase): - def test_generate_event_substitution(self): process = Popen([Test_EventGeneration_Integ._get_command(), "local", "generate-event", "s3", "put"]) return_code = process.wait() - self.assertEquals(return_code, 0) + self.assertEqual(return_code, 0) @staticmethod def _get_command(): diff --git a/tests/integration/local/invoke/invoke_integ_base.py b/tests/integration/local/invoke/invoke_integ_base.py index 084f0fe846..fe13436acb 100644 --- a/tests/integration/local/invoke/invoke_integ_base.py +++ b/tests/integration/local/invoke/invoke_integ_base.py @@ -30,9 +30,19 @@ def base_command(cls): return command - def get_command_list(self, function_to_invoke, template_path=None, event_path=None, env_var_path=None, - parameter_overrides=None, region=None, no_event=None, profile=None, layer_cache=None, - docker_network=None): + def get_command_list( + self, + function_to_invoke, + template_path=None, + event_path=None, + env_var_path=None, + parameter_overrides=None, + region=None, + no_event=None, + profile=None, + layer_cache=None, + docker_network=None, + ): command_list = [self.cmd, "local", "invoke", function_to_invoke] if template_path: @@ -57,9 +67,9 @@ def get_command_list(self, function_to_invoke, template_path=None, event_path=No command_list = command_list + ["--docker-network", docker_network] if parameter_overrides: - arg_value = " ".join([ - "ParameterKey={},ParameterValue={}".format(key, value) for key, value in parameter_overrides.items() - ]) + arg_value = " ".join( + ["ParameterKey={},ParameterValue={}".format(key, value) for key, value in parameter_overrides.items()] + ) command_list = command_list + ["--parameter-overrides", arg_value] if region: diff --git a/tests/integration/local/invoke/layer_utils.py b/tests/integration/local/invoke/layer_utils.py index 0dd942f509..eb9f135002 100644 --- a/tests/integration/local/invoke/layer_utils.py +++ b/tests/integration/local/invoke/layer_utils.py @@ -12,38 +12,30 @@ class LayerUtils(object): - def __init__(self, region): self.region = region - self.layer_meta = namedtuple('LayerMeta', ['layer_name', 'layer_arn', 'layer_version']) - self.lambda_client = boto3.client('lambda', - region_name=region) + self.layer_meta = namedtuple("LayerMeta", ["layer_name", "layer_arn", "layer_version"]) + self.lambda_client = boto3.client("lambda", region_name=region) self.parameters_overrides = {} self.layers_meta = [] self.layer_zip_parent = InvokeIntegBase.get_integ_dir().joinpath("testdata", "invoke", "layer_zips") @staticmethod def generate_layer_name(): - return str(uuid.uuid4()).replace('-', '')[:10] + return str(uuid.uuid4()).replace("-", "")[:10] def upsert_layer(self, layer_name, ref_layer_name, layer_zip): - with open(str(Path.joinpath(self.layer_zip_parent, layer_zip)), 'rb') as zip_contents: + with open(str(Path.joinpath(self.layer_zip_parent, layer_zip)), "rb") as zip_contents: resp = self.lambda_client.publish_layer_version( - LayerName=layer_name, - Content={ - 'ZipFile': zip_contents.read() - }) - self.parameters_overrides[ref_layer_name] = resp['LayerVersionArn'] + LayerName=layer_name, Content={"ZipFile": zip_contents.read()} + ) + self.parameters_overrides[ref_layer_name] = resp["LayerVersionArn"] self.layers_meta.append( - self.layer_meta( - layer_name=layer_name, - layer_arn=resp['LayerArn'], - layer_version=resp['Version']) + self.layer_meta(layer_name=layer_name, layer_arn=resp["LayerArn"], layer_version=resp["Version"]) ) def delete_layers(self): for layer_meta in self.layers_meta: self.lambda_client.delete_layer_version( - LayerName=layer_meta.layer_arn, - VersionNumber=layer_meta.layer_version + LayerName=layer_meta.layer_arn, VersionNumber=layer_meta.layer_version ) diff --git a/tests/integration/local/invoke/runtimes/test_with_runtime_zips.py b/tests/integration/local/invoke/runtimes/test_with_runtime_zips.py index bf7264398d..e37f0fd402 100644 --- a/tests/integration/local/invoke/runtimes/test_with_runtime_zips.py +++ b/tests/integration/local/invoke/runtimes/test_with_runtime_zips.py @@ -30,33 +30,29 @@ def setUp(self): def tearDown(self): os.remove(self.events_file_path) - @parameterized.expand([ - param("Go1xFunction"), - param("Java8Function") - ]) + @parameterized.expand([param("Go1xFunction"), param("Java8Function")]) def test_runtime_zip(self, function_name): - command_list = self.get_command_list(function_name, - template_path=self.template_path, - event_path=self.events_file_path) + command_list = self.get_command_list( + function_name, template_path=self.template_path, event_path=self.events_file_path + ) process = Popen(command_list, stdout=PIPE) return_code = process.wait() - self.assertEquals(return_code, 0) + self.assertEqual(return_code, 0) process_stdout = b"".join(process.stdout.readlines()).strip() - self.assertEquals(process_stdout.decode('utf-8'), '"Hello World"') + self.assertEqual(process_stdout.decode("utf-8"), '"Hello World"') def test_custom_provided_runtime(self): - command_list = self.get_command_list("CustomBashFunction", - template_path=self.template_path, - event_path=self.events_file_path) + command_list = self.get_command_list( + "CustomBashFunction", template_path=self.template_path, event_path=self.events_file_path + ) command_list = command_list + ["--skip-pull-image"] process = Popen(command_list, stdout=PIPE) return_code = process.wait() - self.assertEquals(return_code, 0) + self.assertEqual(return_code, 0) process_stdout = b"".join(process.stdout.readlines()).strip() - self.assertEquals(process_stdout.decode('utf-8'), - u'{"body":"hello 曰有冥 world 🐿","statusCode":200,"headers":{}}') + self.assertEqual(process_stdout.decode("utf-8"), u'{"body":"hello 曰有冥 world 🐿","statusCode":200,"headers":{}}') diff --git a/tests/integration/local/invoke/test_integrations_cli.py b/tests/integration/local/invoke/test_integrations_cli.py index dcd7586b8c..737a926df9 100644 --- a/tests/integration/local/invoke/test_integrations_cli.py +++ b/tests/integration/local/invoke/test_integrations_cli.py @@ -29,54 +29,49 @@ class TestSamPython36HelloWorldIntegration(InvokeIntegBase): template = Path("template.yml") def test_invoke_returncode_is_zero(self): - command_list = self.get_command_list("HelloWorldServerlessFunction", - template_path=self.template_path, - event_path=self.event_path) + command_list = self.get_command_list( + "HelloWorldServerlessFunction", template_path=self.template_path, event_path=self.event_path + ) process = Popen(command_list, stdout=PIPE) return_code = process.wait() - self.assertEquals(return_code, 0) + self.assertEqual(return_code, 0) def test_function_with_metadata(self): - command_list = self.get_command_list("FunctionWithMetadata", - template_path=self.template_path, - no_event=True) + command_list = self.get_command_list("FunctionWithMetadata", template_path=self.template_path, no_event=True) process = Popen(command_list, stdout=PIPE) process.wait() process_stdout = b"".join(process.stdout.readlines()).strip() - self.assertEquals(process_stdout.decode('utf-8'), '"Hello World in a different dir"') + self.assertEqual(process_stdout.decode("utf-8"), '"Hello World in a different dir"') def test_invoke_returns_execpted_results(self): - command_list = self.get_command_list("HelloWorldServerlessFunction", - template_path=self.template_path, - event_path=self.event_path) + command_list = self.get_command_list( + "HelloWorldServerlessFunction", template_path=self.template_path, event_path=self.event_path + ) process = Popen(command_list, stdout=PIPE) process.wait() process_stdout = b"".join(process.stdout.readlines()).strip() - self.assertEquals(process_stdout.decode('utf-8'), '"Hello world"') + self.assertEqual(process_stdout.decode("utf-8"), '"Hello world"') def test_invoke_of_lambda_function(self): - command_list = self.get_command_list("HelloWorldLambdaFunction", - template_path=self.template_path, - event_path=self.event_path) + command_list = self.get_command_list( + "HelloWorldLambdaFunction", template_path=self.template_path, event_path=self.event_path + ) process = Popen(command_list, stdout=PIPE) process.wait() process_stdout = b"".join(process.stdout.readlines()).strip() - self.assertEquals(process_stdout.decode('utf-8'), '"Hello world"') + self.assertEqual(process_stdout.decode("utf-8"), '"Hello world"') - @parameterized.expand([ - ("TimeoutFunction"), - ("TimeoutFunctionWithParameter"), - ]) + @parameterized.expand([("TimeoutFunction"), ("TimeoutFunctionWithParameter")]) def test_invoke_with_timeout_set(self, function_name): - command_list = self.get_command_list(function_name, - template_path=self.template_path, - event_path=self.event_path) + command_list = self.get_command_list( + function_name, template_path=self.template_path, event_path=self.event_path + ) start = timer() process = Popen(command_list, stdout=PIPE) @@ -91,25 +86,30 @@ def test_invoke_with_timeout_set(self, function_name): self.assertGreater(wall_clock_cli_duration, 5) self.assertLess(wall_clock_cli_duration, 20) - self.assertEquals(return_code, 0) - self.assertEquals(process_stdout.decode('utf-8'), "", msg="The return statement in the LambdaFunction " - "should never return leading to an empty string") + self.assertEqual(return_code, 0) + self.assertEqual( + process_stdout.decode("utf-8"), + "", + msg="The return statement in the LambdaFunction " "should never return leading to an empty string", + ) def test_invoke_with_env_vars(self): - command_list = self.get_command_list("EchoCustomEnvVarFunction", - template_path=self.template_path, - event_path=self.event_path, - env_var_path=self.env_var_path) + command_list = self.get_command_list( + "EchoCustomEnvVarFunction", + template_path=self.template_path, + event_path=self.event_path, + env_var_path=self.env_var_path, + ) process = Popen(command_list, stdout=PIPE) process.wait() process_stdout = b"".join(process.stdout.readlines()).strip() - self.assertEquals(process_stdout.decode('utf-8'), '"MyVar"') + self.assertEqual(process_stdout.decode("utf-8"), '"MyVar"') def test_invoke_when_function_writes_stdout(self): - command_list = self.get_command_list("WriteToStdoutFunction", - template_path=self.template_path, - event_path=self.event_path) + command_list = self.get_command_list( + "WriteToStdoutFunction", template_path=self.template_path, event_path=self.event_path + ) process = Popen(command_list, stdout=PIPE, stderr=PIPE) process.wait() @@ -117,20 +117,20 @@ def test_invoke_when_function_writes_stdout(self): process_stdout = b"".join(process.stdout.readlines()).strip() process_stderr = b"".join(process.stderr.readlines()).strip() - self.assertIn("Docker Lambda is writing to stdout", process_stderr.decode('utf-8')) - self.assertIn("wrote to stdout", process_stdout.decode('utf-8')) + self.assertIn("Docker Lambda is writing to stdout", process_stderr.decode("utf-8")) + self.assertIn("wrote to stdout", process_stdout.decode("utf-8")) def test_invoke_when_function_writes_stderr(self): - command_list = self.get_command_list("WriteToStderrFunction", - template_path=self.template_path, - event_path=self.event_path) + command_list = self.get_command_list( + "WriteToStderrFunction", template_path=self.template_path, event_path=self.event_path + ) process = Popen(command_list, stderr=PIPE) process.wait() process_stderr = b"".join(process.stderr.readlines()).strip() - self.assertIn("Docker Lambda is writing to stderr", process_stderr.decode('utf-8')) + self.assertIn("Docker Lambda is writing to stderr", process_stderr.decode("utf-8")) def test_invoke_returns_expected_result_when_no_event_given(self): command_list = self.get_command_list("EchoEventFunction", template_path=self.template_path) @@ -139,61 +139,60 @@ def test_invoke_returns_expected_result_when_no_event_given(self): return_code = process.wait() process_stdout = b"".join(process.stdout.readlines()).strip() - self.assertEquals(return_code, 0) - self.assertEquals("{}", process_stdout.decode('utf-8')) + self.assertEqual(return_code, 0) + self.assertEqual("{}", process_stdout.decode("utf-8")) def test_invoke_raises_exception_with_noargs_and_event(self): - command_list = self.get_command_list("HelloWorldLambdaFunction", - template_path=self.template_path, - event_path=self.event_path) + command_list = self.get_command_list( + "HelloWorldLambdaFunction", template_path=self.template_path, event_path=self.event_path + ) command_list.append("--no-event") process = Popen(command_list, stderr=PIPE) process.wait() process_stderr = b"".join(process.stderr.readlines()).strip() - error_output = process_stderr.decode('utf-8') + error_output = process_stderr.decode("utf-8") self.assertIn("no_event and event cannot be used together. Please provide only one.", error_output) def test_invoke_with_env_using_parameters(self): - command_list = self.get_command_list("EchoEnvWithParameters", - template_path=self.template_path, - event_path=self.event_path, - parameter_overrides={ - "MyRuntimeVersion": "v0", - "DefaultTimeout": "100" - }) + command_list = self.get_command_list( + "EchoEnvWithParameters", + template_path=self.template_path, + event_path=self.event_path, + parameter_overrides={"MyRuntimeVersion": "v0", "DefaultTimeout": "100"}, + ) process = Popen(command_list, stdout=PIPE) process.wait() process_stdout = b"".join(process.stdout.readlines()).strip() - environ = json.loads(process_stdout.decode('utf-8')) + environ = json.loads(process_stdout.decode("utf-8")) - self.assertEquals(environ["Region"], "us-east-1") - self.assertEquals(environ["AccountId"], "123456789012") - self.assertEquals(environ["Partition"], "aws") - self.assertEquals(environ["StackName"], "local") - self.assertEquals(environ["StackId"], "arn:aws:cloudformation:us-east-1:123456789012:stack/" - "local/51af3dc0-da77-11e4-872e-1234567db123",) + self.assertEqual(environ["Region"], "us-east-1") + self.assertEqual(environ["AccountId"], "123456789012") + self.assertEqual(environ["Partition"], "aws") + self.assertEqual(environ["StackName"], "local") + self.assertEqual( + environ["StackId"], + "arn:aws:cloudformation:us-east-1:123456789012:stack/" "local/51af3dc0-da77-11e4-872e-1234567db123", + ) - self.assertEquals(environ["URLSuffix"], "localhost") - self.assertEquals(environ["Timeout"], "100") - self.assertEquals(environ["MyRuntimeVersion"], "v0") + self.assertEqual(environ["URLSuffix"], "localhost") + self.assertEqual(environ["Timeout"], "100") + self.assertEqual(environ["MyRuntimeVersion"], "v0") def test_invoke_with_env_using_parameters_with_custom_region(self): custom_region = "my-custom-region" - command_list = self.get_command_list("EchoEnvWithParameters", - template_path=self.template_path, - event_path=self.event_path, - region=custom_region - ) + command_list = self.get_command_list( + "EchoEnvWithParameters", template_path=self.template_path, event_path=self.event_path, region=custom_region + ) process = Popen(command_list, stdout=PIPE) process.wait() process_stdout = b"".join(process.stdout.readlines()).strip() - environ = json.loads(process_stdout.decode('utf-8')) + environ = json.loads(process_stdout.decode("utf-8")) - self.assertEquals(environ["Region"], custom_region) + self.assertEqual(environ["Region"], custom_region) def test_invoke_with_env_with_aws_creds(self): custom_region = "my-custom-region" @@ -201,9 +200,9 @@ def test_invoke_with_env_with_aws_creds(self): secret = "secret" session = "session" - command_list = self.get_command_list("EchoEnvWithParameters", - template_path=self.template_path, - event_path=self.event_path) + command_list = self.get_command_list( + "EchoEnvWithParameters", template_path=self.template_path, event_path=self.event_path + ) env = copy.deepcopy(dict(os.environ)) env["AWS_DEFAULT_REGION"] = custom_region @@ -215,40 +214,41 @@ def test_invoke_with_env_with_aws_creds(self): process = Popen(command_list, stdout=PIPE, env=env) process.wait() process_stdout = b"".join(process.stdout.readlines()).strip() - environ = json.loads(process_stdout.decode('utf-8')) + environ = json.loads(process_stdout.decode("utf-8")) - self.assertEquals(environ["AWS_DEFAULT_REGION"], custom_region) - self.assertEquals(environ["AWS_REGION"], custom_region) - self.assertEquals(environ["AWS_ACCESS_KEY_ID"], key) - self.assertEquals(environ["AWS_SECRET_ACCESS_KEY"], secret) - self.assertEquals(environ["AWS_SESSION_TOKEN"], session) + self.assertEqual(environ["AWS_DEFAULT_REGION"], custom_region) + self.assertEqual(environ["AWS_REGION"], custom_region) + self.assertEqual(environ["AWS_ACCESS_KEY_ID"], key) + self.assertEqual(environ["AWS_SECRET_ACCESS_KEY"], secret) + self.assertEqual(environ["AWS_SESSION_TOKEN"], session) def test_invoke_with_docker_network_of_host(self): - command_list = self.get_command_list("HelloWorldServerlessFunction", - template_path=self.template_path, - event_path=self.event_path, - docker_network='host') + command_list = self.get_command_list( + "HelloWorldServerlessFunction", + template_path=self.template_path, + event_path=self.event_path, + docker_network="host", + ) process = Popen(command_list, stdout=PIPE) return_code = process.wait() - self.assertEquals(return_code, 0) + self.assertEqual(return_code, 0) - @skipIf(IS_WINDOWS, - "The test hangs on Windows due to trying to attach to a non-existing network") + @skipIf(IS_WINDOWS, "The test hangs on Windows due to trying to attach to a non-existing network") def test_invoke_with_docker_network_of_host_in_env_var(self): - command_list = self.get_command_list("HelloWorldServerlessFunction", - template_path=self.template_path, - event_path=self.event_path) + command_list = self.get_command_list( + "HelloWorldServerlessFunction", template_path=self.template_path, event_path=self.event_path + ) env = os.environ.copy() - env["SAM_DOCKER_NETWORK"] = 'non-existing-network' + env["SAM_DOCKER_NETWORK"] = "non-existing-network" process = Popen(command_list, stderr=PIPE, env=env) process.wait() process_stderr = b"".join(process.stderr.readlines()).strip() - self.assertIn('Not Found ("network non-existing-network not found")', process_stderr.decode('utf-8')) + self.assertIn('Not Found ("network non-existing-network not found")', process_stderr.decode("utf-8")) def test_sam_template_file_env_var_set(self): command_list = self.get_command_list("HelloWorldFunctionInNonDefaultTemplate", event_path=self.event_path) @@ -261,14 +261,14 @@ def test_sam_template_file_env_var_set(self): process.wait() process_stdout = b"".join(process.stdout.readlines()).strip() - self.assertEquals(process_stdout.decode('utf-8'), '"Hello world"') + self.assertEqual(process_stdout.decode("utf-8"), '"Hello world"') def test_skip_pull_image_in_env_var(self): - docker.from_env().api.pull('lambci/lambda:python3.6') + docker.from_env().api.pull("lambci/lambda:python3.6") - command_list = self.get_command_list("HelloWorldLambdaFunction", - template_path=self.template_path, - event_path=self.event_path) + command_list = self.get_command_list( + "HelloWorldLambdaFunction", template_path=self.template_path, event_path=self.event_path + ) env = os.environ.copy() env["SAM_SKIP_PULL_IMAGE"] = "True" @@ -276,7 +276,7 @@ def test_skip_pull_image_in_env_var(self): process = Popen(command_list, stderr=PIPE, env=env) process.wait() process_stderr = b"".join(process.stderr.readlines()).strip() - self.assertIn("Requested to skip pulling images", process_stderr.decode('utf-8')) + self.assertIn("Requested to skip pulling images", process_stderr.decode("utf-8")) class TestUsingConfigFiles(InvokeIntegBase): @@ -293,96 +293,95 @@ def test_existing_env_variables_precedence_over_profiles(self): custom_config = self._create_config_file(profile) custom_cred = self._create_cred_file(profile) - command_list = self.get_command_list("EchoEnvWithParameters", - template_path=self.template_path, - event_path=self.event_path) + command_list = self.get_command_list( + "EchoEnvWithParameters", template_path=self.template_path, event_path=self.event_path + ) env = os.environ.copy() # Explicitly set environment variables beforehand - env['AWS_DEFAULT_REGION'] = 'sa-east-1' - env['AWS_REGION'] = 'sa-east-1' - env['AWS_ACCESS_KEY_ID'] = 'priority_access_key_id' - env['AWS_SECRET_ACCESS_KEY'] = 'priority_secret_key_id' - env['AWS_SESSION_TOKEN'] = 'priority_secret_token' + env["AWS_DEFAULT_REGION"] = "sa-east-1" + env["AWS_REGION"] = "sa-east-1" + env["AWS_ACCESS_KEY_ID"] = "priority_access_key_id" + env["AWS_SECRET_ACCESS_KEY"] = "priority_secret_key_id" + env["AWS_SESSION_TOKEN"] = "priority_secret_token" # Setup a custom profile - env['AWS_CONFIG_FILE'] = custom_config - env['AWS_SHARED_CREDENTIALS_FILE'] = custom_cred + env["AWS_CONFIG_FILE"] = custom_config + env["AWS_SHARED_CREDENTIALS_FILE"] = custom_cred process = Popen(command_list, stdout=PIPE, env=env) process.wait() process_stdout = b"".join(process.stdout.readlines()).strip() - environ = json.loads(process_stdout.decode('utf-8')) + environ = json.loads(process_stdout.decode("utf-8")) # Environment variables we explicitly set take priority over profiles. - self.assertEquals(environ["AWS_DEFAULT_REGION"], 'sa-east-1') - self.assertEquals(environ["AWS_REGION"], 'sa-east-1') - self.assertEquals(environ["AWS_ACCESS_KEY_ID"], 'priority_access_key_id') - self.assertEquals(environ["AWS_SECRET_ACCESS_KEY"], 'priority_secret_key_id') - self.assertEquals(environ["AWS_SESSION_TOKEN"], 'priority_secret_token') + self.assertEqual(environ["AWS_DEFAULT_REGION"], "sa-east-1") + self.assertEqual(environ["AWS_REGION"], "sa-east-1") + self.assertEqual(environ["AWS_ACCESS_KEY_ID"], "priority_access_key_id") + self.assertEqual(environ["AWS_SECRET_ACCESS_KEY"], "priority_secret_key_id") + self.assertEqual(environ["AWS_SESSION_TOKEN"], "priority_secret_token") def test_default_profile_with_custom_configs(self): profile = "default" custom_config = self._create_config_file(profile) custom_cred = self._create_cred_file(profile) - command_list = self.get_command_list("EchoEnvWithParameters", - template_path=self.template_path, - event_path=self.event_path) + command_list = self.get_command_list( + "EchoEnvWithParameters", template_path=self.template_path, event_path=self.event_path + ) env = os.environ.copy() # Explicitly clean environment variables beforehand - env.pop('AWS_DEFAULT_REGION', None) - env.pop('AWS_REGION', None) - env.pop('AWS_ACCESS_KEY_ID', None) - env.pop('AWS_SECRET_ACCESS_KEY', None) - env.pop('AWS_SESSION_TOKEN', None) - env['AWS_CONFIG_FILE'] = custom_config - env['AWS_SHARED_CREDENTIALS_FILE'] = custom_cred + env.pop("AWS_DEFAULT_REGION", None) + env.pop("AWS_REGION", None) + env.pop("AWS_ACCESS_KEY_ID", None) + env.pop("AWS_SECRET_ACCESS_KEY", None) + env.pop("AWS_SESSION_TOKEN", None) + env["AWS_CONFIG_FILE"] = custom_config + env["AWS_SHARED_CREDENTIALS_FILE"] = custom_cred process = Popen(command_list, stdout=PIPE, env=env) process.wait() process_stdout = b"".join(process.stdout.readlines()).strip() - environ = json.loads(process_stdout.decode('utf-8')) + environ = json.loads(process_stdout.decode("utf-8")) - self.assertEquals(environ["AWS_DEFAULT_REGION"], 'us-west-1') - self.assertEquals(environ["AWS_REGION"], 'us-west-1') - self.assertEquals(environ["AWS_ACCESS_KEY_ID"], 'someaccesskeyid') - self.assertEquals(environ["AWS_SECRET_ACCESS_KEY"], 'shhhhhthisisasecret') - self.assertEquals(environ["AWS_SESSION_TOKEN"], 'sessiontoken') + self.assertEqual(environ["AWS_DEFAULT_REGION"], "us-west-1") + self.assertEqual(environ["AWS_REGION"], "us-west-1") + self.assertEqual(environ["AWS_ACCESS_KEY_ID"], "someaccesskeyid") + self.assertEqual(environ["AWS_SECRET_ACCESS_KEY"], "shhhhhthisisasecret") + self.assertEqual(environ["AWS_SESSION_TOKEN"], "sessiontoken") def test_custom_profile_with_custom_configs(self): custom_config = self._create_config_file("custom") custom_cred = self._create_cred_file("custom") - command_list = self.get_command_list("EchoEnvWithParameters", - template_path=self.template_path, - event_path=self.event_path, - profile='custom') + command_list = self.get_command_list( + "EchoEnvWithParameters", template_path=self.template_path, event_path=self.event_path, profile="custom" + ) env = os.environ.copy() # Explicitly clean environment variables beforehand - env.pop('AWS_DEFAULT_REGION', None) - env.pop('AWS_REGION', None) - env.pop('AWS_ACCESS_KEY_ID', None) - env.pop('AWS_SECRET_ACCESS_KEY', None) - env.pop('AWS_SESSION_TOKEN', None) - env['AWS_CONFIG_FILE'] = custom_config - env['AWS_SHARED_CREDENTIALS_FILE'] = custom_cred + env.pop("AWS_DEFAULT_REGION", None) + env.pop("AWS_REGION", None) + env.pop("AWS_ACCESS_KEY_ID", None) + env.pop("AWS_SECRET_ACCESS_KEY", None) + env.pop("AWS_SESSION_TOKEN", None) + env["AWS_CONFIG_FILE"] = custom_config + env["AWS_SHARED_CREDENTIALS_FILE"] = custom_cred process = Popen(command_list, stdout=PIPE, env=env) process.wait() process_stdout = b"".join(process.stdout.readlines()).strip() - environ = json.loads(process_stdout.decode('utf-8')) + environ = json.loads(process_stdout.decode("utf-8")) - self.assertEquals(environ["AWS_DEFAULT_REGION"], 'us-west-1') - self.assertEquals(environ["AWS_REGION"], 'us-west-1') - self.assertEquals(environ["AWS_ACCESS_KEY_ID"], 'someaccesskeyid') - self.assertEquals(environ["AWS_SECRET_ACCESS_KEY"], 'shhhhhthisisasecret') - self.assertEquals(environ["AWS_SESSION_TOKEN"], 'sessiontoken') + self.assertEqual(environ["AWS_DEFAULT_REGION"], "us-west-1") + self.assertEqual(environ["AWS_REGION"], "us-west-1") + self.assertEqual(environ["AWS_ACCESS_KEY_ID"], "someaccesskeyid") + self.assertEqual(environ["AWS_SECRET_ACCESS_KEY"], "shhhhhthisisasecret") + self.assertEqual(environ["AWS_SESSION_TOKEN"], "sessiontoken") def test_custom_profile_through_envrionment_variables(self): # When using a custom profile in a custom location, you need both the config @@ -392,32 +391,32 @@ def test_custom_profile_through_envrionment_variables(self): custom_cred = self._create_cred_file("custom") - command_list = self.get_command_list("EchoEnvWithParameters", - template_path=self.template_path, - event_path=self.event_path) + command_list = self.get_command_list( + "EchoEnvWithParameters", template_path=self.template_path, event_path=self.event_path + ) env = os.environ.copy() # Explicitly clean environment variables beforehand - env.pop('AWS_DEFAULT_REGION', None) - env.pop('AWS_REGION', None) - env.pop('AWS_ACCESS_KEY_ID', None) - env.pop('AWS_SECRET_ACCESS_KEY', None) - env.pop('AWS_SESSION_TOKEN', None) - env['AWS_CONFIG_FILE'] = custom_config - env['AWS_SHARED_CREDENTIALS_FILE'] = custom_cred - env['AWS_PROFILE'] = "custom" + env.pop("AWS_DEFAULT_REGION", None) + env.pop("AWS_REGION", None) + env.pop("AWS_ACCESS_KEY_ID", None) + env.pop("AWS_SECRET_ACCESS_KEY", None) + env.pop("AWS_SESSION_TOKEN", None) + env["AWS_CONFIG_FILE"] = custom_config + env["AWS_SHARED_CREDENTIALS_FILE"] = custom_cred + env["AWS_PROFILE"] = "custom" process = Popen(command_list, stdout=PIPE, env=env) process.wait() process_stdout = b"".join(process.stdout.readlines()).strip() - environ = json.loads(process_stdout.decode('utf-8')) + environ = json.loads(process_stdout.decode("utf-8")) - self.assertEquals(environ["AWS_DEFAULT_REGION"], 'us-west-1') - self.assertEquals(environ["AWS_REGION"], 'us-west-1') - self.assertEquals(environ["AWS_ACCESS_KEY_ID"], 'someaccesskeyid') - self.assertEquals(environ["AWS_SECRET_ACCESS_KEY"], 'shhhhhthisisasecret') - self.assertEquals(environ["AWS_SESSION_TOKEN"], 'sessiontoken') + self.assertEqual(environ["AWS_DEFAULT_REGION"], "us-west-1") + self.assertEqual(environ["AWS_REGION"], "us-west-1") + self.assertEqual(environ["AWS_ACCESS_KEY_ID"], "someaccesskeyid") + self.assertEqual(environ["AWS_SECRET_ACCESS_KEY"], "shhhhhthisisasecret") + self.assertEqual(environ["AWS_SESSION_TOKEN"], "sessiontoken") def _create_config_file(self, profile): if profile == "default": @@ -432,18 +431,19 @@ def _create_config_file(self, profile): def _create_cred_file(self, profile): cred_file_content = "[{}]\naws_access_key_id = someaccesskeyid\naws_secret_access_key = shhhhhthisisasecret \ - \naws_session_token = sessiontoken".format(profile) + \naws_session_token = sessiontoken".format( + profile + ) custom_cred = os.path.join(self.config_dir, "customcred") with open(custom_cred, "w") as file: file.write(cred_file_content) return custom_cred -@skipIf(SKIP_LAYERS_TESTS, - "Skip layers tests in Travis only") +@skipIf(SKIP_LAYERS_TESTS, "Skip layers tests in Travis only") class TestLayerVersion(InvokeIntegBase): template = Path("layers", "layer-template.yml") - region = 'us-west-2' + region = "us-west-2" layer_utils = LayerUtils(region=region) def setUp(self): @@ -451,7 +451,7 @@ def setUp(self): def tearDown(self): docker_client = docker.from_env() - samcli_images = docker_client.images.list(name='samcli/lambda') + samcli_images = docker_client.images.list(name="samcli/lambda") for image in samcli_images: docker_client.images.remove(image.id) @@ -468,7 +468,7 @@ def tearDownClass(cls): cls.layer_utils.delete_layers() # Added to handle the case where ^C failed the test due to invalid cleanup of layers docker_client = docker.from_env() - samcli_images = docker_client.images.list(name='samcli/lambda') + samcli_images = docker_client.images.list(name="samcli/lambda") for image in samcli_images: docker_client.images.remove(image.id) integ_layer_cache_dir = Path().home().joinpath("integ_layer_cache") @@ -477,21 +477,24 @@ def tearDownClass(cls): super(TestLayerVersion, cls).tearDownClass() - @parameterized.expand([ - ("ReferenceServerlessLayerVersionServerlessFunction"), - ("ReferenceLambdaLayerVersionServerlessFunction"), - ("ReferenceServerlessLayerVersionLambdaFunction"), - ("ReferenceLambdaLayerVersionLambdaFunction"), - ("ReferenceServerlessLayerVersionServerlessFunction") - ]) + @parameterized.expand( + [ + ("ReferenceServerlessLayerVersionServerlessFunction"), + ("ReferenceLambdaLayerVersionServerlessFunction"), + ("ReferenceServerlessLayerVersionLambdaFunction"), + ("ReferenceLambdaLayerVersionLambdaFunction"), + ("ReferenceServerlessLayerVersionServerlessFunction"), + ] + ) def test_reference_of_layer_version(self, function_logical_id): - command_list = self.get_command_list(function_logical_id, - template_path=self.template_path, - no_event=True, - region=self.region, - layer_cache=str(self.layer_cache), - parameter_overrides=self.layer_utils.parameters_overrides - ) + command_list = self.get_command_list( + function_logical_id, + template_path=self.template_path, + no_event=True, + region=self.region, + layer_cache=str(self.layer_cache), + parameter_overrides=self.layer_utils.parameters_overrides, + ) process = Popen(command_list, stdout=PIPE) process.wait() @@ -500,20 +503,18 @@ def test_reference_of_layer_version(self, function_logical_id): expected_output = '"This is a Layer Ping from simple_python"' - self.assertEquals(process_stdout.decode('utf-8'), expected_output) + self.assertEqual(process_stdout.decode("utf-8"), expected_output) - @parameterized.expand([ - ("OneLayerVersionServerlessFunction"), - ("OneLayerVersionLambdaFunction") - ]) + @parameterized.expand([("OneLayerVersionServerlessFunction"), ("OneLayerVersionLambdaFunction")]) def test_download_one_layer(self, function_logical_id): - command_list = self.get_command_list(function_logical_id, - template_path=self.template_path, - no_event=True, - region=self.region, - layer_cache=str(self.layer_cache), - parameter_overrides=self.layer_utils.parameters_overrides - ) + command_list = self.get_command_list( + function_logical_id, + template_path=self.template_path, + no_event=True, + region=self.region, + layer_cache=str(self.layer_cache), + parameter_overrides=self.layer_utils.parameters_overrides, + ) process = Popen(command_list, stdout=PIPE) process.wait() @@ -521,25 +522,21 @@ def test_download_one_layer(self, function_logical_id): process_stdout = b"".join(process.stdout.readlines()[-1:]).strip() expected_output = '"Layer1"' - self.assertEquals(process_stdout.decode('utf-8'), expected_output) + self.assertEqual(process_stdout.decode("utf-8"), expected_output) - @parameterized.expand([ - ("ChangedLayerVersionServerlessFunction"), - ("ChangedLayerVersionLambdaFunction") - ]) + @parameterized.expand([("ChangedLayerVersionServerlessFunction"), ("ChangedLayerVersionLambdaFunction")]) def test_publish_changed_download_layer(self, function_logical_id): layer_name = self.layer_utils.generate_layer_name() - self.layer_utils.upsert_layer(layer_name=layer_name, - ref_layer_name="ChangedLayerArn", - layer_zip="layer1.zip") - - command_list = self.get_command_list(function_logical_id, - template_path=self.template_path, - no_event=True, - region=self.region, - layer_cache=str(self.layer_cache), - parameter_overrides=self.layer_utils.parameters_overrides - ) + self.layer_utils.upsert_layer(layer_name=layer_name, ref_layer_name="ChangedLayerArn", layer_zip="layer1.zip") + + command_list = self.get_command_list( + function_logical_id, + template_path=self.template_path, + no_event=True, + region=self.region, + layer_cache=str(self.layer_cache), + parameter_overrides=self.layer_utils.parameters_overrides, + ) process = Popen(command_list, stdout=PIPE) process.wait() @@ -547,19 +544,20 @@ def test_publish_changed_download_layer(self, function_logical_id): process_stdout = b"".join(process.stdout.readlines()[-1:]).strip() expected_output = '"Layer1"' - self.assertEquals(process_stdout.decode('utf-8'), expected_output) + self.assertEqual(process_stdout.decode("utf-8"), expected_output) - self.layer_utils.upsert_layer(layer_name=layer_name, - ref_layer_name="ChangedLayerArn", - layer_zip="changedlayer1.zip") + self.layer_utils.upsert_layer( + layer_name=layer_name, ref_layer_name="ChangedLayerArn", layer_zip="changedlayer1.zip" + ) - command_list = self.get_command_list(function_logical_id, - template_path=self.template_path, - no_event=True, - region=self.region, - layer_cache=str(self.layer_cache), - parameter_overrides=self.layer_utils.parameters_overrides - ) + command_list = self.get_command_list( + function_logical_id, + template_path=self.template_path, + no_event=True, + region=self.region, + layer_cache=str(self.layer_cache), + parameter_overrides=self.layer_utils.parameters_overrides, + ) process = Popen(command_list, stdout=PIPE) process.wait() @@ -567,21 +565,19 @@ def test_publish_changed_download_layer(self, function_logical_id): process_stdout = b"".join(process.stdout.readlines()[-1:]).strip() expected_output = '"Changed_Layer_1"' - self.assertEquals(process_stdout.decode('utf-8'), expected_output) + self.assertEqual(process_stdout.decode("utf-8"), expected_output) - @parameterized.expand([ - ("TwoLayerVersionServerlessFunction"), - ("TwoLayerVersionLambdaFunction") - ]) + @parameterized.expand([("TwoLayerVersionServerlessFunction"), ("TwoLayerVersionLambdaFunction")]) def test_download_two_layers(self, function_logical_id): - command_list = self.get_command_list(function_logical_id, - template_path=self.template_path, - no_event=True, - region=self.region, - layer_cache=str(self.layer_cache), - parameter_overrides=self.layer_utils.parameters_overrides - ) + command_list = self.get_command_list( + function_logical_id, + template_path=self.template_path, + no_event=True, + region=self.region, + layer_cache=str(self.layer_cache), + parameter_overrides=self.layer_utils.parameters_overrides, + ) process = Popen(command_list, stdout=PIPE) process.wait() @@ -591,31 +587,33 @@ def test_download_two_layers(self, function_logical_id): process_stdout = b"".join(stdout[-1:]).strip() expected_output = '"Layer2"' - self.assertEquals(process_stdout.decode('utf-8'), expected_output) + self.assertEqual(process_stdout.decode("utf-8"), expected_output) def test_caching_two_layers(self): - command_list = self.get_command_list("TwoLayerVersionServerlessFunction", - template_path=self.template_path, - no_event=True, - region=self.region, - layer_cache=str(self.layer_cache), - parameter_overrides=self.layer_utils.parameters_overrides - ) + command_list = self.get_command_list( + "TwoLayerVersionServerlessFunction", + template_path=self.template_path, + no_event=True, + region=self.region, + layer_cache=str(self.layer_cache), + parameter_overrides=self.layer_utils.parameters_overrides, + ) process = Popen(command_list, stdout=PIPE) process.wait() - self.assertEquals(2, len(os.listdir(str(self.layer_cache)))) + self.assertEqual(2, len(os.listdir(str(self.layer_cache)))) def test_caching_two_layers_with_layer_cache_env_set(self): - command_list = self.get_command_list("TwoLayerVersionServerlessFunction", - template_path=self.template_path, - no_event=True, - region=self.region, - parameter_overrides=self.layer_utils.parameters_overrides - ) + command_list = self.get_command_list( + "TwoLayerVersionServerlessFunction", + template_path=self.template_path, + no_event=True, + region=self.region, + parameter_overrides=self.layer_utils.parameters_overrides, + ) env = os.environ.copy() env["SAM_LAYER_CACHE_BASEDIR"] = str(self.layer_cache) @@ -623,14 +621,13 @@ def test_caching_two_layers_with_layer_cache_env_set(self): process = Popen(command_list, stdout=PIPE, env=env) process.wait() - self.assertEquals(2, len(os.listdir(str(self.layer_cache)))) + self.assertEqual(2, len(os.listdir(str(self.layer_cache)))) -@skipIf(SKIP_LAYERS_TESTS, - "Skip layers tests in Travis only") +@skipIf(SKIP_LAYERS_TESTS, "Skip layers tests in Travis only") class TestLayerVersionThatDoNotCreateCache(InvokeIntegBase): template = Path("layers", "layer-template.yml") - region = 'us-west-2' + region = "us-west-2" layer_utils = LayerUtils(region=region) def setUp(self): @@ -638,29 +635,29 @@ def setUp(self): def tearDown(self): docker_client = docker.from_env() - samcli_images = docker_client.images.list(name='samcli/lambda') + samcli_images = docker_client.images.list(name="samcli/lambda") for image in samcli_images: docker_client.images.remove(image.id) def test_layer_does_not_exist(self): self.layer_utils.upsert_layer(LayerUtils.generate_layer_name(), "LayerOneArn", "layer1.zip") non_existent_layer_arn = self.layer_utils.parameters_overrides["LayerOneArn"].replace( - self.layer_utils.layers_meta[0].layer_name, 'non_existent_layer') + self.layer_utils.layers_meta[0].layer_name, "non_existent_layer" + ) - command_list = self.get_command_list("LayerVersionDoesNotExistFunction", - template_path=self.template_path, - no_event=True, - region=self.region, - parameter_overrides={ - 'NonExistentLayerArn': non_existent_layer_arn - } - ) + command_list = self.get_command_list( + "LayerVersionDoesNotExistFunction", + template_path=self.template_path, + no_event=True, + region=self.region, + parameter_overrides={"NonExistentLayerArn": non_existent_layer_arn}, + ) process = Popen(command_list, stderr=PIPE) process.wait() process_stderr = b"".join(process.stderr.readlines()).strip() - error_output = process_stderr.decode('utf-8') + error_output = process_stderr.decode("utf-8") expected_error_output = "{} was not found.".format(non_existent_layer_arn) @@ -668,19 +665,22 @@ def test_layer_does_not_exist(self): self.layer_utils.delete_layers() def test_account_does_not_exist_for_layer(self): - command_list = self.get_command_list("LayerVersionAccountDoesNotExistFunction", - template_path=self.template_path, - no_event=True, - region=self.region - ) + command_list = self.get_command_list( + "LayerVersionAccountDoesNotExistFunction", + template_path=self.template_path, + no_event=True, + region=self.region, + ) process = Popen(command_list, stderr=PIPE) process.wait() process_stderr = b"".join(process.stderr.readlines()).strip() - error_output = process_stderr.decode('utf-8') + error_output = process_stderr.decode("utf-8") - expected_error_output = "Credentials provided are missing lambda:Getlayerversion policy that is needed to " \ - "download the layer or you do not have permission to download the layer" + expected_error_output = ( + "Credentials provided are missing lambda:Getlayerversion policy that is needed to " + "download the layer or you do not have permission to download the layer" + ) self.assertIn(expected_error_output, error_output) diff --git a/tests/integration/local/start_api/start_api_integ_base.py b/tests/integration/local/start_api/start_api_integ_base.py index 08306a2649..a212eb1036 100644 --- a/tests/integration/local/start_api/start_api_integ_base.py +++ b/tests/integration/local/start_api/start_api_integ_base.py @@ -37,7 +37,7 @@ def start_api(cls): if os.getenv("SAM_CLI_DEV"): command = "samdev" - cls.start_api_process = Popen([command, "local", "start-api", "-t", cls.template, "-p", cls.port, "--debug"]) + cls.start_api_process = Popen([command, "local", "start-api", "-t", cls.template, "-p", cls.port]) # we need to wait some time for start-api to start, hence the sleep time.sleep(5) diff --git a/tests/integration/local/start_api/test_start_api.py b/tests/integration/local/start_api/test_start_api.py index 724eedcb43..08a68ed70c 100644 --- a/tests/integration/local/start_api/test_start_api.py +++ b/tests/integration/local/start_api/test_start_api.py @@ -34,15 +34,12 @@ def test_same_endpoint(self): end_time = time() - self.assertEquals(len(results), 10) + self.assertEqual(len(results), 10) self.assertGreater(end_time - start_time, 10) - self.assertLess(end_time - start_time, 20) for result in results: - self.assertEquals(result.status_code, 200) - self.assertEquals( - result.json(), {"message": "HelloWorld! I just slept and waking up."} - ) + self.assertEqual(result.status_code, 200) + self.assertEqual(result.json(), {"message": "HelloWorld! I just slept and waking up."}) def test_different_endpoints(self): """ @@ -53,31 +50,22 @@ def test_different_endpoints(self): start_time = time() thread_pool = ThreadPoolExecutor(10) - test_url_paths = [ - "/sleepfortenseconds/function0", - "/sleepfortenseconds/function1", - ] + test_url_paths = ["/sleepfortenseconds/function0", "/sleepfortenseconds/function1"] futures = [ - thread_pool.submit( - requests.get, - self.url + test_url_paths[function_num % len(test_url_paths)], - ) + thread_pool.submit(requests.get, self.url + test_url_paths[function_num % len(test_url_paths)]) for function_num in range(0, number_of_requests) ] results = [r.result() for r in as_completed(futures)] end_time = time() - self.assertEquals(len(results), 10) + self.assertEqual(len(results), 10) self.assertGreater(end_time - start_time, 10) - self.assertLess(end_time - start_time, 20) for result in results: - self.assertEquals(result.status_code, 200) - self.assertEquals( - result.json(), {"message": "HelloWorld! I just slept and waking up."} - ) + self.assertEqual(result.status_code, 200) + self.assertEqual(result.json(), {"message": "HelloWorld! I just slept and waking up."}) class TestServiceErrorResponses(StartApiIntegBaseClass): @@ -94,20 +82,20 @@ def setUp(self): def test_invalid_http_verb_for_endpoint(self): response = requests.get(self.url + "/id") - self.assertEquals(response.status_code, 403) - self.assertEquals(response.json(), {"message": "Missing Authentication Token"}) + self.assertEqual(response.status_code, 403) + self.assertEqual(response.json(), {"message": "Missing Authentication Token"}) def test_invalid_response_from_lambda(self): response = requests.get(self.url + "/invalidresponsereturned") - self.assertEquals(response.status_code, 502) - self.assertEquals(response.json(), {"message": "Internal server error"}) + self.assertEqual(response.status_code, 502) + self.assertEqual(response.json(), {"message": "Internal server error"}) def test_invalid_json_response_from_lambda(self): response = requests.get(self.url + "/invalidresponsehash") - self.assertEquals(response.status_code, 502) - self.assertEquals(response.json(), {"message": "Internal server error"}) + self.assertEqual(response.status_code, 502) + self.assertEqual(response.json(), {"message": "Internal server error"}) def test_request_timeout(self): pass @@ -129,8 +117,8 @@ def test_static_directory(self): def test_calling_proxy_endpoint(self): response = requests.get(self.url + "/proxypath/this/is/some/path") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_get_call_with_path_setup_with_any_implicit_api(self): """ @@ -138,8 +126,8 @@ def test_get_call_with_path_setup_with_any_implicit_api(self): """ response = requests.get(self.url + "/anyandall") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_post_call_with_path_setup_with_any_implicit_api(self): """ @@ -147,8 +135,8 @@ def test_post_call_with_path_setup_with_any_implicit_api(self): """ response = requests.post(self.url + "/anyandall", json={}) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_put_call_with_path_setup_with_any_implicit_api(self): """ @@ -156,8 +144,8 @@ def test_put_call_with_path_setup_with_any_implicit_api(self): """ response = requests.put(self.url + "/anyandall", json={}) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_head_call_with_path_setup_with_any_implicit_api(self): """ @@ -165,7 +153,7 @@ def test_head_call_with_path_setup_with_any_implicit_api(self): """ response = requests.head(self.url + "/anyandall") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) def test_delete_call_with_path_setup_with_any_implicit_api(self): """ @@ -173,8 +161,8 @@ def test_delete_call_with_path_setup_with_any_implicit_api(self): """ response = requests.delete(self.url + "/anyandall") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_options_call_with_path_setup_with_any_implicit_api(self): """ @@ -182,7 +170,7 @@ def test_options_call_with_path_setup_with_any_implicit_api(self): """ response = requests.options(self.url + "/anyandall") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) def test_patch_call_with_path_setup_with_any_implicit_api(self): """ @@ -190,8 +178,8 @@ def test_patch_call_with_path_setup_with_any_implicit_api(self): """ response = requests.patch(self.url + "/anyandall") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) class TestStartApiWithSwaggerApis(StartApiIntegBaseClass): @@ -207,8 +195,8 @@ def test_get_call_with_path_setup_with_any_swagger(self): """ response = requests.get(self.url + "/anyandall") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_post_call_with_path_setup_with_any_swagger(self): """ @@ -216,8 +204,8 @@ def test_post_call_with_path_setup_with_any_swagger(self): """ response = requests.post(self.url + "/anyandall", json={}) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_put_call_with_path_setup_with_any_swagger(self): """ @@ -225,8 +213,8 @@ def test_put_call_with_path_setup_with_any_swagger(self): """ response = requests.put(self.url + "/anyandall", json={}) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_head_call_with_path_setup_with_any_swagger(self): """ @@ -234,7 +222,7 @@ def test_head_call_with_path_setup_with_any_swagger(self): """ response = requests.head(self.url + "/anyandall") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) def test_delete_call_with_path_setup_with_any_swagger(self): """ @@ -242,8 +230,8 @@ def test_delete_call_with_path_setup_with_any_swagger(self): """ response = requests.delete(self.url + "/anyandall") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_options_call_with_path_setup_with_any_swagger(self): """ @@ -251,7 +239,7 @@ def test_options_call_with_path_setup_with_any_swagger(self): """ response = requests.options(self.url + "/anyandall") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) def test_patch_call_with_path_setup_with_any_swagger(self): """ @@ -259,28 +247,26 @@ def test_patch_call_with_path_setup_with_any_swagger(self): """ response = requests.patch(self.url + "/anyandall") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_function_not_defined_in_template(self): response = requests.get(self.url + "/nofunctionfound") - self.assertEquals(response.status_code, 502) - self.assertEquals( - response.json(), {"message": "No function defined for resource method"} - ) + self.assertEqual(response.status_code, 502) + self.assertEqual(response.json(), {"message": "No function defined for resource method"}) def test_function_with_no_api_event_is_reachable(self): response = requests.get(self.url + "/functionwithnoapievent") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_lambda_function_resource_is_reachable(self): response = requests.get(self.url + "/nonserverlessfunction") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_binary_request(self): """ @@ -288,14 +274,12 @@ def test_binary_request(self): """ input_data = self.get_binary_data(self.binary_data_file) response = requests.post( - self.url + "/echobase64eventbody", - headers={"Content-Type": "image/gif"}, - data=input_data, + self.url + "/echobase64eventbody", headers={"Content-Type": "image/gif"}, data=input_data ) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get("Content-Type"), "image/gif") - self.assertEquals(response.content, input_data) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "image/gif") + self.assertEqual(response.content, input_data) def test_binary_response(self): """ @@ -305,9 +289,9 @@ def test_binary_response(self): response = requests.get(self.url + "/base64response") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get("Content-Type"), "image/gif") - self.assertEquals(response.content, expected) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "image/gif") + self.assertEqual(response.content, expected) class TestStartApiWithSwaggerRestApis(StartApiIntegBaseClass): @@ -323,8 +307,8 @@ def test_get_call_with_path_setup_with_any_swagger(self): """ response = requests.get(self.url + "/anyandall") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_post_call_with_path_setup_with_any_swagger(self): """ @@ -332,8 +316,8 @@ def test_post_call_with_path_setup_with_any_swagger(self): """ response = requests.post(self.url + "/anyandall", json={}) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_put_call_with_path_setup_with_any_swagger(self): """ @@ -341,8 +325,8 @@ def test_put_call_with_path_setup_with_any_swagger(self): """ response = requests.put(self.url + "/anyandall", json={}) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_head_call_with_path_setup_with_any_swagger(self): """ @@ -350,7 +334,7 @@ def test_head_call_with_path_setup_with_any_swagger(self): """ response = requests.head(self.url + "/anyandall") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) def test_delete_call_with_path_setup_with_any_swagger(self): """ @@ -358,8 +342,8 @@ def test_delete_call_with_path_setup_with_any_swagger(self): """ response = requests.delete(self.url + "/anyandall") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_options_call_with_path_setup_with_any_swagger(self): """ @@ -367,7 +351,7 @@ def test_options_call_with_path_setup_with_any_swagger(self): """ response = requests.options(self.url + "/anyandall") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) def test_patch_call_with_path_setup_with_any_swagger(self): """ @@ -375,22 +359,20 @@ def test_patch_call_with_path_setup_with_any_swagger(self): """ response = requests.patch(self.url + "/anyandall") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_function_not_defined_in_template(self): response = requests.get(self.url + "/nofunctionfound") - self.assertEquals(response.status_code, 502) - self.assertEquals( - response.json(), {"message": "No function defined for resource method"} - ) + self.assertEqual(response.status_code, 502) + self.assertEqual(response.json(), {"message": "No function defined for resource method"}) def test_lambda_function_resource_is_reachable(self): response = requests.get(self.url + "/nonserverlessfunction") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_binary_request(self): """ @@ -398,14 +380,12 @@ def test_binary_request(self): """ input_data = self.get_binary_data(self.binary_data_file) response = requests.post( - self.url + "/echobase64eventbody", - headers={"Content-Type": "image/gif"}, - data=input_data, + self.url + "/echobase64eventbody", headers={"Content-Type": "image/gif"}, data=input_data ) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get("Content-Type"), "image/gif") - self.assertEquals(response.content, input_data) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "image/gif") + self.assertEqual(response.content, input_data) def test_binary_response(self): """ @@ -415,9 +395,9 @@ def test_binary_response(self): response = requests.get(self.url + "/base64response") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get("Content-Type"), "image/gif") - self.assertEquals(response.content, expected) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "image/gif") + self.assertEqual(response.content, expected) class TestServiceResponses(StartApiIntegBaseClass): @@ -434,18 +414,16 @@ def setUp(self): def test_multiple_headers_response(self): response = requests.get(self.url + "/multipleheaders") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get("Content-Type"), "text/plain") - self.assertEquals(response.headers.get("MyCustomHeader"), "Value1, Value2") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "text/plain") + self.assertEqual(response.headers.get("MyCustomHeader"), "Value1, Value2") def test_multiple_headers_overrides_headers_response(self): response = requests.get(self.url + "/multipleheadersoverridesheaders") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get("Content-Type"), "text/plain") - self.assertEquals( - response.headers.get("MyCustomHeader"), "Value1, Value2, Custom" - ) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "text/plain") + self.assertEqual(response.headers.get("MyCustomHeader"), "Value1, Value2, Custom") def test_binary_response(self): """ @@ -455,9 +433,9 @@ def test_binary_response(self): response = requests.get(self.url + "/base64response") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get("Content-Type"), "image/gif") - self.assertEquals(response.content, expected) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "image/gif") + self.assertEqual(response.content, expected) def test_default_header_content_type(self): """ @@ -465,9 +443,9 @@ def test_default_header_content_type(self): """ response = requests.get(self.url + "/onlysetstatuscode") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.content.decode("utf-8"), "no data") - self.assertEquals(response.headers.get("Content-Type"), "application/json") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content.decode("utf-8"), "no data") + self.assertEqual(response.headers.get("Content-Type"), "application/json") def test_default_status_code(self): """ @@ -476,8 +454,8 @@ def test_default_status_code(self): """ response = requests.get(self.url + "/onlysetbody") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_string_status_code(self): """ @@ -485,7 +463,7 @@ def test_string_status_code(self): """ response = requests.get(self.url + "/stringstatuscode") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) def test_default_body(self): """ @@ -493,26 +471,26 @@ def test_default_body(self): """ response = requests.get(self.url + "/onlysetstatuscode") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.content.decode("utf-8"), "no data") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content.decode("utf-8"), "no data") def test_function_writing_to_stdout(self): response = requests.get(self.url + "/writetostdout") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_function_writing_to_stderr(self): response = requests.get(self.url + "/writetostderr") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_integer_body(self): response = requests.get(self.url + "/echo_integer_body") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.content.decode("utf-8"), "42") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content.decode("utf-8"), "42") class TestServiceRequests(StartApiIntegBaseClass): @@ -532,59 +510,49 @@ def test_binary_request(self): """ input_data = self.get_binary_data(self.binary_data_file) response = requests.post( - self.url + "/echobase64eventbody", - headers={"Content-Type": "image/gif"}, - data=input_data, + self.url + "/echobase64eventbody", headers={"Content-Type": "image/gif"}, data=input_data ) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get("Content-Type"), "image/gif") - self.assertEquals(response.content, input_data) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "image/gif") + self.assertEqual(response.content, input_data) def test_request_with_form_data(self): """ Form-encoded data should be put into the Event to Lambda """ response = requests.post( - self.url + "/echoeventbody", - headers={"Content-Type": "application/x-www-form-urlencoded"}, - data="key=value", + self.url + "/echoeventbody", headers={"Content-Type": "application/x-www-form-urlencoded"}, data="key=value" ) - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) response_data = response.json() - self.assertEquals( - response_data.get("headers").get("Content-Type"), - "application/x-www-form-urlencoded", - ) - self.assertEquals(response_data.get("body"), "key=value") + self.assertEqual(response_data.get("headers").get("Content-Type"), "application/x-www-form-urlencoded") + self.assertEqual(response_data.get("body"), "key=value") def test_request_to_an_endpoint_with_two_different_handlers(self): response = requests.get(self.url + "/echoeventbody") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) response_data = response.json() - self.assertEquals(response_data.get("handler"), "echo_event_handler_2") + self.assertEqual(response_data.get("handler"), "echo_event_handler_2") def test_request_with_multi_value_headers(self): response = requests.get( - self.url + "/echoeventbody", - headers={"Content-Type": "application/x-www-form-urlencoded, image/gif"}, + self.url + "/echoeventbody", headers={"Content-Type": "application/x-www-form-urlencoded, image/gif"} ) - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) response_data = response.json() - self.assertEquals( - response_data.get("multiValueHeaders").get("Content-Type"), - ["application/x-www-form-urlencoded, image/gif"], + self.assertEqual( + response_data.get("multiValueHeaders").get("Content-Type"), ["application/x-www-form-urlencoded, image/gif"] ) - self.assertEquals( - response_data.get("headers").get("Content-Type"), - "application/x-www-form-urlencoded, image/gif", + self.assertEqual( + response_data.get("headers").get("Content-Type"), "application/x-www-form-urlencoded, image/gif" ) def test_request_with_query_params(self): @@ -593,14 +561,12 @@ def test_request_with_query_params(self): """ response = requests.get(self.url + "/id/4", params={"key": "value"}) - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) response_data = response.json() - self.assertEquals(response_data.get("queryStringParameters"), {"key": "value"}) - self.assertEquals( - response_data.get("multiValueQueryStringParameters"), {"key": ["value"]} - ) + self.assertEqual(response_data.get("queryStringParameters"), {"key": "value"}) + self.assertEqual(response_data.get("multiValueQueryStringParameters"), {"key": ["value"]}) def test_request_with_list_of_query_params(self): """ @@ -608,15 +574,12 @@ def test_request_with_list_of_query_params(self): """ response = requests.get(self.url + "/id/4", params={"key": ["value", "value2"]}) - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) response_data = response.json() - self.assertEquals(response_data.get("queryStringParameters"), {"key": "value2"}) - self.assertEquals( - response_data.get("multiValueQueryStringParameters"), - {"key": ["value", "value2"]}, - ) + self.assertEqual(response_data.get("queryStringParameters"), {"key": "value2"}) + self.assertEqual(response_data.get("multiValueQueryStringParameters"), {"key": ["value", "value2"]}) def test_request_with_path_params(self): """ @@ -624,11 +587,11 @@ def test_request_with_path_params(self): """ response = requests.get(self.url + "/id/4") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) response_data = response.json() - self.assertEquals(response_data.get("pathParameters"), {"id": "4"}) + self.assertEqual(response_data.get("pathParameters"), {"id": "4"}) def test_request_with_many_path_params(self): """ @@ -636,13 +599,11 @@ def test_request_with_many_path_params(self): """ response = requests.get(self.url + "/id/4/user/jacob") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) response_data = response.json() - self.assertEquals( - response_data.get("pathParameters"), {"id": "4", "user": "jacob"} - ) + self.assertEqual(response_data.get("pathParameters"), {"id": "4", "user": "jacob"}) def test_forward_headers_are_added_to_event(self): """ @@ -652,16 +613,10 @@ def test_forward_headers_are_added_to_event(self): response_data = response.json() - self.assertEquals(response_data.get("headers").get("X-Forwarded-Proto"), "http") - self.assertEquals( - response_data.get("multiValueHeaders").get("X-Forwarded-Proto"), ["http"] - ) - self.assertEquals( - response_data.get("headers").get("X-Forwarded-Port"), self.port - ) - self.assertEquals( - response_data.get("multiValueHeaders").get("X-Forwarded-Port"), [self.port] - ) + self.assertEqual(response_data.get("headers").get("X-Forwarded-Proto"), "http") + self.assertEqual(response_data.get("multiValueHeaders").get("X-Forwarded-Proto"), ["http"]) + self.assertEqual(response_data.get("headers").get("X-Forwarded-Port"), self.port) + self.assertEqual(response_data.get("multiValueHeaders").get("X-Forwarded-Port"), [self.port]) class TestStartApiWithStage(StartApiIntegBaseClass): @@ -677,20 +632,20 @@ def setUp(self): def test_default_stage_name(self): response = requests.get(self.url + "/echoeventbody") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) response_data = response.json() - self.assertEquals(response_data.get("requestContext", {}).get("stage"), "Prod") + self.assertEqual(response_data.get("requestContext", {}).get("stage"), "Prod") def test_global_stage_variables(self): response = requests.get(self.url + "/echoeventbody") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) response_data = response.json() - self.assertEquals(response_data.get("stageVariables"), {"VarName": "varValue"}) + self.assertEqual(response_data.get("stageVariables"), {"VarName": "varValue"}) class TestStartApiWithStageAndSwagger(StartApiIntegBaseClass): @@ -706,18 +661,18 @@ def setUp(self): def test_swagger_stage_name(self): response = requests.get(self.url + "/echoeventbody") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) response_data = response.json() - self.assertEquals(response_data.get("requestContext", {}).get("stage"), "dev") + self.assertEqual(response_data.get("requestContext", {}).get("stage"), "dev") def test_swagger_stage_variable(self): response = requests.get(self.url + "/echoeventbody") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) response_data = response.json() - self.assertEquals(response_data.get("stageVariables"), {"VarName": "varValue"}) + self.assertEqual(response_data.get("stageVariables"), {"VarName": "varValue"}) class TestServiceCorsSwaggerRequests(StartApiIntegBaseClass): @@ -737,17 +692,12 @@ def test_cors_swagger_options(self): """ response = requests.options(self.url + "/echobase64eventbody") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) - self.assertEquals(response.headers.get("Access-Control-Allow-Origin"), "*") - self.assertEquals( - response.headers.get("Access-Control-Allow-Headers"), - "origin, x-requested-with", - ) - self.assertEquals( - response.headers.get("Access-Control-Allow-Methods"), "GET,OPTIONS" - ) - self.assertEquals(response.headers.get("Access-Control-Max-Age"), "510") + self.assertEqual(response.headers.get("Access-Control-Allow-Origin"), "*") + self.assertEqual(response.headers.get("Access-Control-Allow-Headers"), "origin, x-requested-with") + self.assertEqual(response.headers.get("Access-Control-Allow-Methods"), "GET,OPTIONS") + self.assertEqual(response.headers.get("Access-Control-Max-Age"), "510") class TestServiceCorsGlobalRequests(StartApiIntegBaseClass): @@ -766,14 +716,11 @@ def test_cors_global(self): """ response = requests.options(self.url + "/echobase64eventbody") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get("Access-Control-Allow-Origin"), "*") - self.assertEquals(response.headers.get("Access-Control-Allow-Headers"), None) - self.assertEquals( - response.headers.get("Access-Control-Allow-Methods"), - ",".join(sorted(Route.ANY_HTTP_METHODS)), - ) - self.assertEquals(response.headers.get("Access-Control-Max-Age"), None) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Access-Control-Allow-Origin"), "*") + self.assertEqual(response.headers.get("Access-Control-Allow-Headers"), None) + self.assertEqual(response.headers.get("Access-Control-Allow-Methods"), ",".join(sorted(Route.ANY_HTTP_METHODS))) + self.assertEqual(response.headers.get("Access-Control-Max-Age"), None) def test_cors_global_get(self): """ @@ -781,13 +728,13 @@ def test_cors_global_get(self): """ response = requests.get(self.url + "/onlysetstatuscode") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.content.decode("utf-8"), "no data") - self.assertEquals(response.headers.get("Content-Type"), "application/json") - self.assertEquals(response.headers.get("Access-Control-Allow-Origin"), None) - self.assertEquals(response.headers.get("Access-Control-Allow-Headers"), None) - self.assertEquals(response.headers.get("Access-Control-Allow-Methods"), None) - self.assertEquals(response.headers.get("Access-Control-Max-Age"), None) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content.decode("utf-8"), "no data") + self.assertEqual(response.headers.get("Content-Type"), "application/json") + self.assertEqual(response.headers.get("Access-Control-Allow-Origin"), None) + self.assertEqual(response.headers.get("Access-Control-Allow-Headers"), None) + self.assertEqual(response.headers.get("Access-Control-Allow-Methods"), None) + self.assertEqual(response.headers.get("Access-Control-Max-Age"), None) class TestStartApiWithCloudFormationStage(StartApiIntegBaseClass): @@ -803,19 +750,19 @@ def setUp(self): def test_default_stage_name(self): response = requests.get(self.url + "/echoeventbody") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) response_data = response.json() - self.assertEquals(response_data.get("requestContext", {}).get("stage"), "Dev") + self.assertEqual(response_data.get("requestContext", {}).get("stage"), "Dev") def test_global_stage_variables(self): response = requests.get(self.url + "/echoeventbody") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) response_data = response.json() - self.assertEquals(response_data.get("stageVariables"), {"Stack": "Dev"}) + self.assertEqual(response_data.get("stageVariables"), {"Stack": "Dev"}) class TestStartApiWithMethodsAndResources(StartApiIntegBaseClass): @@ -831,8 +778,8 @@ def test_get_call_with_path_setup_with_any_swagger(self): """ response = requests.get(self.url + "/root/anyandall") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_post_call_with_path_setup_with_any_swagger(self): """ @@ -840,8 +787,8 @@ def test_post_call_with_path_setup_with_any_swagger(self): """ response = requests.post(self.url + "/root/anyandall", json={}) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_put_call_with_path_setup_with_any_swagger(self): """ @@ -849,8 +796,8 @@ def test_put_call_with_path_setup_with_any_swagger(self): """ response = requests.put(self.url + "/root/anyandall", json={}) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_head_call_with_path_setup_with_any_swagger(self): """ @@ -858,7 +805,7 @@ def test_head_call_with_path_setup_with_any_swagger(self): """ response = requests.head(self.url + "/root/anyandall") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) def test_delete_call_with_path_setup_with_any_swagger(self): """ @@ -866,8 +813,8 @@ def test_delete_call_with_path_setup_with_any_swagger(self): """ response = requests.delete(self.url + "/root/anyandall") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_options_call_with_path_setup_with_any_swagger(self): """ @@ -875,7 +822,7 @@ def test_options_call_with_path_setup_with_any_swagger(self): """ response = requests.options(self.url + "/root/anyandall") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) def test_patch_call_with_path_setup_with_any_swagger(self): """ @@ -883,22 +830,20 @@ def test_patch_call_with_path_setup_with_any_swagger(self): """ response = requests.patch(self.url + "/root/anyandall") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_function_not_defined_in_template(self): response = requests.get(self.url + "/root/nofunctionfound") - self.assertEquals(response.status_code, 502) - self.assertEquals( - response.json(), {"message": "No function defined for resource method"} - ) + self.assertEqual(response.status_code, 502) + self.assertEqual(response.json(), {"message": "No function defined for resource method"}) def test_lambda_function_resource_is_reachable(self): response = requests.get(self.url + "/root/nonserverlessfunction") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) def test_binary_request(self): """ @@ -906,14 +851,12 @@ def test_binary_request(self): """ input_data = self.get_binary_data(self.binary_data_file) response = requests.post( - self.url + "/root/echobase64eventbody", - headers={"Content-Type": "image/gif"}, - data=input_data, + self.url + "/root/echobase64eventbody", headers={"Content-Type": "image/gif"}, data=input_data ) - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get("Content-Type"), "image/gif") - self.assertEquals(response.content, input_data) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "image/gif") + self.assertEqual(response.content, input_data) def test_binary_response(self): """ @@ -923,9 +866,9 @@ def test_binary_response(self): response = requests.get(self.url + "/root/base64response") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.headers.get("Content-Type"), "image/gif") - self.assertEquals(response.content, expected) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Content-Type"), "image/gif") + self.assertEqual(response.content, expected) def test_proxy_response(self): """ @@ -933,8 +876,8 @@ def test_proxy_response(self): """ response = requests.get(self.url + "/root/v1/test") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {"hello": "world"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) class TestCDKApiGateway(StartApiIntegBaseClass): @@ -949,8 +892,8 @@ def test_get_with_cdk(self): """ response = requests.get(self.url + "/hello-world") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {'hello': 'world'}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) class TestServerlessApiGateway(StartApiIntegBaseClass): @@ -965,5 +908,5 @@ def test_get_with_serverless(self): """ response = requests.get(self.url + "/hello-world") - self.assertEquals(response.status_code, 200) - self.assertEquals(response.json(), {'hello': 'world'}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"hello": "world"}) diff --git a/tests/integration/local/start_lambda/start_lambda_api_integ_base.py b/tests/integration/local/start_lambda/start_lambda_api_integ_base.py index 4646ce5625..7fd5099f0a 100644 --- a/tests/integration/local/start_lambda/start_lambda_api_integ_base.py +++ b/tests/integration/local/start_lambda/start_lambda_api_integ_base.py @@ -33,8 +33,8 @@ def start_lambda(cls): if os.getenv("SAM_CLI_DEV"): command = "samdev" - cls.start_lambda_process = \ - Popen([command, "local", "start-lambda", "-t", cls.template, "-p", cls.port, "--debug"]) + cls.start_lambda_process = Popen([command, "local", "start-lambda", "-t", cls.template, "-p", cls.port]) + # we need to wait some time for start-lambda to start, hence the sleep time.sleep(5) diff --git a/tests/integration/local/start_lambda/test_start_lambda.py b/tests/integration/local/start_lambda/test_start_lambda.py index c931cfd735..daae71ffd6 100644 --- a/tests/integration/local/start_lambda/test_start_lambda.py +++ b/tests/integration/local/start_lambda/test_start_lambda.py @@ -14,14 +14,14 @@ class TestParallelRequests(StartLambdaIntegBaseClass): def setUp(self): self.url = "http://127.0.0.1:{}".format(self.port) - self.lambda_client = boto3.client('lambda', - endpoint_url=self.url, - region_name='us-east-1', - use_ssl=False, - verify=False, - config=Config(signature_version=UNSIGNED, - read_timeout=120, - retries={'max_attempts': 0})) + self.lambda_client = boto3.client( + "lambda", + endpoint_url=self.url, + region_name="us-east-1", + use_ssl=False, + verify=False, + config=Config(signature_version=UNSIGNED, read_timeout=120, retries={"max_attempts": 0}), + ) def test_same_endpoint(self): """ @@ -32,36 +32,19 @@ def test_same_endpoint(self): start_time = time() thread_pool = ThreadPoolExecutor(number_of_requests) - futures = [thread_pool.submit(self.lambda_client.invoke, FunctionName="HelloWorldSleepFunction") - for _ in range(0, number_of_requests)] + futures = [ + thread_pool.submit(self.lambda_client.invoke, FunctionName="HelloWorldSleepFunction") + for _ in range(0, number_of_requests) + ] results = [r.result() for r in as_completed(futures)] end_time = time() - self.assertEquals(len(results), 10) + self.assertEqual(len(results), 10) self.assertGreater(end_time - start_time, 10) - self.assertLess(end_time - start_time, 20) for result in results: - self.assertEquals(result.get("Payload").read().decode('utf-8'), '"Slept for 10s"') - - -class TestLambdaToLambdaInvoke(StartLambdaIntegBaseClass): - template_path = "/testdata/start_lambda/template.yml" - - def setUp(self): - self.url = "http://127.0.0.1:{}".format(self.port) - self.lambda_client = boto3.client('lambda', - endpoint_url=self.url, - region_name='us-east-1', - use_ssl=False, - verify=False, - config=Config(signature_version=UNSIGNED, - read_timeout=120, - retries={'max_attempts': 0})) - - def test_local_lambda_calling_local_lambda(self): - pass + self.assertEqual(result.get("Payload").read().decode("utf-8"), '"Slept for 10s"') class TestLambdaServiceErrorCases(StartLambdaIntegBaseClass): @@ -69,41 +52,47 @@ class TestLambdaServiceErrorCases(StartLambdaIntegBaseClass): def setUp(self): self.url = "http://127.0.0.1:{}".format(self.port) - self.lambda_client = boto3.client('lambda', - endpoint_url=self.url, - region_name='us-east-1', - use_ssl=False, - verify=False, - config=Config(signature_version=UNSIGNED, - read_timeout=120, - retries={'max_attempts': 0})) + self.lambda_client = boto3.client( + "lambda", + endpoint_url=self.url, + region_name="us-east-1", + use_ssl=False, + verify=False, + config=Config(signature_version=UNSIGNED, read_timeout=120, retries={"max_attempts": 0}), + ) def test_invoke_with_non_json_data(self): - expected_error_message = "An error occurred (InvalidRequestContent) when calling the Invoke operation: " \ - "Could not parse request body into json: No JSON object could be decoded" + expected_error_message = ( + "An error occurred (InvalidRequestContent) when calling the Invoke operation: " + "Could not parse request body into json: No JSON object could be decoded" + ) with self.assertRaises(ClientError) as error: - self.lambda_client.invoke(FunctionName="EchoEventFunction", Payload='notat:asdfasdf') + self.lambda_client.invoke(FunctionName="EchoEventFunction", Payload="notat:asdfasdf") - self.assertEquals(str(error.exception), expected_error_message) + self.assertEqual(str(error.exception), expected_error_message) def test_invoke_with_log_type_not_None(self): - expected_error_message = "An error occurred (NotImplemented) when calling the Invoke operation: " \ - "log-type: Tail is not supported. None is only supported." + expected_error_message = ( + "An error occurred (NotImplemented) when calling the Invoke operation: " + "log-type: Tail is not supported. None is only supported." + ) with self.assertRaises(ClientError) as error: self.lambda_client.invoke(FunctionName="EchoEventFunction", LogType="Tail") - self.assertEquals(str(error.exception), expected_error_message) + self.assertEqual(str(error.exception), expected_error_message) def test_invoke_with_invocation_type_not_RequestResponse(self): - expected_error_message = "An error occurred (NotImplemented) when calling the Invoke operation: " \ - "invocation-type: DryRun is not supported. RequestResponse is only supported." + expected_error_message = ( + "An error occurred (NotImplemented) when calling the Invoke operation: " + "invocation-type: DryRun is not supported. RequestResponse is only supported." + ) with self.assertRaises(ClientError) as error: self.lambda_client.invoke(FunctionName="EchoEventFunction", InvocationType="DryRun") - self.assertEquals(str(error.exception), expected_error_message) + self.assertEqual(str(error.exception), expected_error_message) class TestLambdaService(StartLambdaIntegBaseClass): @@ -111,53 +100,55 @@ class TestLambdaService(StartLambdaIntegBaseClass): def setUp(self): self.url = "http://127.0.0.1:{}".format(self.port) - self.lambda_client = boto3.client('lambda', - endpoint_url=self.url, - region_name='us-east-1', - use_ssl=False, - verify=False, - config=Config(signature_version=UNSIGNED, - read_timeout=120, - retries={'max_attempts': 0})) + self.lambda_client = boto3.client( + "lambda", + endpoint_url=self.url, + region_name="us-east-1", + use_ssl=False, + verify=False, + config=Config(signature_version=UNSIGNED, read_timeout=120, retries={"max_attempts": 0}), + ) def test_invoke_with_data(self): response = self.lambda_client.invoke(FunctionName="EchoEventFunction", Payload='"This is json data"') - self.assertEquals(response.get("Payload").read().decode('utf-8'), '"This is json data"') + self.assertEqual(response.get("Payload").read().decode("utf-8"), '"This is json data"') self.assertIsNone(response.get("FunctionError")) - self.assertEquals(response.get("StatusCode"), 200) + self.assertEqual(response.get("StatusCode"), 200) def test_invoke_with_no_data(self): response = self.lambda_client.invoke(FunctionName="EchoEventFunction") - self.assertEquals(response.get("Payload").read().decode('utf-8'), '{}') + self.assertEqual(response.get("Payload").read().decode("utf-8"), "{}") self.assertIsNone(response.get("FunctionError")) - self.assertEquals(response.get("StatusCode"), 200) + self.assertEqual(response.get("StatusCode"), 200) def test_invoke_with_log_type_None(self): - response = self.lambda_client.invoke(FunctionName="EchoEventFunction", LogType='None') + response = self.lambda_client.invoke(FunctionName="EchoEventFunction", LogType="None") - self.assertEquals(response.get("Payload").read().decode('utf-8'), '{}') + self.assertEqual(response.get("Payload").read().decode("utf-8"), "{}") self.assertIsNone(response.get("FunctionError")) - self.assertEquals(response.get("StatusCode"), 200) + self.assertEqual(response.get("StatusCode"), 200) def test_invoke_with_invocation_type_RequestResponse(self): - response = self.lambda_client.invoke(FunctionName="EchoEventFunction", InvocationType='RequestResponse') + response = self.lambda_client.invoke(FunctionName="EchoEventFunction", InvocationType="RequestResponse") - self.assertEquals(response.get("Payload").read().decode('utf-8'), '{}') + self.assertEqual(response.get("Payload").read().decode("utf-8"), "{}") self.assertIsNone(response.get("FunctionError")) - self.assertEquals(response.get("StatusCode"), 200) + self.assertEqual(response.get("StatusCode"), 200) def test_lambda_function_raised_error(self): - response = self.lambda_client.invoke(FunctionName="RaiseExceptionFunction", InvocationType='RequestResponse') - - self.assertEquals(response.get("Payload").read().decode('utf-8'), - '{"errorMessage": "Lambda is raising an exception", ' - '"errorType": "Exception", ' - '"stackTrace": [["/var/task/main.py", 47, "raise_exception", ' - '"raise Exception(\\"Lambda is raising an exception\\")"]]}') - self.assertEquals(response.get("FunctionError"), 'Unhandled') - self.assertEquals(response.get("StatusCode"), 200) + response = self.lambda_client.invoke(FunctionName="RaiseExceptionFunction", InvocationType="RequestResponse") + + self.assertEqual( + response.get("Payload").read().decode("utf-8"), + '{"errorMessage": "Lambda is raising an exception", ' + '"errorType": "Exception", ' + '"stackTrace": [["/var/task/main.py", 48, "raise_exception", ' + '"raise Exception(\\"Lambda is raising an exception\\")"]]}', + ) + self.assertEqual(response.get("FunctionError"), "Unhandled") + self.assertEqual(response.get("StatusCode"), 200) def test_invoke_with_function_timeout(self): """ @@ -172,6 +163,6 @@ def test_invoke_with_function_timeout(self): """ response = self.lambda_client.invoke(FunctionName="TimeoutFunction") - self.assertEquals(response.get("Payload").read().decode('utf-8'), '') + self.assertEqual(response.get("Payload").read().decode("utf-8"), "") self.assertIsNone(response.get("FunctionError")) - self.assertEquals(response.get("StatusCode"), 200) + self.assertEqual(response.get("StatusCode"), 200) diff --git a/tests/integration/publish/publish_app_integ_base.py b/tests/integration/publish/publish_app_integ_base.py index 11176890c5..a937149a41 100644 --- a/tests/integration/publish/publish_app_integ_base.py +++ b/tests/integration/publish/publish_app_integ_base.py @@ -15,7 +15,6 @@ class PublishAppIntegBase(TestCase): - @classmethod def setUpClass(cls): cls.region_name = os.environ.get("AWS_DEFAULT_REGION") @@ -24,10 +23,10 @@ def setUpClass(cls): cls.application_name_placeholder = "" cls.temp_dir = Path(tempfile.mkdtemp()) cls.test_data_path = Path(__file__).resolve().parents[1].joinpath("testdata", "publish") - cls.sar_client = boto3.client('serverlessrepo', region_name=cls.region_name) + cls.sar_client = boto3.client("serverlessrepo", region_name=cls.region_name) # Create S3 bucket - s3 = boto3.resource('s3') + s3 = boto3.resource("s3") cls.s3_bucket = s3.Bucket(cls.bucket_name) cls.s3_bucket.create() @@ -53,12 +52,11 @@ def setUpClass(cls): @classmethod def tearDownClass(cls): - cls.s3_bucket.delete_objects(Delete={ - 'Objects': [ - {'Key': 'LICENSE'}, {'Key': 'README.md'}, - {'Key': 'README_UPDATE.md'}, {'Key': 'main.py'} - ] - }) + cls.s3_bucket.delete_objects( + Delete={ + "Objects": [{"Key": "LICENSE"}, {"Key": "README.md"}, {"Key": "README_UPDATE.md"}, {"Key": "main.py"}] + } + ) cls.s3_bucket.delete() @classmethod @@ -82,7 +80,7 @@ def tearDown(self): def assert_metadata_details(self, app_metadata, std_output): # Strip newlines and spaces in the std output - stripped_std_output = std_output.replace('\n', '').replace('\r', '').replace(' ', '') + stripped_std_output = std_output.replace("\n", "").replace("\r", "").replace(" ", "") # Assert expected app metadata in the std output regardless of key order for key, value in app_metadata.items(): self.assertIn('"{}":{}'.format(key, json.dumps(value)), stripped_std_output) diff --git a/tests/integration/publish/test_command_integ.py b/tests/integration/publish/test_command_integ.py index b51fde6174..0b509b9d73 100644 --- a/tests/integration/publish/test_command_integ.py +++ b/tests/integration/publish/test_command_integ.py @@ -16,15 +16,14 @@ @skipIf(SKIP_PUBLISH_TESTS, "Skip publish tests in CI/CD only") class TestPublishExistingApp(PublishAppIntegBase): - def setUp(self): super(TestPublishExistingApp, self).setUp() # Create application for each test app_metadata_text = self.temp_dir.joinpath("metadata_create_app.json").read_text() app_metadata = json.loads(app_metadata_text) - app_metadata['TemplateBody'] = self.temp_dir.joinpath("template_create_app.yaml").read_text() + app_metadata["TemplateBody"] = self.temp_dir.joinpath("template_create_app.yaml").read_text() response = self.sar_client.create_application(**app_metadata) - self.application_id = response['ApplicationId'] + self.application_id = response["ApplicationId"] # Sleep for a little bit to make server happy time.sleep(2) @@ -43,11 +42,11 @@ def test_update_application(self): process_stdout = b"".join(process.stdout.readlines()).strip() expected_msg = 'The following metadata of application "{}" has been updated:'.format(self.application_id) - self.assertIn(expected_msg, process_stdout.decode('utf-8')) + self.assertIn(expected_msg, process_stdout.decode("utf-8")) app_metadata_text = self.temp_dir.joinpath("metadata_update_app.json").read_text() app_metadata = json.loads(app_metadata_text) - self.assert_metadata_details(app_metadata, process_stdout.decode('utf-8')) + self.assert_metadata_details(app_metadata, process_stdout.decode("utf-8")) def test_create_application_version(self): template_path = self.temp_dir.joinpath("template_create_app_version.yaml") @@ -58,33 +57,33 @@ def test_create_application_version(self): process_stdout = b"".join(process.stdout.readlines()).strip() expected_msg = 'The following metadata of application "{}" has been updated:'.format(self.application_id) - self.assertIn(expected_msg, process_stdout.decode('utf-8')) + self.assertIn(expected_msg, process_stdout.decode("utf-8")) app_metadata_text = self.temp_dir.joinpath("metadata_create_app_version.json").read_text() app_metadata = json.loads(app_metadata_text) - self.assert_metadata_details(app_metadata, process_stdout.decode('utf-8')) + self.assert_metadata_details(app_metadata, process_stdout.decode("utf-8")) def test_create_application_version_with_semantic_version_option(self): template_path = self.temp_dir.joinpath("template_create_app_version.yaml") command_list = self.get_command_list( - template_path=template_path, region=self.region_name, semantic_version='0.1.0') + template_path=template_path, region=self.region_name, semantic_version="0.1.0" + ) process = Popen(command_list, stdout=PIPE) process.wait() process_stdout = b"".join(process.stdout.readlines()).strip() expected_msg = 'The following metadata of application "{}" has been updated:'.format(self.application_id) - self.assertIn(expected_msg, process_stdout.decode('utf-8')) + self.assertIn(expected_msg, process_stdout.decode("utf-8")) app_metadata_text = self.temp_dir.joinpath("metadata_create_app_version.json").read_text() app_metadata = json.loads(app_metadata_text) - app_metadata[SEMANTIC_VERSION] = '0.1.0' - self.assert_metadata_details(app_metadata, process_stdout.decode('utf-8')) + app_metadata[SEMANTIC_VERSION] = "0.1.0" + self.assert_metadata_details(app_metadata, process_stdout.decode("utf-8")) @skipIf(SKIP_PUBLISH_TESTS, "Skip publish tests in CI/CD only") class TestPublishNewApp(PublishAppIntegBase): - def setUp(self): super(TestPublishNewApp, self).setUp() self.application_id = None @@ -104,16 +103,16 @@ def test_create_application(self): process_stdout = b"".join(process.stdout.readlines()).strip() expected_msg = "Created new application with the following metadata:" - self.assertIn(expected_msg, process_stdout.decode('utf-8')) + self.assertIn(expected_msg, process_stdout.decode("utf-8")) app_metadata_text = self.temp_dir.joinpath("metadata_create_app.json").read_text() app_metadata = json.loads(app_metadata_text) - self.assert_metadata_details(app_metadata, process_stdout.decode('utf-8')) + self.assert_metadata_details(app_metadata, process_stdout.decode("utf-8")) # Get console link application id from stdout - pattern = r'arn:[\w\-]+:serverlessrepo:[\w\-]+:[0-9]+:applications\~[\S]+' - match = re.search(pattern, process_stdout.decode('utf-8')) - self.application_id = match.group().replace('~', '/') + pattern = r"arn:[\w\-]+:serverlessrepo:[\w\-]+:[0-9]+:applications\~[\S]+" + match = re.search(pattern, process_stdout.decode("utf-8")) + self.application_id = match.group().replace("~", "/") def test_publish_not_packaged_template(self): template_path = self.temp_dir.joinpath("template_not_packaged.yaml") @@ -124,7 +123,7 @@ def test_publish_not_packaged_template(self): process_stderr = b"".join(process.stderr.readlines()).strip() expected_msg = "Please make sure that you have uploaded application artifacts to S3" - self.assertIn(expected_msg, process_stderr.decode('utf-8')) + self.assertIn(expected_msg, process_stderr.decode("utf-8")) def test_create_application_infer_region_from_env(self): template_path = self.temp_dir.joinpath("template_create_app.yaml") @@ -135,10 +134,10 @@ def test_create_application_infer_region_from_env(self): process_stdout = b"".join(process.stdout.readlines()).strip() expected_msg = "Created new application with the following metadata:" - self.assertIn(expected_msg, process_stdout.decode('utf-8')) + self.assertIn(expected_msg, process_stdout.decode("utf-8")) # Get console link application id from stdout - pattern = r'arn:[\w\-]+:serverlessrepo:[\w\-]+:[0-9]+:applications\~[\S]+' - match = re.search(pattern, process_stdout.decode('utf-8')) - self.application_id = match.group().replace('~', '/') + pattern = r"arn:[\w\-]+:serverlessrepo:[\w\-]+:[0-9]+:applications\~[\S]+" + match = re.search(pattern, process_stdout.decode("utf-8")) + self.application_id = match.group().replace("~", "/") self.assertIn(self.region_name, self.application_id) diff --git a/tests/integration/telemetry/integ_base.py b/tests/integration/telemetry/integ_base.py index c8d0df5b60..314da3b181 100644 --- a/tests/integration/telemetry/integ_base.py +++ b/tests/integration/telemetry/integ_base.py @@ -28,11 +28,10 @@ TELEMETRY_ENDPOINT_URL = "http://{}:{}".format(TELEMETRY_ENDPOINT_HOST, TELEMETRY_ENDPOINT_PORT) # Convert line separators to work with Windows \r\n -EXPECTED_TELEMETRY_PROMPT = re.sub(r'\n', os.linesep, TELEMETRY_PROMPT) +EXPECTED_TELEMETRY_PROMPT = re.sub(r"\n", os.linesep, TELEMETRY_PROMPT) class IntegBase(TestCase): - @classmethod def setUpClass(cls): cls.cmd = cls.base_command() @@ -69,8 +68,9 @@ def run_cmd(self, stdin_data="", optout_envvar_value=None): env["__SAM_CLI_APP_DIR"] = self.config_dir env["__SAM_CLI_TELEMETRY_ENDPOINT_URL"] = "{}/metrics".format(TELEMETRY_ENDPOINT_URL) - process = subprocess.Popen(cmd_list, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, - env=env) + process = subprocess.Popen( + cmd_list, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env + ) return process def unset_config(self): @@ -134,16 +134,17 @@ def __init__(self): self.flask_app = Flask(__name__) - self.flask_app.add_url_rule("/metrics", - endpoint="/metrics", - view_func=self._request_handler, - methods=["POST"], - provide_automatic_options=False) + self.flask_app.add_url_rule( + "/metrics", + endpoint="/metrics", + view_func=self._request_handler, + methods=["POST"], + provide_automatic_options=False, + ) - self.flask_app.add_url_rule("/_shutdown", - endpoint="/_shutdown", - view_func=self._shutdown_flask, - methods=["GET"]) + self.flask_app.add_url_rule( + "/_shutdown", endpoint="/_shutdown", view_func=self._shutdown_flask, methods=["GET"] + ) # Thread-safe data structure to record requests sent to the server self._requests = deque() @@ -185,7 +186,7 @@ def _request_handler(self, **kwargs): "endpoint": request.endpoint, "method": request.method, "data": request.get_json(), - "headers": dict(request.headers) + "headers": dict(request.headers), } self._requests.append(request_data) @@ -194,6 +195,6 @@ def _request_handler(self, **kwargs): def _shutdown_flask(self): # Based on http://flask.pocoo.org/snippets/67/ - request.environ.get('werkzeug.server.shutdown')() - print('Server shutting down...') - return '' + request.environ.get("werkzeug.server.shutdown")() + print ("Server shutting down...") + return "" diff --git a/tests/integration/telemetry/test_installed_metric.py b/tests/integration/telemetry/test_installed_metric.py index b25fb1b19e..bc1518bf21 100644 --- a/tests/integration/telemetry/test_installed_metric.py +++ b/tests/integration/telemetry/test_installed_metric.py @@ -6,7 +6,6 @@ class TestSendInstalledMetric(IntegBase): - def test_send_installed_metric_on_first_run(self): """ On the first run, send the installed metric @@ -20,38 +19,39 @@ def test_send_installed_metric_on_first_run(self): (_, stderrdata) = process.communicate() retcode = process.poll() - self.assertEquals(retcode, 0, "Command should successfully complete") + self.assertEqual(retcode, 0, "Command should successfully complete") # Make sure the prompt was printed. Otherwise this test is not valid self.assertIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) all_requests = server.get_all_requests() - self.assertEquals(2, len(all_requests), "There should be exactly two metrics request") + self.assertEqual(2, len(all_requests), "There should be exactly two metrics request") # First one is usually the installed metric requests = filter_installed_metric_requests(all_requests) - self.assertEquals(1, len(requests), "There should be only one 'installed' metric") + self.assertEqual(1, len(requests), "There should be only one 'installed' metric") request = requests[0] self.assertIn("Content-Type", request["headers"]) - self.assertEquals(request["headers"]["Content-Type"], "application/json") + self.assertEqual(request["headers"]["Content-Type"], "application/json") expected_data = { - "metrics": [{ - "installed": { - "installationId": self.get_global_config().installation_id, - "samcliVersion": SAM_CLI_VERSION, - "osPlatform": platform.system(), - - "executionEnvironment": ANY, - "pyversion": ANY, - "sessionId": ANY, - "requestId": ANY, - "telemetryEnabled": True + "metrics": [ + { + "installed": { + "installationId": self.get_global_config().installation_id, + "samcliVersion": SAM_CLI_VERSION, + "osPlatform": platform.system(), + "executionEnvironment": ANY, + "pyversion": ANY, + "sessionId": ANY, + "requestId": ANY, + "telemetryEnabled": True, + } } - }] + ] } - self.assertEquals(request["data"], expected_data) + self.assertEqual(request["data"], expected_data) def test_must_not_send_installed_metric_when_prompt_is_disabled(self): """ @@ -69,12 +69,12 @@ def test_must_not_send_installed_metric_when_prompt_is_disabled(self): (stdoutdata, stderrdata) = process.communicate() retcode = process.poll() - self.assertEquals(retcode, 0, "Command should successfully complete") + self.assertEqual(retcode, 0, "Command should successfully complete") self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stdoutdata.decode()) self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) requests = filter_installed_metric_requests(server.get_all_requests()) - self.assertEquals(0, len(requests), "'installed' metric should NOT be sent") + self.assertEqual(0, len(requests), "'installed' metric should NOT be sent") def test_must_not_send_installed_metric_on_second_run(self): """ @@ -90,20 +90,24 @@ def test_must_not_send_installed_metric_on_second_run(self): process1 = self.run_cmd() (_, stderrdata) = process1.communicate() retcode = process1.poll() - self.assertEquals(retcode, 0, "Command should successfully complete") + self.assertEqual(retcode, 0, "Command should successfully complete") self.assertIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) - self.assertEquals(1, len(filter_installed_metric_requests(server.get_all_requests())), - "'installed' metric should be sent") + self.assertEqual( + 1, len(filter_installed_metric_requests(server.get_all_requests())), "'installed' metric should be sent" + ) # Second Run process2 = self.run_cmd() (stdoutdata, stderrdata) = process2.communicate() retcode = process2.poll() - self.assertEquals(retcode, 0) + self.assertEqual(retcode, 0) self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stdoutdata.decode()) self.assertNotIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) - self.assertEquals(1, len(filter_installed_metric_requests(server.get_all_requests())), - "Only one 'installed' metric should be sent") + self.assertEqual( + 1, + len(filter_installed_metric_requests(server.get_all_requests())), + "Only one 'installed' metric should be sent", + ) def filter_installed_metric_requests(all_requests): diff --git a/tests/integration/telemetry/test_prompt.py b/tests/integration/telemetry/test_prompt.py index 8dedf79473..876dd326cf 100644 --- a/tests/integration/telemetry/test_prompt.py +++ b/tests/integration/telemetry/test_prompt.py @@ -1,10 +1,8 @@ - from parameterized import parameterized from .integ_base import IntegBase, EXPECTED_TELEMETRY_PROMPT class TestTelemetryPrompt(IntegBase): - def test_must_prompt_if_config_is_not_set(self): """ Must print prompt if Telemetry config is not set. @@ -17,10 +15,7 @@ def test_must_prompt_if_config_is_not_set(self): # Telemetry prompt should be printed to the terminal self.assertIn(EXPECTED_TELEMETRY_PROMPT, stderrdata.decode()) - @parameterized.expand([ - (True, "Enable Telemetry"), - (False, "Disalbe Telemetry") - ]) + @parameterized.expand([(True, "Enable Telemetry"), (False, "Disalbe Telemetry")]) def test_must_not_prompt_if_config_is_set(self, telemetry_enabled, msg): """ If telemetry config is already set, prompt must not be displayed diff --git a/tests/integration/telemetry/test_telemetry_contract.py b/tests/integration/telemetry/test_telemetry_contract.py index f42f01effd..0d7f40afb8 100644 --- a/tests/integration/telemetry/test_telemetry_contract.py +++ b/tests/integration/telemetry/test_telemetry_contract.py @@ -1,4 +1,3 @@ - from .integ_base import IntegBase, TelemetryServer @@ -19,17 +18,17 @@ def test_must_not_send_metrics_if_disabled_using_envvar(self): process = self.run_cmd(optout_envvar_value="0") (_, stderrdata) = process.communicate() retcode = process.poll() - self.assertEquals(retcode, 0, "Command should successfully complete") + self.assertEqual(retcode, 0, "Command should successfully complete") all_requests = server.get_all_requests() - self.assertEquals(0, len(all_requests), "No metrics should be sent") + self.assertEqual(0, len(all_requests), "No metrics should be sent") # Now run again without the Env Var Opt out process = self.run_cmd() (_, stderrdata) = process.communicate() retcode = process.poll() - self.assertEquals(retcode, 0, "Command should successfully complete") + self.assertEqual(retcode, 0, "Command should successfully complete") all_requests = server.get_all_requests() - self.assertEquals(1, len(all_requests), "Command run metric should be sent") + self.assertEqual(1, len(all_requests), "Command run metric should be sent") def test_must_send_metrics_if_enabled_via_envvar(self): """ @@ -43,17 +42,17 @@ def test_must_send_metrics_if_enabled_via_envvar(self): process = self.run_cmd() (_, stderrdata) = process.communicate() retcode = process.poll() - self.assertEquals(retcode, 0, "Command should successfully complete") + self.assertEqual(retcode, 0, "Command should successfully complete") all_requests = server.get_all_requests() - self.assertEquals(0, len(all_requests), "No metric should be sent") + self.assertEqual(0, len(all_requests), "No metric should be sent") # Opt-in via env var process = self.run_cmd(optout_envvar_value="1") (_, stderrdata) = process.communicate() retcode = process.poll() - self.assertEquals(retcode, 0, "Command should successfully complete") + self.assertEqual(retcode, 0, "Command should successfully complete") all_requests = server.get_all_requests() - self.assertEquals(1, len(all_requests), "Command run metric must be sent") + self.assertEqual(1, len(all_requests), "Command run metric must be sent") def test_must_not_crash_when_offline(self): """ @@ -70,4 +69,4 @@ def test_must_not_crash_when_offline(self): (_, stderrdata) = process.communicate() retcode = process.poll() - self.assertEquals(retcode, 0, "Command should successfully complete") + self.assertEqual(retcode, 0, "Command should successfully complete") diff --git a/tests/integration/testdata/buildcmd/Python/main.py b/tests/integration/testdata/buildcmd/Python/main.py index fe02bd097e..fbd0085e12 100644 --- a/tests/integration/testdata/buildcmd/Python/main.py +++ b/tests/integration/testdata/buildcmd/Python/main.py @@ -1,4 +1,5 @@ import numpy + # from cryptography.fernet import Fernet from jinja2 import Template @@ -8,9 +9,6 @@ def handler(event, context): # Try using some of the modules to make sure they work & don't crash the process # print(Fernet.generate_key()) - template = Template('Hello {{ name }}') + template = Template("Hello {{ name }}") - return { - "pi": "{0:.2f}".format(numpy.pi), - "jinja": template.render(name='World') - } + return {"pi": "{0:.2f}".format(numpy.pi), "jinja": template.render(name="World")} diff --git a/tests/integration/testdata/invoke/layers/layer-main.py b/tests/integration/testdata/invoke/layers/layer-main.py index 44eed96c84..e92f7ca877 100644 --- a/tests/integration/testdata/invoke/layers/layer-main.py +++ b/tests/integration/testdata/invoke/layers/layer-main.py @@ -1,12 +1,12 @@ import sys import site -sys.path.insert(0, '/opt') +sys.path.insert(0, "/opt") site.addsitedir("/opt") def handler(event, context): - return 'hello' + return "hello" def custom_layer_handler(event, context): diff --git a/tests/integration/testdata/invoke/main.py b/tests/integration/testdata/invoke/main.py index 68d432f510..06afecfe2b 100644 --- a/tests/integration/testdata/invoke/main.py +++ b/tests/integration/testdata/invoke/main.py @@ -1,13 +1,14 @@ import time import os import sys -print('Loading function') + +print ("Loading function") def handler(event, context): - print("value1 = " + event['key1']) - print("value2 = " + event['key2']) - print("value3 = " + event['key3']) + print ("value1 = " + event["key1"]) + print ("value2 = " + event["key2"]) + print ("value3 = " + event["key3"]) sys.stdout.write("Docker Lambda is writing to stderr") diff --git a/tests/integration/testdata/start_api/main.py b/tests/integration/testdata/start_api/main.py index fc6bc06482..ae10aa8b0b 100644 --- a/tests/integration/testdata/start_api/main.py +++ b/tests/integration/testdata/start_api/main.py @@ -12,7 +12,7 @@ def echo_event_handler(event, context): def echo_event_handler_2(event, context): - event['handler'] = 'echo_event_handler_2' + event["handler"] = "echo_event_handler_2" return {"statusCode": 200, "body": json.dumps(event)} @@ -71,9 +71,7 @@ def base64_response(event, context): "statusCode": 200, "body": gifImageBase64, "isBase64Encoded": True, - "headers": { - "Content-Type": "image/gif" - } + "headers": {"Content-Type": "image/gif"}, } @@ -81,10 +79,8 @@ def echo_base64_event_body(event, context): return { "statusCode": 200, "body": event["body"], - "headers": { - "Content-Type": event["headers"]["Content-Type"] - }, - "isBase64Encoded": event["isBase64Encoded"] + "headers": {"Content-Type": event["headers"]["Content-Type"]}, + "isBase64Encoded": event["isBase64Encoded"], } @@ -93,7 +89,7 @@ def multiple_headers(event, context): "statusCode": 200, "body": "hello", "headers": {"Content-Type": "text/plain"}, - "multiValueHeaders": {"MyCustomHeader": ['Value1', 'Value2']} + "multiValueHeaders": {"MyCustomHeader": ["Value1", "Value2"]}, } @@ -101,6 +97,6 @@ def multiple_headers_overrides_headers(event, context): return { "statusCode": 200, "body": "hello", - "headers": {"Content-Type": "text/plain", "MyCustomHeader": 'Custom'}, - "multiValueHeaders": {"MyCustomHeader": ['Value1', 'Value2']} + "headers": {"Content-Type": "text/plain", "MyCustomHeader": "Custom"}, + "multiValueHeaders": {"MyCustomHeader": ["Value1", "Value2"]}, } diff --git a/tests/integration/testdata/start_api/swagger-template.yaml b/tests/integration/testdata/start_api/swagger-template.yaml index cff33b1f43..4073fbd032 100644 --- a/tests/integration/testdata/start_api/swagger-template.yaml +++ b/tests/integration/testdata/start_api/swagger-template.yaml @@ -15,10 +15,10 @@ Resources: Variables: VarName: varValue Cors: - AllowOrigin: "*" - AllowMethods: "GET" - AllowHeaders: "origin, x-requested-with" - MaxAge: 510 + AllowOrigin: "'*''" + AllowMethods: "'GET'" + AllowHeaders: "'origin, x-requested-with'" + MaxAge: "'510'" DefinitionBody: swagger: "2.0" info: diff --git a/tests/integration/testdata/start_api/template.yaml b/tests/integration/testdata/start_api/template.yaml index ec0b65978c..73b6bff06b 100644 --- a/tests/integration/testdata/start_api/template.yaml +++ b/tests/integration/testdata/start_api/template.yaml @@ -9,7 +9,7 @@ Globals: - image~1png Variables: VarName: varValue - Cors: "*" + Cors: "'*''" Resources: HelloWorldFunction: Type: AWS::Serverless::Function diff --git a/tests/smoke/download_sar_templates.py b/tests/smoke/download_sar_templates.py index 63b53467f8..aa195348f9 100644 --- a/tests/smoke/download_sar_templates.py +++ b/tests/smoke/download_sar_templates.py @@ -12,50 +12,55 @@ def download(count=100): - sar_browse_url = "https://shr32taah3.execute-api.us-east-1.amazonaws.com/Prod/applications/browse" - current_page = 1 - retry_count = 0 - apps = [] - - while len(apps) < count and retry_count < 10: - try: - response = requests.get(sar_browse_url, { - "pageSize": count if count < 10 else 10, - "pageNumber": current_page, - "includeAppsWithCapabilities": "CAPABILITY_IAM,CAPABILITY_NAMED_IAM,CAPABILITY_RESOURCE_POLICY,CAPABILITY_AUTO_EXPAND" - }) - - response.raise_for_status() - result = response.json() - - # Successful request - apps = apps + result["applications"] - current_page += 1 - retry_count = 0 - except requests.exceptions.RequestException as ex: - LOG.warning("Got throttled by SAR", exc_info=ex) - retry_count += 1 - - for index, app in enumerate(apps): - app_id = app["id"] - name = app["name"] - template_file_name = os.path.join(TEMPLATE_FOLDER, name+"-template.yaml") - LOG.info("[%s/%s] %s", index, count, name) - _download_templates(app_id, template_file_name) - time.sleep(0.1) # 100ms aka 10 TPS - + sar_browse_url = "https://shr32taah3.execute-api.us-east-1.amazonaws.com/Prod/applications/browse" + current_page = 1 + retry_count = 0 + apps = [] + + while len(apps) < count and retry_count < 10: + try: + response = requests.get( + sar_browse_url, + { + "pageSize": count if count < 10 else 10, + "pageNumber": current_page, + "includeAppsWithCapabilities": "CAPABILITY_IAM,CAPABILITY_NAMED_IAM,CAPABILITY_RESOURCE_POLICY,CAPABILITY_AUTO_EXPAND", + }, + ) + + response.raise_for_status() + result = response.json() + + # Successful request + apps = apps + result["applications"] + current_page += 1 + retry_count = 0 + except requests.exceptions.RequestException as ex: + LOG.warning("Got throttled by SAR", exc_info=ex) + retry_count += 1 + + for index, app in enumerate(apps): + app_id = app["id"] + name = app["name"] + template_file_name = os.path.join(TEMPLATE_FOLDER, name + "-template.yaml") + LOG.info("[%s/%s] %s", index, count, name) + _download_templates(app_id, template_file_name) + time.sleep(0.1) # 100ms aka 10 TPS + + def _download_templates(app_id, template_file_path): - sar = boto3.client('serverlessrepo') + sar = boto3.client("serverlessrepo") response = sar.get_application(ApplicationId=app_id) template_url = response["Version"]["TemplateUrl"] with open(template_file_path, "wb") as fp: - r = requests.get(template_url, stream=True) - for chunk in r.iter_content(chunk_size=128): - fp.write(chunk) + r = requests.get(template_url, stream=True) + for chunk in r.iter_content(chunk_size=128): + fp.write(chunk) + if __name__ == "__main__": - count = 100 - LOG.info("Downloading %s templates", count) - download(count=count) \ No newline at end of file + count = 100 + LOG.info("Downloading %s templates", count) + download(count=count) diff --git a/tests/smoke/templates/sar/getatt-custom-resource.yaml b/tests/smoke/templates/sar/getatt-custom-resource.yaml new file mode 100644 index 0000000000..7837d31d8b --- /dev/null +++ b/tests/smoke/templates/sar/getatt-custom-resource.yaml @@ -0,0 +1,37 @@ + +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 + +Resources: + MyCustomResource: + Type: AWS::CloudFormation::CustomResource + Properties: + ServiceToken: String + + APIGetFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: + Bucket: <%REPO_BUCKET%> + Key: 7d354083-39c1-4a33-a435-fbf3dbab85b7 + Handler: main + Environment: + Variables: + SAFE_ENVIRONMENT_VARIABLE_1: + !Ref TemplateParameterOne + OFFENDING_ENVIRONMENT_VARIABLE: + !GetAtt MyCustomResource.Parameter.Value + SAFE_ENVIRONMENT_VARIABLE_2: + !Ref MyCustomResource + Events: + PostEvent: + Type: Api + Properties: + Path: / + Method: post + GetEvent: + Type: Api + Properties: + Path: / + Method: get + Runtime: go1.x diff --git a/tests/smoke/templates/sar/vpc-import-value.yaml b/tests/smoke/templates/sar/vpc-import-value.yaml new file mode 100644 index 0000000000..af9aaebdff --- /dev/null +++ b/tests/smoke/templates/sar/vpc-import-value.yaml @@ -0,0 +1,25 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: Layer Test +Parameters: + LayerArn: + Default: 'arn:aws:lambda:us-east-1:764866452798:layer:chrome-aws-lambda:4' + Description: Layer Arn + Type: String +Resources: + FunctionWithLayer: + Type: AWS::Serverless::Function + Properties: + CodeUri: + Bucket: <%REPO_BUCKET%> + Key: a44a03c9-ccb1-4ddc-b196-8e2c9fdeec35 + Handler: app.lambda_handler + Runtime: python3.7 + Layers: + - Ref: LayerArn + VpcConfig: + SecurityGroupIds: + - Fn::ImportValue: !Sub ${VpcStackName}-AppSG + SubnetIds: + - Fn::ImportValue: !Sub ${VpcStackName}-AppSubnet1 + - Fn::ImportValue: !Sub ${VpcStackName}-AppSubnet2 diff --git a/tests/smoke/test_all_commands.py b/tests/smoke/test_all_commands.py index 132d278967..f46681b474 100644 --- a/tests/smoke/test_all_commands.py +++ b/tests/smoke/test_all_commands.py @@ -8,39 +8,49 @@ TEMPLATE_FOLDER = os.path.join(os.path.dirname(__file__), "templates", "sar") TEMPLATE_FILE_NAMES = [v for v in os.listdir(TEMPLATE_FOLDER) if "yaml" in v] -class TestAllCommands(TestCase): - @parameterized.expand(TEMPLATE_FILE_NAMES) - def test_build(self, template_file_name): - self.run_and_verify_no_crash("build", ["-t", os.path.join(TEMPLATE_FOLDER, template_file_name)]) - - @parameterized.expand(TEMPLATE_FILE_NAMES) - def test_validate(self, template_file_name): - self.run_and_verify_no_crash("validate", ["-t", os.path.join(TEMPLATE_FOLDER, template_file_name)]) - - @parameterized.expand(TEMPLATE_FILE_NAMES) - def test_local_invoke(self, template_file_name): - self.run_and_verify_no_crash("local invoke", ["-t", os.path.join(TEMPLATE_FOLDER, template_file_name)]) - - @parameterized.expand(TEMPLATE_FILE_NAMES) - def test_package(self, template_file_name): - self.run_and_verify_no_crash("package", [ - "--template-file", os.path.join(TEMPLATE_FOLDER, template_file_name), - "--s3-bucket", "sdfafds-random-bucket" - ]) - - @parameterized.expand(TEMPLATE_FILE_NAMES) - def test_deploy(self, template_file_name): - self.run_and_verify_no_crash("deploy", [ - "--template-file", os.path.join(TEMPLATE_FOLDER, template_file_name), - "--stack-name", "dsfafs-random-stack" - ]) - - def run_and_verify_no_crash(self, cmd_name, args): - sam_cmd = "samdev" if os.getenv("SAM_CLI_DEV", 0) else "sam" - process = subprocess.Popen([sam_cmd, cmd_name] + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout, stderr = process.communicate() - - # Just make sure "Traceback" is not in the stdout and stderr - aka the command didn't blow up with stacktrace - self.assertNotIn("Traceback", str(stdout.decode('utf-8'))) - self.assertNotIn("Traceback", str(stderr.decode('utf-8'))) \ No newline at end of file +class TestAllCommands(TestCase): + @parameterized.expand(TEMPLATE_FILE_NAMES) + def test_build(self, template_file_name): + self.run_and_verify_no_crash("build", ["-t", os.path.join(TEMPLATE_FOLDER, template_file_name)]) + + @parameterized.expand(TEMPLATE_FILE_NAMES) + def test_validate(self, template_file_name): + self.run_and_verify_no_crash("validate", ["-t", os.path.join(TEMPLATE_FOLDER, template_file_name)]) + + @parameterized.expand(TEMPLATE_FILE_NAMES) + def test_local_invoke(self, template_file_name): + self.run_and_verify_no_crash("local invoke", ["-t", os.path.join(TEMPLATE_FOLDER, template_file_name)]) + + @parameterized.expand(TEMPLATE_FILE_NAMES) + def test_package(self, template_file_name): + self.run_and_verify_no_crash( + "package", + [ + "--template-file", + os.path.join(TEMPLATE_FOLDER, template_file_name), + "--s3-bucket", + "sdfafds-random-bucket", + ], + ) + + @parameterized.expand(TEMPLATE_FILE_NAMES) + def test_deploy(self, template_file_name): + self.run_and_verify_no_crash( + "deploy", + [ + "--template-file", + os.path.join(TEMPLATE_FOLDER, template_file_name), + "--stack-name", + "dsfafs-random-stack", + ], + ) + + def run_and_verify_no_crash(self, cmd_name, args): + sam_cmd = "samdev" if os.getenv("SAM_CLI_DEV", 0) else "sam" + process = subprocess.Popen([sam_cmd, cmd_name] + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + stdout, stderr = process.communicate() + + # Just make sure "Traceback" is not in the stdout and stderr - aka the command didn't blow up with stacktrace + self.assertNotIn("Traceback", str(stdout.decode("utf-8"))) + self.assertNotIn("Traceback", str(stderr.decode("utf-8"))) diff --git a/tests/testing_utils.py b/tests/testing_utils.py index 1304ee156c..ce0b3fc0ff 100644 --- a/tests/testing_utils.py +++ b/tests/testing_utils.py @@ -1,6 +1,6 @@ import os import platform -IS_WINDOWS = platform.system().lower() == 'windows' +IS_WINDOWS = platform.system().lower() == "windows" RUNNING_ON_CI = os.environ.get("APPVEYOR", False) RUNNING_TEST_FOR_MASTER_ON_CI = os.environ.get("APPVEYOR_REPO_BRANCH", "master") != "master" diff --git a/tests/unit/cli/test_command.py b/tests/unit/cli/test_command.py index baa93e14a8..082af8bc8c 100644 --- a/tests/unit/cli/test_command.py +++ b/tests/unit/cli/test_command.py @@ -6,14 +6,9 @@ class TestBaseCommand(TestCase): - def setUp(self): - self.packages = [ - "a.b.cmd1", - "foo.cmd2", - "cmd3" - ] + self.packages = ["a.b.cmd1", "foo.cmd2", "cmd3"] def test_must_inherit(self): @@ -21,21 +16,17 @@ def test_must_inherit(self): self.assertTrue(isinstance(cmd, click.MultiCommand)) def test_set_commands_must_extract_command_name(self): - expected = { - "cmd1": "a.b.cmd1", - "cmd2": "foo.cmd2", - "cmd3": "cmd3" - } + expected = {"cmd1": "a.b.cmd1", "cmd2": "foo.cmd2", "cmd3": "cmd3"} result = BaseCommand._set_commands(self.packages) - self.assertEquals(result, expected) + self.assertEqual(result, expected) def test_list_commands_must_return_commands_name(self): expected = ["cmd1", "cmd2", "cmd3"].sort() cmd = BaseCommand(cmd_packages=self.packages) result = cmd.list_commands(ctx=None) - self.assertEquals(result.sort(), expected) + self.assertEqual(result.sort(), expected) @patch("samcli.cli.command.importlib") def test_get_command_must_return_command_module(self, importlib_mock): @@ -48,27 +39,23 @@ def test_get_command_must_return_command_module(self, importlib_mock): cmd = BaseCommand(cmd_packages=self.packages) result = cmd.get_command(None, "cmd1") - self.assertEquals(result, module_mock.cli) + self.assertEqual(result, module_mock.cli) result = cmd.get_command(None, "cmd2") - self.assertEquals(result, module_mock.cli) + self.assertEqual(result, module_mock.cli) result = cmd.get_command(None, "cmd3") - self.assertEquals(result, module_mock.cli) + self.assertEqual(result, module_mock.cli) # Library to import the modules must be called three times - importlib_mock.import_module.assert_has_calls([ - call("a.b.cmd1"), - call("foo.cmd2"), - call("cmd3") - ]) + importlib_mock.import_module.assert_has_calls([call("a.b.cmd1"), call("foo.cmd2"), call("cmd3")]) def test_get_command_must_skip_unknown_commands(self): cmd = BaseCommand(cmd_packages=self.packages) result = cmd.get_command(None, "unknown_command") - self.assertEquals(result, None, "must not return a command") + self.assertEqual(result, None, "must not return a command") @patch("samcli.cli.command.importlib") def test_get_command_must_skip_on_exception_loading_module(self, importlib_mock): @@ -79,7 +66,7 @@ def test_get_command_must_skip_on_exception_loading_module(self, importlib_mock) importlib_mock.import_module.side_effect = ImportError() result = cmd.get_command(None, "cmd1") - self.assertEquals(result, None, "must not return a command") + self.assertEqual(result, None, "must not return a command") @patch("samcli.cli.command.importlib") def test_get_command_must_skip_on_absence_of_cli_method(self, importlib_mock): @@ -90,4 +77,4 @@ def test_get_command_must_skip_on_absence_of_cli_method(self, importlib_mock): importlib_mock.import_module.return_value = {} # Returned Module does *not* have 'cli' property result = cmd.get_command(None, "cmd1") - self.assertEquals(result, None, "must not return a command") + self.assertEqual(result, None, "must not return a command") diff --git a/tests/unit/cli/test_context.py b/tests/unit/cli/test_context.py index 2ef94cff61..7bd04e4ca9 100644 --- a/tests/unit/cli/test_context.py +++ b/tests/unit/cli/test_context.py @@ -8,37 +8,36 @@ class TestContext(TestCase): - def test_must_initialize_with_defaults(self): ctx = Context() - self.assertEquals(ctx.debug, False, "debug must default to False") + self.assertEqual(ctx.debug, False, "debug must default to False") def test_must_set_get_debug_flag(self): ctx = Context() ctx.debug = True - self.assertEquals(ctx.debug, True, "debug must be set to True") - self.assertEquals(logging.getLogger('samcli').getEffectiveLevel(), logging.DEBUG) - self.assertEquals(logging.getLogger('aws_lambda_builders').getEffectiveLevel(), logging.DEBUG) + self.assertEqual(ctx.debug, True, "debug must be set to True") + self.assertEqual(logging.getLogger("samcli").getEffectiveLevel(), logging.DEBUG) + self.assertEqual(logging.getLogger("aws_lambda_builders").getEffectiveLevel(), logging.DEBUG) def test_must_unset_get_debug_flag(self): ctx = Context() ctx.debug = True - self.assertEquals(ctx.debug, True, "debug must be set to True") + self.assertEqual(ctx.debug, True, "debug must be set to True") # Flipping from True to False ctx.debug = False - self.assertEquals(ctx.debug, False, "debug must be set to False") + self.assertEqual(ctx.debug, False, "debug must be set to False") def test_must_set_aws_region_in_boto_session(self): region = "myregion" ctx = Context() ctx.region = region - self.assertEquals(ctx.region, region) - self.assertEquals(region, boto3._get_default_session().region_name) + self.assertEqual(ctx.region, region) + self.assertEqual(region, boto3._get_default_session().region_name) @patch("samcli.cli.context.boto3") def test_must_set_aws_profile_in_boto_session(self, boto_mock): @@ -47,7 +46,7 @@ def test_must_set_aws_profile_in_boto_session(self, boto_mock): ctx = Context() ctx.profile = profile - self.assertEquals(ctx.profile, profile) + self.assertEqual(ctx.profile, profile) boto_mock.setup_default_session.assert_called_with(region_name=None, profile_name=profile) @patch("samcli.cli.context.boto3") @@ -65,7 +64,7 @@ def test_must_set_session_id_to_uuid(self, uuid_mock): uuid_mock.uuid4.return_value = "abcd" ctx = Context() - self.assertEquals(ctx.session_id, "abcd") + self.assertEqual(ctx.session_id, "abcd") @patch("samcli.cli.context.click") def test_must_find_context(self, click_mock): @@ -73,7 +72,7 @@ def test_must_find_context(self, click_mock): ctx = Context() result = ctx.get_current_context() - self.assertEquals(click_mock.get_current_context.return_value.find_object.return_value, result) + self.assertEqual(click_mock.get_current_context.return_value.find_object.return_value, result) click_mock.get_current_context.return_value.find_object.assert_called_once_with(Context) @patch("samcli.cli.context.click") @@ -85,7 +84,7 @@ def test_create_new_context_if_not_found(self, click_mock): ctx = Context() result = ctx.get_current_context() - self.assertEquals(click_mock.get_current_context.return_value.ensure_object.return_value, result) + self.assertEqual(click_mock.get_current_context.return_value.ensure_object.return_value, result) click_mock.get_current_context.return_value.ensure_object.assert_called_once_with(Context) @patch("samcli.cli.context.click") diff --git a/tests/unit/cli/test_global_config.py b/tests/unit/cli/test_global_config.py index d59520e14b..776952dbfd 100644 --- a/tests/unit/cli/test_global_config.py +++ b/tests/unit/cli/test_global_config.py @@ -10,12 +10,11 @@ class TestGlobalConfig(TestCase): - def test_config_write_error(self): m = mock_open() m.side_effect = IOError("fail") gc = GlobalConfig() - with patch('samcli.cli.global_config.open', m): + with patch("samcli.cli.global_config.open", m): installation_id = gc.installation_id self.assertIsNone(installation_id) @@ -23,7 +22,7 @@ def test_unable_to_create_dir(self): m = mock_open() m.side_effect = OSError("Permission DENIED") gc = GlobalConfig() - with patch('samcli.cli.global_config.Path.mkdir', m): + with patch("samcli.cli.global_config.Path.mkdir", m): installation_id = gc.installation_id self.assertIsNone(installation_id) telemetry_enabled = gc.telemetry_enabled @@ -33,24 +32,24 @@ def test_setter_cannot_open_path(self): m = mock_open() m.side_effect = IOError("fail") gc = GlobalConfig() - with patch('samcli.cli.global_config.open', m): + with patch("samcli.cli.global_config.open", m): with self.assertRaises(IOError): gc.telemetry_enabled = True - @patch('samcli.cli.global_config.click') + @patch("samcli.cli.global_config.click") def test_config_dir_default(self, mock_click): mock_click.get_app_dir.return_value = "mock/folders" gc = GlobalConfig() self.assertEqual(Path("mock/folders"), gc.config_dir) - mock_click.get_app_dir.assert_called_once_with('AWS SAM', force_posix=True) + mock_click.get_app_dir.assert_called_once_with("AWS SAM", force_posix=True) def test_explicit_installation_id(self): gc = GlobalConfig(installation_id="foobar") self.assertEqual("foobar", gc.installation_id) - @patch('samcli.cli.global_config.uuid') - @patch('samcli.cli.global_config.Path') - @patch('samcli.cli.global_config.click') + @patch("samcli.cli.global_config.uuid") + @patch("samcli.cli.global_config.Path") + @patch("samcli.cli.global_config.click") def test_setting_installation_id(self, mock_click, mock_path, mock_uuid): gc = GlobalConfig() mock_uuid.uuid4.return_value = "SevenLayerDipMock" @@ -62,15 +61,15 @@ def test_setting_installation_id(self, mock_click, mock_path, mock_uuid): mock_click.get_app_dir.return_value = "mock/folders" mock_io = mock_open(Mock()) with patch("samcli.cli.global_config.open", mock_io): - self.assertEquals("SevenLayerDipMock", gc.installation_id) + self.assertEqual("SevenLayerDipMock", gc.installation_id) def test_explicit_telemetry_enabled(self): gc = GlobalConfig(telemetry_enabled=True) self.assertTrue(gc.telemetry_enabled) - @patch('samcli.cli.global_config.Path') - @patch('samcli.cli.global_config.click') - @patch('samcli.cli.global_config.os') + @patch("samcli.cli.global_config.Path") + @patch("samcli.cli.global_config.click") + @patch("samcli.cli.global_config.os") def test_missing_telemetry_flag(self, mock_os, mock_click, mock_path): gc = GlobalConfig() mock_click.get_app_dir.return_value = "mock/folders" @@ -82,9 +81,9 @@ def test_missing_telemetry_flag(self, mock_os, mock_click, mock_path): mock_os.environ = {} # env var is not set self.assertIsNone(gc.telemetry_enabled) - @patch('samcli.cli.global_config.Path') - @patch('samcli.cli.global_config.click') - @patch('samcli.cli.global_config.os') + @patch("samcli.cli.global_config.Path") + @patch("samcli.cli.global_config.click") + @patch("samcli.cli.global_config.os") def test_error_reading_telemetry_flag(self, mock_os, mock_click, mock_path): gc = GlobalConfig() mock_click.get_app_dir.return_value = "mock/folders" @@ -97,30 +96,31 @@ def test_error_reading_telemetry_flag(self, mock_os, mock_click, mock_path): m = mock_open() m.side_effect = IOError("fail") - with patch('samcli.cli.global_config.open', m): + with patch("samcli.cli.global_config.open", m): self.assertFalse(gc.telemetry_enabled) - @parameterized.expand([ - # Only values of '1' and 1 will enable Telemetry. Everything will disable. - (1, True), - ('1', True), - - (0, False), - ('0', False), - # words true, True, False, False etc will disable telemetry - ('true', False), - ('True', False), - ('False', False) - ]) - @patch('samcli.cli.global_config.os') - @patch('samcli.cli.global_config.click') + @parameterized.expand( + [ + # Only values of '1' and 1 will enable Telemetry. Everything will disable. + (1, True), + ("1", True), + (0, False), + ("0", False), + # words true, True, False, False etc will disable telemetry + ("true", False), + ("True", False), + ("False", False), + ] + ) + @patch("samcli.cli.global_config.os") + @patch("samcli.cli.global_config.click") def test_set_telemetry_through_env_variable(self, env_value, expected_result, mock_click, mock_os): gc = GlobalConfig() mock_os.environ = {"SAM_CLI_TELEMETRY": env_value} mock_os.getenv.return_value = env_value - self.assertEquals(gc.telemetry_enabled, expected_result) + self.assertEqual(gc.telemetry_enabled, expected_result) mock_os.getenv.assert_called_once_with("SAM_CLI_TELEMETRY") diff --git a/tests/unit/cli/test_main.py b/tests/unit/cli/test_main.py index 827960a0f7..486d4154f1 100644 --- a/tests/unit/cli/test_main.py +++ b/tests/unit/cli/test_main.py @@ -6,40 +6,39 @@ class TestCliBase(TestCase): - def test_cli_base(self): """ Just invoke the CLI without any commands and assert that help text was printed :return: """ mock_cfg = mock.Mock() - with mock.patch('samcli.cli.main.global_cfg', mock_cfg): + with mock.patch("samcli.cli.main.global_cfg", mock_cfg): runner = CliRunner() result = runner.invoke(cli, []) - self.assertEquals(result.exit_code, 0) + self.assertEqual(result.exit_code, 0) self.assertTrue("--help" in result.output, "Help text must be printed") self.assertTrue("--debug" in result.output, "--debug option must be present in help text") def test_cli_some_command(self): mock_cfg = mock.Mock() - with mock.patch('samcli.cli.main.global_cfg', mock_cfg): + with mock.patch("samcli.cli.main.global_cfg", mock_cfg): runner = CliRunner() result = runner.invoke(cli, ["local", "generate-event", "s3"]) - self.assertEquals(result.exit_code, 0) + self.assertEqual(result.exit_code, 0) def test_cli_with_debug(self): mock_cfg = mock.Mock() - with mock.patch('samcli.cli.main.global_cfg', mock_cfg): + with mock.patch("samcli.cli.main.global_cfg", mock_cfg): runner = CliRunner() result = runner.invoke(cli, ["local", "generate-event", "s3", "put", "--debug"]) - self.assertEquals(result.exit_code, 0) + self.assertEqual(result.exit_code, 0) - @mock.patch('samcli.cli.main.send_installed_metric') + @mock.patch("samcli.cli.main.send_installed_metric") def test_cli_enable_telemetry_with_prompt(self, send_installed_metric_mock): with mock.patch( - 'samcli.cli.global_config.GlobalConfig.telemetry_enabled', new_callable=mock.PropertyMock + "samcli.cli.global_config.GlobalConfig.telemetry_enabled", new_callable=mock.PropertyMock ) as mock_flag: mock_flag.return_value = None runner = CliRunner() @@ -49,10 +48,10 @@ def test_cli_enable_telemetry_with_prompt(self, send_installed_metric_mock): # If telemetry is enabled, this should be called send_installed_metric_mock.assert_called_once() - @mock.patch('samcli.cli.main.send_installed_metric') + @mock.patch("samcli.cli.main.send_installed_metric") def test_prompt_skipped_when_value_set(self, send_installed_metric_mock): with mock.patch( - 'samcli.cli.global_config.GlobalConfig.telemetry_enabled', new_callable=mock.PropertyMock + "samcli.cli.global_config.GlobalConfig.telemetry_enabled", new_callable=mock.PropertyMock ) as mock_flag: mock_flag.return_value = True runner = CliRunner() diff --git a/tests/unit/cli/test_types.py b/tests/unit/cli/test_types.py index 966534b828..a46b72f9d1 100644 --- a/tests/unit/cli/test_types.py +++ b/tests/unit/cli/test_types.py @@ -1,4 +1,3 @@ - from unittest import TestCase from mock import Mock, ANY from nose_parameterized import parameterized @@ -7,91 +6,70 @@ class TestCfnParameterOverridesType(TestCase): - def setUp(self): self.param_type = CfnParameterOverridesType() - @parameterized.expand([ - ("some string"), - - # Key must not contain spaces - ('ParameterKey="Ke y",ParameterValue=Value'), - - # No value - ('ParameterKey=Key,ParameterValue='), - - # No key - ('ParameterKey=,ParameterValue=Value'), - - # Case sensitive - ('parameterkey=Key,ParameterValue=Value'), - - # No space after comma - ('ParameterKey=Key, ParameterValue=Value'), - - # Bad separator - ('ParameterKey:Key,ParameterValue:Value') - ]) + @parameterized.expand( + [ + ("some string"), + # Key must not contain spaces + ('ParameterKey="Ke y",ParameterValue=Value'), + # No value + ("ParameterKey=Key,ParameterValue="), + # No key + ("ParameterKey=,ParameterValue=Value"), + # Case sensitive + ("parameterkey=Key,ParameterValue=Value"), + # No space after comma + ("ParameterKey=Key, ParameterValue=Value"), + # Bad separator + ("ParameterKey:Key,ParameterValue:Value"), + ] + ) def test_must_fail_on_invalid_format(self, input): self.param_type.fail = Mock() self.param_type.convert(input, "param", "ctx") self.param_type.fail.assert_called_with(ANY, "param", "ctx") - @parameterized.expand([ - ( - "ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro", - {"KeyPairName": "MyKey", "InstanceType": "t1.micro"}, - - ), - ( - 'ParameterKey="Key",ParameterValue=Val\\ ue', - {"Key": "Val ue"}, - ), - ( - 'ParameterKey="Key",ParameterValue="Val\\"ue"', - {"Key": 'Val"ue'}, - ), - ( - 'ParameterKey=Key,ParameterValue=Value', - {"Key": 'Value'}, - ), - ( - 'ParameterKey=Key,ParameterValue=""', - {"Key": ''}, - ), - ( - # Trailing and leading whitespaces - ' ParameterKey=Key,ParameterValue=Value ParameterKey=Key2,ParameterValue=Value2 ', - {"Key": 'Value', 'Key2': 'Value2'}, - ), - ( - # Quotes at the end - 'ParameterKey=Key,ParameterValue=Value\\"', - {"Key": 'Value"'}, - ), - ( - # Quotes at the start - 'ParameterKey=Key,ParameterValue=\\"Value', - {"Key": '"Value'}, - ), - ( - # Value is spacial characters - 'ParameterKey=Key,ParameterValue==-_)(*&^%$#@!`~:;,. ParameterKey=Key2,ParameterValue=Value2', - {"Key": "=-_)(*&^%$#@!`~:;,.", "Key2": 'Value2'}, - ), - ( - 'ParameterKey=Key1230,ParameterValue="{\\"a\\":\\"b\\"}"', - {"Key1230": '{"a":"b"}'}, - ), - - ( - # Must ignore empty inputs - "", - {} - ) - - ]) + @parameterized.expand( + [ + ( + "ParameterKey=KeyPairName,ParameterValue=MyKey ParameterKey=InstanceType,ParameterValue=t1.micro", + {"KeyPairName": "MyKey", "InstanceType": "t1.micro"}, + ), + ('ParameterKey="Key",ParameterValue=Val\\ ue', {"Key": "Val ue"}), + ('ParameterKey="Key",ParameterValue="Val\\"ue"', {"Key": 'Val"ue'}), + ("ParameterKey=Key,ParameterValue=Value", {"Key": "Value"}), + ('ParameterKey=Key,ParameterValue=""', {"Key": ""}), + ( + # Trailing and leading whitespaces + " ParameterKey=Key,ParameterValue=Value ParameterKey=Key2,ParameterValue=Value2 ", + {"Key": "Value", "Key2": "Value2"}, + ), + ( + # Quotes at the end + 'ParameterKey=Key,ParameterValue=Value\\"', + {"Key": 'Value"'}, + ), + ( + # Quotes at the start + 'ParameterKey=Key,ParameterValue=\\"Value', + {"Key": '"Value'}, + ), + ( + # Value is spacial characters + "ParameterKey=Key,ParameterValue==-_)(*&^%$#@!`~:;,. ParameterKey=Key2,ParameterValue=Value2", + {"Key": "=-_)(*&^%$#@!`~:;,.", "Key2": "Value2"}, + ), + ('ParameterKey=Key1230,ParameterValue="{\\"a\\":\\"b\\"}"', {"Key1230": '{"a":"b"}'}), + ( + # Must ignore empty inputs + "", + {}, + ), + ] + ) def test_successful_parsing(self, input, expected): result = self.param_type.convert(input, None, None) - self.assertEquals(result, expected, msg="Failed with Input = " + input) + self.assertEqual(result, expected, msg="Failed with Input = " + input) diff --git a/tests/unit/commands/_utils/test_options.py b/tests/unit/commands/_utils/test_options.py index 83e533a71f..44bad654fb 100644 --- a/tests/unit/commands/_utils/test_options.py +++ b/tests/unit/commands/_utils/test_options.py @@ -10,13 +10,12 @@ class TestGetOrDefaultTemplateFileName(TestCase): - def test_must_return_abspath_of_user_provided_value(self): filename = "foo.txt" expected = os.path.abspath(filename) result = get_or_default_template_file_name(None, None, filename, include_build=False) - self.assertEquals(result, expected) + self.assertEqual(result, expected) @patch("samcli.commands._utils.options.os") def test_must_return_yml_extension(self, os_mock): @@ -26,7 +25,7 @@ def test_must_return_yml_extension(self, os_mock): os_mock.path.abspath.return_value = "absPath" result = get_or_default_template_file_name(None, None, _TEMPLATE_OPTION_DEFAULT_VALUE, include_build=False) - self.assertEquals(result, "absPath") + self.assertEqual(result, "absPath") os_mock.path.abspath.assert_called_with(expected) @patch("samcli.commands._utils.options.os") @@ -37,7 +36,7 @@ def test_must_return_yaml_extension(self, os_mock): os_mock.path.abspath.return_value = "absPath" result = get_or_default_template_file_name(None, None, _TEMPLATE_OPTION_DEFAULT_VALUE, include_build=False) - self.assertEquals(result, "absPath") + self.assertEqual(result, "absPath") os_mock.path.abspath.assert_called_with(expected) @patch("samcli.commands._utils.options.os") @@ -49,5 +48,5 @@ def test_must_return_built_template(self, os_mock): os_mock.path.abspath.return_value = "absPath" result = get_or_default_template_file_name(None, None, _TEMPLATE_OPTION_DEFAULT_VALUE, include_build=True) - self.assertEquals(result, "absPath") + self.assertEqual(result, "absPath") os_mock.path.abspath.assert_called_with(expected) diff --git a/tests/unit/commands/_utils/test_template.py b/tests/unit/commands/_utils/test_template.py index 96a24fda46..6d1b8575e6 100644 --- a/tests/unit/commands/_utils/test_template.py +++ b/tests/unit/commands/_utils/test_template.py @@ -1,4 +1,3 @@ - import os import copy import yaml @@ -7,12 +6,16 @@ from mock import patch, mock_open from parameterized import parameterized, param -from samcli.commands._utils.template import get_template_data, _METADATA_WITH_LOCAL_PATHS, \ - _RESOURCES_WITH_LOCAL_PATHS, _update_relative_paths, move_template +from samcli.commands._utils.template import ( + get_template_data, + _METADATA_WITH_LOCAL_PATHS, + _RESOURCES_WITH_LOCAL_PATHS, + _update_relative_paths, + move_template, +) class Test_get_template_data(TestCase): - def test_must_raise_if_file_does_not_exist(self): filename = "filename" @@ -20,7 +23,7 @@ def test_must_raise_if_file_does_not_exist(self): get_template_data(filename) ex = exception_ctx.exception - self.assertEquals(str(ex), "Template file not found at {}".format(filename)) + self.assertEqual(str(ex), "Template file not found at {}".format(filename)) @patch("samcli.commands._utils.template.yaml_parse") @patch("samcli.commands._utils.template.pathlib") @@ -37,15 +40,12 @@ def test_must_read_file_and_parse(self, pathlib_mock, yaml_parse_mock): with patch("samcli.commands._utils.template.open", m): result = get_template_data(filename) - self.assertEquals(result, parse_result) + self.assertEqual(result, parse_result) - m.assert_called_with(filename, 'r') + m.assert_called_with(filename, "r") yaml_parse_mock.assert_called_with(file_data) - @parameterized.expand([ - param(ValueError()), - param(yaml.YAMLError()) - ]) + @parameterized.expand([param(ValueError()), param(yaml.YAMLError())]) @patch("samcli.commands._utils.template.yaml_parse") @patch("samcli.commands._utils.template.pathlib") def test_must_raise_on_parse_errors(self, exception, pathlib_mock, yaml_parse_mock): @@ -67,7 +67,6 @@ def test_must_raise_on_parse_errors(self, exception, pathlib_mock, yaml_parse_mo class Test_update_relative_paths(TestCase): - def setUp(self): self.s3path = "s3://foo/bar" @@ -78,138 +77,101 @@ def setUp(self): self.expected_result = os.path.join("..", "foo", "bar") - @parameterized.expand( - [(resource_type, props) for resource_type, props in _METADATA_WITH_LOCAL_PATHS.items()] - ) + @parameterized.expand([(resource_type, props) for resource_type, props in _METADATA_WITH_LOCAL_PATHS.items()]) def test_must_update_relative_metadata_paths(self, resource_type, properties): for propname in properties: for path in [self.s3path, self.abspath, self.curpath]: template_dict = { - "Metadata": { - resource_type: { - propname: path - }, - "AWS::Ec2::Instance": { - propname: path - } - }, - "Parameters": { - "a": "b" - } + "Metadata": {resource_type: {propname: path}, "AWS::Ec2::Instance": {propname: path}}, + "Parameters": {"a": "b"}, } expected_template_dict = copy.deepcopy(template_dict) if path == self.curpath: - expected_template_dict["Metadata"][resource_type][propname] = \ - self.expected_result + expected_template_dict["Metadata"][resource_type][propname] = self.expected_result result = _update_relative_paths(template_dict, self.src, self.dest) self.maxDiff = None - self.assertEquals(result, expected_template_dict) + self.assertEqual(result, expected_template_dict) - @parameterized.expand( - [(resource_type, props) for resource_type, props in _RESOURCES_WITH_LOCAL_PATHS.items()] - ) + @parameterized.expand([(resource_type, props) for resource_type, props in _RESOURCES_WITH_LOCAL_PATHS.items()]) def test_must_update_relative_resource_paths(self, resource_type, properties): for propname in properties: template_dict = { "Resources": { - "MyResourceWithRelativePath": { - "Type": resource_type, - "Properties": { - propname: self.curpath - } - }, - "MyResourceWithS3Path": { - "Type": resource_type, - "Properties": { - propname: self.s3path - } - }, - "MyResourceWithAbsolutePath": { - "Type": resource_type, - "Properties": { - propname: self.abspath - } - }, + "MyResourceWithRelativePath": {"Type": resource_type, "Properties": {propname: self.curpath}}, + "MyResourceWithS3Path": {"Type": resource_type, "Properties": {propname: self.s3path}}, + "MyResourceWithAbsolutePath": {"Type": resource_type, "Properties": {propname: self.abspath}}, "MyResourceWithInvalidPath": { "Type": resource_type, "Properties": { # Path is not a string propname: {"foo": "bar"} - } - }, - "MyResourceWithoutProperties": { - "Type": resource_type - }, - "UnsupportedResourceType": { - "Type": "AWS::Ec2::Instance", - "Properties": { - "Code": "bar" - } + }, }, + "MyResourceWithoutProperties": {"Type": resource_type}, + "UnsupportedResourceType": {"Type": "AWS::Ec2::Instance", "Properties": {"Code": "bar"}}, "ResourceWithoutType": {"foo": "bar"}, }, - "Parameters": { - "a": "b" - } + "Parameters": {"a": "b"}, } expected_template_dict = copy.deepcopy(template_dict) - expected_template_dict["Resources"]["MyResourceWithRelativePath"]["Properties"][propname] = \ - self.expected_result + expected_template_dict["Resources"]["MyResourceWithRelativePath"]["Properties"][ + propname + ] = self.expected_result result = _update_relative_paths(template_dict, self.src, self.dest) self.maxDiff = None - self.assertEquals(result, expected_template_dict) + self.assertEqual(result, expected_template_dict) def test_must_update_aws_include_also(self): template_dict = { "Resources": {"Fn::Transform": {"Name": "AWS::Include", "Parameters": {"Location": self.curpath}}}, "list_prop": [ "a", - 1, 2, 3, + 1, + 2, + 3, {"Fn::Transform": {"Name": "AWS::Include", "Parameters": {"Location": self.curpath}}}, - # S3 path {"Fn::Transform": {"Name": "AWS::Include", "Parameters": {"Location": self.s3path}}}, ], "Fn::Transform": {"Name": "AWS::OtherTransform"}, "key1": {"Fn::Transform": "Invalid value"}, - "key2": {"Fn::Transform": {"no": "name"}} + "key2": {"Fn::Transform": {"no": "name"}}, } expected_template_dict = { "Resources": {"Fn::Transform": {"Name": "AWS::Include", "Parameters": {"Location": self.expected_result}}}, "list_prop": [ "a", - 1, 2, 3, + 1, + 2, + 3, {"Fn::Transform": {"Name": "AWS::Include", "Parameters": {"Location": self.expected_result}}}, # S3 path {"Fn::Transform": {"Name": "AWS::Include", "Parameters": {"Location": self.s3path}}}, ], "Fn::Transform": {"Name": "AWS::OtherTransform"}, "key1": {"Fn::Transform": "Invalid value"}, - "key2": {"Fn::Transform": {"no": "name"}} + "key2": {"Fn::Transform": {"no": "name"}}, } result = _update_relative_paths(template_dict, self.src, self.dest) self.maxDiff = None - self.assertEquals(result, expected_template_dict) + self.assertEqual(result, expected_template_dict) class Test_move_template(TestCase): - @patch("samcli.commands._utils.template._update_relative_paths") @patch("samcli.commands._utils.template.yaml_dump") - def test_must_update_and_write_template(self, - yaml_dump_mock, - update_relative_paths_mock): + def test_must_update_and_write_template(self, yaml_dump_mock, update_relative_paths_mock): template_dict = {"a": "b"} # Moving from /tmp/original/root/template.yaml to /tmp/new/root/othertemplate.yaml @@ -223,9 +185,9 @@ def test_must_update_and_write_template(self, with patch("samcli.commands._utils.template.open", m): move_template(source, dest, template_dict) - update_relative_paths_mock.assert_called_once_with(template_dict, - os.path.dirname(source), - os.path.dirname(dest)) + update_relative_paths_mock.assert_called_once_with( + template_dict, os.path.dirname(source), os.path.dirname(dest) + ) yaml_dump_mock.assert_called_with(modified_template) - m.assert_called_with(dest, 'w') + m.assert_called_with(dest, "w") m.return_value.write.assert_called_with(dumped_yaml) diff --git a/tests/unit/commands/buildcmd/test_build_context.py b/tests/unit/commands/buildcmd/test_build_context.py index cf68e2ed9d..7d087336b7 100644 --- a/tests/unit/commands/buildcmd/test_build_context.py +++ b/tests/unit/commands/buildcmd/test_build_context.py @@ -3,16 +3,17 @@ from mock import patch, Mock from samcli.commands.build.build_context import BuildContext +from samcli.commands.build.exceptions import InvalidBuildDirException class TestBuildContext__enter__(TestCase): - @patch("samcli.commands.build.build_context.get_template_data") @patch("samcli.commands.build.build_context.SamFunctionProvider") @patch("samcli.commands.build.build_context.pathlib") @patch("samcli.commands.build.build_context.ContainerManager") - def test_must_setup_context(self, ContainerManagerMock, pathlib_mock, SamFunctionProviderMock, - get_template_data_mock): + def test_must_setup_context( + self, ContainerManagerMock, pathlib_mock, SamFunctionProviderMock, get_template_data_mock + ): template_dict = get_template_data_mock.return_value = "template dict" func_provider_mock = Mock() @@ -21,17 +22,19 @@ def test_must_setup_context(self, ContainerManagerMock, pathlib_mock, SamFunctio base_dir = pathlib_mock.Path.return_value.resolve.return_value.parent = "basedir" container_mgr_mock = ContainerManagerMock.return_value = Mock() - context = BuildContext("function_identifier", - "template_file", - None, # No base dir is provided - "build_dir", - manifest_path="manifest_path", - clean=True, - use_container=True, - docker_network="network", - parameter_overrides="overrides", - skip_pull_image=True, - mode="buildmode") + context = BuildContext( + "function_identifier", + "template_file", + None, # No base dir is provided + "build_dir", + manifest_path="manifest_path", + clean=True, + use_container=True, + docker_network="network", + parameter_overrides="overrides", + skip_pull_image=True, + mode="buildmode", + ) setup_build_dir_mock = Mock() build_dir_result = setup_build_dir_mock.return_value = "my/new/build/dir" context._setup_build_dir = setup_build_dir_mock @@ -39,32 +42,32 @@ def test_must_setup_context(self, ContainerManagerMock, pathlib_mock, SamFunctio # call the enter method result = context.__enter__() - self.assertEquals(result, context) # __enter__ must return self - self.assertEquals(context.template_dict, template_dict) - self.assertEquals(context.function_provider, funcprovider) - self.assertEquals(context.base_dir, base_dir) - self.assertEquals(context.container_manager, container_mgr_mock) - self.assertEquals(context.build_dir, build_dir_result) - self.assertEquals(context.use_container, True) - self.assertEquals(context.output_template_path, os.path.join(build_dir_result, "template.yaml")) - self.assertEquals(context.manifest_path_override, os.path.abspath("manifest_path")) + self.assertEqual(result, context) # __enter__ must return self + self.assertEqual(context.template_dict, template_dict) + self.assertEqual(context.function_provider, funcprovider) + self.assertEqual(context.base_dir, base_dir) + self.assertEqual(context.container_manager, container_mgr_mock) + self.assertEqual(context.build_dir, build_dir_result) + self.assertEqual(context.use_container, True) + self.assertEqual(context.output_template_path, os.path.join(build_dir_result, "template.yaml")) + self.assertEqual(context.manifest_path_override, os.path.abspath("manifest_path")) self.assertEqual(context.mode, "buildmode") - self.assertEquals(context.functions_to_build, ["function to build"]) + self.assertEqual(context.functions_to_build, ["function to build"]) get_template_data_mock.assert_called_once_with("template_file") SamFunctionProviderMock.assert_called_once_with(template_dict, "overrides") pathlib_mock.Path.assert_called_once_with("template_file") setup_build_dir_mock.assert_called_with("build_dir", True) - ContainerManagerMock.assert_called_once_with(docker_network_id="network", - skip_pull_image=True) + ContainerManagerMock.assert_called_once_with(docker_network_id="network", skip_pull_image=True) func_provider_mock.get.assert_called_once_with("function_identifier") @patch("samcli.commands.build.build_context.get_template_data") @patch("samcli.commands.build.build_context.SamFunctionProvider") @patch("samcli.commands.build.build_context.pathlib") @patch("samcli.commands.build.build_context.ContainerManager") - def test_must_return_many_functions_to_build(self, ContainerManagerMock, pathlib_mock, SamFunctionProviderMock, - get_template_data_mock): + def test_must_return_many_functions_to_build( + self, ContainerManagerMock, pathlib_mock, SamFunctionProviderMock, get_template_data_mock + ): template_dict = get_template_data_mock.return_value = "template dict" func_provider_mock = Mock() func_provider_mock.get_all.return_value = ["function to build", "and another function"] @@ -72,17 +75,19 @@ def test_must_return_many_functions_to_build(self, ContainerManagerMock, pathlib base_dir = pathlib_mock.Path.return_value.resolve.return_value.parent = "basedir" container_mgr_mock = ContainerManagerMock.return_value = Mock() - context = BuildContext(None, - "template_file", - None, # No base dir is provided - "build_dir", - manifest_path="manifest_path", - clean=True, - use_container=True, - docker_network="network", - parameter_overrides="overrides", - skip_pull_image=True, - mode="buildmode") + context = BuildContext( + None, + "template_file", + None, # No base dir is provided + "build_dir", + manifest_path="manifest_path", + clean=True, + use_container=True, + docker_network="network", + parameter_overrides="overrides", + skip_pull_image=True, + mode="buildmode", + ) setup_build_dir_mock = Mock() build_dir_result = setup_build_dir_mock.return_value = "my/new/build/dir" context._setup_build_dir = setup_build_dir_mock @@ -90,35 +95,35 @@ def test_must_return_many_functions_to_build(self, ContainerManagerMock, pathlib # call the enter method result = context.__enter__() - self.assertEquals(result, context) # __enter__ must return self - self.assertEquals(context.template_dict, template_dict) - self.assertEquals(context.function_provider, funcprovider) - self.assertEquals(context.base_dir, base_dir) - self.assertEquals(context.container_manager, container_mgr_mock) - self.assertEquals(context.build_dir, build_dir_result) - self.assertEquals(context.use_container, True) - self.assertEquals(context.output_template_path, os.path.join(build_dir_result, "template.yaml")) - self.assertEquals(context.manifest_path_override, os.path.abspath("manifest_path")) + self.assertEqual(result, context) # __enter__ must return self + self.assertEqual(context.template_dict, template_dict) + self.assertEqual(context.function_provider, funcprovider) + self.assertEqual(context.base_dir, base_dir) + self.assertEqual(context.container_manager, container_mgr_mock) + self.assertEqual(context.build_dir, build_dir_result) + self.assertEqual(context.use_container, True) + self.assertEqual(context.output_template_path, os.path.join(build_dir_result, "template.yaml")) + self.assertEqual(context.manifest_path_override, os.path.abspath("manifest_path")) self.assertEqual(context.mode, "buildmode") - self.assertEquals(context.functions_to_build, ["function to build", "and another function"]) + self.assertEqual(context.functions_to_build, ["function to build", "and another function"]) get_template_data_mock.assert_called_once_with("template_file") SamFunctionProviderMock.assert_called_once_with(template_dict, "overrides") pathlib_mock.Path.assert_called_once_with("template_file") setup_build_dir_mock.assert_called_with("build_dir", True) - ContainerManagerMock.assert_called_once_with(docker_network_id="network", - skip_pull_image=True) + ContainerManagerMock.assert_called_once_with(docker_network_id="network", skip_pull_image=True) func_provider_mock.get_all.assert_called_once() class TestBuildContext_setup_build_dir(TestCase): - @patch("samcli.commands.build.build_context.shutil") @patch("samcli.commands.build.build_context.os") @patch("samcli.commands.build.build_context.pathlib") def test_build_dir_exists_with_non_empty_dir(self, pathlib_patch, os_patch, shutil_patch): path_mock = Mock() pathlib_patch.Path.return_value = path_mock + os_patch.path.abspath.side_effect = ["/somepath", "/cwd/path"] + path_mock.cwd.return_value = "/cwd/path" os_patch.listdir.return_value = True path_mock.resolve.return_value = "long/full/path" path_mock.exists.return_value = True @@ -126,13 +131,15 @@ def test_build_dir_exists_with_non_empty_dir(self, pathlib_patch, os_patch, shut full_build_path = BuildContext._setup_build_dir(build_dir, True) - self.assertEquals(full_build_path, "long/full/path") + self.assertEqual(full_build_path, "long/full/path") + self.assertEqual(os_patch.path.abspath.call_count, 2) os_patch.listdir.assert_called_once() path_mock.exists.assert_called_once() path_mock.mkdir.assert_called_once_with(mode=0o755, parents=True, exist_ok=True) pathlib_patch.Path.assert_called_once_with(build_dir) shutil_patch.rmtree.assert_called_once_with(build_dir) + pathlib_patch.Path.cwd.assert_called_once() @patch("samcli.commands.build.build_context.shutil") @patch("samcli.commands.build.build_context.os") @@ -141,19 +148,23 @@ def test_build_dir_exists_with_empty_dir(self, pathlib_patch, os_patch, shutil_p path_mock = Mock() pathlib_patch.Path.return_value = path_mock os_patch.listdir.return_value = False + os_patch.path.abspath.side_effect = ["/somepath", "/cwd/path"] + path_mock.cwd.return_value = "/cwd/path" path_mock.resolve.return_value = "long/full/path" path_mock.exists.return_value = True build_dir = "/somepath" full_build_path = BuildContext._setup_build_dir(build_dir, True) - self.assertEquals(full_build_path, "long/full/path") + self.assertEqual(full_build_path, "long/full/path") + self.assertEqual(os_patch.path.abspath.call_count, 2) os_patch.listdir.assert_called_once() path_mock.exists.assert_called_once() path_mock.mkdir.assert_called_once_with(mode=0o755, parents=True, exist_ok=True) pathlib_patch.Path.assert_called_once_with(build_dir) shutil_patch.rmtree.assert_not_called() + pathlib_patch.Path.cwd.assert_called_once() @patch("samcli.commands.build.build_context.shutil") @patch("samcli.commands.build.build_context.os") @@ -161,19 +172,23 @@ def test_build_dir_exists_with_empty_dir(self, pathlib_patch, os_patch, shutil_p def test_build_dir_does_not_exist(self, pathlib_patch, os_patch, shutil_patch): path_mock = Mock() pathlib_patch.Path.return_value = path_mock + os_patch.path.abspath.side_effect = ["/somepath", "/cwd/path"] + path_mock.cwd.return_value = "/cwd/path" path_mock.resolve.return_value = "long/full/path" path_mock.exists.return_value = False build_dir = "/somepath" full_build_path = BuildContext._setup_build_dir(build_dir, True) - self.assertEquals(full_build_path, "long/full/path") + self.assertEqual(full_build_path, "long/full/path") + self.assertEqual(os_patch.path.abspath.call_count, 2) os_patch.listdir.assert_not_called() path_mock.exists.assert_called_once() path_mock.mkdir.assert_called_once_with(mode=0o755, parents=True, exist_ok=True) pathlib_patch.Path.assert_called_once_with(build_dir) shutil_patch.rmtree.assert_not_called() + pathlib_patch.Path.cwd.assert_called_once() @patch("samcli.commands.build.build_context.shutil") @patch("samcli.commands.build.build_context.os") @@ -181,6 +196,8 @@ def test_build_dir_does_not_exist(self, pathlib_patch, os_patch, shutil_patch): def test_non_clean_build_when_dir_exists_with_non_empty_dir(self, pathlib_patch, os_patch, shutil_patch): path_mock = Mock() pathlib_patch.Path.return_value = path_mock + os_patch.path.abspath.side_effect = ["/somepath", "/cwd/path"] + path_mock.cwd.return_value = "/cwd/path" os_patch.listdir.return_value = True path_mock.resolve.return_value = "long/full/path" path_mock.exists.return_value = True @@ -188,10 +205,34 @@ def test_non_clean_build_when_dir_exists_with_non_empty_dir(self, pathlib_patch, full_build_path = BuildContext._setup_build_dir(build_dir, False) - self.assertEquals(full_build_path, "long/full/path") + self.assertEqual(full_build_path, "long/full/path") + self.assertEqual(os_patch.path.abspath.call_count, 2) os_patch.listdir.assert_called_once() path_mock.exists.assert_called_once() path_mock.mkdir.assert_called_once_with(mode=0o755, parents=True, exist_ok=True) pathlib_patch.Path.assert_called_once_with(build_dir) shutil_patch.rmtree.assert_not_called() + pathlib_patch.Path.cwd.assert_called_once() + + @patch("samcli.commands.build.build_context.shutil") + @patch("samcli.commands.build.build_context.os") + @patch("samcli.commands.build.build_context.pathlib") + def test_when_build_dir_is_cwd_raises_exception(self, pathlib_patch, os_patch, shutil_patch): + path_mock = Mock() + pathlib_patch.Path.return_value = path_mock + os_patch.path.abspath.side_effect = ["/somepath", "/somepath"] + path_mock.cwd.return_value = "/somepath" + build_dir = "/somepath" + + with self.assertRaises(InvalidBuildDirException): + BuildContext._setup_build_dir(build_dir, True) + + self.assertEqual(os_patch.path.abspath.call_count, 2) + + os_patch.listdir.assert_not_called() + path_mock.exists.assert_not_called() + path_mock.mkdir.assert_not_called() + pathlib_patch.Path.assert_called_once_with(build_dir) + shutil_patch.rmtree.assert_not_called() + pathlib_patch.Path.cwd.assert_called_once() diff --git a/tests/unit/commands/buildcmd/test_command.py b/tests/unit/commands/buildcmd/test_command.py index 1df0bbf946..f8008238a3 100644 --- a/tests/unit/commands/buildcmd/test_command.py +++ b/tests/unit/commands/buildcmd/test_command.py @@ -13,16 +13,11 @@ class TestDoCli(TestCase): - @patch("samcli.commands.build.command.BuildContext") @patch("samcli.commands.build.command.ApplicationBuilder") @patch("samcli.commands.build.command.move_template") @patch("samcli.commands.build.command.os") - def test_must_succeed_build(self, - os_mock, - move_template_mock, - ApplicationBuilderMock, - BuildContextMock): + def test_must_succeed_build(self, os_mock, move_template_mock, ApplicationBuilderMock, BuildContextMock): ctx_mock = Mock() BuildContextMock.return_value.__enter__ = Mock() @@ -31,28 +26,43 @@ def test_must_succeed_build(self, artifacts = builder_mock.build.return_value = "artifacts" modified_template = builder_mock.update_template.return_value = "modified template" - do_cli("function_identifier", "template", "base_dir", "build_dir", "clean", "use_container", - "manifest_path", "docker_network", "skip_pull", "parameter_overrides", "mode") - - ApplicationBuilderMock.assert_called_once_with(ctx_mock.functions_to_build, - ctx_mock.build_dir, - ctx_mock.base_dir, - manifest_path_override=ctx_mock.manifest_path_override, - container_manager=ctx_mock.container_manager, - mode=ctx_mock.mode) + do_cli( + "function_identifier", + "template", + "base_dir", + "build_dir", + "clean", + "use_container", + "manifest_path", + "docker_network", + "skip_pull", + "parameter_overrides", + "mode", + ) + + ApplicationBuilderMock.assert_called_once_with( + ctx_mock.functions_to_build, + ctx_mock.build_dir, + ctx_mock.base_dir, + manifest_path_override=ctx_mock.manifest_path_override, + container_manager=ctx_mock.container_manager, + mode=ctx_mock.mode, + ) builder_mock.build.assert_called_once() - builder_mock.update_template.assert_called_once_with(ctx_mock.template_dict, - ctx_mock.original_template_path, - artifacts) - move_template_mock.assert_called_once_with(ctx_mock.original_template_path, - ctx_mock.output_template_path, - modified_template) - - @parameterized.expand([ - (UnsupportedRuntimeException(), ), - (BuildError(), ), - (UnsupportedBuilderLibraryVersionError(container_name="name", error_msg="msg"), ) - ]) + builder_mock.update_template.assert_called_once_with( + ctx_mock.template_dict, ctx_mock.original_template_path, artifacts + ) + move_template_mock.assert_called_once_with( + ctx_mock.original_template_path, ctx_mock.output_template_path, modified_template + ) + + @parameterized.expand( + [ + (UnsupportedRuntimeException(),), + (BuildError(),), + (UnsupportedBuilderLibraryVersionError(container_name="name", error_msg="msg"),), + ] + ) @patch("samcli.commands.build.command.BuildContext") @patch("samcli.commands.build.command.ApplicationBuilder") def test_must_catch_known_exceptions(self, exception, ApplicationBuilderMock, BuildContextMock): @@ -65,10 +75,21 @@ def test_must_catch_known_exceptions(self, exception, ApplicationBuilderMock, Bu builder_mock.build.side_effect = exception with self.assertRaises(UserException) as ctx: - do_cli("function_identifier", "template", "base_dir", "build_dir", "clean", "use_container", - "manifest_path", "docker_network", "skip_pull", "parameteroverrides", "mode") - - self.assertEquals(str(ctx.exception), str(exception)) + do_cli( + "function_identifier", + "template", + "base_dir", + "build_dir", + "clean", + "use_container", + "manifest_path", + "docker_network", + "skip_pull", + "parameteroverrides", + "mode", + ) + + self.assertEqual(str(ctx.exception), str(exception)) @patch("samcli.commands.build.command.BuildContext") @patch("samcli.commands.build.command.ApplicationBuilder") @@ -76,17 +97,27 @@ def test_must_catch_function_not_found_exception(self, ApplicationBuilderMock, B ctx_mock = Mock() BuildContextMock.return_value.__enter__ = Mock() BuildContextMock.return_value.__enter__.return_value = ctx_mock - ApplicationBuilderMock.side_effect = FunctionNotFound('Function Not Found') + ApplicationBuilderMock.side_effect = FunctionNotFound("Function Not Found") with self.assertRaises(UserException) as ctx: - do_cli("function_identifier", "template", "base_dir", "build_dir", "clean", "use_container", - "manifest_path", "docker_network", "skip_pull", "parameteroverrides", "mode") - - self.assertEquals(str(ctx.exception), 'Function Not Found') + do_cli( + "function_identifier", + "template", + "base_dir", + "build_dir", + "clean", + "use_container", + "manifest_path", + "docker_network", + "skip_pull", + "parameteroverrides", + "mode", + ) + + self.assertEqual(str(ctx.exception), "Function Not Found") class TestGetModeValueFromEnvvar(TestCase): - def setUp(self): self.original = os.environ.copy() self.varname = "SOME_ENVVAR" @@ -100,7 +131,7 @@ def test_must_get_value(self): os.environ[self.varname] = "A" result = _get_mode_value_from_envvar(self.varname, self.choices) - self.assertEquals(result, "A") + self.assertEqual(result, "A") def test_must_raise_if_value_not_in_choice(self): diff --git a/tests/unit/commands/init/test_cli.py b/tests/unit/commands/init/test_cli.py index 25fe7142c3..8d15a2c274 100644 --- a/tests/unit/commands/init/test_cli.py +++ b/tests/unit/commands/init/test_cli.py @@ -7,7 +7,6 @@ class TestCli(TestCase): - def setUp(self): self.ctx = None self.location = None @@ -22,31 +21,41 @@ def test_init_cli(self, generate_project_patch): # GIVEN generate_project successfully created a project # WHEN a project name has been passed init_cli( - ctx=self.ctx, location=self.location, runtime=self.runtime, - dependency_manager=self.dependency_manager, output_dir=self.output_dir, - name=self.name, no_input=self.no_input) + ctx=self.ctx, + location=self.location, + runtime=self.runtime, + dependency_manager=self.dependency_manager, + output_dir=self.output_dir, + name=self.name, + no_input=self.no_input, + ) # THEN we should receive no errors generate_project_patch.assert_called_once_with( - self.location, self.runtime, self.dependency_manager, - self.output_dir, self.name, self.no_input) + self.location, self.runtime, self.dependency_manager, self.output_dir, self.name, self.no_input + ) @patch("samcli.commands.init.generate_project") def test_init_cli_generate_project_fails(self, generate_project_patch): # GIVEN generate_project fails to create a project generate_project_patch.side_effect = GenerateProjectFailedError( - project=self.name, provider_error="Something wrong happened" + project=self.name, provider_error="Something wrong happened" ) # WHEN generate_project returns an error # THEN we should receive a GenerateProjectFailedError Exception with self.assertRaises(UserException): init_cli( - self.ctx, location="self.location", runtime=self.runtime, - dependency_manager=self.dependency_manager, - output_dir=self.output_dir, name=self.name, no_input=self.no_input) + self.ctx, + location="self.location", + runtime=self.runtime, + dependency_manager=self.dependency_manager, + output_dir=self.output_dir, + name=self.name, + no_input=self.no_input, + ) generate_project_patch.assert_called_with( - self.location, self.runtime, self.dependency_manager, - self.output_dir, self.name, self.no_input) + self.location, self.runtime, self.dependency_manager, self.output_dir, self.name, self.no_input + ) diff --git a/tests/unit/commands/local/cli_common/test_invoke_context.py b/tests/unit/commands/local/cli_common/test_invoke_context.py index 48f35b2023..14e60e9030 100644 --- a/tests/unit/commands/local/cli_common/test_invoke_context.py +++ b/tests/unit/commands/local/cli_common/test_invoke_context.py @@ -12,7 +12,6 @@ class TestInvokeContext__enter__(TestCase): - @patch("samcli.commands.local.cli_common.invoke_context.SamFunctionProvider") def test_must_read_from_necessary_files(self, SamFunctionProviderMock): function_provider = Mock() @@ -23,19 +22,21 @@ def test_must_read_from_necessary_files(self, SamFunctionProviderMock): env_vars_file = "env_vars_file" log_file = "log_file" - invoke_context = InvokeContext(template_file=template_file, - function_identifier="id", - env_vars_file=env_vars_file, - docker_volume_basedir="volumedir", - docker_network="network", - log_file=log_file, - skip_pull_image=True, - debug_port=1111, - debugger_path="path-to-debugger", - debug_args='args', - parameter_overrides={}, - aws_region="region", - aws_profile="profile") + invoke_context = InvokeContext( + template_file=template_file, + function_identifier="id", + env_vars_file=env_vars_file, + docker_volume_basedir="volumedir", + docker_network="network", + log_file=log_file, + skip_pull_image=True, + debug_port=1111, + debugger_path="path-to-debugger", + debug_args="args", + parameter_overrides={}, + aws_region="region", + aws_profile="profile", + ) template_dict = "template_dict" invoke_context._get_template_data = Mock() @@ -86,8 +87,13 @@ def test_must_use_container_manager_to_check_docker_connectivity(self, SamFuncti container_manager_mock = Mock() - with patch.object(type(container_manager_mock), "is_docker_reachable", - create=True, new_callable=PropertyMock, return_value=True) as is_docker_reachable_mock: + with patch.object( + type(container_manager_mock), + "is_docker_reachable", + create=True, + new_callable=PropertyMock, + return_value=True, + ) as is_docker_reachable_mock: invoke_context._get_container_manager = Mock() invoke_context._get_container_manager.return_value = container_manager_mock @@ -106,8 +112,13 @@ def test_must_raise_if_docker_is_not_reachable(self, SamFunctionProviderMock): container_manager_mock = Mock() - with patch.object(type(container_manager_mock), "is_docker_reachable", - create=True, new_callable=PropertyMock, return_value=False): + with patch.object( + type(container_manager_mock), + "is_docker_reachable", + create=True, + new_callable=PropertyMock, + return_value=False, + ): invoke_context._get_container_manager = Mock() invoke_context._get_container_manager.return_value = container_manager_mock @@ -115,12 +126,13 @@ def test_must_raise_if_docker_is_not_reachable(self, SamFunctionProviderMock): with self.assertRaises(InvokeContextException) as ex_ctx: invoke_context.__enter__() - self.assertEqual("Running AWS SAM projects locally requires Docker. Have you got it installed?", - str(ex_ctx.exception)) + self.assertEqual( + "Running AWS SAM projects locally requires Docker. Have you got it installed?", + str(ex_ctx.exception), + ) class TestInvokeContext__exit__(TestCase): - def test_must_close_opened_logfile(self): context = InvokeContext(template_file="template") handle_mock = Mock() @@ -151,30 +163,31 @@ def test_must_work_in_with_statement(self, ExitMock, EnterMock): context_obj = Mock() EnterMock.return_value = context_obj - with InvokeContext(template_file="template_file", - function_identifier="id", - env_vars_file="env_vars_file", - docker_volume_basedir="volumedir", - docker_network="network", - log_file="log_file", - skip_pull_image=True, - debug_port=1111, - debugger_path="path-to-debugger", - debug_args='args', - aws_profile="profile") as context: - self.assertEquals(context_obj, context) + with InvokeContext( + template_file="template_file", + function_identifier="id", + env_vars_file="env_vars_file", + docker_volume_basedir="volumedir", + docker_network="network", + log_file="log_file", + skip_pull_image=True, + debug_port=1111, + debugger_path="path-to-debugger", + debug_args="args", + aws_profile="profile", + ) as context: + self.assertEqual(context_obj, context) EnterMock.assert_called_with() - self.assertEquals(1, ExitMock.call_count) + self.assertEqual(1, ExitMock.call_count) class TestInvokeContext_function_name_property(TestCase): - def test_must_return_function_name_if_present(self): id = "id" context = InvokeContext(template_file="template_file", function_identifier=id) - self.assertEquals(id, context.function_name) + self.assertEqual(id, context.function_name) def test_must_return_one_function_from_template(self): context = InvokeContext(template_file="template_file") @@ -184,7 +197,7 @@ def test_must_return_one_function_from_template(self): context._function_provider = Mock() context._function_provider.get_all.return_value = [function] # Provider returns only one function - self.assertEquals("myname", context.function_name) + self.assertEqual("myname", context.function_name) def test_must_raise_if_more_than_one_function(self): context = InvokeContext(template_file="template_file") @@ -197,33 +210,31 @@ def test_must_raise_if_more_than_one_function(self): class TestInvokeContext_local_lambda_runner(TestCase): - def setUp(self): - self.context = InvokeContext(template_file="template_file", - function_identifier="id", - env_vars_file="env_vars_file", - docker_volume_basedir="volumedir", - docker_network="network", - log_file="log_file", - skip_pull_image=True, - force_image_build=True, - debug_port=1111, - debugger_path="path-to-debugger", - debug_args='args', - aws_profile="profile", - aws_region="region") + self.context = InvokeContext( + template_file="template_file", + function_identifier="id", + env_vars_file="env_vars_file", + docker_volume_basedir="volumedir", + docker_network="network", + log_file="log_file", + skip_pull_image=True, + force_image_build=True, + debug_port=1111, + debugger_path="path-to-debugger", + debug_args="args", + aws_profile="profile", + aws_region="region", + ) @patch("samcli.commands.local.cli_common.invoke_context.LambdaImage") @patch("samcli.commands.local.cli_common.invoke_context.LayerDownloader") @patch("samcli.commands.local.cli_common.invoke_context.LambdaRuntime") @patch("samcli.commands.local.cli_common.invoke_context.LocalLambdaRunner") @patch("samcli.commands.local.cli_common.invoke_context.SamFunctionProvider") - def test_must_create_runner(self, - SamFunctionProviderMock, - LocalLambdaMock, - LambdaRuntimeMock, - download_layers_mock, - lambda_image_patch): + def test_must_create_runner( + self, SamFunctionProviderMock, LocalLambdaMock, LambdaRuntimeMock, download_layers_mock, lambda_image_patch + ): runtime_mock = Mock() LambdaRuntimeMock.return_value = runtime_mock @@ -252,27 +263,29 @@ def test_must_create_runner(self, with self.context: result = self.context.local_lambda_runner - self.assertEquals(result, runner_mock) + self.assertEqual(result, runner_mock) LambdaRuntimeMock.assert_called_with(container_manager_mock, image_mock) lambda_image_patch.assert_called_once_with(download_mock, True, True) - LocalLambdaMock.assert_called_with(local_runtime=runtime_mock, - function_provider=ANY, - cwd=cwd, - debug_context=None, - env_vars_values=ANY, - aws_profile="profile", - aws_region="region") + LocalLambdaMock.assert_called_with( + local_runtime=runtime_mock, + function_provider=ANY, + cwd=cwd, + debug_context=None, + env_vars_values=ANY, + aws_profile="profile", + aws_region="region", + ) class TestInvokeContext_stdout_property(TestCase): - @patch.object(InvokeContext, "__exit__") @patch("samcli.commands.local.cli_common.invoke_context.osutils.stdout") @patch("samcli.commands.local.cli_common.invoke_context.StreamWriter") @patch("samcli.commands.local.cli_common.invoke_context.SamFunctionProvider") - def test_must_enable_auto_flush_if_debug(self, SamFunctionProviderMock, StreamWriterMock, - osutils_stdout_mock, ExitMock): + def test_must_enable_auto_flush_if_debug( + self, SamFunctionProviderMock, StreamWriterMock, osutils_stdout_mock, ExitMock + ): context = InvokeContext(template_file="template", debug_port=6000) @@ -293,9 +306,9 @@ def test_must_enable_auto_flush_if_debug(self, SamFunctionProviderMock, StreamWr @patch("samcli.commands.local.cli_common.invoke_context.osutils.stdout") @patch("samcli.commands.local.cli_common.invoke_context.StreamWriter") @patch("samcli.commands.local.cli_common.invoke_context.SamFunctionProvider") - def test_must_not_enable_auto_flush_if_not_debug(self, - SamFunctionProviderMock, StreamWriterMock, - osutils_stdout_mock, ExitMock): + def test_must_not_enable_auto_flush_if_not_debug( + self, SamFunctionProviderMock, StreamWriterMock, osutils_stdout_mock, ExitMock + ): context = InvokeContext(template_file="template") @@ -316,9 +329,9 @@ def test_must_not_enable_auto_flush_if_not_debug(self, @patch("samcli.commands.local.cli_common.invoke_context.osutils.stdout") @patch("samcli.commands.local.cli_common.invoke_context.StreamWriter") @patch("samcli.commands.local.cli_common.invoke_context.SamFunctionProvider") - def test_must_use_stdout_if_no_log_file_handle(self, - SamFunctionProviderMock, StreamWriterMock, - osutils_stdout_mock, ExitMock): + def test_must_use_stdout_if_no_log_file_handle( + self, SamFunctionProviderMock, StreamWriterMock, osutils_stdout_mock, ExitMock + ): stream_writer_mock = Mock() StreamWriterMock.return_value = stream_writer_mock @@ -370,13 +383,13 @@ def test_must_use_log_file_handle(self, StreamWriterMock, SamFunctionProviderMoc class TestInvokeContext_stderr_property(TestCase): - @patch.object(InvokeContext, "__exit__") @patch("samcli.commands.local.cli_common.invoke_context.osutils.stderr") @patch("samcli.commands.local.cli_common.invoke_context.StreamWriter") @patch("samcli.commands.local.cli_common.invoke_context.SamFunctionProvider") - def test_must_enable_auto_flush_if_debug(self, SamFunctionProviderMock, StreamWriterMock, - osutils_stderr_mock, ExitMock): + def test_must_enable_auto_flush_if_debug( + self, SamFunctionProviderMock, StreamWriterMock, osutils_stderr_mock, ExitMock + ): context = InvokeContext(template_file="template", debug_port=6000) @@ -397,9 +410,9 @@ def test_must_enable_auto_flush_if_debug(self, SamFunctionProviderMock, StreamWr @patch("samcli.commands.local.cli_common.invoke_context.osutils.stderr") @patch("samcli.commands.local.cli_common.invoke_context.StreamWriter") @patch("samcli.commands.local.cli_common.invoke_context.SamFunctionProvider") - def test_must_not_enable_auto_flush_if_not_debug(self, - SamFunctionProviderMock, StreamWriterMock, - osutils_stderr_mock, ExitMock): + def test_must_not_enable_auto_flush_if_not_debug( + self, SamFunctionProviderMock, StreamWriterMock, osutils_stderr_mock, ExitMock + ): context = InvokeContext(template_file="template") @@ -420,9 +433,9 @@ def test_must_not_enable_auto_flush_if_not_debug(self, @patch("samcli.commands.local.cli_common.invoke_context.osutils.stderr") @patch("samcli.commands.local.cli_common.invoke_context.StreamWriter") @patch("samcli.commands.local.cli_common.invoke_context.SamFunctionProvider") - def test_must_use_stderr_if_no_log_file_handle(self, - SamFunctionProviderMock, StreamWriterMock, - osutils_stderr_mock, ExitMock): + def test_must_use_stderr_if_no_log_file_handle( + self, SamFunctionProviderMock, StreamWriterMock, osutils_stderr_mock, ExitMock + ): stream_writer_mock = Mock() StreamWriterMock.return_value = stream_writer_mock @@ -474,16 +487,14 @@ def test_must_use_log_file_handle(self, StreamWriterMock, SamFunctionProviderMoc class TestInvokeContext_template_property(TestCase): - def test_must_return_tempalte_dict(self): context = InvokeContext(template_file="file") context._template_dict = "My template" - self.assertEquals("My template", context.template) + self.assertEqual("My template", context.template) class TestInvokeContextget_cwd(TestCase): - def test_must_return_template_file_dir_name(self): filename = "filename" context = InvokeContext(template_file=filename) @@ -491,18 +502,17 @@ def test_must_return_template_file_dir_name(self): expected = os.path.dirname(os.path.abspath(filename)) result = context.get_cwd() - self.assertEquals(result, expected) + self.assertEqual(result, expected) def test_must_return_docker_volume_dir(self): filename = "filename" context = InvokeContext(template_file=filename, docker_volume_basedir="basedir") result = context.get_cwd() - self.assertEquals(result, "basedir") + self.assertEqual(result, "basedir") class TestInvokeContext_get_env_vars_value(TestCase): - def test_must_return_if_no_file(self): result = InvokeContext._get_env_vars_value(filename=None) self.assertIsNone(result, "No value must be returned") @@ -517,13 +527,13 @@ def test_must_read_file_and_parse_as_json(self): with patch("samcli.commands.local.cli_common.invoke_context.open", m): result = InvokeContext._get_env_vars_value(filename) - self.assertEquals(expected, result) + self.assertEqual(expected, result) - m.assert_called_with(filename, 'r') + m.assert_called_with(filename, "r") def test_must_raise_if_failed_to_parse_json(self): filename = "filename" - file_data = 'invalid json' + file_data = "invalid json" m = mock_open(read_data=file_data) @@ -533,12 +543,12 @@ def test_must_raise_if_failed_to_parse_json(self): InvokeContext._get_env_vars_value(filename) msg = str(ex_ctx.exception) - self.assertTrue(msg.startswith("Could not read environment variables overrides from file {}".format( - filename))) + self.assertTrue( + msg.startswith("Could not read environment variables overrides from file {}".format(filename)) + ) class TestInvokeContext_setup_log_file(TestCase): - def test_must_return_if_file_not_given(self): result = InvokeContext._setup_log_file(log_file=None) self.assertIsNone(result, "Log file must not be setup") @@ -550,11 +560,10 @@ def test_must_open_file_for_writing(self): with patch("samcli.commands.local.cli_common.invoke_context.open", m): InvokeContext._setup_log_file(filename) - m.assert_called_with(filename, 'wb') + m.assert_called_with(filename, "wb") class TestInvokeContext_get_debug_context(TestCase): - @patch("samcli.commands.local.cli_common.invoke_context.Path") def test_debugger_path_not_found(self, pathlib_mock): error = OSError() @@ -562,7 +571,7 @@ def test_debugger_path_not_found(self, pathlib_mock): pathlib_mock.side_effect = error with self.assertRaises(DebugContextException): - InvokeContext._get_debug_context(debug_port=1111, debug_args=None, debugger_path='somepath') + InvokeContext._get_debug_context(debug_port=1111, debug_args=None, debugger_path="somepath") @patch("samcli.commands.local.cli_common.invoke_context.Path") def test_debugger_path_not_dir(self, pathlib_mock): @@ -573,21 +582,21 @@ def test_debugger_path_not_dir(self, pathlib_mock): pathlib_mock.return_value = pathlib_path_mock with self.assertRaises(DebugContextException): - InvokeContext._get_debug_context(debug_port=1111, debug_args=None, debugger_path='somepath') + InvokeContext._get_debug_context(debug_port=1111, debug_args=None, debugger_path="somepath") def test_no_debug_port(self): debug_context = InvokeContext._get_debug_context(None, None, None) - self.assertEquals(debug_context.debugger_path, None) - self.assertEquals(debug_context.debug_port, None) - self.assertEquals(debug_context.debug_args, None) + self.assertEqual(debug_context.debugger_path, None) + self.assertEqual(debug_context.debug_port, None) + self.assertEqual(debug_context.debug_args, None) @patch("samcli.commands.local.cli_common.invoke_context.Path") def test_non_path_not_found_oserror_is_thrown(self, pathlib_mock): pathlib_mock.side_effect = OSError() with self.assertRaises(OSError): - InvokeContext._get_debug_context(debug_port=1111, debug_args=None, debugger_path='somepath') + InvokeContext._get_debug_context(debug_port=1111, debug_args=None, debugger_path="somepath") @patch("samcli.commands.local.cli_common.invoke_context.DebugContext") def test_debug_port_given_without_debugger_path(self, debug_context_mock): @@ -595,7 +604,7 @@ def test_debug_port_given_without_debugger_path(self, debug_context_mock): debug_context = InvokeContext._get_debug_context(1111, None, None) - self.assertEquals(debug_context, "I am the DebugContext") + self.assertEqual(debug_context, "I am the DebugContext") debug_context_mock.assert_called_once_with(debug_port=1111, debug_args=None, debugger_path=None) @@ -614,7 +623,7 @@ def test_debugger_path_resolves(self, pathlib_mock, debug_context_mock): debug_context = InvokeContext._get_debug_context(1111, "args", "./path") - self.assertEquals(debug_context, "I am the DebugContext") + self.assertEqual(debug_context, "I am the DebugContext") debug_context_mock.assert_called_once_with(debug_port=1111, debug_args="args", debugger_path="full/path") resolve_path_mock.is_dir.assert_called_once() diff --git a/tests/unit/commands/local/generate_event/test_event_generation.py b/tests/unit/commands/local/generate_event/test_event_generation.py index a017e1ba12..1b5a5ea730 100644 --- a/tests/unit/commands/local/generate_event/test_event_generation.py +++ b/tests/unit/commands/local/generate_event/test_event_generation.py @@ -10,39 +10,37 @@ class TestEvents(TestCase): - def setUp(self): self.values_to_sub = {"hello": "world"} def test_base64_encoding(self): tags = {"hello": {"encoding": "base64"}} - e = events.Events().encode(tags, 'encoding', self.values_to_sub) + e = events.Events().encode(tags, "encoding", self.values_to_sub) self.assertEqual(e, {"hello": "d29ybGQ="}) def test_url_encoding(self): tags = {"hello": {"encoding": "url"}} - e = events.Events().encode(tags, 'encoding', self.values_to_sub) + e = events.Events().encode(tags, "encoding", self.values_to_sub) self.assertEqual(e, {"hello": "world"}) def test_if_encoding_is_none(self): tags = {"hello": {"encoding": "None"}} - e = events.Events().encode(tags, 'encoding', self.values_to_sub) + e = events.Events().encode(tags, "encoding", self.values_to_sub) self.assertEqual(e, {"hello": "world"}) def test_if_tags_is_empty(self): tags = {} - e = events.Events().encode(tags, 'encoding', {}) + e = events.Events().encode(tags, "encoding", {}) self.assertEqual(e, {}) def test_if_tags_is_two_or_more(self): tags = {"hello": {"encoding": "base64"}, "hi": {"encoding": "url"}, "bop": {"encoding": "None"}} values_to_sub = {"bop": "dop", "hello": "world", "hi": "yo"} - e = events.Events().encode(tags, 'encoding', values_to_sub) + e = events.Events().encode(tags, "encoding", values_to_sub) self.assertEqual(e, {"bop": "dop", "hello": "d29ybGQ=", "hi": "yo"}) class TestServiceCommand(TestCase): - def setUp(self): self.service_cmd_name = "myservice" self.event_type_name = "myevent" @@ -69,7 +67,7 @@ def test_get_command_returns_none_when_not_in_all_cmds(self): def test_list_commands_must_return_commands_name(self): expected = self.s.list_commands(ctx=None) - self.assertEqual(expected, ['hello', 'hi']) + self.assertEqual(expected, ["hello", "hi"]) def test_get_command_return_value(self): command_name = "hello" @@ -80,7 +78,6 @@ def test_get_command_return_value(self): class TestEventTypeSubCommand(TestCase): - def setUp(self): self.service_cmd_name = "myservice" self.event_type_name = "myevent" @@ -127,40 +124,31 @@ def test_subcommand_get_command_return_value(self, click_mock, functools_mock, o functools_mock.partial.return_value = callback_object_mock s = EventTypeSubCommand(self.events_lib_mock, "hello", all_commands) s.get_command(None, "hi") - click_mock.Command.assert_called_once_with(name="hi", - short_help="Generates a hello Event", - params=[], - callback=callback_object_mock) + click_mock.Command.assert_called_once_with( + name="hi", short_help="Generates a hello Event", params=[], callback=callback_object_mock + ) def test_subcommand_list_return_value(self): subcmd_def = {"hello": "world", "hi": "you"} self.events_lib_mock.expose_event_metadata.return_value = subcmd_def s = EventTypeSubCommand(self.events_lib_mock, "hello", subcmd_def) - expected = ['hello', 'hi'] - self.assertEquals(s.list_commands(ctx=None), expected) + expected = ["hello", "hi"] + self.assertEqual(s.list_commands(ctx=None), expected) def test_must_print_sample_event_json(self): event_json = '{"hello": "world"}' self.events_lib_mock.generate_event.return_value = event_json s = EventTypeSubCommand(self.events_lib_mock, "hello", event_json) - event = s.cmd_implementation(self.events_lib_mock, - self.service_cmd_name, - self.event_type_name, - {}) - self.events_lib_mock.generate_event.assert_called_with(self.service_cmd_name, - self.event_type_name, - {}) + event = s.cmd_implementation(self.events_lib_mock, self.service_cmd_name, self.event_type_name, {}) + self.events_lib_mock.generate_event.assert_called_with(self.service_cmd_name, self.event_type_name, {}) self.assertEqual(event, event_json) def test_must_accept_keyword_args(self): event_json = '{"hello": "world"}' self.events_lib_mock.generate_event.return_value = event_json s = EventTypeSubCommand(self.events_lib_mock, "hello", event_json) - event = s.cmd_implementation(self.events_lib_mock, - self.service_cmd_name, - self.event_type_name, - key="value") - self.events_lib_mock.generate_event.assert_called_with(self.service_cmd_name, - self.event_type_name, - {"key": "value"}) + event = s.cmd_implementation(self.events_lib_mock, self.service_cmd_name, self.event_type_name, key="value") + self.events_lib_mock.generate_event.assert_called_with( + self.service_cmd_name, self.event_type_name, {"key": "value"} + ) self.assertEqual(event, event_json) diff --git a/tests/unit/commands/local/invoke/test_cli.py b/tests/unit/commands/local/invoke/test_cli.py index e762a434cc..99fb85dba4 100644 --- a/tests/unit/commands/local/invoke/test_cli.py +++ b/tests/unit/commands/local/invoke/test_cli.py @@ -20,7 +20,6 @@ class TestCli(TestCase): - def setUp(self): self.function_id = "id" self.template = "template" @@ -54,43 +53,46 @@ def test_cli_must_setup_context_and_invoke(self, get_event_mock, InvokeContextMo context_mock = Mock() InvokeContextMock.return_value.__enter__.return_value = context_mock - invoke_cli(ctx=ctx_mock, - function_identifier=self.function_id, - template=self.template, - event=self.eventfile, - no_event=self.no_event, - env_vars=self.env_vars, - debug_port=self.debug_port, - debug_args=self.debug_args, - debugger_path=self.debugger_path, - docker_volume_basedir=self.docker_volume_basedir, - docker_network=self.docker_network, - log_file=self.log_file, - skip_pull_image=self.skip_pull_image, - parameter_overrides=self.parameter_overrides, - layer_cache_basedir=self.layer_cache_basedir, - force_image_build=self.force_image_build) - - InvokeContextMock.assert_called_with(template_file=self.template, - function_identifier=self.function_id, - env_vars_file=self.env_vars, - docker_volume_basedir=self.docker_volume_basedir, - docker_network=self.docker_network, - log_file=self.log_file, - skip_pull_image=self.skip_pull_image, - debug_port=self.debug_port, - debug_args=self.debug_args, - debugger_path=self.debugger_path, - parameter_overrides=self.parameter_overrides, - layer_cache_basedir=self.layer_cache_basedir, - force_image_build=self.force_image_build, - aws_region=self.region_name, - aws_profile=self.profile) - - context_mock.local_lambda_runner.invoke.assert_called_with(context_mock.function_name, - event=event_data, - stdout=context_mock.stdout, - stderr=context_mock.stderr) + invoke_cli( + ctx=ctx_mock, + function_identifier=self.function_id, + template=self.template, + event=self.eventfile, + no_event=self.no_event, + env_vars=self.env_vars, + debug_port=self.debug_port, + debug_args=self.debug_args, + debugger_path=self.debugger_path, + docker_volume_basedir=self.docker_volume_basedir, + docker_network=self.docker_network, + log_file=self.log_file, + skip_pull_image=self.skip_pull_image, + parameter_overrides=self.parameter_overrides, + layer_cache_basedir=self.layer_cache_basedir, + force_image_build=self.force_image_build, + ) + + InvokeContextMock.assert_called_with( + template_file=self.template, + function_identifier=self.function_id, + env_vars_file=self.env_vars, + docker_volume_basedir=self.docker_volume_basedir, + docker_network=self.docker_network, + log_file=self.log_file, + skip_pull_image=self.skip_pull_image, + debug_port=self.debug_port, + debug_args=self.debug_args, + debugger_path=self.debugger_path, + parameter_overrides=self.parameter_overrides, + layer_cache_basedir=self.layer_cache_basedir, + force_image_build=self.force_image_build, + aws_region=self.region_name, + aws_profile=self.profile, + ) + + context_mock.local_lambda_runner.invoke.assert_called_with( + context_mock.function_name, event=event_data, stdout=context_mock.stdout, stderr=context_mock.stderr + ) get_event_mock.assert_called_with(self.eventfile) @patch("samcli.commands.local.invoke.cli.InvokeContext") @@ -105,43 +107,46 @@ def test_cli_must_invoke_with_no_event(self, get_event_mock, InvokeContextMock): # Mock the __enter__ method to return a object inside a context manager context_mock = Mock() InvokeContextMock.return_value.__enter__.return_value = context_mock - invoke_cli(ctx=ctx_mock, - function_identifier=self.function_id, - template=self.template, - event=STDIN_FILE_NAME, - no_event=self.no_event, - env_vars=self.env_vars, - debug_port=self.debug_port, - debug_args=self.debug_args, - debugger_path=self.debugger_path, - docker_volume_basedir=self.docker_volume_basedir, - docker_network=self.docker_network, - log_file=self.log_file, - skip_pull_image=self.skip_pull_image, - parameter_overrides=self.parameter_overrides, - layer_cache_basedir=self.layer_cache_basedir, - force_image_build=self.force_image_build) - - InvokeContextMock.assert_called_with(template_file=self.template, - function_identifier=self.function_id, - env_vars_file=self.env_vars, - docker_volume_basedir=self.docker_volume_basedir, - docker_network=self.docker_network, - log_file=self.log_file, - skip_pull_image=self.skip_pull_image, - debug_port=self.debug_port, - debug_args=self.debug_args, - debugger_path=self.debugger_path, - parameter_overrides=self.parameter_overrides, - layer_cache_basedir=self.layer_cache_basedir, - force_image_build=self.force_image_build, - aws_region=self.region_name, - aws_profile=self.profile) - - context_mock.local_lambda_runner.invoke.assert_called_with(context_mock.function_name, - event="{}", - stdout=context_mock.stdout, - stderr=context_mock.stderr) + invoke_cli( + ctx=ctx_mock, + function_identifier=self.function_id, + template=self.template, + event=STDIN_FILE_NAME, + no_event=self.no_event, + env_vars=self.env_vars, + debug_port=self.debug_port, + debug_args=self.debug_args, + debugger_path=self.debugger_path, + docker_volume_basedir=self.docker_volume_basedir, + docker_network=self.docker_network, + log_file=self.log_file, + skip_pull_image=self.skip_pull_image, + parameter_overrides=self.parameter_overrides, + layer_cache_basedir=self.layer_cache_basedir, + force_image_build=self.force_image_build, + ) + + InvokeContextMock.assert_called_with( + template_file=self.template, + function_identifier=self.function_id, + env_vars_file=self.env_vars, + docker_volume_basedir=self.docker_volume_basedir, + docker_network=self.docker_network, + log_file=self.log_file, + skip_pull_image=self.skip_pull_image, + debug_port=self.debug_port, + debug_args=self.debug_args, + debugger_path=self.debugger_path, + parameter_overrides=self.parameter_overrides, + layer_cache_basedir=self.layer_cache_basedir, + force_image_build=self.force_image_build, + aws_region=self.region_name, + aws_profile=self.profile, + ) + + context_mock.local_lambda_runner.invoke.assert_called_with( + context_mock.function_name, event="{}", stdout=context_mock.stdout, stderr=context_mock.stderr + ) get_event_mock.assert_not_called() @patch("samcli.commands.local.invoke.cli.InvokeContext") @@ -155,37 +160,39 @@ def test_must_raise_user_exception_on_no_event_and_event(self, get_event_mock, I with self.assertRaises(UserException) as ex_ctx: - invoke_cli(ctx=ctx_mock, - function_identifier=self.function_id, - template=self.template, - event=self.eventfile, - no_event=self.no_event, - env_vars=self.env_vars, - debug_port=self.debug_port, - debug_args=self.debug_args, - debugger_path=self.debugger_path, - docker_volume_basedir=self.docker_volume_basedir, - docker_network=self.docker_network, - log_file=self.log_file, - skip_pull_image=self.skip_pull_image, - parameter_overrides=self.parameter_overrides, - layer_cache_basedir=self.layer_cache_basedir, - force_image_build=self.force_image_build) + invoke_cli( + ctx=ctx_mock, + function_identifier=self.function_id, + template=self.template, + event=self.eventfile, + no_event=self.no_event, + env_vars=self.env_vars, + debug_port=self.debug_port, + debug_args=self.debug_args, + debugger_path=self.debugger_path, + docker_volume_basedir=self.docker_volume_basedir, + docker_network=self.docker_network, + log_file=self.log_file, + skip_pull_image=self.skip_pull_image, + parameter_overrides=self.parameter_overrides, + layer_cache_basedir=self.layer_cache_basedir, + force_image_build=self.force_image_build, + ) msg = str(ex_ctx.exception) - self.assertEquals(msg, "no_event and event cannot be used together. Please provide only one.") - - @parameterized.expand([ - param(FunctionNotFound("not found"), "Function id not found in template"), - param(DockerImagePullFailedException("Failed to pull image"), "Failed to pull image") - ]) + self.assertEqual(msg, "no_event and event cannot be used together. Please provide only one.") + + @parameterized.expand( + [ + param(FunctionNotFound("not found"), "Function id not found in template"), + param(DockerImagePullFailedException("Failed to pull image"), "Failed to pull image"), + ] + ) @patch("samcli.commands.local.invoke.cli.InvokeContext") @patch("samcli.commands.local.invoke.cli._get_event") - def test_must_raise_user_exception_on_function_not_found(self, - side_effect_exception, - expected_exectpion_message, - get_event_mock, - InvokeContextMock): + def test_must_raise_user_exception_on_function_not_found( + self, side_effect_exception, expected_exectpion_message, get_event_mock, InvokeContextMock + ): event_data = "data" get_event_mock.return_value = event_data @@ -201,38 +208,43 @@ def test_must_raise_user_exception_on_function_not_found(self, with self.assertRaises(UserException) as ex_ctx: - invoke_cli(ctx=ctx_mock, - function_identifier=self.function_id, - template=self.template, - event=self.eventfile, - no_event=self.no_event, - env_vars=self.env_vars, - debug_port=self.debug_port, - debug_args=self.debug_args, - debugger_path=self.debugger_path, - docker_volume_basedir=self.docker_volume_basedir, - docker_network=self.docker_network, - log_file=self.log_file, - skip_pull_image=self.skip_pull_image, - parameter_overrides=self.parameter_overrides, - layer_cache_basedir=self.layer_cache_basedir, - force_image_build=self.force_image_build) + invoke_cli( + ctx=ctx_mock, + function_identifier=self.function_id, + template=self.template, + event=self.eventfile, + no_event=self.no_event, + env_vars=self.env_vars, + debug_port=self.debug_port, + debug_args=self.debug_args, + debugger_path=self.debugger_path, + docker_volume_basedir=self.docker_volume_basedir, + docker_network=self.docker_network, + log_file=self.log_file, + skip_pull_image=self.skip_pull_image, + parameter_overrides=self.parameter_overrides, + layer_cache_basedir=self.layer_cache_basedir, + force_image_build=self.force_image_build, + ) msg = str(ex_ctx.exception) - self.assertEquals(msg, expected_exectpion_message) - - @parameterized.expand([(InvalidSamDocumentException("bad template"), "bad template"), - (InvalidLayerReference(), "Layer References need to be of type " - "'AWS::Serverless::LayerVersion' or 'AWS::Lambda::LayerVersion'"), - (DebuggingNotSupported("Debugging not supported"), "Debugging not supported") - ]) + self.assertEqual(msg, expected_exectpion_message) + + @parameterized.expand( + [ + (InvalidSamDocumentException("bad template"), "bad template"), + ( + InvalidLayerReference(), + "Layer References need to be of type " "'AWS::Serverless::LayerVersion' or 'AWS::Lambda::LayerVersion'", + ), + (DebuggingNotSupported("Debugging not supported"), "Debugging not supported"), + ] + ) @patch("samcli.commands.local.invoke.cli.InvokeContext") @patch("samcli.commands.local.invoke.cli._get_event") - def test_must_raise_user_exception_on_invalid_sam_template(self, - exeception_to_raise, - execption_message, - get_event_mock, - InvokeContextMock): + def test_must_raise_user_exception_on_invalid_sam_template( + self, exeception_to_raise, execption_message, get_event_mock, InvokeContextMock + ): event_data = "data" get_event_mock.return_value = event_data @@ -244,25 +256,27 @@ def test_must_raise_user_exception_on_invalid_sam_template(self, with self.assertRaises(UserException) as ex_ctx: - invoke_cli(ctx=ctx_mock, - function_identifier=self.function_id, - template=self.template, - event=self.eventfile, - no_event=self.no_event, - env_vars=self.env_vars, - debug_port=self.debug_port, - debug_args=self.debug_args, - debugger_path=self.debugger_path, - docker_volume_basedir=self.docker_volume_basedir, - docker_network=self.docker_network, - log_file=self.log_file, - skip_pull_image=self.skip_pull_image, - parameter_overrides=self.parameter_overrides, - layer_cache_basedir=self.layer_cache_basedir, - force_image_build=self.force_image_build) + invoke_cli( + ctx=ctx_mock, + function_identifier=self.function_id, + template=self.template, + event=self.eventfile, + no_event=self.no_event, + env_vars=self.env_vars, + debug_port=self.debug_port, + debug_args=self.debug_args, + debugger_path=self.debugger_path, + docker_volume_basedir=self.docker_volume_basedir, + docker_network=self.docker_network, + log_file=self.log_file, + skip_pull_image=self.skip_pull_image, + parameter_overrides=self.parameter_overrides, + layer_cache_basedir=self.layer_cache_basedir, + force_image_build=self.force_image_build, + ) msg = str(ex_ctx.exception) - self.assertEquals(msg, execption_message) + self.assertEqual(msg, execption_message) @patch("samcli.commands.local.invoke.cli.InvokeContext") @patch("samcli.commands.local.invoke.cli._get_event") @@ -278,33 +292,31 @@ def test_must_raise_user_exception_on_invalid_env_vars(self, get_event_mock, Inv with self.assertRaises(UserException) as ex_ctx: - invoke_cli(ctx=ctx_mock, - function_identifier=self.function_id, - template=self.template, - event=self.eventfile, - no_event=self.no_event, - env_vars=self.env_vars, - debug_port=self.debug_port, - debug_args=self.debug_args, - debugger_path=self.debugger_path, - docker_volume_basedir=self.docker_volume_basedir, - docker_network=self.docker_network, - log_file=self.log_file, - skip_pull_image=self.skip_pull_image, - parameter_overrides=self.parameter_overrides, - layer_cache_basedir=self.layer_cache_basedir, - force_image_build=self.force_image_build) + invoke_cli( + ctx=ctx_mock, + function_identifier=self.function_id, + template=self.template, + event=self.eventfile, + no_event=self.no_event, + env_vars=self.env_vars, + debug_port=self.debug_port, + debug_args=self.debug_args, + debugger_path=self.debugger_path, + docker_volume_basedir=self.docker_volume_basedir, + docker_network=self.docker_network, + log_file=self.log_file, + skip_pull_image=self.skip_pull_image, + parameter_overrides=self.parameter_overrides, + layer_cache_basedir=self.layer_cache_basedir, + force_image_build=self.force_image_build, + ) msg = str(ex_ctx.exception) - self.assertEquals(msg, "bad env vars") + self.assertEqual(msg, "bad env vars") class TestGetEvent(TestCase): - - @parameterized.expand([ - param(STDIN_FILE_NAME), - param("somefile") - ]) + @parameterized.expand([param(STDIN_FILE_NAME), param("somefile")]) @patch("samcli.commands.local.invoke.cli.click") def test_must_work_with_stdin(self, filename, click_mock): event_data = "some data" @@ -318,5 +330,5 @@ def test_must_work_with_stdin(self, filename, click_mock): result = invoke_cli_get_event(filename) - self.assertEquals(result, event_data) + self.assertEqual(result, event_data) fp_mock.read.assert_called_with() diff --git a/tests/unit/commands/local/lib/swagger/test_integration_uri.py b/tests/unit/commands/local/lib/swagger/test_integration_uri.py index 4eb9dc1813..7e045864f5 100644 --- a/tests/unit/commands/local/lib/swagger/test_integration_uri.py +++ b/tests/unit/commands/local/lib/swagger/test_integration_uri.py @@ -19,58 +19,50 @@ class TestLambdaUri(TestCase): SUCCESS_CASES = [ ( "URI is a string with fully resolved ARN", - "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/arn:aws:lambda:us-east-1:123456789012:function:MyCoolFunction/invocations" # NOQA + "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/arn:aws:lambda:us-east-1:123456789012:function:MyCoolFunction/invocations", # NOQA ), - ( "URI is a full ARN with any region and any account id", - "arn:aws:apigateway:<>:<>:path/2015-03-31/functions/arn:aws:lambda:region:accountid:function:MyCoolFunction/invocations" # NOQA + "arn:aws:apigateway:<>:<>:path/2015-03-31/functions/arn:aws:lambda:region:accountid:function:MyCoolFunction/invocations", # NOQA ), - ( - "URI is a Fn::Sub with a Lambda ARN as a variable", { "Fn::Sub": "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/${MyCoolFunction.Arn}/invocations" # NOQA - } + }, ), - ( "URI is a Fn::Sub with a Lambda Alias as a variable", { "Fn::Sub": "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/${MyCoolFunction.Alias}/invocations" # NOQA - } + }, ), - ( "URI is a Fn::Sub with a Lambda ARN as a variable in addition to others provided as string", { "Fn::Sub": "arn:${AWS::Partition}:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${MyCoolFunction.Arn}/invocations" # NOQA - } + }, ), - ( "URI is a Fn::Sub with a Lambda ARN as a variable in addition to others provided as array", { "Fn::Sub": [ "arn:aws:apigateway:${region}:lambda:path/2015-03-31/functions/${MyCoolFunction.Arn}/invocations", - {"region": {"Ref": "AWS::Region"}} + {"region": {"Ref": "AWS::Region"}}, ] - } + }, ), - ( "URI is a Fn::Sub resolvable intrinsic as an array", { "Fn::Sub": [ - "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/${MyCoolFunction.Arn}/invocations", + "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/${MyCoolFunction.Arn}/invocations" ] - } + }, ), - ( "URI is a string with just enough information to pass regex tests", - "foo/functions/bar:function:MyCoolFunction/invocations" # NOQA + "foo/functions/bar:function:MyCoolFunction/invocations", # NOQA ), ] @@ -78,57 +70,32 @@ class TestLambdaUri(TestCase): def test_get_function_name_success(self, test_case_name, uri): result = LambdaUri.get_function_name(uri) - self.assertEquals(result, self.FUNCTION_NAME) + self.assertEqual(result, self.FUNCTION_NAME) FAILURE_CASES = [ - ( "URI is a string with stage variables", - "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/arn:aws:lambda:us-east-1:123456789012:function:${stageVariables.FunctionName}/invocations" # NOQA + "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/arn:aws:lambda:us-east-1:123456789012:function:${stageVariables.FunctionName}/invocations", # NOQA ), - ( "URI is an ARN string of non-Lambda resource", - "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/arn:aws:something:us-east-1:123456789012:event:MyCoolFunction/invocations" # NOQA + "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/arn:aws:something:us-east-1:123456789012:event:MyCoolFunction/invocations", # NOQA ), - - ( - "URI is a random string", - "hello world" - ), - + ("URI is a random string", "hello world"), ( "URI is an integration ARN without proper Lambda function name", - "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/arn:aws:lambda:us-east-1:123456789012:function:${ThisIntrinsicDidntGetSubstituted}/invocations" # NOQA - ), - - ( - "URI is a list", - [1, 2, 3] + "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/arn:aws:lambda:us-east-1:123456789012:function:${ThisIntrinsicDidntGetSubstituted}/invocations", # NOQA ), - + ("URI is a list", [1, 2, 3]), ( "URI is a dictionary with more than one keys", { "Fn::Sub": "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/${MyCoolFunction.Arn}/invocations", # NOQA - "SomeKey": "value" - } - ), - - ( - "URI is a Ref", - { - "Ref": "MyCoolFunction" - } - ), - - ( - "URI is a GetAtt", - { - "Fn::GetAtt": "MyCoolFunction.Arn" - } + "SomeKey": "value", + }, ), - + ("URI is a Ref", {"Ref": "MyCoolFunction"}), + ("URI is a GetAtt", {"Fn::GetAtt": "MyCoolFunction.Arn"}), ( "URI is a Fn::Sub with array values that would resolve in CloudFormation", { @@ -136,43 +103,25 @@ def test_get_function_name_success(self, test_case_name, uri): # In CloudFormation this intrinsic function will resolve to correct function name. # But unfortunately we don't support this here "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/${MyArn}/invocations", - { - "MyArn": {"Fn::GetAtt": "MyCoolFunction.Arn"} - } - ], - } + {"MyArn": {"Fn::GetAtt": "MyCoolFunction.Arn"}}, + ] + }, ), - ( "URI is a Fn::Sub with intrinsic that does not return an Arn or Alias", { - "Fn::Sub": "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/${MyCoolFunction}/invocations", # NOQA - } + "Fn::Sub": "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/${MyCoolFunction}/invocations" # NOQA + }, ), - ( "URI is a Fn::Sub with ignored variable created with ${! syntax", { - "Fn::Sub": "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/${!MyCoolFunction.Arn}/invocations", # NOQA - } - ), - - ( - "URI is a Fn::Sub is invalid in structure", - { - "Fn::Sub": {"foo": "bar"}, - } - ), - - ( - "URI is empty string", - "" - ), - - ( - "URI without enough information to pass regex test", - "bar:function:MyCoolFunction/invocations" + "Fn::Sub": "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/${!MyCoolFunction.Arn}/invocations" # NOQA + }, ), + ("URI is a Fn::Sub is invalid in structure", {"Fn::Sub": {"foo": "bar"}}), + ("URI is empty string", ""), + ("URI without enough information to pass regex test", "bar:function:MyCoolFunction/invocations"), ] @parameterized.expand(FAILURE_CASES) diff --git a/tests/unit/commands/local/lib/swagger/test_parser.py b/tests/unit/commands/local/lib/swagger/test_parser.py index 827f49be1c..f5cf36bce9 100644 --- a/tests/unit/commands/local/lib/swagger/test_parser.py +++ b/tests/unit/commands/local/lib/swagger/test_parser.py @@ -11,20 +11,10 @@ class TestSwaggerParser_get_apis(TestCase): - def test_with_one_path_method(self): function_name = "myfunction" swagger = { - "paths": { - "/path1": { - "get": { - "x-amazon-apigateway-integration": { - "type": "aws_proxy", - "uri": "someuri" - } - } - } - } + "paths": {"/path1": {"get": {"x-amazon-apigateway-integration": {"type": "aws_proxy", "uri": "someuri"}}}} } parser = SwaggerParser(swagger) @@ -34,41 +24,20 @@ def test_with_one_path_method(self): expected = [Route(path="/path1", methods=["get"], function_name=function_name)] result = parser.get_routes() - self.assertEquals(expected, result) - parser._get_integration_function_name.assert_called_with({ - "x-amazon-apigateway-integration": { - "type": "aws_proxy", - "uri": "someuri" - } - }) + self.assertEqual(expected, result) + parser._get_integration_function_name.assert_called_with( + {"x-amazon-apigateway-integration": {"type": "aws_proxy", "uri": "someuri"}} + ) def test_with_combination_of_paths_methods(self): function_name = "myfunction" swagger = { "paths": { "/path1": { - "get": { - "x-amazon-apigateway-integration": { - "type": "aws_proxy", - "uri": "someuri" - } - }, - "delete": { - "x-amazon-apigateway-integration": { - "type": "aws_proxy", - "uri": "someuri" - } - } + "get": {"x-amazon-apigateway-integration": {"type": "aws_proxy", "uri": "someuri"}}, + "delete": {"x-amazon-apigateway-integration": {"type": "aws_proxy", "uri": "someuri"}}, }, - - "/path2": { - "post": { - "x-amazon-apigateway-integration": { - "type": "aws_proxy", - "uri": "someuri" - } - } - } + "/path2": {"post": {"x-amazon-apigateway-integration": {"type": "aws_proxy", "uri": "someuri"}}}, } } @@ -83,7 +52,7 @@ def test_with_combination_of_paths_methods(self): } result = parser.get_routes() - self.assertEquals(expected, set(result)) + self.assertEqual(expected, set(result)) def test_with_any_method(self): function_name = "myfunction" @@ -91,10 +60,7 @@ def test_with_any_method(self): "paths": { "/path1": { "x-amazon-apigateway-any-method": { - "x-amazon-apigateway-integration": { - "type": "aws_proxy", - "uri": "someuri" - } + "x-amazon-apigateway-integration": {"type": "aws_proxy", "uri": "someuri"} } } } @@ -104,24 +70,14 @@ def test_with_any_method(self): parser._get_integration_function_name = Mock() parser._get_integration_function_name.return_value = function_name - expected = [Route(methods=["ANY"], path="/path1", - function_name=function_name)] + expected = [Route(methods=["ANY"], path="/path1", function_name=function_name)] result = parser.get_routes() - self.assertEquals(expected, result) + self.assertEqual(expected, result) def test_does_not_have_function_name(self): swagger = { - "paths": { - "/path1": { - "post": { - "x-amazon-apigateway-integration": { - "type": "aws_proxy", - "uri": "someuri" - } - } - } - } + "paths": {"/path1": {"post": {"x-amazon-apigateway-integration": {"type": "aws_proxy", "uri": "someuri"}}}} } parser = SwaggerParser(swagger) @@ -131,62 +87,50 @@ def test_does_not_have_function_name(self): expected = [] result = parser.get_routes() - self.assertEquals(expected, result) - - @parameterized.expand([ - param("empty swagger", {}), - param("'paths' property is absent", {"foo": "bar"}), - param("no paths", {"paths": {}}), - param("no methods", {"paths": { - "/path1": {} - }}), - param("no integration", {"paths": { - "/path1": { - "get": {} - } - }}) - ]) + self.assertEqual(expected, result) + + @parameterized.expand( + [ + param("empty swagger", {}), + param("'paths' property is absent", {"foo": "bar"}), + param("no paths", {"paths": {}}), + param("no methods", {"paths": {"/path1": {}}}), + param("no integration", {"paths": {"/path1": {"get": {}}}}), + ] + ) def test_invalid_swagger(self, test_case_name, swagger): parser = SwaggerParser(swagger) result = parser.get_routes() expected = [] - self.assertEquals(expected, result) + self.assertEqual(expected, result) class TestSwaggerParser_get_integration_function_name(TestCase): - - @patch('samcli.commands.local.lib.swagger.parser.LambdaUri') + @patch("samcli.commands.local.lib.swagger.parser.LambdaUri") def test_valid_integration(self, LambdaUriMock): function_name = "name" LambdaUriMock.get_function_name.return_value = function_name - method_config = { - "x-amazon-apigateway-integration": { - "type": "aws_proxy", - "uri": "someuri" - } - } + method_config = {"x-amazon-apigateway-integration": {"type": "aws_proxy", "uri": "someuri"}} parser = SwaggerParser({}) result = parser._get_integration_function_name(method_config) - self.assertEquals(function_name, result) + self.assertEqual(function_name, result) LambdaUriMock.get_function_name.assert_called_with("someuri") - @parameterized.expand([ - param("config is not dict", "myconfig"), - param("integration key is not in config", {"key": "value"}), - param("integration value is empty", {"x-amazon-apigateway-integration": {}}), - param("integration value is not dict", {"x-amazon-apigateway-integration": "someval"}), - param("integration type is not aws_proxy", {"x-amazon-apigateway-integration": { - "type": "mock", - }}), - param("integration uri is not present", {"x-amazon-apigateway-integration": { - "type": "aws_proxy" - }}) - ]) - @patch('samcli.commands.local.lib.swagger.parser.LambdaUri') + @parameterized.expand( + [ + param("config is not dict", "myconfig"), + param("integration key is not in config", {"key": "value"}), + param("integration value is empty", {"x-amazon-apigateway-integration": {}}), + param("integration value is not dict", {"x-amazon-apigateway-integration": "someval"}), + param("integration type is not aws_proxy", {"x-amazon-apigateway-integration": {"type": "mock"}}), + param("integration uri is not present", {"x-amazon-apigateway-integration": {"type": "aws_proxy"}}), + ] + ) + @patch("samcli.commands.local.lib.swagger.parser.LambdaUri") def test_invalid_integration(self, test_case_name, method_config, LambdaUriMock): LambdaUriMock.get_function_name.return_value = None @@ -197,15 +141,18 @@ def test_invalid_integration(self, test_case_name, method_config, LambdaUriMock) class TestSwaggerParser_get_binary_media_types(TestCase): - - @parameterized.expand([ - param("Swagger was none", None, []), - param("Swagger is has no binary media types defined", {}, []), - param("Swagger define binary media types", - {"x-amazon-apigateway-binary-media-types": ["image/gif", "application/json"]}, - ["image/gif", "application/json"]) - ]) + @parameterized.expand( + [ + param("Swagger was none", None, []), + param("Swagger is has no binary media types defined", {}, []), + param( + "Swagger define binary media types", + {"x-amazon-apigateway-binary-media-types": ["image/gif", "application/json"]}, + ["image/gif", "application/json"], + ), + ] + ) def test_binary_media_type_returned(self, test_case_name, swagger, expected_result): parser = SwaggerParser(swagger) - self.assertEquals(parser.get_binary_media_types(), expected_result) + self.assertEqual(parser.get_binary_media_types(), expected_result) diff --git a/tests/unit/commands/local/lib/swagger/test_reader.py b/tests/unit/commands/local/lib/swagger/test_reader.py index f329e464e4..9e71ce7bdc 100644 --- a/tests/unit/commands/local/lib/swagger/test_reader.py +++ b/tests/unit/commands/local/lib/swagger/test_reader.py @@ -1,4 +1,3 @@ - import tempfile import json import os @@ -12,48 +11,36 @@ class TestParseAwsIncludeTransform(TestCase): - def test_must_return_location(self): - data = { - "Fn::Transform": { - "Name": "AWS::Include", - "Parameters": { - "Location": "s3://bucket/swagger.yaml" - } - } - } + data = {"Fn::Transform": {"Name": "AWS::Include", "Parameters": {"Location": "s3://bucket/swagger.yaml"}}} expected = "s3://bucket/swagger.yaml" result = parse_aws_include_transform(data) - self.assertEquals(result, expected) - - @parameterized.expand([ - param({}), - param(None), - param({ - "Name": "AWS::Include", - "Parameters": { - "Location": "s3://bucket/swagger.yaml" - } - }), - param({ - "Fn::Transform": { - "Name": "AWS::SomeTransformName", - "Parameters": { - "Location": "s3://bucket/swagger.yaml" + self.assertEqual(result, expected) + + @parameterized.expand( + [ + param({}), + param(None), + param({"Name": "AWS::Include", "Parameters": {"Location": "s3://bucket/swagger.yaml"}}), + param( + { + "Fn::Transform": { + "Name": "AWS::SomeTransformName", + "Parameters": {"Location": "s3://bucket/swagger.yaml"}, + } } - } - }) - ]) + ), + ] + ) def test_invalid_aws_include_data(self, data): result = parse_aws_include_transform(data) self.assertIsNone(result) class TestSamSwaggerReader_init(TestCase): - def test_definition_body_and_uri_required(self): with self.assertRaises(ValueError): @@ -61,7 +48,6 @@ def test_definition_body_and_uri_required(self): class TestSamSwaggerReader_read(TestCase): - def test_must_read_first_from_definition_body(self): body = {"this is": "swagger"} uri = "./file.txt" @@ -73,7 +59,7 @@ def test_must_read_first_from_definition_body(self): reader._read_from_definition_body.return_value = expected actual = reader.read() - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) reader._read_from_definition_body.assert_called_with() reader._download_swagger.assert_not_called() @@ -87,7 +73,7 @@ def test_read_from_definition_uri(self): reader._download_swagger.return_value = expected actual = reader.read() - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) reader._download_swagger.assert_called_with(uri) @@ -105,18 +91,17 @@ def test_must_use_definition_uri_if_body_does_not_exist(self): reader._read_from_definition_body.return_value = None actual = reader.read() - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) reader._read_from_definition_body.assert_called_with() reader._download_swagger.assert_called_with(uri) class TestSamSwaggerReader_read_from_definition_body(TestCase): - @patch("samcli.commands.local.lib.swagger.reader.parse_aws_include_transform") def test_must_work_with_include_transform(self, parse_mock): - body = {'this': 'swagger'} - expected = {'k': 'v'} + body = {"this": "swagger"} + expected = {"k": "v"} location = "some location" reader = SwaggerReader(definition_body=body) @@ -125,29 +110,24 @@ def test_must_work_with_include_transform(self, parse_mock): parse_mock.return_value = location actual = reader._read_from_definition_body() - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) parse_mock.assert_called_with(body) @patch("samcli.commands.local.lib.swagger.reader.parse_aws_include_transform") def test_must_get_body_directly(self, parse_mock): - body = {'this': 'swagger'} + body = {"this": "swagger"} reader = SwaggerReader(definition_body=body) parse_mock.return_value = None # No location is returned from aws_include parser actual = reader._read_from_definition_body() - self.assertEquals(actual, body) + self.assertEqual(actual, body) class TestSamSwaggerReader_download_swagger(TestCase): - @patch("samcli.commands.local.lib.swagger.reader.yaml_parse") def test_must_download_from_s3_for_s3_locations(self, yaml_parse_mock): - location = { - "Bucket": "mybucket", - "Key": "swagger.yaml", - "Version": "versionId" - } + location = {"Bucket": "mybucket", "Key": "swagger.yaml", "Version": "versionId"} swagger_str = "some swagger str" expected = "some data" @@ -158,10 +138,8 @@ def test_must_download_from_s3_for_s3_locations(self, yaml_parse_mock): actual = reader._download_swagger(location) - self.assertEquals(actual, expected) - reader._download_from_s3.assert_called_with(location["Bucket"], - location["Key"], - location["Version"]) + self.assertEqual(actual, expected) + reader._download_from_s3.assert_called_with(location["Bucket"], location["Key"], location["Version"]) yaml_parse_mock.assert_called_with(swagger_str) @patch("samcli.commands.local.lib.swagger.reader.yaml_parse") @@ -184,7 +162,7 @@ def test_must_read_from_local_file(self, yaml_parse_mock): expected = "parsed result" yaml_parse_mock.return_value = expected - with tempfile.NamedTemporaryFile(mode='w', delete=False) as fp: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as fp: filepath = fp.name json.dump(data, fp) @@ -196,7 +174,7 @@ def test_must_read_from_local_file(self, yaml_parse_mock): reader = SwaggerReader(definition_uri=filename, working_dir=cwd) actual = reader._download_swagger(filename) - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) yaml_parse_mock.assert_called_with('{"some": "value"}') # data was read back from the file as JSON string @patch("samcli.commands.local.lib.swagger.reader.yaml_parse") @@ -205,7 +183,7 @@ def test_must_read_from_local_file_without_working_directory(self, yaml_parse_mo expected = "parsed result" yaml_parse_mock.return_value = expected - with tempfile.NamedTemporaryFile(mode='w', delete=False) as fp: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as fp: filepath = fp.name json.dump(data, fp) @@ -214,7 +192,7 @@ def test_must_read_from_local_file_without_working_directory(self, yaml_parse_mo reader = SwaggerReader(definition_uri=filepath) actual = reader._download_swagger(filepath) - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) yaml_parse_mock.assert_called_with('{"some": "value"}') # data was read back from the file as JSON string @patch("samcli.commands.local.lib.swagger.reader.yaml_parse") @@ -237,14 +215,13 @@ def test_with_invalid_location(self): class TestSamSwaggerReaderDownloadFromS3(TestCase): - def setUp(self): self.bucket = "mybucket" self.key = "mykey" self.version = "versionid" - @patch('samcli.commands.local.lib.swagger.reader.boto3') - @patch('samcli.commands.local.lib.swagger.reader.tempfile') + @patch("samcli.commands.local.lib.swagger.reader.boto3") + @patch("samcli.commands.local.lib.swagger.reader.tempfile") def test_must_download_file_from_s3(self, tempfilemock, botomock): s3_mock = Mock() @@ -257,32 +234,31 @@ def test_must_download_file_from_s3(self, tempfilemock, botomock): fp_mock.read.return_value = expected actual = SwaggerReader._download_from_s3(self.bucket, self.key, self.version) - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) - s3_mock.download_fileobj.assert_called_with(self.bucket, self.key, fp_mock, - ExtraArgs={"VersionId": self.version}) + s3_mock.download_fileobj.assert_called_with( + self.bucket, self.key, fp_mock, ExtraArgs={"VersionId": self.version} + ) fp_mock.seek.assert_called_with(0) # make sure we seek the file before reading fp_mock.read.assert_called_with() - @patch('samcli.commands.local.lib.swagger.reader.boto3') - @patch('samcli.commands.local.lib.swagger.reader.tempfile') + @patch("samcli.commands.local.lib.swagger.reader.boto3") + @patch("samcli.commands.local.lib.swagger.reader.tempfile") def test_must_fail_on_download_from_s3(self, tempfilemock, botomock): s3_mock = Mock() botomock.client.return_value = s3_mock fp_mock = Mock() tempfilemock.TemporaryFile.return_value.__enter__.return_value = fp_mock # mocking context manager - s3_mock.download_fileobj.side_effect = botocore.exceptions.ClientError({"Error": {}}, - "download_file") + s3_mock.download_fileobj.side_effect = botocore.exceptions.ClientError({"Error": {}}, "download_file") with self.assertRaises(Exception) as cm: SwaggerReader._download_from_s3(self.bucket, self.key) - self.assertIn(cm.exception.__class__, - (botocore.exceptions.NoCredentialsError, botocore.exceptions.ClientError)) + self.assertIn(cm.exception.__class__, (botocore.exceptions.NoCredentialsError, botocore.exceptions.ClientError)) - @patch('samcli.commands.local.lib.swagger.reader.boto3') - @patch('samcli.commands.local.lib.swagger.reader.tempfile') + @patch("samcli.commands.local.lib.swagger.reader.boto3") + @patch("samcli.commands.local.lib.swagger.reader.tempfile") def test_must_work_without_object_version_id(self, tempfilemock, botomock): s3_mock = Mock() @@ -295,13 +271,12 @@ def test_must_work_without_object_version_id(self, tempfilemock, botomock): fp_mock.read.return_value = expected actual = SwaggerReader._download_from_s3(self.bucket, self.key) - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) - s3_mock.download_fileobj.assert_called_with(self.bucket, self.key, fp_mock, - ExtraArgs={}) + s3_mock.download_fileobj.assert_called_with(self.bucket, self.key, fp_mock, ExtraArgs={}) - @patch('samcli.commands.local.lib.swagger.reader.boto3') - @patch('samcli.commands.local.lib.swagger.reader.tempfile') + @patch("samcli.commands.local.lib.swagger.reader.boto3") + @patch("samcli.commands.local.lib.swagger.reader.tempfile") def test_must_log_on_download_exception(self, tempfilemock, botomock): s3_mock = Mock() @@ -309,8 +284,7 @@ def test_must_log_on_download_exception(self, tempfilemock, botomock): fp_mock = Mock() tempfilemock.TemporaryFile.return_value.__enter__.return_value = fp_mock # mocking context manager - s3_mock.download_fileobj.side_effect = botocore.exceptions.ClientError({"Error": {}}, - "download_file") + s3_mock.download_fileobj.side_effect = botocore.exceptions.ClientError({"Error": {}}, "download_file") with self.assertRaises(botocore.exceptions.ClientError): SwaggerReader._download_from_s3(self.bucket, self.key) @@ -319,50 +293,44 @@ def test_must_log_on_download_exception(self, tempfilemock, botomock): class TestSamSwaggerReader_parse_s3_location(TestCase): - def setUp(self): self.bucket = "mybucket" self.key = "mykey" self.version = "myversion" def test_must_parse_valid_dict(self): - location = { - "Bucket": self.bucket, - "Key": self.key, - "Version": self.version - } + location = {"Bucket": self.bucket, "Key": self.key, "Version": self.version} result = SwaggerReader._parse_s3_location(location) - self.assertEquals(result, (self.bucket, self.key, self.version)) + self.assertEqual(result, (self.bucket, self.key, self.version)) def test_must_parse_dict_without_version(self): - location = { - "Bucket": self.bucket, - "Key": self.key - } + location = {"Bucket": self.bucket, "Key": self.key} result = SwaggerReader._parse_s3_location(location) - self.assertEquals(result, (self.bucket, self.key, None)) + self.assertEqual(result, (self.bucket, self.key, None)) def test_must_parse_s3_uri_string(self): location = "s3://{}/{}?versionId={}".format(self.bucket, self.key, self.version) result = SwaggerReader._parse_s3_location(location) - self.assertEquals(result, (self.bucket, self.key, self.version)) + self.assertEqual(result, (self.bucket, self.key, self.version)) def test_must_parse_s3_uri_string_without_version_id(self): location = "s3://{}/{}".format(self.bucket, self.key) result = SwaggerReader._parse_s3_location(location) - self.assertEquals(result, (self.bucket, self.key, None)) - - @parameterized.expand([ - param("http://s3.amazonaws.com/bucket/key"), - param("./foo/bar.txt"), - param("/home/user/bar.txt"), - param({"k": "v"}), - ]) + self.assertEqual(result, (self.bucket, self.key, None)) + + @parameterized.expand( + [ + param("http://s3.amazonaws.com/bucket/key"), + param("./foo/bar.txt"), + param("/home/user/bar.txt"), + param({"k": "v"}), + ] + ) def test_must_parse_invalid_location(self, location): result = SwaggerReader._parse_s3_location(location) - self.assertEquals(result, (None, None, None)) + self.assertEqual(result, (None, None, None)) diff --git a/tests/unit/commands/local/lib/test_api_provider.py b/tests/unit/commands/local/lib/test_api_provider.py index 013405429a..426fa37d05 100644 --- a/tests/unit/commands/local/lib/test_api_provider.py +++ b/tests/unit/commands/local/lib/test_api_provider.py @@ -10,7 +10,6 @@ class TestApiProvider_init(TestCase): - @patch.object(ApiProvider, "_extract_api") @patch("samcli.commands.local.lib.api_provider.SamBaseProvider") def test_provider_with_valid_template(self, SamBaseProviderMock, extract_api_mock): @@ -19,11 +18,11 @@ def test_provider_with_valid_template(self, SamBaseProviderMock, extract_api_moc SamBaseProviderMock.get_template.return_value = template provider = ApiProvider(template) - self.assertEquals(len(provider.routes), 3) - self.assertEquals(provider.routes, set(["set", "of", "values"])) + self.assertEqual(len(provider.routes), 3) + self.assertEqual(provider.routes, set(["set", "of", "values"])) - self.assertEquals(provider.template_dict, {"Resources": {"a": "b"}}) - self.assertEquals(provider.resources, {"a": "b"}) + self.assertEqual(provider.template_dict, {"Resources": {"a": "b"}}) + self.assertEqual(provider.resources, {"a": "b"}) class TestApiProviderSelection(TestCase): @@ -42,16 +41,15 @@ def test_default_provider(self): "type": "aws_proxy", "uri": { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31" - "/functions/${NoApiEventFunction.Arn}/invocations", + "/functions/${NoApiEventFunction.Arn}/invocations" }, "responses": {}, - }, + } } } - } - } - } + }, + }, } } @@ -73,16 +71,15 @@ def test_api_provider_sam_api(self): "type": "aws_proxy", "uri": { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31" - "/functions/${NoApiEventFunction.Arn}/invocations", + "/functions/${NoApiEventFunction.Arn}/invocations" }, "responses": {}, - }, + } } } - } - } - } + }, + }, } } @@ -104,16 +101,15 @@ def test_api_provider_sam_function(self): "type": "aws_proxy", "uri": { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31" - "/functions/${NoApiEventFunction.Arn}/invocations", + "/functions/${NoApiEventFunction.Arn}/invocations" }, "responses": {}, - }, + } } } - } - } - } + }, + }, } } @@ -136,16 +132,15 @@ def test_api_provider_cloud_formation(self): "type": "aws_proxy", "uri": { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31" - "/functions/${NoApiEventFunction.Arn}/invocations", + "/functions/${NoApiEventFunction.Arn}/invocations" }, "responses": {}, - }, + } } } - } - } - } + }, + }, } } @@ -167,16 +162,15 @@ def test_multiple_api_provider_cloud_formation(self): "type": "aws_proxy", "uri": { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31" - "/functions/${NoApiEventFunction.Arn}/invocations", + "/functions/${NoApiEventFunction.Arn}/invocations" }, "responses": {}, - }, + } } } - } - } - } + }, + }, } resources["OtherApi"] = { "Type": "AWS::Serverless::Api", @@ -191,16 +185,15 @@ def test_multiple_api_provider_cloud_formation(self): "type": "aws_proxy", "uri": { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31" - "/functions/${NoApiEventFunction.Arn}/invocations", + "/functions/${NoApiEventFunction.Arn}/invocations" }, "responses": {}, - }, + } } } - } - } - } + }, + }, } provider = ApiProvider.find_api_provider(resources) diff --git a/tests/unit/commands/local/lib/test_cfn_api_provider.py b/tests/unit/commands/local/lib/test_cfn_api_provider.py index 85388467b3..cbf5d40d2e 100644 --- a/tests/unit/commands/local/lib/test_cfn_api_provider.py +++ b/tests/unit/commands/local/lib/test_cfn_api_provider.py @@ -13,42 +13,25 @@ class TestApiProviderWithApiGatewayRestRoute(TestCase): - def setUp(self): self.binary_types = ["image/png", "image/jpg"] self.input_routes = [ Route(path="/path1", methods=["GET", "POST"], function_name="SamFunc1"), Route(path="/path2", methods=["PUT", "GET"], function_name="SamFunc1"), - Route(path="/path3", methods=["DELETE"], function_name="SamFunc1") + Route(path="/path3", methods=["DELETE"], function_name="SamFunc1"), ] def test_with_no_apis(self): - template = { - "Resources": { - - "Api1": { - "Type": "AWS::ApiGateway::RestApi", - "Properties": { - }, - - } - } - } + template = {"Resources": {"Api1": {"Type": "AWS::ApiGateway::RestApi", "Properties": {}}}} provider = ApiProvider(template) - self.assertEquals(provider.routes, []) + self.assertEqual(provider.routes, []) def test_with_inline_swagger_apis(self): template = { "Resources": { - - "Api1": { - "Type": "AWS::ApiGateway::RestApi", - "Properties": { - "Body": make_swagger(self.input_routes) - } - } + "Api1": {"Type": "AWS::ApiGateway::RestApi", "Properties": {"Body": make_swagger(self.input_routes)}} } } @@ -56,7 +39,7 @@ def test_with_inline_swagger_apis(self): assertCountEqual(self, self.input_routes, provider.routes) def test_with_swagger_as_local_file(self): - with tempfile.NamedTemporaryFile(mode='w', delete=False) as fp: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as fp: filename = fp.name swagger = make_swagger(self.input_routes) @@ -65,22 +48,14 @@ def test_with_swagger_as_local_file(self): fp.flush() template = { - "Resources": { - - "Api1": { - "Type": "AWS::ApiGateway::RestApi", - "Properties": { - "BodyS3Location": filename - } - } - } + "Resources": {"Api1": {"Type": "AWS::ApiGateway::RestApi", "Properties": {"BodyS3Location": filename}}} } provider = ApiProvider(template) assertCountEqual(self, self.input_routes, provider.routes) def test_body_with_swagger_as_local_file_expect_fail(self): - with tempfile.NamedTemporaryFile(mode='w', delete=False) as fp: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as fp: filename = fp.name swagger = make_swagger(self.input_routes) @@ -88,17 +63,7 @@ def test_body_with_swagger_as_local_file_expect_fail(self): json.dump(swagger, fp) fp.flush() - template = { - "Resources": { - - "Api1": { - "Type": "AWS::ApiGateway::RestApi", - "Properties": { - "Body": filename - } - } - } - } + template = {"Resources": {"Api1": {"Type": "AWS::ApiGateway::RestApi", "Properties": {"Body": filename}}}} self.assertRaises(Exception, ApiProvider, template) @patch("samcli.commands.local.lib.cfn_base_api_provider.SwaggerReader") @@ -108,14 +73,7 @@ def test_with_swagger_as_both_body_and_uri_called(self, SwaggerReaderMock): template = { "Resources": { - - "Api1": { - "Type": "AWS::ApiGateway::RestApi", - "Properties": { - "BodyS3Location": filename, - "Body": body - } - } + "Api1": {"Type": "AWS::ApiGateway::RestApi", "Properties": {"BodyS3Location": filename, "Body": body}} } } @@ -127,29 +85,18 @@ def test_with_swagger_as_both_body_and_uri_called(self, SwaggerReaderMock): SwaggerReaderMock.assert_called_with(definition_body=body, definition_uri=filename, working_dir=cwd) def test_swagger_with_any_method(self): - routes = [ - Route(path="/path", methods=["any"], function_name="SamFunc1") - ] + routes = [Route(path="/path", methods=["any"], function_name="SamFunc1")] expected_routes = [ - Route(path="/path", methods=["GET", - "DELETE", - "PUT", - "POST", - "HEAD", - "OPTIONS", - "PATCH"], function_name="SamFunc1") + Route( + path="/path", + methods=["GET", "DELETE", "PUT", "POST", "HEAD", "OPTIONS", "PATCH"], + function_name="SamFunc1", + ) ] template = { - "Resources": { - "Api1": { - "Type": "AWS::ApiGateway::RestApi", - "Properties": { - "Body": make_swagger(routes) - } - } - } + "Resources": {"Api1": {"Type": "AWS::ApiGateway::RestApi", "Properties": {"Body": make_swagger(routes)}}} } provider = ApiProvider(template) @@ -158,12 +105,9 @@ def test_swagger_with_any_method(self): def test_with_binary_media_types(self): template = { "Resources": { - "Api1": { "Type": "AWS::ApiGateway::RestApi", - "Properties": { - "Body": make_swagger(self.input_routes, binary_media_types=self.binary_types) - } + "Properties": {"Body": make_swagger(self.input_routes, binary_media_types=self.binary_types)}, } } } @@ -172,7 +116,7 @@ def test_with_binary_media_types(self): expected_apis = [ Route(path="/path1", methods=["GET", "POST"], function_name="SamFunc1"), Route(path="/path2", methods=["PUT", "GET"], function_name="SamFunc1"), - Route(path="/path3", methods=["DELETE"], function_name="SamFunc1") + Route(path="/path3", methods=["DELETE"], function_name="SamFunc1"), ] provider = ApiProvider(template) @@ -180,28 +124,23 @@ def test_with_binary_media_types(self): assertCountEqual(self, provider.api.binary_media_types, expected_binary_types) def test_with_binary_media_types_in_swagger_and_on_resource(self): - input_routes = [ - Route(path="/path", methods=["OPTIONS"], function_name="SamFunc1"), - ] + input_routes = [Route(path="/path", methods=["OPTIONS"], function_name="SamFunc1")] extra_binary_types = ["text/html"] template = { "Resources": { - "Api1": { "Type": "AWS::ApiGateway::RestApi", "Properties": { "BinaryMediaTypes": extra_binary_types, - "Body": make_swagger(input_routes, binary_media_types=self.binary_types) - } + "Body": make_swagger(input_routes, binary_media_types=self.binary_types), + }, } } } expected_binary_types = sorted(self.binary_types + extra_binary_types) - expected_routes = [ - Route(path="/path", methods=["OPTIONS"], function_name="SamFunc1"), - ] + expected_routes = [Route(path="/path", methods=["OPTIONS"], function_name="SamFunc1")] provider = ApiProvider(template) assertCountEqual(self, expected_routes, provider.routes) @@ -214,19 +153,13 @@ def setUp(self): self.input_routes = [ Route(path="/path1", methods=["GET", "POST"], function_name="SamFunc1"), Route(path="/path2", methods=["PUT", "GET"], function_name="SamFunc1"), - Route(path="/path3", methods=["DELETE"], function_name="SamFunc1") + Route(path="/path3", methods=["DELETE"], function_name="SamFunc1"), ] def test_provider_parse_stage_name(self): template = { "Resources": { - "Stage": { - "Type": "AWS::ApiGateway::Stage", - "Properties": { - "StageName": "dev", - "RestApiId": "TestApi" - } - }, + "Stage": {"Type": "AWS::ApiGateway::Stage", "Properties": {"StageName": "dev", "RestApiId": "TestApi"}}, "TestApi": { "Type": "AWS::ApiGateway::RestApi", "Properties": { @@ -239,25 +172,24 @@ def test_provider_parse_stage_name(self): "type": "aws_proxy", "uri": { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31" - "/functions/${NoApiEventFunction.Arn}/invocations", + "/functions/${NoApiEventFunction.Arn}/invocations" }, "responses": {}, - }, + } } } - } } - } - } + }, + }, } } provider = ApiProvider(template) - route1 = Route(path='/path', methods=['GET'], function_name='NoApiEventFunction') + route1 = Route(path="/path", methods=["GET"], function_name="NoApiEventFunction") self.assertIn(route1, provider.routes) - self.assertEquals(provider.api.stage_name, "dev") - self.assertEquals(provider.api.stage_variables, None) + self.assertEqual(provider.api.stage_name, "dev") + self.assertEqual(provider.api.stage_variables, None) def test_provider_stage_variables(self): template = { @@ -266,13 +198,9 @@ def test_provider_stage_variables(self): "Type": "AWS::ApiGateway::Stage", "Properties": { "StageName": "dev", - "Variables": { - "vis": "data", - "random": "test", - "foo": "bar" - }, - "RestApiId": "TestApi" - } + "Variables": {"vis": "data", "random": "test", "foo": "bar"}, + "RestApiId": "TestApi", + }, }, "TestApi": { "Type": "AWS::ApiGateway::RestApi", @@ -286,90 +214,78 @@ def test_provider_stage_variables(self): "type": "aws_proxy", "uri": { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31" - "/functions/${NoApiEventFunction.Arn}/invocations", + "/functions/${NoApiEventFunction.Arn}/invocations" }, "responses": {}, - }, + } } } - } } - } - } + }, + }, } } provider = ApiProvider(template) - route1 = Route(path='/path', methods=['GET'], function_name='NoApiEventFunction') + route1 = Route(path="/path", methods=["GET"], function_name="NoApiEventFunction") self.assertIn(route1, provider.routes) - self.assertEquals(provider.api.stage_name, "dev") - self.assertEquals(provider.api.stage_variables, { - "vis": "data", - "random": "test", - "foo": "bar" - }) + self.assertEqual(provider.api.stage_name, "dev") + self.assertEqual(provider.api.stage_variables, {"vis": "data", "random": "test", "foo": "bar"}) def test_multi_stage_get_all(self): - resources = OrderedDict({ - "ProductionApi": { - "Type": "AWS::ApiGateway::RestApi", - "Properties": { - "Body": { - "paths": { - "/path": { - "get": { - "x-amazon-apigateway-integration": { - "httpMethod": "POST", - "type": "aws_proxy", - "uri": { - "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31" - "/functions/${NoApiEventFunction.Arn}/invocations", - }, - "responses": {}, - }, - } - }, - "/anotherpath": { - "post": { - "x-amazon-apigateway-integration": { - "httpMethod": "POST", - "type": "aws_proxy", - "uri": { - "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31" - "/functions/${NoApiEventFunction.Arn}/invocations", - }, - "responses": {}, - }, - } + resources = OrderedDict( + { + "ProductionApi": { + "Type": "AWS::ApiGateway::RestApi", + "Properties": { + "Body": { + "paths": { + "/path": { + "get": { + "x-amazon-apigateway-integration": { + "httpMethod": "POST", + "type": "aws_proxy", + "uri": { + "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31" + "/functions/${NoApiEventFunction.Arn}/invocations" + }, + "responses": {}, + } + } + }, + "/anotherpath": { + "post": { + "x-amazon-apigateway-integration": { + "httpMethod": "POST", + "type": "aws_proxy", + "uri": { + "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31" + "/functions/${NoApiEventFunction.Arn}/invocations" + }, + "responses": {}, + } + } + }, } - } - } + }, } } - }) + ) resources["StageDev"] = { "Type": "AWS::ApiGateway::Stage", "Properties": { "StageName": "dev", - "Variables": { - "vis": "data", - "random": "test", - "foo": "bar" - }, - "RestApiId": "ProductionApi" - } + "Variables": {"vis": "data", "random": "test", "foo": "bar"}, + "RestApiId": "ProductionApi", + }, } resources["StageProd"] = { "Type": "AWS::ApiGateway::Stage", "Properties": { "StageName": "Production", - "Variables": { - "vis": "prod data", - "random": "test", - "foo": "bar" - }, - "RestApiId": "ProductionApi" + "Variables": {"vis": "prod data", "random": "test", "foo": "bar"}, + "RestApiId": "ProductionApi", }, } template = {"Resources": resources} @@ -378,217 +294,144 @@ def test_multi_stage_get_all(self): result = [f for f in provider.get_all()] routes = result[0].routes - route1 = Route(path='/path', methods=['GET'], function_name='NoApiEventFunction') - route2 = Route(path='/anotherpath', methods=['POST'], function_name='NoApiEventFunction') - self.assertEquals(len(routes), 2) + route1 = Route(path="/path", methods=["GET"], function_name="NoApiEventFunction") + route2 = Route(path="/anotherpath", methods=["POST"], function_name="NoApiEventFunction") + self.assertEqual(len(routes), 2) self.assertIn(route1, routes) self.assertIn(route2, routes) - self.assertEquals(provider.api.stage_name, "Production") - self.assertEquals(provider.api.stage_variables, { - "vis": "prod data", - "random": "test", - "foo": "bar" - }) + self.assertEqual(provider.api.stage_name, "Production") + self.assertEqual(provider.api.stage_variables, {"vis": "prod data", "random": "test", "foo": "bar"}) class TestCloudFormationResourceMethod(TestCase): - def setUp(self): self.binary_types = ["image/png", "image/jpg"] self.input_routes = [ Route(path="/path1", methods=["GET", "POST"], function_name="SamFunc1"), Route(path="/path2", methods=["PUT", "GET"], function_name="SamFunc1"), - Route(path="/path3", methods=["DELETE"], function_name="SamFunc1") + Route(path="/path3", methods=["DELETE"], function_name="SamFunc1"), ] def test_basic_rest_api_resource_method(self): template = { "Resources": { - "TestApi": { - "Type": "AWS::ApiGateway::RestApi", - "Properties": { - "StageName": "Prod" - } - }, - "ApiResource": { - "Properties": { - "PathPart": "{proxy+}", - "RestApiId": "TestApi", - } - }, + "TestApi": {"Type": "AWS::ApiGateway::RestApi", "Properties": {"StageName": "Prod"}}, + "ApiResource": {"Properties": {"PathPart": "{proxy+}", "RestApiId": "TestApi"}}, "ApiMethod": { "Type": "AWS::ApiGateway::Method", - "Properties": { - "HttpMethod": "POST", - "RestApiId": "TestApi", - "ResourceId": "ApiResource" - }, - } + "Properties": {"HttpMethod": "POST", "RestApiId": "TestApi", "ResourceId": "ApiResource"}, + }, } } provider = ApiProvider(template) - self.assertEquals(provider.routes, [Route(function_name=None, path="/{proxy+}", methods=["POST"])]) + self.assertEqual(provider.routes, [Route(function_name=None, path="/{proxy+}", methods=["POST"])]) def test_resolve_correct_resource_path(self): resources = { "RootApiResource": { "Tyoe": "AWS::ApiGateway::Resource", - "Properties": { - "PathPart": "root", - "ResourceId": "TestApi", - } + "Properties": {"PathPart": "root", "ResourceId": "TestApi"}, } } beta_resource = { "Tyoe": "AWS::ApiGateway::Resource", - "Properties": { - "PathPart": "beta", - "ResourceId": "TestApi", - "ParentId": "RootApiResource" - } + "Properties": {"PathPart": "beta", "ResourceId": "TestApi", "ParentId": "RootApiResource"}, } resources["BetaApiResource"] = beta_resource provider = CfnApiProvider() full_path = provider.resolve_resource_path(resources, beta_resource, "/test") - self.assertEquals(full_path, "/root/beta/test") + self.assertEqual(full_path, "/root/beta/test") def test_resolve_correct_multi_parent_resource_path(self): template = { "Resources": { - "TestApi": { - "Type": "AWS::ApiGateway::Resource", - "Properties": { - "StageName": "Prod" - } - }, + "TestApi": {"Type": "AWS::ApiGateway::Resource", "Properties": {"StageName": "Prod"}}, "RootApiResource": { "Type": "AWS::ApiGateway::Resource", - "Properties": { - "PathPart": "root", - "ResourceId": "TestApi", - } + "Properties": {"PathPart": "root", "ResourceId": "TestApi"}, }, "V1ApiResource": { "Type": "AWS::ApiGateway::Resource", - "Properties": { - "PathPart": "v1", - "ResourceId": "TestApi", - "ParentId": "RootApiResource" - } + "Properties": {"PathPart": "v1", "ResourceId": "TestApi", "ParentId": "RootApiResource"}, }, "AlphaApiResource": { "Type": "AWS::ApiGateway::Resource", - "Properties": { - "PathPart": "alpha", - "ResourceId": "TestApi", - "ParentId": "V1ApiResource" - } + "Properties": {"PathPart": "alpha", "ResourceId": "TestApi", "ParentId": "V1ApiResource"}, }, "BetaApiResource": { "Type": "AWS::ApiGateway::Resource", - "Properties": { - "PathPart": "beta", - "ResourceId": "TestApi", - "ParentId": "V1ApiResource" - } + "Properties": {"PathPart": "beta", "ResourceId": "TestApi", "ParentId": "V1ApiResource"}, }, "AlphaApiMethod": { "Type": "AWS::ApiGateway::Method", - "Properties": { - "HttpMethod": "GET", - "RestApiId": "TestApi", - "ResourceId": "AlphaApiResource" - }, + "Properties": {"HttpMethod": "GET", "RestApiId": "TestApi", "ResourceId": "AlphaApiResource"}, }, "BetaAlphaApiMethod": { "Type": "AWS::ApiGateway::Method", - "Properties": { - "HttpMethod": "POST", - "RestApiId": "TestApi", - "ResourceId": "BetaApiResource" - }, - } + "Properties": {"HttpMethod": "POST", "RestApiId": "TestApi", "ResourceId": "BetaApiResource"}, + }, } } provider = ApiProvider(template) - assertCountEqual(self, provider.routes, [Route(path="/root/v1/beta", methods=["POST"], function_name=None), - Route(path="/root/v1/alpha", methods=["GET"], function_name=None)]) + assertCountEqual( + self, + provider.routes, + [ + Route(path="/root/v1/beta", methods=["POST"], function_name=None), + Route(path="/root/v1/alpha", methods=["GET"], function_name=None), + ], + ) def test_resource_with_method_correct_routes(self): template = { "Resources": { - "TestApi": { - "Type": "AWS::ApiGateway::Resource", - "Properties": { - "StageName": "Prod" - } - }, + "TestApi": {"Type": "AWS::ApiGateway::Resource", "Properties": {"StageName": "Prod"}}, "BetaApiResource": { "Type": "AWS::ApiGateway::Resource", - "Properties": { - "PathPart": "beta", - "ResourceId": "TestApi", - } + "Properties": {"PathPart": "beta", "ResourceId": "TestApi"}, }, "BetaAlphaApiMethod": { "Type": "AWS::ApiGateway::Method", - "Properties": { - "HttpMethod": "ANY", - "RestApiId": "TestApi", - "ResourceId": "BetaApiResource", - }, - } + "Properties": {"HttpMethod": "ANY", "RestApiId": "TestApi", "ResourceId": "BetaApiResource"}, + }, } } provider = ApiProvider(template) - assertCountEqual(self, provider.routes, - [Route(path="/beta", methods=["POST", "GET", "DELETE", "HEAD", "OPTIONS", "PATCH", "PUT"], - function_name=None), - ]) + assertCountEqual( + self, + provider.routes, + [ + Route( + path="/beta", + methods=["POST", "GET", "DELETE", "HEAD", "OPTIONS", "PATCH", "PUT"], + function_name=None, + ) + ], + ) def test_method_integration_uri(self): template = { "Resources": { - "TestApi": { - "Type": "AWS::ApiGateway::Resource", - "Properties": { - "StageName": "Prod" - } - }, + "TestApi": {"Type": "AWS::ApiGateway::Resource", "Properties": {"StageName": "Prod"}}, "RootApiResource": { "Type": "AWS::ApiGateway::Resource", - "Properties": { - "PathPart": "root", - "ResourceId": "TestApi", - } + "Properties": {"PathPart": "root", "ResourceId": "TestApi"}, }, "V1ApiResource": { "Type": "AWS::ApiGateway::Resource", - "Properties": { - "PathPart": "v1", - "ResourceId": "TestApi", - "ParentId": "RootApiResource" - } + "Properties": {"PathPart": "v1", "ResourceId": "TestApi", "ParentId": "RootApiResource"}, }, "AlphaApiResource": { "Type": "AWS::ApiGateway::Resource", - "Properties": { - "PathPart": "alpha", - "ResourceId": "TestApi", - "ParentId": "V1ApiResource" - } + "Properties": {"PathPart": "alpha", "ResourceId": "TestApi", "ParentId": "V1ApiResource"}, }, "BetaApiResource": { "Type": "AWS::ApiGateway::Resource", - "Properties": { - "PathPart": "beta", - "ResourceId": "TestApi", - "ParentId": "V1ApiResource" - } + "Properties": {"PathPart": "beta", "ResourceId": "TestApi", "ParentId": "V1ApiResource"}, }, "AlphaApiMethod": { "Type": "AWS::ApiGateway::Method", @@ -599,10 +442,10 @@ def test_method_integration_uri(self): "Integration": { "Uri": { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/" - "functions" - "/${AWSBetaLambdaFunction.Arn}/invocations} " + "functions" + "/${AWSBetaLambdaFunction.Arn}/invocations} " } - } + }, }, }, "BetaAlphaApiMethod": { @@ -614,75 +457,52 @@ def test_method_integration_uri(self): "Integration": { "Uri": { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/" - "functions" - "/${AWSLambdaFunction.Arn}/invocations}" + "functions" + "/${AWSLambdaFunction.Arn}/invocations}" } - } + }, }, }, "AWSAlphaLambdaFunction": { "Type": "AWS::Lambda::Function", - "Properties": { - "Code": ".", - "Handler": "main.run_test", - "Runtime": "Python3.6" - } + "Properties": {"Code": ".", "Handler": "main.run_test", "Runtime": "Python3.6"}, }, "AWSBetaLambdaFunction": { "Type": "AWS::Lambda::Function", - "Properties": { - "Code": ".", - "Handler": "main.run_test", - "Runtime": "Python3.6" - } - } + "Properties": {"Code": ".", "Handler": "main.run_test", "Runtime": "Python3.6"}, + }, } } provider = ApiProvider(template) - assertCountEqual(self, provider.routes, - [Route(path="/root/v1/beta", methods=["POST"], function_name="AWSLambdaFunction"), - Route(path="/root/v1/alpha", methods=["GET"], function_name="AWSBetaLambdaFunction")]) + assertCountEqual( + self, + provider.routes, + [ + Route(path="/root/v1/beta", methods=["POST"], function_name="AWSLambdaFunction"), + Route(path="/root/v1/alpha", methods=["GET"], function_name="AWSBetaLambdaFunction"), + ], + ) def test_binary_media_types_method(self): template = { "Resources": { - "TestApi": { - "Type": "AWS::ApiGateway::Resource", - "Properties": { - "StageName": "Prod" - } - }, + "TestApi": {"Type": "AWS::ApiGateway::Resource", "Properties": {"StageName": "Prod"}}, "RootApiResource": { "Type": "AWS::ApiGateway::Resource", - "Properties": { - "PathPart": "root", - "ResourceId": "TestApi", - } + "Properties": {"PathPart": "root", "ResourceId": "TestApi"}, }, "V1ApiResource": { "Type": "AWS::ApiGateway::Resource", - "Properties": { - "PathPart": "v1", - "ResourceId": "TestApi", - "ParentId": "RootApiResource" - } + "Properties": {"PathPart": "v1", "ResourceId": "TestApi", "ParentId": "RootApiResource"}, }, "AlphaApiResource": { "Type": "AWS::ApiGateway::Resource", - "Properties": { - "PathPart": "alpha", - "ResourceId": "TestApi", - "ParentId": "V1ApiResource" - } + "Properties": {"PathPart": "alpha", "ResourceId": "TestApi", "ParentId": "V1ApiResource"}, }, "BetaApiResource": { "Type": "AWS::ApiGateway::Resource", - "Properties": { - "PathPart": "beta", - "ResourceId": "TestApi", - "ParentId": "V1ApiResource" - } + "Properties": {"PathPart": "beta", "ResourceId": "TestApi", "ParentId": "V1ApiResource"}, }, "AlphaApiMethod": { "Type": "AWS::ApiGateway::Method", @@ -693,12 +513,12 @@ def test_binary_media_types_method(self): "Integration": { "Uri": { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/" - "functions" - "/${AWSBetaLambdaFunction.Arn}/invocations} " + "functions" + "/${AWSBetaLambdaFunction.Arn}/invocations} " }, "ContentHandling": "CONVERT_TO_BINARY", - "ContentType": "image~1jpg" - } + "ContentType": "image~1jpg", + }, }, }, "BetaAlphaApiMethod": { @@ -710,30 +530,22 @@ def test_binary_media_types_method(self): "Integration": { "Uri": { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/" - "functions" - "/${AWSLambdaFunction.Arn}/invocations}" + "functions" + "/${AWSLambdaFunction.Arn}/invocations}" }, "ContentHandling": "CONVERT_TO_BINARY", - "ContentType": "image~1png" - } + "ContentType": "image~1png", + }, }, }, "AWSAlphaLambdaFunction": { "Type": "AWS::Lambda::Function", - "Properties": { - "Code": ".", - "Handler": "main.run_test", - "Runtime": "Python3.6" - } + "Properties": {"Code": ".", "Handler": "main.run_test", "Runtime": "Python3.6"}, }, "AWSBetaLambdaFunction": { "Type": "AWS::Lambda::Function", - "Properties": { - "Code": ".", - "Handler": "main.run_test", - "Runtime": "Python3.6" - } - } + "Properties": {"Code": ".", "Handler": "main.run_test", "Runtime": "Python3.6"}, + }, } } @@ -746,58 +558,32 @@ def test_cdk(self): "Resources": { "HelloHandler2E4FBA4D": { "Type": "AWS::Lambda::Function", - "Properties": { - "Code": ".", - "Handler": "main.handler", - "Runtime": "python3.6", - }, - "DependsOn": [ - "HelloHandlerServiceRole11EF7C63" - ], - }, - "EndpointEEF1FD8F": { - "Type": "AWS::ApiGateway::RestApi", - "Properties": { - "Name": "Endpoint" - } + "Properties": {"Code": ".", "Handler": "main.handler", "Runtime": "python3.6"}, + "DependsOn": ["HelloHandlerServiceRole11EF7C63"], }, + "EndpointEEF1FD8F": {"Type": "AWS::ApiGateway::RestApi", "Properties": {"Name": "Endpoint"}}, "EndpointDeploymentStageprodB78BEEA0": { "Type": "AWS::ApiGateway::Stage", "Properties": { - "RestApiId": { - "Ref": "EndpointEEF1FD8F" - }, - "DeploymentId": { - "Ref": "EndpointDeployment318525DA37c0e38727e25b4317827bf43e918fbf" - }, - "StageName": "prod" - } + "RestApiId": {"Ref": "EndpointEEF1FD8F"}, + "DeploymentId": {"Ref": "EndpointDeployment318525DA37c0e38727e25b4317827bf43e918fbf"}, + "StageName": "prod", + }, }, "Endpointproxy39E2174E": { "Type": "AWS::ApiGateway::Resource", "Properties": { - "ParentId": { - "Fn::GetAtt": [ - "EndpointEEF1FD8F", - "RootResourceId" - ] - }, + "ParentId": {"Fn::GetAtt": ["EndpointEEF1FD8F", "RootResourceId"]}, "PathPart": "{proxy+}", - "RestApiId": { - "Ref": "EndpointEEF1FD8F" - } - } + "RestApiId": {"Ref": "EndpointEEF1FD8F"}, + }, }, "EndpointproxyANYC09721C5": { "Type": "AWS::ApiGateway::Method", "Properties": { "HttpMethod": "ANY", - "ResourceId": { - "Ref": "Endpointproxy39E2174E" - }, - "RestApiId": { - "Ref": "EndpointEEF1FD8F" - }, + "ResourceId": {"Ref": "Endpointproxy39E2174E"}, + "RestApiId": {"Ref": "EndpointEEF1FD8F"}, "AuthorizationType": "NONE", "Integration": { "IntegrationHttpMethod": "POST", @@ -807,40 +593,24 @@ def test_cdk(self): "", [ "arn:", - { - "Ref": "AWS::Partition" - }, + {"Ref": "AWS::Partition"}, ":apigateway:", - { - "Ref": "AWS::Region" - }, + {"Ref": "AWS::Region"}, "lambda:path/2015-03-31/functions/", - { - "Fn::GetAtt": [ - "HelloHandler2E4FBA4D", - "Arn" - ] - }, - "/invocations" - ] + {"Fn::GetAtt": ["HelloHandler2E4FBA4D", "Arn"]}, + "/invocations", + ], ] - } - } - } + }, + }, + }, }, "EndpointANY485C938B": { "Type": "AWS::ApiGateway::Method", "Properties": { "HttpMethod": "ANY", - "ResourceId": { - "Fn::GetAtt": [ - "EndpointEEF1FD8F", - "RootResourceId" - ] - }, - "RestApiId": { - "Ref": "EndpointEEF1FD8F" - }, + "ResourceId": {"Fn::GetAtt": ["EndpointEEF1FD8F", "RootResourceId"]}, + "RestApiId": {"Ref": "EndpointEEF1FD8F"}, "AuthorizationType": "NONE", "Integration": { "IntegrationHttpMethod": "POST", @@ -850,37 +620,28 @@ def test_cdk(self): "", [ "arn:", - { - "Ref": "AWS::Partition" - }, + {"Ref": "AWS::Partition"}, ":apigateway:", - { - "Ref": "AWS::Region" - }, + {"Ref": "AWS::Region"}, "lambda:path/2015-03-31/functions/", - { - "Fn::GetAtt": [ - "HelloHandler2E4FBA4D", - "Arn" - ] - }, - "/invocations" - ] + {"Fn::GetAtt": ["HelloHandler2E4FBA4D", "Arn"]}, + "/invocations", + ], ] - } - } - } - } + }, + }, + }, + }, }, "Parameters": { "HelloHandlerCodeS3Bucket4359A483": { "Type": "String", - "Description": "S3 bucket for asset \"CdkWorkshopStack/HelloHandler/Code\"" + "Description": 'S3 bucket for asset "CdkWorkshopStack/HelloHandler/Code"', }, "HelloHandlerCodeS3VersionKey07D12610": { "Type": "String", - "Description": "S3 key for asset version \"CdkWorkshopStack/HelloHandler/Code\"" - } + "Description": 'S3 key for asset version "CdkWorkshopStack/HelloHandler/Code"', + }, }, "Outputs": { "Endpoint8024A810": { @@ -889,30 +650,20 @@ def test_cdk(self): "", [ "https://", - { - "Ref": "EndpointEEF1FD8F" - }, + {"Ref": "EndpointEEF1FD8F"}, ".execute-api.", - { - "Ref": "AWS::Region" - }, + {"Ref": "AWS::Region"}, ".", - { - "Ref": "AWS::URLSuffix" - }, + {"Ref": "AWS::URLSuffix"}, "/", - { - "Ref": "EndpointDeploymentStageprodB78BEEA0" - }, - "/" - ] + {"Ref": "EndpointDeploymentStageprodB78BEEA0"}, + "/", + ], ] }, - "Export": { - "Name": "CdkWorkshopStack:Endpoint8024A810" - } + "Export": {"Name": "CdkWorkshopStack:Endpoint8024A810"}, } - } + }, } provider = ApiProvider(template) proxy_paths = [Route(path="/{proxy+}", methods=Route.ANY_HTTP_METHODS, function_name="HelloHandler2E4FBA4D")] diff --git a/tests/unit/commands/local/lib/test_debug_context.py b/tests/unit/commands/local/lib/test_debug_context.py index 6d8f0a347a..9c19346e5a 100644 --- a/tests/unit/commands/local/lib/test_debug_context.py +++ b/tests/unit/commands/local/lib/test_debug_context.py @@ -6,61 +6,68 @@ class TestDebugContext(TestCase): - def test_init(self): - context = DebugContext('port', 'debuggerpath', 'debug_args') + context = DebugContext("port", "debuggerpath", "debug_args") - self.assertEquals(context.debug_port, 'port') - self.assertEquals(context.debugger_path, 'debuggerpath') - self.assertEquals(context.debug_args, 'debug_args') + self.assertEqual(context.debug_port, "port") + self.assertEqual(context.debugger_path, "debuggerpath") + self.assertEqual(context.debug_args, "debug_args") - @parameterized.expand([ - ('1000', 'debuggerpath', 'debug_args'), - ('1000', None, None), - ('1000', None, 'debug_args'), - ('1000', 'debuggerpath', None), - (1000, 'debuggerpath', 'debug_args'), - (1000, None, None), - (1000, None, 'debug_args'), - (1000, 'debuggerpath', None) - ]) + @parameterized.expand( + [ + ("1000", "debuggerpath", "debug_args"), + ("1000", None, None), + ("1000", None, "debug_args"), + ("1000", "debuggerpath", None), + (1000, "debuggerpath", "debug_args"), + (1000, None, None), + (1000, None, "debug_args"), + (1000, "debuggerpath", None), + ] + ) def test_bool_truthy(self, port, debug_path, debug_ars): debug_context = DebugContext(port, debug_path, debug_ars) self.assertTrue(debug_context.__bool__()) - @parameterized.expand([ - (None, 'debuggerpath', 'debug_args'), - (None, None, None), - (None, None, 'debug_args'), - (None, 'debuggerpath', None), - ]) + @parameterized.expand( + [ + (None, "debuggerpath", "debug_args"), + (None, None, None), + (None, None, "debug_args"), + (None, "debuggerpath", None), + ] + ) def test_bool_falsy(self, port, debug_path, debug_ars): debug_context = DebugContext(port, debug_path, debug_ars) self.assertFalse(debug_context.__bool__()) - @parameterized.expand([ - ('1000', 'debuggerpath', 'debug_args'), - ('1000', None, None), - ('1000', None, 'debug_args'), - ('1000', 'debuggerpath', None), - (1000, 'debuggerpath', 'debug_args'), - (1000, None, None), - (1000, None, 'debug_args'), - (1000, 'debuggerpath', None) - ]) + @parameterized.expand( + [ + ("1000", "debuggerpath", "debug_args"), + ("1000", None, None), + ("1000", None, "debug_args"), + ("1000", "debuggerpath", None), + (1000, "debuggerpath", "debug_args"), + (1000, None, None), + (1000, None, "debug_args"), + (1000, "debuggerpath", None), + ] + ) def test_nonzero_thruthy(self, port, debug_path, debug_ars): debug_context = DebugContext(port, debug_path, debug_ars) self.assertTrue(debug_context.__nonzero__()) - @parameterized.expand([ - (None, 'debuggerpath', 'debug_args'), - (None, None, None), - (None, None, 'debug_args'), - (None, 'debuggerpath', None) - ]) + @parameterized.expand( + [ + (None, "debuggerpath", "debug_args"), + (None, None, None), + (None, None, "debug_args"), + (None, "debuggerpath", None), + ] + ) def test_nonzero_falsy(self, port, debug_path, debug_ars): debug_context = DebugContext(port, debug_path, debug_ars) diff --git a/tests/unit/commands/local/lib/test_local_api_service.py b/tests/unit/commands/local/lib/test_local_api_service.py index f43f93713e..758007c6c5 100644 --- a/tests/unit/commands/local/lib/test_local_api_service.py +++ b/tests/unit/commands/local/lib/test_local_api_service.py @@ -15,7 +15,6 @@ class TestLocalApiService_start(TestCase): - def setUp(self): self.port = 123 self.host = "abc" @@ -39,11 +38,7 @@ def setUp(self): @patch("samcli.commands.local.lib.local_api_service.ApiProvider") @patch.object(LocalApiService, "_make_static_dir_path") @patch.object(LocalApiService, "_print_routes") - def test_must_start_service(self, - log_routes_mock, - make_static_dir_mock, - SamApiProviderMock, - ApiGwServiceMock): + def test_must_start_service(self, log_routes_mock, make_static_dir_mock, SamApiProviderMock, ApiGwServiceMock): routing_list = [1, 2, 3] # something static_dir_path = "/foo/bar" @@ -58,18 +53,20 @@ def test_must_start_service(self, local_service.start() # Make sure the right methods are called - SamApiProviderMock.assert_called_with(self.template, - cwd=self.cwd, - parameter_overrides=self.lambda_invoke_context_mock.parameter_overrides) + SamApiProviderMock.assert_called_with( + self.template, cwd=self.cwd, parameter_overrides=self.lambda_invoke_context_mock.parameter_overrides + ) log_routes_mock.assert_called_with(routing_list, self.host, self.port) make_static_dir_mock.assert_called_with(self.cwd, self.static_dir) - ApiGwServiceMock.assert_called_with(api=self.api_provider_mock.api, - lambda_runner=self.lambda_runner_mock, - static_dir=static_dir_path, - port=self.port, - host=self.host, - stderr=self.stderr_mock) + ApiGwServiceMock.assert_called_with( + api=self.api_provider_mock.api, + lambda_runner=self.lambda_runner_mock, + static_dir=static_dir_path, + port=self.port, + host=self.host, + stderr=self.stderr_mock, + ) self.apigw_service.create.assert_called_with() self.apigw_service.run.assert_called_with() @@ -79,12 +76,9 @@ def test_must_start_service(self, @patch.object(LocalApiService, "_make_static_dir_path") @patch.object(LocalApiService, "_print_routes") @patch.object(ApiProvider, "_extract_api") - def test_must_raise_if_route_not_available(self, - extract_api, - log_routes_mock, - make_static_dir_mock, - SamApiProviderMock, - ApiGwServiceMock): + def test_must_raise_if_route_not_available( + self, extract_api, log_routes_mock, make_static_dir_mock, SamApiProviderMock, ApiGwServiceMock + ): routing_list = [] # Empty api = Api() extract_api.return_value = api @@ -100,7 +94,6 @@ def test_must_raise_if_route_not_available(self, class TestLocalApiService_print_routes(TestCase): - def test_must_print_routes(self): host = "host" port = 123 @@ -113,17 +106,18 @@ def test_must_print_routes(self): Route(path="/3", methods=["GET3"], function_name="name3"), ] apis = ApiCollector.dedupe_function_routes(apis) - expected = {"Mounting name1 at http://host:123/1 [GET, POST]", - "Mounting othername1 at http://host:123/1 [DELETE]", - "Mounting name2 at http://host:123/2 [GET2]", - "Mounting name3 at http://host:123/3 [GET3]"} + expected = { + "Mounting name1 at http://host:123/1 [GET, POST]", + "Mounting othername1 at http://host:123/1 [DELETE]", + "Mounting name2 at http://host:123/2 [GET2]", + "Mounting name3 at http://host:123/3 [GET3]", + } actual = LocalApiService._print_routes(apis, host, port) - self.assertEquals(expected, set(actual)) + self.assertEqual(expected, set(actual)) class TestLocalApiService_make_static_dir_path(TestCase): - def test_must_skip_if_none(self): result = LocalApiService._make_static_dir_path("something", None) self.assertIsNone(result) @@ -138,7 +132,7 @@ def test_must_resolve_with_respect_to_cwd(self, os_mock): os_mock.path.exists.return_value = True # Fake the path to exist result = LocalApiService._make_static_dir_path(cwd, static_dir) - self.assertEquals(resolved_path, result) + self.assertEqual(resolved_path, result) os_mock.path.join.assert_called_with(cwd, static_dir) os_mock.path.exists.assert_called_with(resolved_path) diff --git a/tests/unit/commands/local/lib/test_local_lambda.py b/tests/unit/commands/local/lib/test_local_lambda.py index bdc4e14143..ee9f5ced3f 100644 --- a/tests/unit/commands/local/lib/test_local_lambda.py +++ b/tests/unit/commands/local/lib/test_local_lambda.py @@ -12,7 +12,6 @@ class TestLocalLambda_get_aws_creds(TestCase): - def setUp(self): self.region = "region" self.key = "key" @@ -27,13 +26,15 @@ def setUp(self): self.aws_profile = "myprofile" self.aws_region = "region" - self.local_lambda = LocalLambdaRunner(self.runtime_mock, - self.function_provider_mock, - self.cwd, - env_vars_values=self.env_vars_values, - debug_context=self.debug_context, - aws_profile=self.aws_profile, - aws_region=self.aws_region) + self.local_lambda = LocalLambdaRunner( + self.runtime_mock, + self.function_provider_mock, + self.cwd, + env_vars_values=self.env_vars_values, + debug_context=self.debug_context, + aws_profile=self.aws_profile, + aws_region=self.aws_region, + ) @patch("samcli.commands.local.lib.local_lambda.boto3") def test_must_get_from_boto_session(self, boto3_mock): @@ -48,15 +49,10 @@ def test_must_get_from_boto_session(self, boto3_mock): boto3_mock.session.Session.return_value = mock_session mock_session.get_credentials.return_value = creds - expected = { - "region": self.region, - "key": self.key, - "secret": self.secret, - "sessiontoken": self.token - } + expected = {"region": self.region, "key": self.key, "secret": self.secret, "sessiontoken": self.token} actual = self.local_lambda.get_aws_creds() - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) boto3_mock.session.Session.assert_called_with(profile_name=self.aws_profile, region_name=self.aws_region) @@ -68,26 +64,22 @@ def test_must_work_with_no_region_name(self, boto3_mock): creds.token = self.token mock_session = Mock() - del mock_session.region_name # Ask mock to return AttributeError when 'region_name' is accessed + del mock_session.region_name # Ask mock to return AttributeError when 'region_name' is accessed boto3_mock.session.Session.return_value = mock_session mock_session.get_credentials.return_value = creds - expected = { - "key": self.key, - "secret": self.secret, - "sessiontoken": self.token - } + expected = {"key": self.key, "secret": self.secret, "sessiontoken": self.token} actual = self.local_lambda.get_aws_creds() - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) boto3_mock.session.Session.assert_called_with(profile_name=self.aws_profile, region_name=self.aws_region) @patch("samcli.commands.local.lib.local_lambda.boto3") def test_must_work_with_no_access_key(self, boto3_mock): creds = Mock() - del creds.access_key # No access key + del creds.access_key # No access key creds.secret_key = self.secret creds.token = self.token @@ -97,14 +89,10 @@ def test_must_work_with_no_access_key(self, boto3_mock): boto3_mock.session.Session.return_value = mock_session mock_session.get_credentials.return_value = creds - expected = { - "region": self.region, - "secret": self.secret, - "sessiontoken": self.token - } + expected = {"region": self.region, "secret": self.secret, "sessiontoken": self.token} actual = self.local_lambda.get_aws_creds() - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) boto3_mock.session.Session.assert_called_with(profile_name=self.aws_profile, region_name=self.aws_region) @@ -112,7 +100,7 @@ def test_must_work_with_no_access_key(self, boto3_mock): def test_must_work_with_no_secret_key(self, boto3_mock): creds = Mock() creds.access_key = self.key - del creds.secret_key # No secret key + del creds.secret_key # No secret key creds.token = self.token mock_session = Mock() @@ -121,14 +109,10 @@ def test_must_work_with_no_secret_key(self, boto3_mock): boto3_mock.session.Session.return_value = mock_session mock_session.get_credentials.return_value = creds - expected = { - "region": self.region, - "key": self.key, - "sessiontoken": self.token - } + expected = {"region": self.region, "key": self.key, "sessiontoken": self.token} actual = self.local_lambda.get_aws_creds() - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) boto3_mock.session.Session.assert_called_with(profile_name=self.aws_profile, region_name=self.aws_region) @@ -137,7 +121,7 @@ def test_must_work_with_no_session_token(self, boto3_mock): creds = Mock() creds.access_key = self.key creds.secret_key = self.secret - del creds.token # No Token + del creds.token # No Token mock_session = Mock() mock_session.region_name = self.region @@ -146,14 +130,10 @@ def test_must_work_with_no_session_token(self, boto3_mock): boto3_mock.session.Session.return_value = mock_session mock_session.get_credentials.return_value = creds - expected = { - "region": self.region, - "key": self.key, - "secret": self.secret - } + expected = {"region": self.region, "key": self.key, "secret": self.secret} actual = self.local_lambda.get_aws_creds() - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) boto3_mock.session.Session.assert_called() @@ -166,7 +146,7 @@ def test_must_work_with_no_credentials(self, boto3_mock): expected = {} actual = self.local_lambda.get_aws_creds() - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) boto3_mock.session.Session.assert_called() @@ -177,13 +157,12 @@ def test_must_work_with_no_session(self, boto3_mock): expected = {} actual = self.local_lambda.get_aws_creds() - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) boto3_mock.session.Session.assert_called() class TestLocalLambda_make_env_vars(TestCase): - def setUp(self): self.runtime_mock = Mock() self.function_provider_mock = Mock() @@ -193,125 +172,135 @@ def setUp(self): self.aws_region = "region" self.env_vars_values = {} - self.environ = { - "Variables": { - "var1": "value1", - } - } + self.environ = {"Variables": {"var1": "value1"}} - self.local_lambda = LocalLambdaRunner(self.runtime_mock, - self.function_provider_mock, - self.cwd, - env_vars_values=self.env_vars_values, - debug_context=self.debug_context) + self.local_lambda = LocalLambdaRunner( + self.runtime_mock, + self.function_provider_mock, + self.cwd, + env_vars_values=self.env_vars_values, + debug_context=self.debug_context, + ) self.aws_creds = {"key": "key"} self.local_lambda.get_aws_creds = Mock() self.local_lambda.get_aws_creds.return_value = self.aws_creds - @parameterized.expand([ - # Override for the function exists - ({"function_name": {"a": "b"}}, {"a": "b"}), - - # Override for the function does *not* exist - ({"otherfunction": {"c": "d"}}, None), - - # Using a CloudFormation parameter file format - ({"Parameters": {"p1": "v1"}}, {"p1": "v1"}) - ]) + @parameterized.expand( + [ + # Override for the function exists + ({"function_name": {"a": "b"}}, {"a": "b"}), + # Override for the function does *not* exist + ({"otherfunction": {"c": "d"}}, None), + # Using a CloudFormation parameter file format + ({"Parameters": {"p1": "v1"}}, {"p1": "v1"}), + ] + ) @patch("samcli.commands.local.lib.local_lambda.EnvironmentVariables") @patch("samcli.commands.local.lib.local_lambda.os") - def test_must_work_with_override_values(self, env_vars_values, expected_override_value, os_mock, - EnvironmentVariablesMock): + def test_must_work_with_override_values( + self, env_vars_values, expected_override_value, os_mock, EnvironmentVariablesMock + ): os_environ = {"some": "value"} os_mock.environ = os_environ - function = Function(name="function_name", - runtime="runtime", - memory=1234, - timeout=12, - handler="handler", - codeuri="codeuri", - environment=self.environ, - rolearn=None, - layers=[]) + function = Function( + name="function_name", + runtime="runtime", + memory=1234, + timeout=12, + handler="handler", + codeuri="codeuri", + environment=self.environ, + rolearn=None, + layers=[], + ) self.local_lambda.env_vars_values = env_vars_values self.local_lambda._make_env_vars(function) - EnvironmentVariablesMock.assert_called_with(function.memory, - function.timeout, - function.handler, - variables={"var1": "value1"}, - shell_env_values=os_environ, - override_values=expected_override_value, - aws_creds=self.aws_creds) - - @parameterized.expand([ - # Using a invalid file format - ({"a": "b"}, OverridesNotWellDefinedError), - - ({"a": False}, OverridesNotWellDefinedError), - - ({"a": [True, False]}, OverridesNotWellDefinedError) - ]) + EnvironmentVariablesMock.assert_called_with( + function.memory, + function.timeout, + function.handler, + variables={"var1": "value1"}, + shell_env_values=os_environ, + override_values=expected_override_value, + aws_creds=self.aws_creds, + ) + + @parameterized.expand( + [ + # Using a invalid file format + ({"a": "b"}, OverridesNotWellDefinedError), + ({"a": False}, OverridesNotWellDefinedError), + ({"a": [True, False]}, OverridesNotWellDefinedError), + ] + ) @patch("samcli.commands.local.lib.local_lambda.os") def test_must_not_work_with_invalid_override_values(self, env_vars_values, expected_exception, os_mock): os_environ = {"some": "value"} os_mock.environ = os_environ - function = Function(name="function_name", - runtime="runtime", - memory=1234, - timeout=12, - handler="handler", - codeuri="codeuri", - environment=self.environ, - rolearn=None, - layers=[]) + function = Function( + name="function_name", + runtime="runtime", + memory=1234, + timeout=12, + handler="handler", + codeuri="codeuri", + environment=self.environ, + rolearn=None, + layers=[], + ) self.local_lambda.env_vars_values = env_vars_values with self.assertRaises(expected_exception): self.local_lambda._make_env_vars(function) - @parameterized.expand([ - param({"a": "b"}), # Does not have the "Variables" Key - param("somestring"), # Must be a dict type - param(None) - ]) + @parameterized.expand( + [ + param({"a": "b"}), # Does not have the "Variables" Key + param("somestring"), # Must be a dict type + param(None), + ] + ) @patch("samcli.commands.local.lib.local_lambda.EnvironmentVariables") @patch("samcli.commands.local.lib.local_lambda.os") def test_must_work_with_invalid_environment_variable(self, environment_variable, os_mock, EnvironmentVariablesMock): os_environ = {"some": "value"} os_mock.environ = os_environ - function = Function(name="function_name", - runtime="runtime", - memory=1234, - timeout=12, - handler="handler", - codeuri="codeuri", - environment=environment_variable, - rolearn=None, - layers=[]) + function = Function( + name="function_name", + runtime="runtime", + memory=1234, + timeout=12, + handler="handler", + codeuri="codeuri", + environment=environment_variable, + rolearn=None, + layers=[], + ) self.local_lambda.env_vars_values = {} self.local_lambda._make_env_vars(function) - EnvironmentVariablesMock.assert_called_with(function.memory, - function.timeout, - function.handler, - variables=None, - shell_env_values=os_environ, - override_values=None, - aws_creds=self.aws_creds) + EnvironmentVariablesMock.assert_called_with( + function.memory, + function.timeout, + function.handler, + variables=None, + shell_env_values=os_environ, + override_values=None, + aws_creds=self.aws_creds, + ) class TestLocalLambda_get_invoke_config(TestCase): - def setUp(self): self.runtime_mock = Mock() self.function_provider_mock = Mock() @@ -321,15 +310,17 @@ def setUp(self): self.env_vars_values = {} self.aws_region = "region" - self.local_lambda = LocalLambdaRunner(self.runtime_mock, - self.function_provider_mock, - self.cwd, - env_vars_values=self.env_vars_values, - debug_context=self.debug_context) - - @patch('samcli.commands.local.lib.local_lambda.resolve_code_path') - @patch('samcli.commands.local.lib.local_lambda.LocalLambdaRunner.is_debugging') - @patch('samcli.commands.local.lib.local_lambda.FunctionConfig') + self.local_lambda = LocalLambdaRunner( + self.runtime_mock, + self.function_provider_mock, + self.cwd, + env_vars_values=self.env_vars_values, + debug_context=self.debug_context, + ) + + @patch("samcli.commands.local.lib.local_lambda.resolve_code_path") + @patch("samcli.commands.local.lib.local_lambda.LocalLambdaRunner.is_debugging") + @patch("samcli.commands.local.lib.local_lambda.FunctionConfig") def test_must_work(self, FunctionConfigMock, is_debugging_mock, resolve_code_path_patch): is_debugging_mock.return_value = False @@ -340,38 +331,42 @@ def test_must_work(self, FunctionConfigMock, is_debugging_mock, resolve_code_pat codepath = "codepath" resolve_code_path_patch.return_value = codepath - layers = ['layer1', 'layer2'] + layers = ["layer1", "layer2"] - function = Function(name="function_name", - runtime="runtime", - memory=1234, - timeout=12, - handler="handler", - codeuri="codeuri", - environment=None, - rolearn=None, - layers=layers) + function = Function( + name="function_name", + runtime="runtime", + memory=1234, + timeout=12, + handler="handler", + codeuri="codeuri", + environment=None, + rolearn=None, + layers=layers, + ) config = "someconfig" FunctionConfigMock.return_value = config actual = self.local_lambda._get_invoke_config(function) - self.assertEquals(actual, config) - - FunctionConfigMock.assert_called_with(name=function.name, - runtime=function.runtime, - handler=function.handler, - code_abs_path=codepath, - layers=layers, - memory=function.memory, - timeout=function.timeout, - env_vars=env_vars) + self.assertEqual(actual, config) + + FunctionConfigMock.assert_called_with( + name=function.name, + runtime=function.runtime, + handler=function.handler, + code_abs_path=codepath, + layers=layers, + memory=function.memory, + timeout=function.timeout, + env_vars=env_vars, + ) resolve_code_path_patch.assert_called_with(self.cwd, function.codeuri) self.local_lambda._make_env_vars.assert_called_with(function) - @patch('samcli.commands.local.lib.local_lambda.resolve_code_path') - @patch('samcli.commands.local.lib.local_lambda.LocalLambdaRunner.is_debugging') - @patch('samcli.commands.local.lib.local_lambda.FunctionConfig') + @patch("samcli.commands.local.lib.local_lambda.resolve_code_path") + @patch("samcli.commands.local.lib.local_lambda.LocalLambdaRunner.is_debugging") + @patch("samcli.commands.local.lib.local_lambda.FunctionConfig") def test_timeout_set_to_max_during_debugging(self, FunctionConfigMock, is_debugging_mock, resolve_code_path_patch): is_debugging_mock.return_value = True @@ -382,36 +377,39 @@ def test_timeout_set_to_max_during_debugging(self, FunctionConfigMock, is_debugg codepath = "codepath" resolve_code_path_patch.return_value = codepath - function = Function(name="function_name", - runtime="runtime", - memory=1234, - timeout=36000, - handler="handler", - codeuri="codeuri", - environment=None, - rolearn=None, - layers=[]) + function = Function( + name="function_name", + runtime="runtime", + memory=1234, + timeout=36000, + handler="handler", + codeuri="codeuri", + environment=None, + rolearn=None, + layers=[], + ) config = "someconfig" FunctionConfigMock.return_value = config actual = self.local_lambda._get_invoke_config(function) - self.assertEquals(actual, config) - - FunctionConfigMock.assert_called_with(name=function.name, - runtime=function.runtime, - handler=function.handler, - code_abs_path=codepath, - layers=[], - memory=function.memory, - timeout=function.timeout, - env_vars=env_vars) + self.assertEqual(actual, config) + + FunctionConfigMock.assert_called_with( + name=function.name, + runtime=function.runtime, + handler=function.handler, + code_abs_path=codepath, + layers=[], + memory=function.memory, + timeout=function.timeout, + env_vars=env_vars, + ) resolve_code_path_patch.assert_called_with(self.cwd, function.codeuri) self.local_lambda._make_env_vars.assert_called_with(function) class TestLocalLambda_invoke(TestCase): - def setUp(self): self.runtime_mock = Mock() self.function_provider_mock = Mock() @@ -421,11 +419,13 @@ def setUp(self): self.aws_region = "region" self.env_vars_values = {} - self.local_lambda = LocalLambdaRunner(self.runtime_mock, - self.function_provider_mock, - self.cwd, - env_vars_values=self.env_vars_values, - debug_context=self.debug_context) + self.local_lambda = LocalLambdaRunner( + self.runtime_mock, + self.function_provider_mock, + self.cwd, + env_vars_values=self.env_vars_values, + debug_context=self.debug_context, + ) def test_must_work(self): name = "name" @@ -441,13 +441,13 @@ def test_must_work(self): self.local_lambda.invoke(name, event, stdout, stderr) - self.runtime_mock.invoke.assert_called_with(invoke_config, event, - debug_context=None, - stdout=stdout, stderr=stderr) + self.runtime_mock.invoke.assert_called_with( + invoke_config, event, debug_context=None, stdout=stdout, stderr=stderr + ) def test_must_raise_if_function_not_found(self): function = Mock() - function.name = 'FunctionLogicalId' + function.name = "FunctionLogicalId" self.function_provider_mock.get.return_value = None # function not found self.function_provider_mock.get_all.return_value = [function] @@ -456,7 +456,6 @@ def test_must_raise_if_function_not_found(self): class TestLocalLambda_is_debugging(TestCase): - def setUp(self): self.runtime_mock = Mock() self.function_provider_mock = Mock() @@ -466,21 +465,25 @@ def setUp(self): self.aws_region = "region" self.env_vars_values = {} - self.local_lambda = LocalLambdaRunner(self.runtime_mock, - self.function_provider_mock, - self.cwd, - env_vars_values=self.env_vars_values, - debug_context=self.debug_context) + self.local_lambda = LocalLambdaRunner( + self.runtime_mock, + self.function_provider_mock, + self.cwd, + env_vars_values=self.env_vars_values, + debug_context=self.debug_context, + ) def test_must_be_on(self): self.assertTrue(self.local_lambda.is_debugging()) def test_must_be_off(self): - self.local_lambda = LocalLambdaRunner(self.runtime_mock, - self.function_provider_mock, - self.cwd, - env_vars_values=self.env_vars_values, - debug_context=None) + self.local_lambda = LocalLambdaRunner( + self.runtime_mock, + self.function_provider_mock, + self.cwd, + env_vars_values=self.env_vars_values, + debug_context=None, + ) self.assertFalse(self.local_lambda.is_debugging()) diff --git a/tests/unit/commands/local/lib/test_local_lambda_service.py b/tests/unit/commands/local/lib/test_local_lambda_service.py index bbbd728b34..f4efd8a148 100644 --- a/tests/unit/commands/local/lib/test_local_lambda_service.py +++ b/tests/unit/commands/local/lib/test_local_lambda_service.py @@ -5,7 +5,6 @@ class TestLocalLambdaService(TestCase): - def test_initialization(self): lambda_runner_mock = Mock() stderr_mock = Mock() @@ -14,14 +13,14 @@ def test_initialization(self): lambda_invoke_context_mock.local_lambda_runner = lambda_runner_mock lambda_invoke_context_mock.stderr = stderr_mock - service = LocalLambdaService(lambda_invoke_context=lambda_invoke_context_mock, port=3000, host='localhost') + service = LocalLambdaService(lambda_invoke_context=lambda_invoke_context_mock, port=3000, host="localhost") - self.assertEquals(service.port, 3000) - self.assertEquals(service.host, 'localhost') - self.assertEquals(service.lambda_runner, lambda_runner_mock) - self.assertEquals(service.stderr_stream, stderr_mock) + self.assertEqual(service.port, 3000) + self.assertEqual(service.host, "localhost") + self.assertEqual(service.lambda_runner, lambda_runner_mock) + self.assertEqual(service.stderr_stream, stderr_mock) - @patch('samcli.commands.local.lib.local_lambda_service.LocalLambdaInvokeService') + @patch("samcli.commands.local.lib.local_lambda_service.LocalLambdaInvokeService") def test_start(self, local_lambda_invoke_service_mock): lambda_runner_mock = Mock() stderr_mock = Mock() @@ -33,13 +32,12 @@ def test_start(self, local_lambda_invoke_service_mock): lambda_invoke_context_mock.local_lambda_runner = lambda_runner_mock lambda_invoke_context_mock.stderr = stderr_mock - service = LocalLambdaService(lambda_invoke_context=lambda_invoke_context_mock, port=3000, host='localhost') + service = LocalLambdaService(lambda_invoke_context=lambda_invoke_context_mock, port=3000, host="localhost") service.start() - local_lambda_invoke_service_mock.assert_called_once_with(lambda_runner=lambda_runner_mock, - port=3000, - host='localhost', - stderr=stderr_mock) + local_lambda_invoke_service_mock.assert_called_once_with( + lambda_runner=lambda_runner_mock, port=3000, host="localhost", stderr=stderr_mock + ) lambda_context_mock.create.assert_called_once() lambda_context_mock.run.assert_called_once() diff --git a/tests/unit/commands/local/lib/test_provider.py b/tests/unit/commands/local/lib/test_provider.py index 20492868e9..7960e94c0d 100644 --- a/tests/unit/commands/local/lib/test_provider.py +++ b/tests/unit/commands/local/lib/test_provider.py @@ -7,12 +7,13 @@ class TestLayerVersion(TestCase): - - @parameterized.expand([ - ("arn:aws:lambda:region:account-id:layer:layer-name:a"), - ("arn:aws:lambda:region:account-id:layer"), - ("a string without delimiter") - ]) + @parameterized.expand( + [ + ("arn:aws:lambda:region:account-id:layer:layer-name:a"), + ("arn:aws:lambda:region:account-id:layer"), + ("a string without delimiter"), + ] + ) def test_invalid_arn(self, arn): with self.assertRaises(InvalidLayerVersionArn): LayerVersion(arn, None) @@ -20,23 +21,23 @@ def test_invalid_arn(self, arn): def test_layer_version_returned(self): layer_version = LayerVersion("arn:aws:lambda:region:account-id:layer:layer-name:1", None) - self.assertEquals(layer_version.version, 1) + self.assertEqual(layer_version.version, 1) def test_layer_arn_returned(self): layer_version = LayerVersion("arn:aws:lambda:region:account-id:layer:layer-name:1", None) - self.assertEquals(layer_version.layer_arn, "arn:aws:lambda:region:account-id:layer:layer-name") + self.assertEqual(layer_version.layer_arn, "arn:aws:lambda:region:account-id:layer:layer-name") def test_codeuri_is_setable(self): layer_version = LayerVersion("arn:aws:lambda:region:account-id:layer:layer-name:1", None) layer_version.codeuri = "./some_value" - self.assertEquals(layer_version.codeuri, "./some_value") + self.assertEqual(layer_version.codeuri, "./some_value") def test_name_is_computed(self): layer_version = LayerVersion("arn:aws:lambda:region:account-id:layer:layer-name:1", None) - self.assertEquals(layer_version.name, "layer-name-1-8cebcd0539") + self.assertEqual(layer_version.name, "layer-name-1-8cebcd0539") def test_layer_version_is_defined_in_template(self): layer_version = LayerVersion("arn:aws:lambda:region:account-id:layer:layer-name:1", ".") @@ -45,13 +46,7 @@ def test_layer_version_is_defined_in_template(self): def test_layer_version_raises_unsupported_intrinsic(self): intrinsic_arn = { - "Fn::Sub": - [ - "arn:aws:lambda:region:account-id:layer:{layer_name}:1", - { - "layer_name": "layer-name" - } - ] + "Fn::Sub": ["arn:aws:lambda:region:account-id:layer:{layer_name}:1", {"layer_name": "layer-name"}] } with self.assertRaises(UnsupportedIntrinsic): diff --git a/tests/unit/commands/local/lib/test_sam_api_provider.py b/tests/unit/commands/local/lib/test_sam_api_provider.py index 6215196fa8..7530dcb6bf 100644 --- a/tests/unit/commands/local/lib/test_sam_api_provider.py +++ b/tests/unit/commands/local/lib/test_sam_api_provider.py @@ -19,7 +19,7 @@ def test_provider_with_no_resource_properties(self): provider = ApiProvider(template) - self.assertEquals(provider.routes, []) + self.assertEqual(provider.routes, []) @parameterized.expand([("GET"), ("get")]) def test_provider_has_correct_api(self, method): @@ -31,12 +31,7 @@ def test_provider_has_correct_api(self, method): "CodeUri": "/usr/foo/bar", "Runtime": "nodejs4.3", "Handler": "index.handler", - "Events": { - "Event1": { - "Type": "Api", - "Properties": {"Path": "/path", "Method": method}, - } - }, + "Events": {"Event1": {"Type": "Api", "Properties": {"Path": "/path", "Method": method}}}, }, } } @@ -44,11 +39,8 @@ def test_provider_has_correct_api(self, method): provider = ApiProvider(template) - self.assertEquals(len(provider.routes), 1) - self.assertEquals( - list(provider.routes)[0], - Route(path="/path", methods=["GET"], function_name="SamFunc1"), - ) + self.assertEqual(len(provider.routes), 1) + self.assertEqual(list(provider.routes)[0], Route(path="/path", methods=["GET"], function_name="SamFunc1")) def test_provider_creates_api_for_all_events(self): template = { @@ -60,14 +52,8 @@ def test_provider_creates_api_for_all_events(self): "Runtime": "nodejs4.3", "Handler": "index.handler", "Events": { - "Event1": { - "Type": "Api", - "Properties": {"Path": "/path", "Method": "GET"}, - }, - "Event2": { - "Type": "Api", - "Properties": {"Path": "/path", "Method": "POST"}, - }, + "Event1": {"Type": "Api", "Properties": {"Path": "/path", "Method": "GET"}}, + "Event2": {"Type": "Api", "Properties": {"Path": "/path", "Method": "POST"}}, }, }, } @@ -79,7 +65,7 @@ def test_provider_creates_api_for_all_events(self): api = Route(path="/path", methods=["GET", "POST"], function_name="SamFunc1") self.assertIn(api, provider.routes) - self.assertEquals(len(provider.routes), 1) + self.assertEqual(len(provider.routes), 1) def test_provider_has_correct_template(self): template = { @@ -90,12 +76,7 @@ def test_provider_has_correct_template(self): "CodeUri": "/usr/foo/bar", "Runtime": "nodejs4.3", "Handler": "index.handler", - "Events": { - "Event1": { - "Type": "Api", - "Properties": {"Path": "/path", "Method": "GET"}, - } - }, + "Events": {"Event1": {"Type": "Api", "Properties": {"Path": "/path", "Method": "GET"}}}, }, }, "SamFunc2": { @@ -104,12 +85,7 @@ def test_provider_has_correct_template(self): "CodeUri": "/usr/foo/bar", "Runtime": "nodejs4.3", "Handler": "index.handler", - "Events": { - "Event1": { - "Type": "Api", - "Properties": {"Path": "/path", "Method": "POST"}, - } - }, + "Events": {"Event1": {"Type": "Api", "Properties": {"Path": "/path", "Method": "POST"}}}, }, }, } @@ -132,12 +108,7 @@ def test_provider_with_no_api_events(self): "CodeUri": "/usr/foo/bar", "Runtime": "nodejs4.3", "Handler": "index.handler", - "Events": { - "Event1": { - "Type": "S3", - "Properties": {"Property1": "value"}, - } - }, + "Events": {"Event1": {"Type": "S3", "Properties": {"Property1": "value"}}}, }, } } @@ -145,25 +116,21 @@ def test_provider_with_no_api_events(self): provider = ApiProvider(template) - self.assertEquals(provider.routes, []) + self.assertEqual(provider.routes, []) def test_provider_with_no_serverless_function(self): template = { "Resources": { "SamFunc1": { "Type": "AWS::Lambda::Function", - "Properties": { - "CodeUri": "/usr/foo/bar", - "Runtime": "nodejs4.3", - "Handler": "index.handler", - }, + "Properties": {"CodeUri": "/usr/foo/bar", "Runtime": "nodejs4.3", "Handler": "index.handler"}, } } } provider = ApiProvider(template) - self.assertEquals(provider.routes, []) + self.assertEqual(provider.routes, []) def test_provider_get_all(self): template = { @@ -174,12 +141,7 @@ def test_provider_get_all(self): "CodeUri": "/usr/foo/bar", "Runtime": "nodejs4.3", "Handler": "index.handler", - "Events": { - "Event1": { - "Type": "Api", - "Properties": {"Path": "/path", "Method": "GET"}, - } - }, + "Events": {"Event1": {"Type": "Api", "Properties": {"Path": "/path", "Method": "GET"}}}, }, }, "SamFunc2": { @@ -188,12 +150,7 @@ def test_provider_get_all(self): "CodeUri": "/usr/foo/bar", "Runtime": "nodejs4.3", "Handler": "index.handler", - "Events": { - "Event1": { - "Type": "Api", - "Properties": {"Path": "/path", "Method": "POST"}, - } - }, + "Events": {"Event1": {"Type": "Api", "Properties": {"Path": "/path", "Method": "POST"}}}, }, }, } @@ -217,7 +174,7 @@ def test_provider_get_all_with_no_routes(self): result = [f for f in provider.get_all()] routes = result[0].routes - self.assertEquals(routes, []) + self.assertEqual(routes, []) @parameterized.expand([("ANY"), ("any")]) def test_provider_with_any_method(self, method): @@ -229,12 +186,7 @@ def test_provider_with_any_method(self, method): "CodeUri": "/usr/foo/bar", "Runtime": "nodejs4.3", "Handler": "index.handler", - "Events": { - "Event1": { - "Type": "Api", - "Properties": {"Path": "/path", "Method": method}, - } - }, + "Events": {"Event1": {"Type": "Api", "Properties": {"Path": "/path", "Method": method}}}, }, } } @@ -243,24 +195,16 @@ def test_provider_with_any_method(self, method): provider = ApiProvider(template) api1 = Route( - path="/path", - methods=["GET", "DELETE", "PUT", "POST", "HEAD", "OPTIONS", "PATCH"], - function_name="SamFunc1", + path="/path", methods=["GET", "DELETE", "PUT", "POST", "HEAD", "OPTIONS", "PATCH"], function_name="SamFunc1" ) - self.assertEquals(len(provider.routes), 1) + self.assertEqual(len(provider.routes), 1) self.assertIn(api1, provider.routes) def test_provider_must_support_binary_media_types(self): template = { "Globals": { - "Api": { - "BinaryMediaTypes": [ - "image~1gif", - "image~1png", - "image~1png", # Duplicates must be ignored - ] - } + "Api": {"BinaryMediaTypes": ["image~1gif", "image~1png", "image~1png"]} # Duplicates must be ignored }, "Resources": { "SamFunc1": { @@ -269,12 +213,7 @@ def test_provider_must_support_binary_media_types(self): "CodeUri": "/usr/foo/bar", "Runtime": "nodejs4.3", "Handler": "index.handler", - "Events": { - "Event1": { - "Type": "Api", - "Properties": {"Path": "/path", "Method": "get"}, - } - }, + "Events": {"Event1": {"Type": "Api", "Properties": {"Path": "/path", "Method": "get"}}}, }, } }, @@ -282,22 +221,15 @@ def test_provider_must_support_binary_media_types(self): provider = ApiProvider(template) - self.assertEquals(len(provider.routes), 1) - self.assertEquals( - list(provider.routes)[0], - Route(path="/path", methods=["GET"], function_name="SamFunc1"), - ) + self.assertEqual(len(provider.routes), 1) + self.assertEqual(list(provider.routes)[0], Route(path="/path", methods=["GET"], function_name="SamFunc1")) - assertCountEqual( - self, provider.api.binary_media_types, ["image/gif", "image/png"] - ) - self.assertEquals(provider.api.stage_name, "Prod") + assertCountEqual(self, provider.api.binary_media_types, ["image/gif", "image/png"]) + self.assertEqual(provider.api.stage_name, "Prod") def test_provider_must_support_binary_media_types_with_any_method(self): template = { - "Globals": { - "Api": {"BinaryMediaTypes": ["image~1gif", "image~1png", "text/html"]} - }, + "Globals": {"Api": {"BinaryMediaTypes": ["image~1gif", "image~1png", "text/html"]}}, "Resources": { "SamFunc1": { "Type": "AWS::Serverless::Function", @@ -305,12 +237,7 @@ def test_provider_must_support_binary_media_types_with_any_method(self): "CodeUri": "/usr/foo/bar", "Runtime": "nodejs4.3", "Handler": "index.handler", - "Events": { - "Event1": { - "Type": "Api", - "Properties": {"Path": "/path", "Method": "any"}, - } - }, + "Events": {"Event1": {"Type": "Api", "Properties": {"Path": "/path", "Method": "any"}}}, }, } }, @@ -343,28 +270,18 @@ def setUp(self): ] def test_with_no_routes(self): - template = { - "Resources": { - "Api1": { - "Type": "AWS::Serverless::Api", - "Properties": {"StageName": "Prod"}, - } - } - } + template = {"Resources": {"Api1": {"Type": "AWS::Serverless::Api", "Properties": {"StageName": "Prod"}}}} provider = ApiProvider(template) - self.assertEquals(provider.routes, []) + self.assertEqual(provider.routes, []) def test_with_inline_swagger_routes(self): template = { "Resources": { "Api1": { "Type": "AWS::Serverless::Api", - "Properties": { - "StageName": "Prod", - "DefinitionBody": make_swagger(self.input_routes), - }, + "Properties": {"StageName": "Prod", "DefinitionBody": make_swagger(self.input_routes)}, } } } @@ -401,25 +318,17 @@ def test_with_swagger_as_both_body_and_uri_called(self, SwaggerReaderMock): "Resources": { "Api1": { "Type": "AWS::Serverless::Api", - "Properties": { - "StageName": "Prod", - "DefinitionUri": filename, - "DefinitionBody": body, - }, + "Properties": {"StageName": "Prod", "DefinitionUri": filename, "DefinitionBody": body}, } } } - SwaggerReaderMock.return_value.read.return_value = make_swagger( - self.input_routes - ) + SwaggerReaderMock.return_value.read.return_value = make_swagger(self.input_routes) cwd = "foo" provider = ApiProvider(template, cwd=cwd) assertCountEqual(self, self.input_routes, provider.routes) - SwaggerReaderMock.assert_called_with( - definition_body=body, definition_uri=filename, working_dir=cwd - ) + SwaggerReaderMock.assert_called_with(definition_body=body, definition_uri=filename, working_dir=cwd) def test_swagger_with_any_method(self): routes = [Route(path="/path", methods=["any"], function_name="SamFunc1")] @@ -436,10 +345,7 @@ def test_swagger_with_any_method(self): "Resources": { "Api1": { "Type": "AWS::Serverless::Api", - "Properties": { - "StageName": "Prod", - "DefinitionBody": make_swagger(routes), - }, + "Properties": {"StageName": "Prod", "DefinitionBody": make_swagger(routes)}, } } } @@ -454,9 +360,7 @@ def test_with_binary_media_types(self): "Type": "AWS::Serverless::Api", "Properties": { "StageName": "Prod", - "DefinitionBody": make_swagger( - self.input_routes, binary_media_types=self.binary_types - ), + "DefinitionBody": make_swagger(self.input_routes, binary_media_types=self.binary_types), }, } } @@ -474,9 +378,7 @@ def test_with_binary_media_types(self): assertCountEqual(self, provider.api.binary_media_types, expected_binary_types) def test_with_binary_media_types_in_swagger_and_on_resource(self): - input_routes = [ - Route(path="/path", methods=["OPTIONS"], function_name="SamFunc1") - ] + input_routes = [Route(path="/path", methods=["OPTIONS"], function_name="SamFunc1")] extra_binary_types = ["text/html"] template = { @@ -486,18 +388,14 @@ def test_with_binary_media_types_in_swagger_and_on_resource(self): "Properties": { "BinaryMediaTypes": extra_binary_types, "StageName": "Prod", - "DefinitionBody": make_swagger( - input_routes, binary_media_types=self.binary_types - ), + "DefinitionBody": make_swagger(input_routes, binary_media_types=self.binary_types), }, } } } expected_binary_types = sorted(self.binary_types + extra_binary_types) - expected_routes = [ - Route(path="/path", methods=["OPTIONS"], function_name="SamFunc1") - ] + expected_routes = [Route(path="/path", methods=["OPTIONS"], function_name="SamFunc1")] provider = ApiProvider(template) assertCountEqual(self, expected_routes, provider.routes) @@ -517,40 +415,22 @@ def setUp(self): self.template = { "Resources": { - "Api1": { - "Type": "AWS::Serverless::Api", - "Properties": {"StageName": "Prod"}, - }, + "Api1": {"Type": "AWS::Serverless::Api", "Properties": {"StageName": "Prod"}}, "ImplicitFunc": { "Type": "AWS::Serverless::Function", - "Properties": { - "CodeUri": "/usr/foo/bar", - "Runtime": "nodejs4.3", - "Handler": "index.handler", - }, + "Properties": {"CodeUri": "/usr/foo/bar", "Runtime": "nodejs4.3", "Handler": "index.handler"}, }, } } def test_must_union_implicit_and_explicit(self): events = { - "Event1": { - "Type": "Api", - "Properties": {"Path": "/path1", "Method": "POST"}, - }, - "Event2": { - "Type": "Api", - "Properties": {"Path": "/path2", "Method": "POST"}, - }, - "Event3": { - "Type": "Api", - "Properties": {"Path": "/path3", "Method": "POST"}, - }, + "Event1": {"Type": "Api", "Properties": {"Path": "/path1", "Method": "POST"}}, + "Event2": {"Type": "Api", "Properties": {"Path": "/path2", "Method": "POST"}}, + "Event3": {"Type": "Api", "Properties": {"Path": "/path3", "Method": "POST"}}, } - self.template["Resources"]["Api1"]["Properties"][ - "DefinitionBody" - ] = self.swagger + self.template["Resources"]["Api1"]["Properties"]["DefinitionBody"] = self.swagger self.template["Resources"]["ImplicitFunc"]["Properties"]["Events"] = events expected_routes = [ @@ -577,18 +457,11 @@ def test_must_prefer_implicit_api_over_explicit(self): "Method": "get", }, }, - "Event2": { - "Type": "Api", - "Properties": {"Path": "/path2", "Method": "POST"}, - }, + "Event2": {"Type": "Api", "Properties": {"Path": "/path2", "Method": "POST"}}, } - self.template["Resources"]["Api1"]["Properties"][ - "DefinitionBody" - ] = self.swagger - self.template["Resources"]["ImplicitFunc"]["Properties"][ - "Events" - ] = implicit_routes + self.template["Resources"]["Api1"]["Properties"]["DefinitionBody"] = self.swagger + self.template["Resources"]["ImplicitFunc"]["Properties"]["Events"] = implicit_routes expected_routes = [ Route(path="/path1", methods=["GET"], function_name="ImplicitFunc"), @@ -620,12 +493,8 @@ def test_must_prefer_implicit_with_any_method(self): Route(path="/path", methods=["DELETE"], function_name="explicitfunction"), ] - self.template["Resources"]["Api1"]["Properties"][ - "DefinitionBody" - ] = make_swagger(explicit_routes) - self.template["Resources"]["ImplicitFunc"]["Properties"][ - "Events" - ] = implicit_routes + self.template["Resources"]["Api1"]["Properties"]["DefinitionBody"] = make_swagger(explicit_routes) + self.template["Resources"]["ImplicitFunc"]["Properties"]["Events"] = implicit_routes expected_routes = [ Route( @@ -664,12 +533,8 @@ def test_with_any_method_on_both(self): Route(path="/path2", methods=["POST"], function_name="explicitfunction"), ] - self.template["Resources"]["Api1"]["Properties"][ - "DefinitionBody" - ] = make_swagger(explicit_routes) - self.template["Resources"]["ImplicitFunc"]["Properties"][ - "Events" - ] = implicit_routes + self.template["Resources"]["Api1"]["Properties"]["DefinitionBody"] = make_swagger(explicit_routes) + self.template["Resources"]["ImplicitFunc"]["Properties"]["Events"] = implicit_routes expected_routes = [ Route( @@ -699,16 +564,12 @@ def test_must_add_explicit_api_when_ref_with_rest_api_id(self): "Properties": { "Path": "/newpath2", "Method": "POST", - "RestApiId": { - "Ref": "Api1" - }, # This path must get added to this API + "RestApiId": {"Ref": "Api1"}, # This path must get added to this API }, }, } - self.template["Resources"]["Api1"]["Properties"][ - "DefinitionBody" - ] = self.swagger + self.template["Resources"]["Api1"]["Properties"]["DefinitionBody"] = self.swagger self.template["Resources"]["ImplicitFunc"]["Properties"]["Events"] = events expected_routes = [ @@ -726,38 +587,20 @@ def test_must_add_explicit_api_when_ref_with_rest_api_id(self): def test_both_routes_must_get_binary_media_types(self): events = { - "Event1": { - "Type": "Api", - "Properties": {"Path": "/newpath1", "Method": "POST"}, - }, - "Event2": { - "Type": "Api", - "Properties": {"Path": "/newpath2", "Method": "POST"}, - }, + "Event1": {"Type": "Api", "Properties": {"Path": "/newpath1", "Method": "POST"}}, + "Event2": {"Type": "Api", "Properties": {"Path": "/newpath2", "Method": "POST"}}, } # Binary type for implicit - self.template["Globals"] = { - "Api": {"BinaryMediaTypes": ["image~1gif", "image~1png"]} - } + self.template["Globals"] = {"Api": {"BinaryMediaTypes": ["image~1gif", "image~1png"]}} self.template["Resources"]["ImplicitFunc"]["Properties"]["Events"] = events - self.template["Resources"]["Api1"]["Properties"][ - "DefinitionBody" - ] = self.swagger + self.template["Resources"]["Api1"]["Properties"]["DefinitionBody"] = self.swagger # Binary type for explicit - self.template["Resources"]["Api1"]["Properties"]["BinaryMediaTypes"] = [ - "explicit/type1", - "explicit/type2", - ] + self.template["Resources"]["Api1"]["Properties"]["BinaryMediaTypes"] = ["explicit/type1", "explicit/type2"] # Because of Globals, binary types will be concatenated on the explicit API - expected_explicit_binary_types = [ - "explicit/type1", - "explicit/type2", - "image/gif", - "image/png", - ] + expected_explicit_binary_types = ["explicit/type1", "explicit/type2", "image/gif", "image/png"] expected_routes = [ # From Explicit APIs @@ -771,48 +614,27 @@ def test_both_routes_must_get_binary_media_types(self): provider = ApiProvider(self.template) assertCountEqual(self, expected_routes, provider.routes) - assertCountEqual( - self, provider.api.binary_media_types, expected_explicit_binary_types - ) + assertCountEqual(self, provider.api.binary_media_types, expected_explicit_binary_types) def test_binary_media_types_with_rest_api_id_reference(self): events = { "Event1": { "Type": "Api", - "Properties": { - "Path": "/connected-to-explicit-path", - "Method": "POST", - "RestApiId": "Api1", - }, - }, - "Event2": { - "Type": "Api", - "Properties": {"Path": "/true-implicit-path", "Method": "POST"}, + "Properties": {"Path": "/connected-to-explicit-path", "Method": "POST", "RestApiId": "Api1"}, }, + "Event2": {"Type": "Api", "Properties": {"Path": "/true-implicit-path", "Method": "POST"}}, } # Binary type for implicit - self.template["Globals"] = { - "Api": {"BinaryMediaTypes": ["image~1gif", "image~1png"]} - } + self.template["Globals"] = {"Api": {"BinaryMediaTypes": ["image~1gif", "image~1png"]}} self.template["Resources"]["ImplicitFunc"]["Properties"]["Events"] = events - self.template["Resources"]["Api1"]["Properties"][ - "DefinitionBody" - ] = self.swagger + self.template["Resources"]["Api1"]["Properties"]["DefinitionBody"] = self.swagger # Binary type for explicit - self.template["Resources"]["Api1"]["Properties"]["BinaryMediaTypes"] = [ - "explicit/type1", - "explicit/type2", - ] + self.template["Resources"]["Api1"]["Properties"]["BinaryMediaTypes"] = ["explicit/type1", "explicit/type2"] # Because of Globals, binary types will be concatenated on the explicit API - expected_explicit_binary_types = [ - "explicit/type1", - "explicit/type2", - "image/gif", - "image/png", - ] + expected_explicit_binary_types = ["explicit/type1", "explicit/type2", "image/gif", "image/png"] # expected_implicit_binary_types = ["image/gif", "image/png"] expected_routes = [ @@ -822,24 +644,14 @@ def test_binary_media_types_with_rest_api_id_reference(self): Route(path="/path3", methods=["GET"], function_name="explicitfunction"), # Because of the RestApiId, Implicit APIs will also get the binary media types inherited from # the corresponding Explicit API - Route( - path="/connected-to-explicit-path", - methods=["POST"], - function_name="ImplicitFunc", - ), + Route(path="/connected-to-explicit-path", methods=["POST"], function_name="ImplicitFunc"), # This is still just a true implicit API because it does not have RestApiId property - Route( - path="/true-implicit-path", - methods=["POST"], - function_name="ImplicitFunc", - ), + Route(path="/true-implicit-path", methods=["POST"], function_name="ImplicitFunc"), ] provider = ApiProvider(self.template) assertCountEqual(self, expected_routes, provider.routes) - assertCountEqual( - self, provider.api.binary_media_types, expected_explicit_binary_types - ) + assertCountEqual(self, provider.api.binary_media_types, expected_explicit_binary_types) class TestSamStageValues(TestCase): @@ -872,13 +684,11 @@ def test_provider_parse_stage_name(self): } } provider = ApiProvider(template) - route1 = Route( - path="/path", methods=["GET"], function_name="NoApiEventFunction" - ) + route1 = Route(path="/path", methods=["GET"], function_name="NoApiEventFunction") self.assertIn(route1, provider.routes) - self.assertEquals(provider.api.stage_name, "dev") - self.assertEquals(provider.api.stage_variables, None) + self.assertEqual(provider.api.stage_name, "dev") + self.assertEqual(provider.api.stage_variables, None) def test_provider_stage_variables(self): template = { @@ -910,16 +720,11 @@ def test_provider_stage_variables(self): } } provider = ApiProvider(template) - route1 = Route( - path="/path", methods=["GET"], function_name="NoApiEventFunction" - ) + route1 = Route(path="/path", methods=["GET"], function_name="NoApiEventFunction") self.assertIn(route1, provider.routes) - self.assertEquals(provider.api.stage_name, "dev") - self.assertEquals( - provider.api.stage_variables, - {"vis": "data", "random": "test", "foo": "bar"}, - ) + self.assertEqual(provider.api.stage_name, "dev") + self.assertEqual(provider.api.stage_variables, {"vis": "data", "random": "test", "foo": "bar"}) def test_multi_stage_get_all(self): template = OrderedDict({"Resources": {}}) @@ -991,25 +796,16 @@ def test_multi_stage_get_all(self): result = [f for f in provider.get_all()] routes = result[0].routes - route1 = Route( - path="/path2", methods=["GET"], function_name="NoApiEventFunction" - ) - route2 = Route( - path="/path", methods=["GET"], function_name="NoApiEventFunction" - ) - route3 = Route( - path="/anotherpath", methods=["POST"], function_name="NoApiEventFunction" - ) - self.assertEquals(len(routes), 3) + route1 = Route(path="/path2", methods=["GET"], function_name="NoApiEventFunction") + route2 = Route(path="/path", methods=["GET"], function_name="NoApiEventFunction") + route3 = Route(path="/anotherpath", methods=["POST"], function_name="NoApiEventFunction") + self.assertEqual(len(routes), 3) self.assertIn(route1, routes) self.assertIn(route2, routes) self.assertIn(route3, routes) - self.assertEquals(provider.api.stage_name, "Production") - self.assertEquals( - provider.api.stage_variables, - {"vis": "prod data", "random": "test", "foo": "bar"}, - ) + self.assertEqual(provider.api.stage_name, "Production") + self.assertEqual(provider.api.stage_variables, {"vis": "prod data", "random": "test", "foo": "bar"}) class TestSamCors(TestCase): @@ -1020,7 +816,7 @@ def test_provider_parse_cors_string(self): "Type": "AWS::Serverless::Api", "Properties": { "StageName": "Prod", - "Cors": "*", + "Cors": "'*'", "DefinitionBody": { "paths": { "/path2": { @@ -1059,23 +855,15 @@ def test_provider_parse_cors_string(self): routes = provider.routes cors = Cors( allow_origin="*", - allow_methods=",".join( - sorted(["GET", "DELETE", "PUT", "POST", "HEAD", "OPTIONS", "PATCH"]) - ), - ) - route1 = Route( - path="/path2", - methods=["POST", "OPTIONS"], - function_name="NoApiEventFunction", - ) - route2 = Route( - path="/path", methods=["GET", "OPTIONS"], function_name="NoApiEventFunction" + allow_methods=",".join(sorted(["GET", "DELETE", "PUT", "POST", "HEAD", "OPTIONS", "PATCH"])), ) + route1 = Route(path="/path2", methods=["POST", "OPTIONS"], function_name="NoApiEventFunction") + route2 = Route(path="/path", methods=["GET", "OPTIONS"], function_name="NoApiEventFunction") - self.assertEquals(len(routes), 2) + self.assertEqual(len(routes), 2) self.assertIn(route1, routes) self.assertIn(route2, routes) - self.assertEquals(provider.api.cors, cors) + self.assertEqual(provider.api.cors, cors) def test_provider_parse_cors_dict(self): template = { @@ -1085,10 +873,10 @@ def test_provider_parse_cors_dict(self): "Properties": { "StageName": "Prod", "Cors": { - "AllowMethods": "POST, GET", - "AllowOrigin": "*", - "AllowHeaders": "Upgrade-Insecure-Requests", - "MaxAge": 600, + "AllowMethods": "'POST, GET'", + "AllowOrigin": "'*'", + "AllowHeaders": "'Upgrade-Insecure-Requests'", + "MaxAge": "'600'", }, "DefinitionBody": { "paths": { @@ -1130,23 +918,15 @@ def test_provider_parse_cors_dict(self): allow_origin="*", allow_methods=",".join(sorted(["POST", "GET", "OPTIONS"])), allow_headers="Upgrade-Insecure-Requests", - max_age=600, - ) - route1 = Route( - path="/path2", - methods=["POST", "OPTIONS"], - function_name="NoApiEventFunction", - ) - route2 = Route( - path="/path", - methods=["POST", "OPTIONS"], - function_name="NoApiEventFunction", + max_age="600", ) + route1 = Route(path="/path2", methods=["POST", "OPTIONS"], function_name="NoApiEventFunction") + route2 = Route(path="/path", methods=["POST", "OPTIONS"], function_name="NoApiEventFunction") - self.assertEquals(len(routes), 2) + self.assertEqual(len(routes), 2) self.assertIn(route1, routes) self.assertIn(route2, routes) - self.assertEquals(provider.api.cors, cors) + self.assertEqual(provider.api.cors, cors) def test_provider_parse_cors_dict_star_allow(self): template = { @@ -1156,10 +936,10 @@ def test_provider_parse_cors_dict_star_allow(self): "Properties": { "StageName": "Prod", "Cors": { - "AllowMethods": "*", - "AllowOrigin": "*", - "AllowHeaders": "Upgrade-Insecure-Requests", - "MaxAge": 600, + "AllowMethods": "'*'", + "AllowOrigin": "'*'", + "AllowHeaders": "'Upgrade-Insecure-Requests'", + "MaxAge": "'600'", }, "DefinitionBody": { "paths": { @@ -1201,25 +981,17 @@ def test_provider_parse_cors_dict_star_allow(self): allow_origin="*", allow_methods=",".join(sorted(Route.ANY_HTTP_METHODS)), allow_headers="Upgrade-Insecure-Requests", - max_age=600, - ) - route1 = Route( - path="/path2", - methods=["POST", "OPTIONS"], - function_name="NoApiEventFunction", - ) - route2 = Route( - path="/path", - methods=["POST", "OPTIONS"], - function_name="NoApiEventFunction", + max_age="600", ) + route1 = Route(path="/path2", methods=["POST", "OPTIONS"], function_name="NoApiEventFunction") + route2 = Route(path="/path", methods=["POST", "OPTIONS"], function_name="NoApiEventFunction") - self.assertEquals(len(routes), 2) + self.assertEqual(len(routes), 2) self.assertIn(route1, routes) self.assertIn(route2, routes) - self.assertEquals(provider.api.cors, cors) + self.assertEqual(provider.api.cors, cors) - def test_invalid_cors_dict_allow_methods(self): + def test_raises_error_when_cors_allowmethods_not_single_quoted(self): template = { "Resources": { "TestApi": { @@ -1228,9 +1000,104 @@ def test_invalid_cors_dict_allow_methods(self): "StageName": "Prod", "Cors": { "AllowMethods": "GET, INVALID_METHOD", - "AllowOrigin": "*", - "AllowHeaders": "Upgrade-Insecure-Requests", - "MaxAge": 600, + "AllowOrigin": "'*'", + "AllowHeaders": "'Upgrade-Insecure-Requests'", + "MaxAge": "'600'", + }, + "DefinitionBody": { + "paths": { + "/path2": { + "post": { + "x-amazon-apigateway-integration": { + "type": "aws_proxy", + "uri": { + "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31" + "/functions/${NoApiEventFunction.Arn}/invocations" + }, + "responses": {}, + } + } + }, + "/path": { + "post": { + "x-amazon-apigateway-integration": { + "type": "aws_proxy", + "uri": { + "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31" + "/functions/${NoApiEventFunction.Arn}/invocations" + }, + "responses": {}, + } + } + }, + } + }, + }, + } + } + } + with self.assertRaises( + InvalidSamDocumentException, msg="ApiProvider should fail for Invalid Cors AllowMethods not single quoted" + ): + ApiProvider(template) + + def test_raises_error_when_cors_value_not_single_quoted(self): + template = { + "Resources": { + "TestApi": { + "Type": "AWS::Serverless::Api", + "Properties": { + "StageName": "Prod", + "Cors": "example.com", + "DefinitionBody": { + "paths": { + "/path2": { + "post": { + "x-amazon-apigateway-integration": { + "type": "aws_proxy", + "uri": { + "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31" + "/functions/${NoApiEventFunction.Arn}/invocations" + }, + "responses": {}, + } + } + }, + "/path": { + "post": { + "x-amazon-apigateway-integration": { + "type": "aws_proxy", + "uri": { + "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31" + "/functions/${NoApiEventFunction.Arn}/invocations" + }, + "responses": {}, + } + } + }, + } + }, + }, + } + } + } + with self.assertRaises( + InvalidSamDocumentException, msg="ApiProvider should fail for Invalid Cors value not single quoted" + ): + ApiProvider(template) + + def test_invalid_cors_dict_allow_methods(self): + template = { + "Resources": { + "TestApi": { + "Type": "AWS::Serverless::Api", + "Properties": { + "StageName": "Prod", + "Cors": { + "AllowMethods": "'GET, INVALID_METHOD'", + "AllowOrigin": "'*'", + "AllowHeaders": "'Upgrade-Insecure-Requests'", + "MaxAge": "'600'", }, "DefinitionBody": { "paths": { @@ -1265,8 +1132,7 @@ def test_invalid_cors_dict_allow_methods(self): } } with self.assertRaises( - InvalidSamDocumentException, - msg="ApiProvider should fail for Invalid Cors Allow method", + InvalidSamDocumentException, msg="ApiProvider should fail for Invalid Cors Allow method" ): ApiProvider(template) @@ -1277,7 +1143,7 @@ def test_default_cors_dict_prop(self): "Type": "AWS::Serverless::Api", "Properties": { "StageName": "Prod", - "Cors": {"AllowOrigin": "www.domain.com"}, + "Cors": {"AllowOrigin": "'www.domain.com'"}, "DefinitionBody": { "paths": { "/path2": { @@ -1303,28 +1169,21 @@ def test_default_cors_dict_prop(self): provider = ApiProvider(template) routes = provider.routes - cors = Cors( - allow_origin="www.domain.com", - allow_methods=",".join(sorted(Route.ANY_HTTP_METHODS)), - ) - route1 = Route( - path="/path2", - methods=["GET", "OPTIONS"], - function_name="NoApiEventFunction", - ) - self.assertEquals(len(routes), 1) + cors = Cors(allow_origin="www.domain.com", allow_methods=",".join(sorted(Route.ANY_HTTP_METHODS))) + route1 = Route(path="/path2", methods=["GET", "OPTIONS"], function_name="NoApiEventFunction") + self.assertEqual(len(routes), 1) self.assertIn(route1, routes) - self.assertEquals(provider.api.cors, cors) + self.assertEqual(provider.api.cors, cors) def test_global_cors(self): template = { "Globals": { "Api": { "Cors": { - "AllowMethods": "GET", - "AllowOrigin": "*", - "AllowHeaders": "Upgrade-Insecure-Requests", - "MaxAge": 600, + "AllowMethods": "'GET'", + "AllowOrigin": "'*'", + "AllowHeaders": "'Upgrade-Insecure-Requests'", + "MaxAge": "'600'", } } }, @@ -1373,21 +1232,15 @@ def test_global_cors(self): allow_origin="*", allow_headers="Upgrade-Insecure-Requests", allow_methods=",".join(["GET", "OPTIONS"]), - max_age=600, - ) - route1 = Route( - path="/path2", - methods=["GET", "OPTIONS"], - function_name="NoApiEventFunction", - ) - route2 = Route( - path="/path", methods=["GET", "OPTIONS"], function_name="NoApiEventFunction" + max_age="600", ) + route1 = Route(path="/path2", methods=["GET", "OPTIONS"], function_name="NoApiEventFunction") + route2 = Route(path="/path", methods=["GET", "OPTIONS"], function_name="NoApiEventFunction") - self.assertEquals(len(routes), 2) + self.assertEqual(len(routes), 2) self.assertIn(route1, routes) self.assertIn(route2, routes) - self.assertEquals(provider.api.cors, cors) + self.assertEqual(provider.api.cors, cors) def make_swagger(routes, binary_media_types=None): @@ -1414,9 +1267,7 @@ def make_swagger(routes, binary_media_types=None): "x-amazon-apigateway-integration": { "type": "aws_proxy", "uri": "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/arn:aws:lambda:us-east-1" - ":123456789012:function:{}/invocations".format( - api.function_name - ), # NOQA + ":123456789012:function:{}/invocations".format(api.function_name), # NOQA } } for method in api.methods: diff --git a/tests/unit/commands/local/lib/test_sam_base_provider.py b/tests/unit/commands/local/lib/test_sam_base_provider.py index b516437948..0eebe68522 100644 --- a/tests/unit/commands/local/lib/test_sam_base_provider.py +++ b/tests/unit/commands/local/lib/test_sam_base_provider.py @@ -9,10 +9,7 @@ class TestSamBaseProvider_get_template(TestCase): @patch("samcli.commands.local.lib.sam_base_provider.SamTranslatorWrapper") @patch.object(IntrinsicResolver, "resolve_template") def test_must_run_translator_plugins( - self, - resolve_template_mock, - SamTranslatorWrapperMock, - resource_metadata_normalizer_patch, + self, resolve_template_mock, SamTranslatorWrapperMock, resource_metadata_normalizer_patch ): resource_metadata_normalizer_patch.normalize.return_value = True resolve_template_mock.return_value = {} diff --git a/tests/unit/commands/local/lib/test_sam_function_provider.py b/tests/unit/commands/local/lib/test_sam_function_provider.py index c2ac653dc0..6973356e0d 100644 --- a/tests/unit/commands/local/lib/test_sam_function_provider.py +++ b/tests/unit/commands/local/lib/test_sam_function_provider.py @@ -15,14 +15,9 @@ class TestSamFunctionProviderEndToEnd(TestCase): TEMPLATE = { "Resources": { - "SamFunc1": { "Type": "AWS::Serverless::Function", - "Properties": { - "CodeUri": "/usr/foo/bar", - "Runtime": "nodejs4.3", - "Handler": "index.handler" - } + "Properties": {"CodeUri": "/usr/foo/bar", "Runtime": "nodejs4.3", "Handler": "index.handler"}, }, "SamFunc2": { "Type": "AWS::Serverless::Function", @@ -30,47 +25,34 @@ class TestSamFunctionProviderEndToEnd(TestCase): # CodeUri is unsupported S3 location "CodeUri": "s3://bucket/key", "Runtime": "nodejs4.3", - "Handler": "index.handler" - } + "Handler": "index.handler", + }, }, "SamFunc3": { "Type": "AWS::Serverless::Function", "Properties": { # CodeUri is unsupported S3 location - "CodeUri": { - "Bucket": "bucket", - "Key": "key" - }, + "CodeUri": {"Bucket": "bucket", "Key": "key"}, "Runtime": "nodejs4.3", - "Handler": "index.handler" - } + "Handler": "index.handler", + }, }, "LambdaFunc1": { "Type": "AWS::Lambda::Function", "Properties": { - "Code": { - "S3Bucket": "bucket", - "S3Key": "key" - }, + "Code": {"S3Bucket": "bucket", "S3Key": "key"}, "Runtime": "nodejs4.3", - "Handler": "index.handler" - } + "Handler": "index.handler", + }, }, "LambdaFuncWithLocalPath": { "Type": "AWS::Lambda::Function", - "Properties": { - "Code": "./some/path/to/code", - "Runtime": "nodejs4.3", - "Handler": "index.handler" - } + "Properties": {"Code": "./some/path/to/code", "Runtime": "nodejs4.3", "Handler": "index.handler"}, }, "OtherResource": { "Type": "AWS::Serverless::Api", - "Properties": { - "StageName": "prod", - "DefinitionUri": "s3://bucket/key" - } - } + "Properties": {"StageName": "prod", "DefinitionUri": "s3://bucket/key"}, + }, } } @@ -80,78 +62,94 @@ def setUp(self): self.parameter_overrides = {} self.provider = SamFunctionProvider(self.TEMPLATE, parameter_overrides=self.parameter_overrides) - @parameterized.expand([ - ("SamFunc1", Function( - name="SamFunc1", - runtime="nodejs4.3", - handler="index.handler", - codeuri="/usr/foo/bar", - memory=None, - timeout=None, - environment=None, - rolearn=None, - layers=[] - )), - ("SamFunc2", Function( - name="SamFunc2", - runtime="nodejs4.3", - handler="index.handler", - codeuri=".", - memory=None, - timeout=None, - environment=None, - rolearn=None, - layers=[] - )), - ("SamFunc3", Function( - name="SamFunc3", - runtime="nodejs4.3", - handler="index.handler", - codeuri=".", - memory=None, - timeout=None, - environment=None, - rolearn=None, - layers=[] - )), - ("LambdaFunc1", Function( - name="LambdaFunc1", - runtime="nodejs4.3", - handler="index.handler", - codeuri=".", - memory=None, - timeout=None, - environment=None, - rolearn=None, - layers=[] - )), - ("LambdaFuncWithLocalPath", Function( - name="LambdaFuncWithLocalPath", - runtime="nodejs4.3", - handler="index.handler", - codeuri="./some/path/to/code", - memory=None, - timeout=None, - environment=None, - rolearn=None, - layers=[] - )) - ]) + @parameterized.expand( + [ + ( + "SamFunc1", + Function( + name="SamFunc1", + runtime="nodejs4.3", + handler="index.handler", + codeuri="/usr/foo/bar", + memory=None, + timeout=None, + environment=None, + rolearn=None, + layers=[], + ), + ), + ( + "SamFunc2", + Function( + name="SamFunc2", + runtime="nodejs4.3", + handler="index.handler", + codeuri=".", + memory=None, + timeout=None, + environment=None, + rolearn=None, + layers=[], + ), + ), + ( + "SamFunc3", + Function( + name="SamFunc3", + runtime="nodejs4.3", + handler="index.handler", + codeuri=".", + memory=None, + timeout=None, + environment=None, + rolearn=None, + layers=[], + ), + ), + ( + "LambdaFunc1", + Function( + name="LambdaFunc1", + runtime="nodejs4.3", + handler="index.handler", + codeuri=".", + memory=None, + timeout=None, + environment=None, + rolearn=None, + layers=[], + ), + ), + ( + "LambdaFuncWithLocalPath", + Function( + name="LambdaFuncWithLocalPath", + runtime="nodejs4.3", + handler="index.handler", + codeuri="./some/path/to/code", + memory=None, + timeout=None, + environment=None, + rolearn=None, + layers=[], + ), + ), + ] + ) def test_get_must_return_each_function(self, name, expected_output): actual = self.provider.get(name) - self.assertEquals(actual, expected_output) + self.assertEqual(actual, expected_output) def test_get_all_must_return_all_functions(self): result = {f.name for f in self.provider.get_all()} expected = {"SamFunc1", "SamFunc2", "SamFunc3", "LambdaFunc1", "LambdaFuncWithLocalPath"} - self.assertEquals(result, expected) + self.assertEqual(result, expected) class TestSamFunctionProvider_init(TestCase): - def setUp(self): self.parameter_overrides = {} @@ -167,7 +165,7 @@ def test_must_extract_functions(self, SamBaseProviderMock, extract_mock): extract_mock.assert_called_with({"a": "b"}) SamBaseProviderMock.get_template.assert_called_with(template, self.parameter_overrides) - self.assertEquals(provider.functions, extract_result) + self.assertEqual(provider.functions, extract_result) @patch.object(SamFunctionProvider, "_extract_functions") @patch("samcli.commands.local.lib.sam_function_provider.SamBaseProvider") @@ -180,31 +178,23 @@ def test_must_default_to_empty_resources(self, SamBaseProviderMock, extract_mock provider = SamFunctionProvider(template, parameter_overrides=self.parameter_overrides) extract_mock.assert_called_with({}) # Empty Resources value must be passed - self.assertEquals(provider.functions, extract_result) - self.assertEquals(provider.resources, {}) + self.assertEqual(provider.functions, extract_result) + self.assertEqual(provider.resources, {}) class TestSamFunctionProvider_extract_functions(TestCase): - @patch.object(SamFunctionProvider, "_convert_sam_function_resource") def test_must_work_for_sam_function(self, convert_mock): convertion_result = "some result" convert_mock.return_value = convertion_result - resources = { - "Func1": { - "Type": "AWS::Serverless::Function", - "Properties": {"a": "b"} - } - } + resources = {"Func1": {"Type": "AWS::Serverless::Function", "Properties": {"a": "b"}}} - expected = { - "Func1": "some result" - } + expected = {"Func1": "some result"} result = SamFunctionProvider._extract_functions(resources) - self.assertEquals(expected, result) - convert_mock.assert_called_with('Func1', {"a": "b"}, []) + self.assertEqual(expected, result) + convert_mock.assert_called_with("Func1", {"a": "b"}, []) @patch.object(SamFunctionProvider, "_convert_sam_function_resource") def test_must_work_with_no_properties(self, convert_mock): @@ -218,50 +208,35 @@ def test_must_work_with_no_properties(self, convert_mock): } } - expected = { - "Func1": "some result" - } + expected = {"Func1": "some result"} result = SamFunctionProvider._extract_functions(resources) - self.assertEquals(expected, result) - convert_mock.assert_called_with('Func1', {}, []) + self.assertEqual(expected, result) + convert_mock.assert_called_with("Func1", {}, []) @patch.object(SamFunctionProvider, "_convert_lambda_function_resource") def test_must_work_for_lambda_function(self, convert_mock): convertion_result = "some result" convert_mock.return_value = convertion_result - resources = { - "Func1": { - "Type": "AWS::Lambda::Function", - "Properties": {"a": "b"} - } - } + resources = {"Func1": {"Type": "AWS::Lambda::Function", "Properties": {"a": "b"}}} - expected = { - "Func1": "some result" - } + expected = {"Func1": "some result"} result = SamFunctionProvider._extract_functions(resources) - self.assertEquals(expected, result) - convert_mock.assert_called_with('Func1', {"a": "b"}, []) + self.assertEqual(expected, result) + convert_mock.assert_called_with("Func1", {"a": "b"}, []) def test_must_skip_unknown_resource(self): - resources = { - "Func1": { - "Type": "AWS::SomeOther::Function", - "Properties": {"a": "b"} - } - } + resources = {"Func1": {"Type": "AWS::SomeOther::Function", "Properties": {"a": "b"}}} expected = {} result = SamFunctionProvider._extract_functions(resources) - self.assertEquals(expected, result) + self.assertEqual(expected, result) class TestSamFunctionProvider_convert_sam_function_resource(TestCase): - def test_must_convert(self): name = "myname" @@ -273,7 +248,7 @@ def test_must_convert(self): "Handler": "myhandler", "Environment": "myenvironment", "Role": "myrole", - "Layers": ["Layer1", "Layer2"] + "Layers": ["Layer1", "Layer2"], } expected = Function( @@ -285,19 +260,17 @@ def test_must_convert(self): codeuri="/usr/local", environment="myenvironment", rolearn="myrole", - layers=["Layer1", "Layer2"] + layers=["Layer1", "Layer2"], ) result = SamFunctionProvider._convert_sam_function_resource(name, properties, ["Layer1", "Layer2"]) - self.assertEquals(expected, result) + self.assertEqual(expected, result) def test_must_skip_non_existent_properties(self): name = "myname" - properties = { - "CodeUri": "/usr/local" - } + properties = {"CodeUri": "/usr/local"} expected = Function( name="myname", @@ -308,22 +281,20 @@ def test_must_skip_non_existent_properties(self): codeuri="/usr/local", environment=None, rolearn=None, - layers=[] + layers=[], ) result = SamFunctionProvider._convert_sam_function_resource(name, properties, []) - self.assertEquals(expected, result) + self.assertEqual(expected, result) def test_must_default_missing_code_uri(self): name = "myname" - properties = { - "Runtime": "myruntime" - } + properties = {"Runtime": "myruntime"} result = SamFunctionProvider._convert_sam_function_resource(name, properties, []) - self.assertEquals(result.codeuri, ".") # Default value + self.assertEqual(result.codeuri, ".") # Default value def test_must_handle_code_dict(self): @@ -336,35 +307,30 @@ def test_must_handle_code_dict(self): } result = SamFunctionProvider._convert_sam_function_resource(name, properties, []) - self.assertEquals(result.codeuri, ".") # Default value + self.assertEqual(result.codeuri, ".") # Default value def test_must_handle_code_s3_uri(self): name = "myname" - properties = { - "CodeUri": "s3://bucket/key" - } + properties = {"CodeUri": "s3://bucket/key"} result = SamFunctionProvider._convert_sam_function_resource(name, properties, []) - self.assertEquals(result.codeuri, ".") # Default value + self.assertEqual(result.codeuri, ".") # Default value class TestSamFunctionProvider_convert_lambda_function_resource(TestCase): - def test_must_convert(self): name = "myname" properties = { - "Code": { - "Bucket": "bucket" - }, + "Code": {"Bucket": "bucket"}, "Runtime": "myruntime", "MemorySize": "mymemorysize", "Timeout": "mytimeout", "Handler": "myhandler", "Environment": "myenvironment", "Role": "myrole", - "Layers": ["Layer1", "Layer2"] + "Layers": ["Layer1", "Layer2"], } expected = Function( @@ -376,21 +342,17 @@ def test_must_convert(self): codeuri=".", environment="myenvironment", rolearn="myrole", - layers=["Layer1", "Layer2"] + layers=["Layer1", "Layer2"], ) result = SamFunctionProvider._convert_lambda_function_resource(name, properties, ["Layer1", "Layer2"]) - self.assertEquals(expected, result) + self.assertEqual(expected, result) def test_must_skip_non_existent_properties(self): name = "myname" - properties = { - "Code": { - "Bucket": "bucket" - } - } + properties = {"Code": {"Bucket": "bucket"}} expected = Function( name="myname", @@ -401,39 +363,33 @@ def test_must_skip_non_existent_properties(self): codeuri=".", environment=None, rolearn=None, - layers=[] + layers=[], ) result = SamFunctionProvider._convert_lambda_function_resource(name, properties, []) - self.assertEquals(expected, result) + self.assertEqual(expected, result) class TestSamFunctionProvider_parse_layer_info(TestCase): - - @parameterized.expand([ - ({ - "Function": { - "Type": "AWS::Serverless::Function", - "Properties": { - } - } - }, {"Ref": "Function"}), - ({}, {"Ref": "LayerDoesNotExist"}) - ]) + @parameterized.expand( + [ + ({"Function": {"Type": "AWS::Serverless::Function", "Properties": {}}}, {"Ref": "Function"}), + ({}, {"Ref": "LayerDoesNotExist"}), + ] + ) def test_raise_on_invalid_layer_resource(self, resources, layer_reference): with self.assertRaises(InvalidLayerReference): SamFunctionProvider._parse_layer_info([layer_reference], resources) - @parameterized.expand([ - ({ - "Function": { - "Type": "AWS::Serverless::Function", - "Properties": { - } - } - }, "arn:aws:lambda:::awslayer:AmazonLinux1703") - ]) + @parameterized.expand( + [ + ( + {"Function": {"Type": "AWS::Serverless::Function", "Properties": {}}}, + "arn:aws:lambda:::awslayer:AmazonLinux1703", + ) + ] + ) def test_raise_on_AmazonLinux1703_layer_provided(self, resources, layer_reference): with self.assertRaises(InvalidLayerVersionArn): SamFunctionProvider._parse_layer_info([layer_reference], resources) @@ -441,62 +397,50 @@ def test_raise_on_AmazonLinux1703_layer_provided(self, resources, layer_referenc def test_must_ignore_opt_in_AmazonLinux1803_layer(self): resources = {} - list_of_layers = ["arn:aws:lambda:region:account-id:layer:layer-name:1", - "arn:aws:lambda:::awslayer:AmazonLinux1803"] + list_of_layers = [ + "arn:aws:lambda:region:account-id:layer:layer-name:1", + "arn:aws:lambda:::awslayer:AmazonLinux1803", + ] actual = SamFunctionProvider._parse_layer_info(list_of_layers, resources) - for (actual_layer, expected_layer) in zip(actual, [LayerVersion( - "arn:aws:lambda:region:account-id:layer:layer-name:1", - None)]): - self.assertEquals(actual_layer, expected_layer) + for (actual_layer, expected_layer) in zip( + actual, [LayerVersion("arn:aws:lambda:region:account-id:layer:layer-name:1", None)] + ): + self.assertEqual(actual_layer, expected_layer) def test_layers_created_from_template_resources(self): resources = { - "Layer": { - "Type": "AWS::Lambda::LayerVersion", - "Properties": { - "Content": { - "Bucket": "bucket" - } - } - }, - "ServerlessLayer": { - "Type": "AWS::Serverless::LayerVersion", - "Properties": { - "ContentUri": "/somepath" - } - } + "Layer": {"Type": "AWS::Lambda::LayerVersion", "Properties": {"Content": {"Bucket": "bucket"}}}, + "ServerlessLayer": {"Type": "AWS::Serverless::LayerVersion", "Properties": {"ContentUri": "/somepath"}}, } - list_of_layers = [{"Ref": "Layer"}, - {"Ref": "ServerlessLayer"}, - "arn:aws:lambda:region:account-id:layer:layer-name:1", - {"NonRef": "Something"}] + list_of_layers = [ + {"Ref": "Layer"}, + {"Ref": "ServerlessLayer"}, + "arn:aws:lambda:region:account-id:layer:layer-name:1", + {"NonRef": "Something"}, + ] actual = SamFunctionProvider._parse_layer_info(list_of_layers, resources) - for (actual_layer, expected_layer) in zip(actual, [LayerVersion("Layer", "."), - LayerVersion("ServerlessLayer", "/somepath"), - LayerVersion( - "arn:aws:lambda:region:account-id:layer:layer-name:1", - None)]): - self.assertEquals(actual_layer, expected_layer) + for (actual_layer, expected_layer) in zip( + actual, + [ + LayerVersion("Layer", "."), + LayerVersion("ServerlessLayer", "/somepath"), + LayerVersion("arn:aws:lambda:region:account-id:layer:layer-name:1", None), + ], + ): + self.assertEqual(actual_layer, expected_layer) def test_return_empty_list_on_no_layers(self): - resources = { - "Function": { - "Type": "AWS::Serverless::Function", - "Properties": { - } - } - } + resources = {"Function": {"Type": "AWS::Serverless::Function", "Properties": {}}} actual = SamFunctionProvider._parse_layer_info([], resources) - self.assertEquals(actual, []) + self.assertEqual(actual, []) class TestSamFunctionProvider_get(TestCase): - def test_raise_on_invalid_name(self): provider = SamFunctionProvider({}) @@ -507,7 +451,7 @@ def test_must_return_function_value(self): provider = SamFunctionProvider({}) provider.functions = {"func1": "value"} # Cheat a bit here by setting the value of this property directly - self.assertEquals("value", provider.get("func1")) + self.assertEqual("value", provider.get("func1")) def test_return_none_if_function_not_found(self): provider = SamFunctionProvider({}) @@ -516,9 +460,8 @@ def test_return_none_if_function_not_found(self): class TestSamFunctionProvider_get_all(TestCase): - def test_must_work_with_no_functions(self): provider = SamFunctionProvider({}) result = [f for f in provider.get_all()] - self.assertEquals(result, []) + self.assertEqual(result, []) diff --git a/tests/unit/commands/local/start_api/test_cli.py b/tests/unit/commands/local/start_api/test_cli.py index 7b09b1ed1f..c697092975 100644 --- a/tests/unit/commands/local/start_api/test_cli.py +++ b/tests/unit/commands/local/start_api/test_cli.py @@ -16,7 +16,6 @@ class TestCli(TestCase): - def setUp(self): self.template = "template" self.env_vars = "env-vars" @@ -43,8 +42,7 @@ def setUp(self): @patch("samcli.commands.local.start_api.cli.InvokeContext") @patch("samcli.commands.local.start_api.cli.LocalApiService") - def test_cli_must_setup_context_and_start_service(self, local_api_service_mock, - invoke_context_mock): + def test_cli_must_setup_context_and_start_service(self, local_api_service_mock, invoke_context_mock): # Mock the __enter__ method to return a object inside a context manager context_mock = Mock() invoke_context_mock.return_value.__enter__.return_value = context_mock @@ -54,26 +52,27 @@ def test_cli_must_setup_context_and_start_service(self, local_api_service_mock, self.call_cli() - invoke_context_mock.assert_called_with(template_file=self.template, - function_identifier=None, - env_vars_file=self.env_vars, - docker_volume_basedir=self.docker_volume_basedir, - docker_network=self.docker_network, - log_file=self.log_file, - skip_pull_image=self.skip_pull_image, - debug_port=self.debug_port, - debug_args=self.debug_args, - debugger_path=self.debugger_path, - parameter_overrides=self.parameter_overrides, - layer_cache_basedir=self.layer_cache_basedir, - force_image_build=self.force_image_build, - aws_region=self.region_name, - aws_profile=self.profile) - - local_api_service_mock.assert_called_with(lambda_invoke_context=context_mock, - port=self.port, - host=self.host, - static_dir=self.static_dir) + invoke_context_mock.assert_called_with( + template_file=self.template, + function_identifier=None, + env_vars_file=self.env_vars, + docker_volume_basedir=self.docker_volume_basedir, + docker_network=self.docker_network, + log_file=self.log_file, + skip_pull_image=self.skip_pull_image, + debug_port=self.debug_port, + debug_args=self.debug_args, + debugger_path=self.debugger_path, + parameter_overrides=self.parameter_overrides, + layer_cache_basedir=self.layer_cache_basedir, + force_image_build=self.force_image_build, + aws_region=self.region_name, + aws_profile=self.profile, + ) + + local_api_service_mock.assert_called_with( + lambda_invoke_context=context_mock, port=self.port, host=self.host, static_dir=self.static_dir + ) service_mock.start.assert_called_with() @@ -94,18 +93,22 @@ def test_must_raise_if_no_api_defined(self, local_api_service_mock, invoke_conte msg = str(context.exception) expected = "Template does not have any APIs connected to Lambda functions" - self.assertEquals(msg, expected) - - @parameterized.expand([(InvalidSamDocumentException("bad template"), "bad template"), - (InvalidLayerReference(), "Layer References need to be of type " - "'AWS::Serverless::LayerVersion' or 'AWS::Lambda::LayerVersion'"), - (DebuggingNotSupported("Debugging not supported"), "Debugging not supported") - ]) + self.assertEqual(msg, expected) + + @parameterized.expand( + [ + (InvalidSamDocumentException("bad template"), "bad template"), + ( + InvalidLayerReference(), + "Layer References need to be of type " "'AWS::Serverless::LayerVersion' or 'AWS::Lambda::LayerVersion'", + ), + (DebuggingNotSupported("Debugging not supported"), "Debugging not supported"), + ] + ) @patch("samcli.commands.local.start_api.cli.InvokeContext") - def test_must_raise_user_exception_on_invalid_sam_template(self, - exeception_to_raise, - execption_message, - invoke_context_mock): + def test_must_raise_user_exception_on_invalid_sam_template( + self, exeception_to_raise, execption_message, invoke_context_mock + ): invoke_context_mock.side_effect = exeception_to_raise @@ -114,7 +117,7 @@ def test_must_raise_user_exception_on_invalid_sam_template(self, msg = str(context.exception) expected = execption_message - self.assertEquals(msg, expected) + self.assertEqual(msg, expected) @patch("samcli.commands.local.start_api.cli.InvokeContext") def test_must_raise_user_exception_on_invalid_env_vars(self, invoke_context_mock): @@ -125,22 +128,24 @@ def test_must_raise_user_exception_on_invalid_env_vars(self, invoke_context_mock msg = str(context.exception) expected = "bad env vars" - self.assertEquals(msg, expected) + self.assertEqual(msg, expected) def call_cli(self): - start_api_cli(ctx=self.ctx_mock, - host=self.host, - port=self.port, - static_dir=self.static_dir, - template=self.template, - env_vars=self.env_vars, - debug_port=self.debug_port, - debug_args=self.debug_args, - debugger_path=self.debugger_path, - docker_volume_basedir=self.docker_volume_basedir, - docker_network=self.docker_network, - log_file=self.log_file, - skip_pull_image=self.skip_pull_image, - parameter_overrides=self.parameter_overrides, - layer_cache_basedir=self.layer_cache_basedir, - force_image_build=self.force_image_build) + start_api_cli( + ctx=self.ctx_mock, + host=self.host, + port=self.port, + static_dir=self.static_dir, + template=self.template, + env_vars=self.env_vars, + debug_port=self.debug_port, + debug_args=self.debug_args, + debugger_path=self.debugger_path, + docker_volume_basedir=self.docker_volume_basedir, + docker_network=self.docker_network, + log_file=self.log_file, + skip_pull_image=self.skip_pull_image, + parameter_overrides=self.parameter_overrides, + layer_cache_basedir=self.layer_cache_basedir, + force_image_build=self.force_image_build, + ) diff --git a/tests/unit/commands/local/start_lambda/test_cli.py b/tests/unit/commands/local/start_lambda/test_cli.py index dfc02cc3c0..54f6496775 100644 --- a/tests/unit/commands/local/start_lambda/test_cli.py +++ b/tests/unit/commands/local/start_lambda/test_cli.py @@ -12,7 +12,6 @@ class TestCli(TestCase): - def setUp(self): self.template = "template" self.env_vars = "env-vars" @@ -38,8 +37,7 @@ def setUp(self): @patch("samcli.commands.local.start_lambda.cli.InvokeContext") @patch("samcli.commands.local.start_lambda.cli.LocalLambdaService") - def test_cli_must_setup_context_and_start_service(self, local_lambda_service_mock, - invoke_context_mock): + def test_cli_must_setup_context_and_start_service(self, local_lambda_service_mock, invoke_context_mock): # Mock the __enter__ method to return a object inside a context manager context_mock = Mock() invoke_context_mock.return_value.__enter__.return_value = context_mock @@ -49,39 +47,42 @@ def test_cli_must_setup_context_and_start_service(self, local_lambda_service_moc self.call_cli() - invoke_context_mock.assert_called_with(template_file=self.template, - function_identifier=None, - env_vars_file=self.env_vars, - docker_volume_basedir=self.docker_volume_basedir, - docker_network=self.docker_network, - log_file=self.log_file, - skip_pull_image=self.skip_pull_image, - debug_port=self.debug_port, - debug_args=self.debug_args, - debugger_path=self.debugger_path, - parameter_overrides=self.parameter_overrides, - layer_cache_basedir=self.layer_cache_basedir, - force_image_build=self.force_image_build, - aws_region=self.region_name, - aws_profile=self.profile) - - local_lambda_service_mock.assert_called_with(lambda_invoke_context=context_mock, - port=self.port, - host=self.host) + invoke_context_mock.assert_called_with( + template_file=self.template, + function_identifier=None, + env_vars_file=self.env_vars, + docker_volume_basedir=self.docker_volume_basedir, + docker_network=self.docker_network, + log_file=self.log_file, + skip_pull_image=self.skip_pull_image, + debug_port=self.debug_port, + debug_args=self.debug_args, + debugger_path=self.debugger_path, + parameter_overrides=self.parameter_overrides, + layer_cache_basedir=self.layer_cache_basedir, + force_image_build=self.force_image_build, + aws_region=self.region_name, + aws_profile=self.profile, + ) + + local_lambda_service_mock.assert_called_with(lambda_invoke_context=context_mock, port=self.port, host=self.host) service_mock.start.assert_called_with() - @parameterized.expand([(InvalidSamDocumentException("bad template"), "bad template"), - (InvalidLayerReference(), "Layer References need to be of type " - "'AWS::Serverless::LayerVersion' or 'AWS::Lambda::LayerVersion'"), - (DebuggingNotSupported("Debugging not supported"), "Debugging not supported") - ]) + @parameterized.expand( + [ + (InvalidSamDocumentException("bad template"), "bad template"), + ( + InvalidLayerReference(), + "Layer References need to be of type " "'AWS::Serverless::LayerVersion' or 'AWS::Lambda::LayerVersion'", + ), + (DebuggingNotSupported("Debugging not supported"), "Debugging not supported"), + ] + ) @patch("samcli.commands.local.start_lambda.cli.InvokeContext") - def test_must_raise_user_exception_on_invalid_sam_template(self, - exeception_to_raise, - execption_message, - invoke_context_mock - ): + def test_must_raise_user_exception_on_invalid_sam_template( + self, exeception_to_raise, execption_message, invoke_context_mock + ): invoke_context_mock.side_effect = exeception_to_raise with self.assertRaises(UserException) as context: @@ -89,7 +90,7 @@ def test_must_raise_user_exception_on_invalid_sam_template(self, msg = str(context.exception) expected = execption_message - self.assertEquals(msg, expected) + self.assertEqual(msg, expected) @patch("samcli.commands.local.start_lambda.cli.InvokeContext") def test_must_raise_user_exception_on_invalid_env_vars(self, invoke_context_mock): @@ -100,21 +101,23 @@ def test_must_raise_user_exception_on_invalid_env_vars(self, invoke_context_mock msg = str(context.exception) expected = "bad env vars" - self.assertEquals(msg, expected) + self.assertEqual(msg, expected) def call_cli(self): - start_lambda_cli(ctx=self.ctx_mock, - host=self.host, - port=self.port, - template=self.template, - env_vars=self.env_vars, - debug_port=self.debug_port, - debug_args=self.debug_args, - debugger_path=self.debugger_path, - docker_volume_basedir=self.docker_volume_basedir, - docker_network=self.docker_network, - log_file=self.log_file, - skip_pull_image=self.skip_pull_image, - parameter_overrides=self.parameter_overrides, - layer_cache_basedir=self.layer_cache_basedir, - force_image_build=self.force_image_build) + start_lambda_cli( + ctx=self.ctx_mock, + host=self.host, + port=self.port, + template=self.template, + env_vars=self.env_vars, + debug_port=self.debug_port, + debug_args=self.debug_args, + debugger_path=self.debugger_path, + docker_volume_basedir=self.docker_volume_basedir, + docker_network=self.docker_network, + log_file=self.log_file, + skip_pull_image=self.skip_pull_image, + parameter_overrides=self.parameter_overrides, + layer_cache_basedir=self.layer_cache_basedir, + force_image_build=self.force_image_build, + ) diff --git a/tests/unit/commands/logs/test_command.py b/tests/unit/commands/logs/test_command.py index 2ea7c3bf10..aacd8c313b 100644 --- a/tests/unit/commands/logs/test_command.py +++ b/tests/unit/commands/logs/test_command.py @@ -5,7 +5,6 @@ class TestLogsCliCommand(TestCase): - def setUp(self): self.function_name = "name" @@ -27,20 +26,23 @@ def test_without_tail(self, LogsCommandContextMock, click_mock): context_mock.fetcher.fetch.return_value = events_iterable context_mock.formatter.do_format.return_value = formatted_events - do_cli(self.function_name, self.stack_name, self.filter_pattern, tailing, - self.start_time, self.end_time) + do_cli(self.function_name, self.stack_name, self.filter_pattern, tailing, self.start_time, self.end_time) - LogsCommandContextMock.assert_called_with(self.function_name, - stack_name=self.stack_name, - filter_pattern=self.filter_pattern, - start_time=self.start_time, - end_time=self.end_time, - output_file=None) + LogsCommandContextMock.assert_called_with( + self.function_name, + stack_name=self.stack_name, + filter_pattern=self.filter_pattern, + start_time=self.start_time, + end_time=self.end_time, + output_file=None, + ) - context_mock.fetcher.fetch.assert_called_with(context_mock.log_group_name, - filter_pattern=context_mock.filter_pattern, - start=context_mock.start_time, - end=context_mock.end_time) + context_mock.fetcher.fetch.assert_called_with( + context_mock.log_group_name, + filter_pattern=context_mock.filter_pattern, + start=context_mock.start_time, + end=context_mock.end_time, + ) context_mock.formatter.do_format.assert_called_with(events_iterable) click_mock.echo.assert_has_calls([call(v, nl=False) for v in formatted_events]) @@ -58,19 +60,20 @@ def test_with_tailing(self, LogsCommandContextMock, click_mock): context_mock.fetcher.tail.return_value = events_iterable context_mock.formatter.do_format.return_value = formatted_events - do_cli(self.function_name, self.stack_name, self.filter_pattern, tailing, - self.start_time, self.end_time) + do_cli(self.function_name, self.stack_name, self.filter_pattern, tailing, self.start_time, self.end_time) - LogsCommandContextMock.assert_called_with(self.function_name, - stack_name=self.stack_name, - filter_pattern=self.filter_pattern, - start_time=self.start_time, - end_time=self.end_time, - output_file=None) + LogsCommandContextMock.assert_called_with( + self.function_name, + stack_name=self.stack_name, + filter_pattern=self.filter_pattern, + start_time=self.start_time, + end_time=self.end_time, + output_file=None, + ) - context_mock.fetcher.tail.assert_called_with(context_mock.log_group_name, - filter_pattern=context_mock.filter_pattern, - start=context_mock.start_time) + context_mock.fetcher.tail.assert_called_with( + context_mock.log_group_name, filter_pattern=context_mock.filter_pattern, start=context_mock.start_time + ) context_mock.formatter.do_format.assert_called_with(events_iterable) click_mock.echo.assert_has_calls([call(v, nl=False) for v in formatted_events]) diff --git a/tests/unit/commands/logs/test_logs_context.py b/tests/unit/commands/logs/test_logs_context.py index 738a205156..630a1413dd 100644 --- a/tests/unit/commands/logs/test_logs_context.py +++ b/tests/unit/commands/logs/test_logs_context.py @@ -1,4 +1,3 @@ - import botocore.session from botocore.stub import Stubber @@ -10,7 +9,6 @@ class TestLogsCommandContext(TestCase): - def setUp(self): self.function_name = "name" self.stack_name = "stack name" @@ -19,12 +17,14 @@ def setUp(self): self.end_time = "end" self.output_file = "somefile" - self.context = LogsCommandContext(self.function_name, - stack_name=self.stack_name, - filter_pattern=self.filter_pattern, - start_time=self.start_time, - end_time=self.end_time, - output_file=self.output_file) + self.context = LogsCommandContext( + self.function_name, + stack_name=self.stack_name, + filter_pattern=self.filter_pattern, + start_time=self.start_time, + end_time=self.end_time, + output_file=self.output_file, + ) def test_basic_properties(self): self.assertEqual(self.context.filter_pattern, self.filter_pattern) @@ -34,20 +34,14 @@ def test_basic_properties(self): def test_fetcher_property(self, LogsFetcherMock): LogsFetcherMock.return_value = Mock() - self.assertEqual( - self.context.fetcher, - LogsFetcherMock.return_value - ) + self.assertEqual(self.context.fetcher, LogsFetcherMock.return_value) LogsFetcherMock.assert_called_with(self.context._logs_client) @patch("samcli.commands.logs.logs_context.Colored") def test_colored_property(self, ColoredMock): ColoredMock.return_value = Mock() - self.assertEqual( - self.context.colored, - ColoredMock.return_value - ) + self.assertEqual(self.context.colored, ColoredMock.return_value) ColoredMock.assert_called_with(colorize=False) @patch("samcli.commands.logs.logs_context.Colored") @@ -55,17 +49,16 @@ def test_colored_property_without_output_file(self, ColoredMock): ColoredMock.return_value = Mock() # No output file. It means we are printing to Terminal. Hence set the color - ctx = LogsCommandContext(self.function_name, - stack_name=self.stack_name, - filter_pattern=self.filter_pattern, - start_time=self.start_time, - end_time=self.end_time, - output_file=None) - - self.assertEqual( - ctx.colored, - ColoredMock.return_value + ctx = LogsCommandContext( + self.function_name, + stack_name=self.stack_name, + filter_pattern=self.filter_pattern, + start_time=self.start_time, + end_time=self.end_time, + output_file=None, ) + + self.assertEqual(ctx.colored, ColoredMock.return_value) ColoredMock.assert_called_with(colorize=True) # Must enable colors @patch("samcli.commands.logs.logs_context.LogsFormatter") @@ -74,12 +67,8 @@ def test_formatter_property(self, ColoredMock, LogsFormatterMock): LogsFormatterMock.return_value = Mock() ColoredMock.return_value = Mock() - self.assertEqual( - self.context.formatter, - LogsFormatterMock.return_value - ) - LogsFormatterMock.assert_called_with(ColoredMock.return_value, - ANY) + self.assertEqual(self.context.formatter, LogsFormatterMock.return_value) + LogsFormatterMock.assert_called_with(ColoredMock.return_value, ANY) @patch("samcli.commands.logs.logs_context.LogGroupProvider") @patch.object(LogsCommandContext, "_get_resource_id_from_stack") @@ -90,10 +79,7 @@ def test_log_group_name_property_with_stack_name(self, get_resource_id_mock, Log LogGroupProviderMock.for_lambda_function.return_value = group get_resource_id_mock.return_value = logical_id - self.assertEqual( - self.context.log_group_name, - group - ) + self.assertEqual(self.context.log_group_name, group) LogGroupProviderMock.for_lambda_function.assert_called_with(logical_id) get_resource_id_mock.assert_called_with(ANY, self.stack_name, self.function_name) @@ -105,18 +91,17 @@ def test_log_group_name_property_without_stack_name(self, get_resource_id_mock, LogGroupProviderMock.for_lambda_function.return_value = group - ctx = LogsCommandContext(self.function_name, - stack_name=None, # No Stack Name - filter_pattern=self.filter_pattern, - start_time=self.start_time, - end_time=self.end_time, - output_file=self.output_file) - - self.assertEqual( - ctx.log_group_name, - group + ctx = LogsCommandContext( + self.function_name, + stack_name=None, # No Stack Name + filter_pattern=self.filter_pattern, + start_time=self.start_time, + end_time=self.end_time, + output_file=self.output_file, ) + self.assertEqual(ctx.log_group_name, group) + LogGroupProviderMock.for_lambda_function.assert_called_with(self.function_name) get_resource_id_mock.assert_not_called() @@ -124,16 +109,16 @@ def test_start_time_property(self): self.context._parse_time = Mock() self.context._parse_time.return_value = "foo" - self.assertEquals(self.context.start_time, "foo") + self.assertEqual(self.context.start_time, "foo") def test_end_time_property(self): self.context._parse_time = Mock() self.context._parse_time.return_value = "foo" - self.assertEquals(self.context.end_time, "foo") + self.assertEqual(self.context.end_time, "foo") - @patch('samcli.commands.logs.logs_context.parse_date') - @patch('samcli.commands.logs.logs_context.to_utc') + @patch("samcli.commands.logs.logs_context.parse_date") + @patch("samcli.commands.logs.logs_context.to_utc") def test_parse_time(self, to_utc_mock, parse_date_mock): input = "some time" parsed_result = "parsed" @@ -142,12 +127,12 @@ def test_parse_time(self, to_utc_mock, parse_date_mock): to_utc_mock.return_value = expected actual = LogsCommandContext._parse_time(input, "some prop") - self.assertEquals(actual, expected) + self.assertEqual(actual, expected) parse_date_mock.assert_called_with(input) to_utc_mock.assert_called_with(parsed_result) - @patch('samcli.commands.logs.logs_context.parse_date') + @patch("samcli.commands.logs.logs_context.parse_date") def test_parse_time_raises_exception(self, parse_date_mock): input = "some time" parsed_result = None @@ -156,8 +141,7 @@ def test_parse_time_raises_exception(self, parse_date_mock): with self.assertRaises(UserException) as ctx: LogsCommandContext._parse_time(input, "some prop") - self.assertEquals(str(ctx.exception), - "Unable to parse the time provided by 'some prop'") + self.assertEqual(str(ctx.exception), "Unable to parse the time provided by 'some prop'") def test_parse_time_empty_time(self): result = LogsCommandContext._parse_time(None, "some prop") @@ -169,7 +153,7 @@ def test_setup_output_file(self, open_mock): open_mock.return_value = "handle" result = LogsCommandContext._setup_output_file(self.output_file) - self.assertEquals(result, "handle") + self.assertEqual(result, "handle") open_mock.assert_called_with(self.output_file, "wb") def test_setup_output_file_without_file(self): @@ -180,13 +164,15 @@ def test_context_manager_with_output_file(self, setup_output_file_mock): handle = Mock() setup_output_file_mock.return_value = handle - with LogsCommandContext(self.function_name, - stack_name=self.stack_name, - filter_pattern=self.filter_pattern, - start_time=self.start_time, - end_time=self.end_time, - output_file=self.output_file) as context: - self.assertEquals(context._output_file_handle, handle) + with LogsCommandContext( + self.function_name, + stack_name=self.stack_name, + filter_pattern=self.filter_pattern, + start_time=self.start_time, + end_time=self.end_time, + output_file=self.output_file, + ) as context: + self.assertEqual(context._output_file_handle, handle) # Context should be reset self.assertIsNone(self.context._output_file_handle) @@ -197,23 +183,24 @@ def test_context_manager_with_output_file(self, setup_output_file_mock): def test_context_manager_no_output_file(self, setup_output_file_mock): setup_output_file_mock.return_value = None - with LogsCommandContext(self.function_name, - stack_name=self.stack_name, - filter_pattern=self.filter_pattern, - start_time=self.start_time, - end_time=self.end_time, - output_file=None) as context: - self.assertEquals(context._output_file_handle, None) + with LogsCommandContext( + self.function_name, + stack_name=self.stack_name, + filter_pattern=self.filter_pattern, + start_time=self.start_time, + end_time=self.end_time, + output_file=None, + ) as context: + self.assertEqual(context._output_file_handle, None) # Context should be reset setup_output_file_mock.assert_called_with(None) class TestLogsCommandContext_get_resource_id_from_stack(TestCase): - def setUp(self): - self.real_client = botocore.session.get_session().create_client('cloudformation', region_name="us-east-1") + self.real_client = botocore.session.get_session().create_client("cloudformation", region_name="us-east-1") self.cfn_client_stubber = Stubber(self.real_client) self.logical_id = "name" @@ -222,10 +209,7 @@ def setUp(self): def test_must_get_from_cfn(self): - expected_params = { - "StackName": self.stack_name, - "LogicalResourceId": self.logical_id - } + expected_params = {"StackName": self.stack_name, "LogicalResourceId": self.logical_id} mock_response = { "StackResourceDetail": { @@ -233,33 +217,30 @@ def test_must_get_from_cfn(self): "LogicalResourceId": self.logical_id, "ResourceType": "AWS::Lambda::Function", "ResourceStatus": "UPDATE_COMPLETE", - "LastUpdatedTimestamp": "2017-07-28T23:34:13.435Z" + "LastUpdatedTimestamp": "2017-07-28T23:34:13.435Z", } } self.cfn_client_stubber.add_response("describe_stack_resource", mock_response, expected_params) with self.cfn_client_stubber: - result = LogsCommandContext._get_resource_id_from_stack(self.real_client, - self.stack_name, - self.logical_id) + result = LogsCommandContext._get_resource_id_from_stack(self.real_client, self.stack_name, self.logical_id) - self.assertEquals(result, self.physical_id) + self.assertEqual(result, self.physical_id) def test_must_handle_resource_not_found(self): errmsg = "Something went wrong" errcode = "SomeException" - self.cfn_client_stubber.add_client_error("describe_stack_resource", - service_error_code=errcode, - service_message=errmsg) + self.cfn_client_stubber.add_client_error( + "describe_stack_resource", service_error_code=errcode, service_message=errmsg + ) expected_error_msg = "An error occurred ({}) when calling the DescribeStackResource operation: {}".format( - errcode, errmsg) + errcode, errmsg + ) with self.cfn_client_stubber: with self.assertRaises(UserException) as context: - LogsCommandContext._get_resource_id_from_stack(self.real_client, - self.stack_name, - self.logical_id) + LogsCommandContext._get_resource_id_from_stack(self.real_client, self.stack_name, self.logical_id) - self.assertEquals(expected_error_msg, str(context.exception)) + self.assertEqual(expected_error_msg, str(context.exception)) diff --git a/tests/unit/commands/publish/test_command.py b/tests/unit/commands/publish/test_command.py index f0f82c1c4d..892e49c848 100644 --- a/tests/unit/commands/publish/test_command.py +++ b/tests/unit/commands/publish/test_command.py @@ -12,17 +12,18 @@ class TestCli(TestCase): - def setUp(self): self.template = "./template" self.application_id = "arn:aws:serverlessrepo:us-east-1:123456789012:applications/hello" self.ctx_mock = Mock() self.ctx_mock.region = "us-east-1" - self.console_link = "Click the link below to view your application in AWS console:\n" \ + self.console_link = ( + "Click the link below to view your application in AWS console:\n" "https://console.aws.amazon.com/serverlessrepo/home?region={}#/published-applications/{}" + ) - @patch('samcli.commands.publish.command.get_template_data') - @patch('samcli.commands.publish.command.click') + @patch("samcli.commands.publish.command.get_template_data") + @patch("samcli.commands.publish.command.click") def test_must_raise_if_value_error(self, click_mock, get_template_data_mock): get_template_data_mock.side_effect = ValueError("Template not found") with self.assertRaises(UserException) as context: @@ -32,9 +33,9 @@ def test_must_raise_if_value_error(self, click_mock, get_template_data_mock): self.assertEqual("Template not found", message) click_mock.secho.assert_called_with("Publish Failed", fg="red") - @patch('samcli.commands.publish.command.get_template_data', Mock(return_value={})) - @patch('samcli.commands.publish.command.publish_application') - @patch('samcli.commands.publish.command.click') + @patch("samcli.commands.publish.command.get_template_data", Mock(return_value={})) + @patch("samcli.commands.publish.command.publish_application") + @patch("samcli.commands.publish.command.click") def test_must_raise_if_serverlessrepo_error(self, click_mock, publish_application_mock): publish_application_mock.side_effect = ServerlessRepoError() with self.assertRaises(UserException): @@ -42,9 +43,9 @@ def test_must_raise_if_serverlessrepo_error(self, click_mock, publish_applicatio click_mock.secho.assert_called_with("Publish Failed", fg="red") - @patch('samcli.commands.publish.command.get_template_data', Mock(return_value={})) - @patch('samcli.commands.publish.command.publish_application') - @patch('samcli.commands.publish.command.click') + @patch("samcli.commands.publish.command.get_template_data", Mock(return_value={})) + @patch("samcli.commands.publish.command.publish_application") + @patch("samcli.commands.publish.command.click") def test_must_raise_if_invalid_S3_uri_error(self, click_mock, publish_application_mock): publish_application_mock.side_effect = InvalidS3UriError(message="") with self.assertRaises(UserException) as context: @@ -54,63 +55,60 @@ def test_must_raise_if_invalid_S3_uri_error(self, click_mock, publish_applicatio self.assertTrue("Your SAM template contains invalid S3 URIs" in message) click_mock.secho.assert_called_with("Publish Failed", fg="red") - @patch('samcli.commands.publish.command.get_template_data', Mock(return_value={})) - @patch('samcli.commands.publish.command.publish_application') - @patch('samcli.commands.publish.command.click') + @patch("samcli.commands.publish.command.get_template_data", Mock(return_value={})) + @patch("samcli.commands.publish.command.publish_application") + @patch("samcli.commands.publish.command.click") def test_must_succeed_to_create_application(self, click_mock, publish_application_mock): publish_application_mock.return_value = { - 'application_id': self.application_id, - 'details': {'attr1': 'value1'}, - 'actions': [CREATE_APPLICATION] + "application_id": self.application_id, + "details": {"attr1": "value1"}, + "actions": [CREATE_APPLICATION], } publish_cli(self.ctx_mock, self.template, None) - details_str = json.dumps({'attr1': 'value1'}, indent=2) + details_str = json.dumps({"attr1": "value1"}, indent=2) expected_msg = "Created new application with the following metadata:\n{}" - expected_link = self.console_link.format( - self.ctx_mock.region, - self.application_id.replace('/', '~') + expected_link = self.console_link.format(self.ctx_mock.region, self.application_id.replace("/", "~")) + click_mock.secho.assert_has_calls( + [ + call("Publish Succeeded", fg="green"), + call(expected_msg.format(details_str)), + call(expected_link, fg="yellow"), + ] ) - click_mock.secho.assert_has_calls([ - call("Publish Succeeded", fg="green"), - call(expected_msg.format(details_str)), - call(expected_link, fg="yellow") - ]) - - @patch('samcli.commands.publish.command.get_template_data', Mock(return_value={})) - @patch('samcli.commands.publish.command.publish_application') - @patch('samcli.commands.publish.command.click') + + @patch("samcli.commands.publish.command.get_template_data", Mock(return_value={})) + @patch("samcli.commands.publish.command.publish_application") + @patch("samcli.commands.publish.command.click") def test_must_succeed_to_update_application(self, click_mock, publish_application_mock): publish_application_mock.return_value = { - 'application_id': self.application_id, - 'details': {'attr1': 'value1'}, - 'actions': [UPDATE_APPLICATION] + "application_id": self.application_id, + "details": {"attr1": "value1"}, + "actions": [UPDATE_APPLICATION], } publish_cli(self.ctx_mock, self.template, None) - details_str = json.dumps({'attr1': 'value1'}, indent=2) + details_str = json.dumps({"attr1": "value1"}, indent=2) expected_msg = 'The following metadata of application "{}" has been updated:\n{}' - expected_link = self.console_link.format( - self.ctx_mock.region, - self.application_id.replace('/', '~') + expected_link = self.console_link.format(self.ctx_mock.region, self.application_id.replace("/", "~")) + click_mock.secho.assert_has_calls( + [ + call("Publish Succeeded", fg="green"), + call(expected_msg.format(self.application_id, details_str)), + call(expected_link, fg="yellow"), + ] ) - click_mock.secho.assert_has_calls([ - call("Publish Succeeded", fg="green"), - call(expected_msg.format(self.application_id, details_str)), - call(expected_link, fg="yellow") - ]) - - @patch('samcli.commands.publish.command.get_template_data', Mock(return_value={})) - @patch('samcli.commands.publish.command.publish_application') - @patch('samcli.commands.publish.command.boto3') - @patch('samcli.commands.publish.command.click') - def test_print_console_link_if_context_region_not_set(self, click_mock, boto3_mock, - publish_application_mock): + + @patch("samcli.commands.publish.command.get_template_data", Mock(return_value={})) + @patch("samcli.commands.publish.command.publish_application") + @patch("samcli.commands.publish.command.boto3") + @patch("samcli.commands.publish.command.click") + def test_print_console_link_if_context_region_not_set(self, click_mock, boto3_mock, publish_application_mock): self.ctx_mock.region = None publish_application_mock.return_value = { - 'application_id': self.application_id, - 'details': {'attr1': 'value1'}, - 'actions': [UPDATE_APPLICATION] + "application_id": self.application_id, + "details": {"attr1": "value1"}, + "actions": [UPDATE_APPLICATION], } session_mock = Mock() @@ -118,48 +116,25 @@ def test_print_console_link_if_context_region_not_set(self, click_mock, boto3_mo boto3_mock.Session.return_value = session_mock publish_cli(self.ctx_mock, self.template, None) - expected_link = self.console_link.format( - session_mock.region_name, - self.application_id.replace('/', '~') - ) + expected_link = self.console_link.format(session_mock.region_name, self.application_id.replace("/", "~")) click_mock.secho.assert_called_with(expected_link, fg="yellow") - @patch('samcli.commands.publish.command.get_template_data') - @patch('samcli.commands.publish.command.publish_application') - def test_must_use_template_semantic_version(self, publish_application_mock, - get_template_data_mock): - template_data = { - METADATA: { - SERVERLESS_REPO_APPLICATION: {SEMANTIC_VERSION: '0.1'} - } - } + @patch("samcli.commands.publish.command.get_template_data") + @patch("samcli.commands.publish.command.publish_application") + def test_must_use_template_semantic_version(self, publish_application_mock, get_template_data_mock): + template_data = {METADATA: {SERVERLESS_REPO_APPLICATION: {SEMANTIC_VERSION: "0.1"}}} get_template_data_mock.return_value = template_data - publish_application_mock.return_value = { - 'application_id': self.application_id, - 'details': {}, 'actions': {} - } + publish_application_mock.return_value = {"application_id": self.application_id, "details": {}, "actions": {}} publish_cli(self.ctx_mock, self.template, None) publish_application_mock.assert_called_with(template_data) - @patch('samcli.commands.publish.command.get_template_data') - @patch('samcli.commands.publish.command.publish_application') - def test_must_override_template_semantic_version(self, publish_application_mock, - get_template_data_mock): - template_data = { - METADATA: { - SERVERLESS_REPO_APPLICATION: {SEMANTIC_VERSION: '0.1'} - } - } + @patch("samcli.commands.publish.command.get_template_data") + @patch("samcli.commands.publish.command.publish_application") + def test_must_override_template_semantic_version(self, publish_application_mock, get_template_data_mock): + template_data = {METADATA: {SERVERLESS_REPO_APPLICATION: {SEMANTIC_VERSION: "0.1"}}} get_template_data_mock.return_value = template_data - publish_application_mock.return_value = { - 'application_id': self.application_id, - 'details': {}, 'actions': {} - } + publish_application_mock.return_value = {"application_id": self.application_id, "details": {}, "actions": {}} - publish_cli(self.ctx_mock, self.template, '0.2') - expected_template_data = { - METADATA: { - SERVERLESS_REPO_APPLICATION: {SEMANTIC_VERSION: '0.2'} - } - } + publish_cli(self.ctx_mock, self.template, "0.2") + expected_template_data = {METADATA: {SERVERLESS_REPO_APPLICATION: {SEMANTIC_VERSION: "0.2"}}} publish_application_mock.assert_called_with(expected_template_data) diff --git a/tests/unit/commands/test_deploy.py b/tests/unit/commands/test_deploy.py index a8b74a2706..119a7ddd2c 100644 --- a/tests/unit/commands/test_deploy.py +++ b/tests/unit/commands/test_deploy.py @@ -9,13 +9,12 @@ class TestCli(TestCase): - def setUp(self): - self.args = ('--force-upload',) + self.args = ("--force-upload",) self.expected_args = self.args + ("--stack-name", "stackName") @patch("samcli.commands.deploy.execute_command") def test_deploy_must_pass_args(self, execute_command_mock): execute_command_mock.return_value = True - deploy_cli(self.args, "file.yaml", 'stackName') - execute_command_mock.assert_called_with("deploy", self.expected_args, template_file='file.yaml') + deploy_cli(self.args, "file.yaml", "stackName") + execute_command_mock.assert_called_with("deploy", self.expected_args, template_file="file.yaml") diff --git a/tests/unit/commands/test_package.py b/tests/unit/commands/test_package.py index 56d89674fd..a603b10a66 100644 --- a/tests/unit/commands/test_package.py +++ b/tests/unit/commands/test_package.py @@ -9,13 +9,12 @@ class TestCli(TestCase): - def setUp(self): - self.args = (' --use - json',) + self.args = (" --use - json",) self.expected_args = self.args + ("--s3-bucket", "bucketName") @patch("samcli.commands.package.execute_command") def test_package_must_pass_args(self, execute_command_mock): execute_command_mock.return_value = True - package_cli(self.args, "template_file", 'bucketName') + package_cli(self.args, "template_file", "bucketName") execute_command_mock.assert_called_with("package", self.expected_args, "template_file") diff --git a/tests/unit/commands/validate/lib/test_sam_template_validator.py b/tests/unit/commands/validate/lib/test_sam_template_validator.py index b8a485b743..6a53ee2d87 100644 --- a/tests/unit/commands/validate/lib/test_sam_template_validator.py +++ b/tests/unit/commands/validate/lib/test_sam_template_validator.py @@ -8,9 +8,8 @@ class TestSamTemplateValidator(TestCase): - - @patch('samcli.commands.validate.lib.sam_template_validator.Translator') - @patch('samcli.commands.validate.lib.sam_template_validator.parser') + @patch("samcli.commands.validate.lib.sam_template_validator.Translator") + @patch("samcli.commands.validate.lib.sam_template_validator.parser") def test_is_valid_returns_true(self, sam_parser, sam_translator): managed_policy_mock = Mock() managed_policy_mock.load.return_value = {"policy": "SomePolicy"} @@ -28,14 +27,14 @@ def test_is_valid_returns_true(self, sam_parser, sam_translator): # Should not throw an Exception validator.is_valid() - sam_translator.assert_called_once_with(managed_policy_map={"policy": "SomePolicy"}, - sam_parser=parser, - plugins=[]) + sam_translator.assert_called_once_with( + managed_policy_map={"policy": "SomePolicy"}, sam_parser=parser, plugins=[] + ) translate_mock.translate.assert_called_once_with(sam_template=template, parameter_values={}) sam_parser.Parser.assert_called_once() - @patch('samcli.commands.validate.lib.sam_template_validator.Translator') - @patch('samcli.commands.validate.lib.sam_template_validator.parser') + @patch("samcli.commands.validate.lib.sam_template_validator.Translator") + @patch("samcli.commands.validate.lib.sam_template_validator.parser") def test_is_valid_raises_exception(self, sam_parser, sam_translator): managed_policy_mock = Mock() managed_policy_mock.load.return_value = {"policy": "SomePolicy"} @@ -45,7 +44,7 @@ def test_is_valid_raises_exception(self, sam_parser, sam_translator): sam_parser.Parser.return_value = parser translate_mock = Mock() - translate_mock.translate.side_effect = InvalidDocumentException([Exception('message')]) + translate_mock.translate.side_effect = InvalidDocumentException([Exception("message")]) sam_translator.return_value = translate_mock validator = SamTemplateValidator(template, managed_policy_mock) @@ -53,9 +52,9 @@ def test_is_valid_raises_exception(self, sam_parser, sam_translator): with self.assertRaises(InvalidSamDocumentException): validator.is_valid() - sam_translator.assert_called_once_with(managed_policy_map={"policy": "SomePolicy"}, - sam_parser=parser, - plugins=[]) + sam_translator.assert_called_once_with( + managed_policy_map={"policy": "SomePolicy"}, sam_parser=parser, plugins=[] + ) translate_mock.translate.assert_called_once_with(sam_template=template, parameter_values={}) sam_parser.Parser.assert_called_once() @@ -65,8 +64,8 @@ def test_init(self): validator = SamTemplateValidator(template, managed_policy_mock) - self.assertEquals(validator.managed_policy_loader, managed_policy_mock) - self.assertEquals(validator.sam_template, template) + self.assertEqual(validator.managed_policy_loader, managed_policy_mock) + self.assertEqual(validator.sam_template, template) # check to see if SamParser was created self.assertIsNotNone(validator.sam_parser) @@ -84,20 +83,21 @@ def test_update_to_s3_uri_with_non_s3_uri(self): property_value = {"CodeUri": "somevalue"} SamTemplateValidator._update_to_s3_uri("CodeUri", property_value) - self.assertEquals(property_value.get("CodeUri"), "s3://bucket/value") + self.assertEqual(property_value.get("CodeUri"), "s3://bucket/value") def test_update_to_s3_url_with_dict(self): property_value = {"CodeUri": {"Bucket": "mybucket-name", "Key": "swagger", "Version": 121212}} SamTemplateValidator._update_to_s3_uri("CodeUri", property_value) - self.assertEquals(property_value.get("CodeUri"), - {"Bucket": "mybucket-name", "Key": "swagger", "Version": 121212}) + self.assertEqual( + property_value.get("CodeUri"), {"Bucket": "mybucket-name", "Key": "swagger", "Version": 121212} + ) def test_update_to_s3_url_with_s3_uri(self): property_value = {"CodeUri": "s3://bucket/key/version"} SamTemplateValidator._update_to_s3_uri("CodeUri", property_value) - self.assertEquals(property_value.get("CodeUri"), "s3://bucket/key/version") + self.assertEqual(property_value.get("CodeUri"), "s3://bucket/key/version") def test_replace_local_codeuri(self): template = { @@ -106,27 +106,14 @@ def test_replace_local_codeuri(self): "Resources": { "ServerlessApi": { "Type": "AWS::Serverless::Api", - "Properties": { - "StageName": "Prod", - "DefinitionUri": "./" - } + "Properties": {"StageName": "Prod", "DefinitionUri": "./"}, }, "ServerlessFunction": { "Type": "AWS::Serverless::Function", - "Properties": { - "Handler": "index.handler", - "CodeUri": "./", - "Runtime": "nodejs6.10", - "Timeout": 60 - } + "Properties": {"Handler": "index.handler", "CodeUri": "./", "Runtime": "nodejs6.10", "Timeout": 60}, }, - "ServerlessLayerVersion": { - "Type": "AWS::Serverless::LayerVersion", - "Properties": { - "ContentUri": "./" - } - } - } + "ServerlessLayerVersion": {"Type": "AWS::Serverless::LayerVersion", "Properties": {"ContentUri": "./"}}, + }, } managed_policy_mock = Mock() @@ -137,33 +124,27 @@ def test_replace_local_codeuri(self): # check template template_resources = validator.sam_template.get("Resources") - self.assertEquals(template_resources.get("ServerlessApi").get("Properties").get("DefinitionUri"), - "s3://bucket/value") - self.assertEquals(template_resources.get("ServerlessFunction").get("Properties").get("CodeUri"), - "s3://bucket/value") - self.assertEquals(template_resources.get("ServerlessLayerVersion").get("Properties").get("ContentUri"), - "s3://bucket/value") + self.assertEqual( + template_resources.get("ServerlessApi").get("Properties").get("DefinitionUri"), "s3://bucket/value" + ) + self.assertEqual( + template_resources.get("ServerlessFunction").get("Properties").get("CodeUri"), "s3://bucket/value" + ) + self.assertEqual( + template_resources.get("ServerlessLayerVersion").get("Properties").get("ContentUri"), "s3://bucket/value" + ) def test_replace_local_codeuri_when_no_codeuri_given(self): template = { "AWSTemplateFormatVersion": "2010-09-09", "Transform": "AWS::Serverless-2016-10-31", "Resources": { - "ServerlessApi": { - "Type": "AWS::Serverless::Api", - "Properties": { - "StageName": "Prod", - } - }, + "ServerlessApi": {"Type": "AWS::Serverless::Api", "Properties": {"StageName": "Prod"}}, "ServerlessFunction": { "Type": "AWS::Serverless::Function", - "Properties": { - "Handler": "index.handler", - "Runtime": "nodejs6.10", - "Timeout": 60 - } - } - } + "Properties": {"Handler": "index.handler", "Runtime": "nodejs6.10", "Timeout": 60}, + }, + }, } managed_policy_mock = Mock() @@ -174,8 +155,9 @@ def test_replace_local_codeuri_when_no_codeuri_given(self): # check template tempalte_resources = validator.sam_template.get("Resources") - self.assertEquals(tempalte_resources.get("ServerlessFunction").get("Properties").get("CodeUri"), - "s3://bucket/value") + self.assertEqual( + tempalte_resources.get("ServerlessFunction").get("Properties").get("CodeUri"), "s3://bucket/value" + ) def test_DefinitionUri_does_not_get_added_to_template_when_DefinitionBody_given(self): template = { @@ -184,14 +166,9 @@ def test_DefinitionUri_does_not_get_added_to_template_when_DefinitionBody_given( "Resources": { "ServerlessApi": { "Type": "AWS::Serverless::Api", - "Properties": { - "StageName": "Prod", - "DefinitionBody": { - "swagger": {} - } - } + "Properties": {"StageName": "Prod", "DefinitionBody": {"swagger": {}}}, } - } + }, } managed_policy_mock = Mock() @@ -209,7 +186,7 @@ def test_replace_local_codeuri_with_no_resources(self): template = { "AWSTemplateFormatVersion": "2010-09-09", "Transform": "AWS::Serverless-2016-10-31", - "Resources": {} + "Resources": {}, } managed_policy_mock = Mock() @@ -219,4 +196,4 @@ def test_replace_local_codeuri_with_no_resources(self): validator._replace_local_codeuri() # check template - self.assertEquals(validator.sam_template.get("Resources"), {}) + self.assertEqual(validator.sam_template.get("Resources"), {}) diff --git a/tests/unit/commands/validate/test_cli.py b/tests/unit/commands/validate/test_cli.py index 1fe75d5fe0..452dbbdc1b 100644 --- a/tests/unit/commands/validate/test_cli.py +++ b/tests/unit/commands/validate/test_cli.py @@ -10,22 +10,21 @@ class TestValidateCli(TestCase): - - @patch('samcli.commands.validate.validate.click') - @patch('samcli.commands.validate.validate.os.path.exists') + @patch("samcli.commands.validate.validate.click") + @patch("samcli.commands.validate.validate.os.path.exists") def test_file_not_found(self, path_exists_patch, click_patch): - template_path = 'path_to_template' + template_path = "path_to_template" path_exists_patch.return_value = False with self.assertRaises(SamTemplateNotFoundException): _read_sam_file(template_path) - @patch('samcli.commands.validate.validate.yaml_parse') - @patch('samcli.commands.validate.validate.click') - @patch('samcli.commands.validate.validate.os.path.exists') + @patch("samcli.commands.validate.validate.yaml_parse") + @patch("samcli.commands.validate.validate.click") + @patch("samcli.commands.validate.validate.os.path.exists") def test_file_parsed(self, path_exists_patch, click_patch, yaml_parse_patch): - template_path = 'path_to_template' + template_path = "path_to_template" path_exists_patch.return_value = True @@ -33,13 +32,13 @@ def test_file_parsed(self, path_exists_patch, click_patch, yaml_parse_patch): actual_template = _read_sam_file(template_path) - self.assertEquals(actual_template, {"a": "b"}) + self.assertEqual(actual_template, {"a": "b"}) - @patch('samcli.commands.validate.validate.SamTemplateValidator') - @patch('samcli.commands.validate.validate.click') - @patch('samcli.commands.validate.validate._read_sam_file') + @patch("samcli.commands.validate.validate.SamTemplateValidator") + @patch("samcli.commands.validate.validate.click") + @patch("samcli.commands.validate.validate._read_sam_file") def test_template_fails_validation(self, read_sam_file_patch, click_patch, template_valiadator): - template_path = 'path_to_template' + template_path = "path_to_template" read_sam_file_patch.return_value = {"a": "b"} is_valid_mock = Mock() @@ -47,14 +46,13 @@ def test_template_fails_validation(self, read_sam_file_patch, click_patch, templ template_valiadator.return_value = is_valid_mock with self.assertRaises(InvalidSamTemplateException): - do_cli(ctx=None, - template=template_path) + do_cli(ctx=None, template=template_path) - @patch('samcli.commands.validate.validate.SamTemplateValidator') - @patch('samcli.commands.validate.validate.click') - @patch('samcli.commands.validate.validate._read_sam_file') + @patch("samcli.commands.validate.validate.SamTemplateValidator") + @patch("samcli.commands.validate.validate.click") + @patch("samcli.commands.validate.validate._read_sam_file") def test_no_credentials_provided(self, read_sam_file_patch, click_patch, template_valiadator): - template_path = 'path_to_template' + template_path = "path_to_template" read_sam_file_patch.return_value = {"a": "b"} is_valid_mock = Mock() @@ -62,19 +60,17 @@ def test_no_credentials_provided(self, read_sam_file_patch, click_patch, templat template_valiadator.return_value = is_valid_mock with self.assertRaises(UserException): - do_cli(ctx=None, - template=template_path) + do_cli(ctx=None, template=template_path) - @patch('samcli.commands.validate.validate.SamTemplateValidator') - @patch('samcli.commands.validate.validate.click') - @patch('samcli.commands.validate.validate._read_sam_file') + @patch("samcli.commands.validate.validate.SamTemplateValidator") + @patch("samcli.commands.validate.validate.click") + @patch("samcli.commands.validate.validate._read_sam_file") def test_template_passes_validation(self, read_sam_file_patch, click_patch, template_valiadator): - template_path = 'path_to_template' + template_path = "path_to_template" read_sam_file_patch.return_value = {"a": "b"} is_valid_mock = Mock() is_valid_mock.is_valid.return_value = True template_valiadator.return_value = is_valid_mock - do_cli(ctx=None, - template=template_path) + do_cli(ctx=None, template=template_path) diff --git a/tests/unit/lib/build_module/test_app_builder.py b/tests/unit/lib/build_module/test_app_builder.py index 03aca7a436..cb617ebfa5 100644 --- a/tests/unit/lib/build_module/test_app_builder.py +++ b/tests/unit/lib/build_module/test_app_builder.py @@ -1,4 +1,3 @@ - import os import docker import json @@ -12,19 +11,20 @@ from pathlib2 import Path -from samcli.lib.build.app_builder import ApplicationBuilder,\ - UnsupportedBuilderLibraryVersionError, BuildError, \ - LambdaBuilderError, ContainerBuildNotSupported +from samcli.lib.build.app_builder import ( + ApplicationBuilder, + UnsupportedBuilderLibraryVersionError, + BuildError, + LambdaBuilderError, + ContainerBuildNotSupported, +) class TestApplicationBuilder_build(TestCase): - def setUp(self): self.func1 = Mock() self.func2 = Mock() - self.builder = ApplicationBuilder([self.func1, self.func2], - "builddir", - "basedir") + self.builder = ApplicationBuilder([self.func1, self.func2], "builddir", "basedir") def test_must_iterate_on_functions(self): build_function_mock = Mock() @@ -33,94 +33,64 @@ def test_must_iterate_on_functions(self): result = self.builder.build() - self.assertEquals(result, { - self.func1.name: build_function_mock.return_value, - self.func2.name: build_function_mock.return_value, - }) + self.assertEqual( + result, + {self.func1.name: build_function_mock.return_value, self.func2.name: build_function_mock.return_value}, + ) - build_function_mock.assert_has_calls([ - call(self.func1.name, self.func1.codeuri, self.func1.runtime), - call(self.func2.name, self.func2.codeuri, self.func2.runtime), - ], any_order=False) + build_function_mock.assert_has_calls( + [ + call(self.func1.name, self.func1.codeuri, self.func1.runtime), + call(self.func2.name, self.func2.codeuri, self.func2.runtime), + ], + any_order=False, + ) class TestApplicationBuilder_update_template(TestCase): - def setUp(self): - self.builder = ApplicationBuilder(Mock(), - "builddir", - "basedir") + self.builder = ApplicationBuilder(Mock(), "builddir", "basedir") self.template_dict = { "Resources": { - "MyFunction1": { - "Type": "AWS::Serverless::Function", - "Properties": { - "CodeUri": "oldvalue" - } - }, - "MyFunction2": { - "Type": "AWS::Lambda::Function", - "Properties": { - "Code": "oldvalue" - } - }, - "OtherResource": { - "Type": "AWS::Lambda::Version", - "Properties": { - "CodeUri": "something" - } - } + "MyFunction1": {"Type": "AWS::Serverless::Function", "Properties": {"CodeUri": "oldvalue"}}, + "MyFunction2": {"Type": "AWS::Lambda::Function", "Properties": {"Code": "oldvalue"}}, + "OtherResource": {"Type": "AWS::Lambda::Version", "Properties": {"CodeUri": "something"}}, } } def test_must_write_relative_build_artifacts_path(self): original_template_path = "/path/to/tempate.txt" - built_artifacts = { - "MyFunction1": "/path/to/build/MyFunction1", - "MyFunction2": "/path/to/build/MyFunction2" - } + built_artifacts = {"MyFunction1": "/path/to/build/MyFunction1", "MyFunction2": "/path/to/build/MyFunction2"} expected_result = { "Resources": { "MyFunction1": { "Type": "AWS::Serverless::Function", - "Properties": { - "CodeUri": os.path.join("build", "MyFunction1") - } + "Properties": {"CodeUri": os.path.join("build", "MyFunction1")}, }, "MyFunction2": { "Type": "AWS::Lambda::Function", - "Properties": { - "Code": os.path.join("build", "MyFunction2") - } + "Properties": {"Code": os.path.join("build", "MyFunction2")}, }, - "OtherResource": { - "Type": "AWS::Lambda::Version", - "Properties": { - "CodeUri": "something" - } - } + "OtherResource": {"Type": "AWS::Lambda::Version", "Properties": {"CodeUri": "something"}}, } } actual = self.builder.update_template(self.template_dict, original_template_path, built_artifacts) - self.assertEquals(actual, expected_result) + self.assertEqual(actual, expected_result) def test_must_skip_if_no_artifacts(self): built_artifacts = {} actual = self.builder.update_template(self.template_dict, "/foo/bar/template.txt", built_artifacts) - self.assertEquals(actual, self.template_dict) + self.assertEqual(actual, self.template_dict) class TestApplicationBuilder_build_function(TestCase): - def setUp(self): - self.builder = ApplicationBuilder(Mock(), - "/build/dir", - "/base/dir") + self.builder = ApplicationBuilder(Mock(), "/build/dir", "/base/dir") @patch("samcli.lib.build.app_builder.get_workflow_config") @patch("samcli.lib.build.app_builder.osutils") @@ -143,12 +113,9 @@ def test_must_build_in_process(self, osutils_mock, get_workflow_config_mock): self.builder._build_function(function_name, codeuri, runtime) - self.builder._build_function_in_process.assert_called_with(config_mock, - code_dir, - artifacts_dir, - scratch_dir, - manifest_path, - runtime) + self.builder._build_function_in_process.assert_called_with( + config_mock, code_dir, artifacts_dir, scratch_dir, manifest_path, runtime + ) @patch("samcli.lib.build.app_builder.get_workflow_config") @patch("samcli.lib.build.app_builder.osutils") @@ -173,46 +140,40 @@ def test_must_build_in_container(self, osutils_mock, get_workflow_config_mock): self.builder._container_manager = Mock() self.builder._build_function(function_name, codeuri, runtime) - self.builder._build_function_on_container.assert_called_with(config_mock, - code_dir, - artifacts_dir, - scratch_dir, - manifest_path, - runtime) + self.builder._build_function_on_container.assert_called_with( + config_mock, code_dir, artifacts_dir, scratch_dir, manifest_path, runtime + ) class TestApplicationBuilder_build_function_in_process(TestCase): - def setUp(self): - self.builder = ApplicationBuilder(Mock(), - "/build/dir", - "/base/dir", - mode="mode") + self.builder = ApplicationBuilder(Mock(), "/build/dir", "/base/dir", mode="mode") @patch("samcli.lib.build.app_builder.LambdaBuilder") def test_must_use_lambda_builder(self, lambda_builder_mock): config_mock = Mock() builder_instance_mock = lambda_builder_mock.return_value = Mock() - result = self.builder._build_function_in_process(config_mock, - "source_dir", - "artifacts_dir", - "scratch_dir", - "manifest_path", - "runtime",) - self.assertEquals(result, "artifacts_dir") - - lambda_builder_mock.assert_called_with(language=config_mock.language, - dependency_manager=config_mock.dependency_manager, - application_framework=config_mock.application_framework) - - builder_instance_mock.build.assert_called_with("source_dir", - "artifacts_dir", - "scratch_dir", - "manifest_path", - runtime="runtime", - executable_search_paths=config_mock.executable_search_paths, - mode="mode") + result = self.builder._build_function_in_process( + config_mock, "source_dir", "artifacts_dir", "scratch_dir", "manifest_path", "runtime" + ) + self.assertEqual(result, "artifacts_dir") + + lambda_builder_mock.assert_called_with( + language=config_mock.language, + dependency_manager=config_mock.dependency_manager, + application_framework=config_mock.application_framework, + ) + + builder_instance_mock.build.assert_called_with( + "source_dir", + "artifacts_dir", + "scratch_dir", + "manifest_path", + runtime="runtime", + executable_search_paths=config_mock.executable_search_paths, + mode="mode", + ) @patch("samcli.lib.build.app_builder.LambdaBuilder") def test_must_raise_on_error(self, lambda_builder_mock): @@ -222,23 +183,17 @@ def test_must_raise_on_error(self, lambda_builder_mock): self.builder._get_build_options = Mock(return_value=None) with self.assertRaises(BuildError): - self.builder._build_function_in_process(config_mock, - "source_dir", - "artifacts_dir", - "scratch_dir", - "manifest_path", - "runtime") + self.builder._build_function_in_process( + config_mock, "source_dir", "artifacts_dir", "scratch_dir", "manifest_path", "runtime" + ) class TestApplicationBuilder_build_function_on_container(TestCase): - def setUp(self): self.container_manager = Mock() - self.builder = ApplicationBuilder(Mock(), - "/build/dir", - "/base/dir", - container_manager=self.container_manager, - mode="mode") + self.builder = ApplicationBuilder( + Mock(), "/build/dir", "/base/dir", container_manager=self.container_manager, mode="mode" + ) self.builder._parse_builder_response = Mock() @patch("samcli.lib.build.app_builder.LambdaBuildContainer") @@ -247,48 +202,41 @@ def setUp(self): @patch("samcli.lib.build.app_builder.osutils") def test_must_build_in_container(self, osutils_mock, LOGMock, protocol_version_mock, LambdaBuildContainerMock): config = Mock() - log_level = LOGMock.getEffectiveLevel.return_value = 'foo' + log_level = LOGMock.getEffectiveLevel.return_value = "foo" stdout_data = "container stdout response data" - response = { - "result": { - "artifacts_dir": "/some/dir" - } - - } + response = {"result": {"artifacts_dir": "/some/dir"}} def mock_wait_for_logs(stdout, stderr): - stdout.write(stdout_data.encode('utf-8')) + stdout.write(stdout_data.encode("utf-8")) # Wire all mocks correctly container_mock = LambdaBuildContainerMock.return_value = Mock() container_mock.wait_for_logs = mock_wait_for_logs self.builder._parse_builder_response.return_value = response - result = self.builder._build_function_on_container(config, - "source_dir", - "artifacts_dir", - "scratch_dir", - "manifest_path", - "runtime") - self.assertEquals(result, "artifacts_dir") - - LambdaBuildContainerMock.assert_called_once_with(protocol_version_mock, - config.language, - config.dependency_manager, - config.application_framework, - "source_dir", - "manifest_path", - "runtime", - log_level=log_level, - optimizations=None, - options=None, - executable_search_paths=config.executable_search_paths, - mode="mode") + result = self.builder._build_function_on_container( + config, "source_dir", "artifacts_dir", "scratch_dir", "manifest_path", "runtime" + ) + self.assertEqual(result, "artifacts_dir") + + LambdaBuildContainerMock.assert_called_once_with( + protocol_version_mock, + config.language, + config.dependency_manager, + config.application_framework, + "source_dir", + "manifest_path", + "runtime", + log_level=log_level, + optimizations=None, + options=None, + executable_search_paths=config.executable_search_paths, + mode="mode", + ) self.container_manager.run.assert_called_with(container_mock) self.builder._parse_builder_response.assert_called_once_with(stdout_data, container_mock.image) - container_mock.copy.assert_called_with(response["result"]["artifacts_dir"] + "/.", - "artifacts_dir") + container_mock.copy.assert_called_with(response["result"]["artifacts_dir"] + "/.", "artifacts_dir") self.container_manager.stop.assert_called_with(container_mock) @patch("samcli.lib.build.app_builder.LambdaBuildContainer") @@ -299,22 +247,22 @@ def test_must_raise_on_unsupported_container(self, LambdaBuildContainerMock): container_mock.image = "image name" container_mock.executable_name = "myexecutable" - self.container_manager.run.side_effect = docker.errors.APIError("Bad Request: 'lambda-builders' " - "executable file not found in $PATH") + self.container_manager.run.side_effect = docker.errors.APIError( + "Bad Request: 'lambda-builders' " "executable file not found in $PATH" + ) with self.assertRaises(UnsupportedBuilderLibraryVersionError) as ctx: - self.builder._build_function_on_container(config, - "source_dir", - "artifacts_dir", - "scratch_dir", - "manifest_path", - "runtime") - - msg = "You are running an outdated version of Docker container 'image name' that is not compatible with" \ - "this version of SAM CLI. Please upgrade to continue to continue with build. " \ - "Reason: 'myexecutable executable not found in container'" - - self.assertEquals(str(ctx.exception), msg) + self.builder._build_function_on_container( + config, "source_dir", "artifacts_dir", "scratch_dir", "manifest_path", "runtime" + ) + + msg = ( + "You are running an outdated version of Docker container 'image name' that is not compatible with" + "this version of SAM CLI. Please upgrade to continue to continue with build. " + "Reason: 'myexecutable executable not found in container'" + ) + + self.assertEqual(str(ctx.exception), msg) self.container_manager.stop.assert_called_with(container_mock) def test_must_raise_on_docker_not_running(self): @@ -323,15 +271,13 @@ def test_must_raise_on_docker_not_running(self): self.container_manager.is_docker_reachable = False with self.assertRaises(BuildError) as ctx: - self.builder._build_function_on_container(config, - "source_dir", - "artifacts_dir", - "scratch_dir", - "manifest_path", - "runtime") + self.builder._build_function_on_container( + config, "source_dir", "artifacts_dir", "scratch_dir", "manifest_path", "runtime" + ) - self.assertEquals(str(ctx.exception), - "Docker is unreachable. Docker needs to be running to build inside a container.") + self.assertEqual( + str(ctx.exception), "Docker is unreachable. Docker needs to be running to build inside a container." + ) @patch("samcli.lib.build.app_builder.supports_build_in_container") def test_must_raise_on_unsupported_container_build(self, supports_build_in_container_mock): @@ -341,29 +287,23 @@ def test_must_raise_on_unsupported_container_build(self, supports_build_in_conta supports_build_in_container_mock.return_value = (False, reason) with self.assertRaises(ContainerBuildNotSupported) as ctx: - self.builder._build_function_on_container(config, - "source_dir", - "artifacts_dir", - "scratch_dir", - "manifest_path", - "runtime") + self.builder._build_function_on_container( + config, "source_dir", "artifacts_dir", "scratch_dir", "manifest_path", "runtime" + ) - self.assertEquals(str(ctx.exception), reason) + self.assertEqual(str(ctx.exception), reason) class TestApplicationBuilder_parse_builder_response(TestCase): - def setUp(self): self.image_name = "name" - self.builder = ApplicationBuilder(Mock(), - "/build/dir", - "/base/dir") + self.builder = ApplicationBuilder(Mock(), "/build/dir", "/base/dir") def test_must_parse_json(self): data = {"valid": "json"} result = self.builder._parse_builder_response(json.dumps(data), self.image_name) - self.assertEquals(result, data) + self.assertEqual(result, data) def test_must_fail_on_invalid_json(self): data = "{invalid: json}" @@ -378,7 +318,7 @@ def test_must_raise_on_user_error(self): with self.assertRaises(BuildError) as ctx: self.builder._parse_builder_response(json.dumps(data), self.image_name) - self.assertEquals(str(ctx.exception), msg) + self.assertEqual(str(ctx.exception), msg) def test_must_raise_on_version_mismatch(self): msg = "invalid params" @@ -388,7 +328,7 @@ def test_must_raise_on_version_mismatch(self): self.builder._parse_builder_response(json.dumps(data), self.image_name) expected = str(UnsupportedBuilderLibraryVersionError(self.image_name, msg)) - self.assertEquals(str(ctx.exception), expected) + self.assertEqual(str(ctx.exception), expected) def test_must_raise_on_method_not_found(self): msg = "invalid method" @@ -398,7 +338,7 @@ def test_must_raise_on_method_not_found(self): self.builder._parse_builder_response(json.dumps(data), self.image_name) expected = str(UnsupportedBuilderLibraryVersionError(self.image_name, msg)) - self.assertEquals(str(ctx.exception), expected) + self.assertEqual(str(ctx.exception), expected) def test_must_raise_on_all_other_codes(self): msg = "builder crashed" @@ -407,4 +347,4 @@ def test_must_raise_on_all_other_codes(self): with self.assertRaises(ValueError) as ctx: self.builder._parse_builder_response(json.dumps(data), self.image_name) - self.assertEquals(str(ctx.exception), msg) + self.assertEqual(str(ctx.exception), msg) diff --git a/tests/unit/lib/build_module/test_workflow_config.py b/tests/unit/lib/build_module/test_workflow_config.py index 9939d58ff3..14a29fee2f 100644 --- a/tests/unit/lib/build_module/test_workflow_config.py +++ b/tests/unit/lib/build_module/test_workflow_config.py @@ -6,73 +6,59 @@ class Test_get_workflow_config(TestCase): - def setUp(self): - self.code_dir = '' - self.project_dir = '' + self.code_dir = "" + self.project_dir = "" - @parameterized.expand([ - ("python2.7", ), - ("python3.6", ) - ]) + @parameterized.expand([("python2.7",), ("python3.6",)]) def test_must_work_for_python(self, runtime): result = get_workflow_config(runtime, self.code_dir, self.project_dir) - self.assertEquals(result.language, "python") - self.assertEquals(result.dependency_manager, "pip") - self.assertEquals(result.application_framework, None) - self.assertEquals(result.manifest_name, "requirements.txt") + self.assertEqual(result.language, "python") + self.assertEqual(result.dependency_manager, "pip") + self.assertEqual(result.application_framework, None) + self.assertEqual(result.manifest_name, "requirements.txt") self.assertIsNone(result.executable_search_paths) - @parameterized.expand([ - ("nodejs4.3", ), - ("nodejs6.10", ), - ("nodejs8.10", ), - ]) + @parameterized.expand([("nodejs4.3",), ("nodejs6.10",), ("nodejs8.10",)]) def test_must_work_for_nodejs(self, runtime): result = get_workflow_config(runtime, self.code_dir, self.project_dir) - self.assertEquals(result.language, "nodejs") - self.assertEquals(result.dependency_manager, "npm") - self.assertEquals(result.application_framework, None) - self.assertEquals(result.manifest_name, "package.json") + self.assertEqual(result.language, "nodejs") + self.assertEqual(result.dependency_manager, "npm") + self.assertEqual(result.application_framework, None) + self.assertEqual(result.manifest_name, "package.json") self.assertIsNone(result.executable_search_paths) - @parameterized.expand([ - ("ruby2.5", ) - ]) + @parameterized.expand([("ruby2.5",)]) def test_must_work_for_ruby(self, runtime): result = get_workflow_config(runtime, self.code_dir, self.project_dir) - self.assertEquals(result.language, "ruby") - self.assertEquals(result.dependency_manager, "bundler") - self.assertEquals(result.application_framework, None) - self.assertEquals(result.manifest_name, "Gemfile") + self.assertEqual(result.language, "ruby") + self.assertEqual(result.dependency_manager, "bundler") + self.assertEqual(result.application_framework, None) + self.assertEqual(result.manifest_name, "Gemfile") self.assertIsNone(result.executable_search_paths) - @parameterized.expand([ - ("java8", "build.gradle", "gradle"), - ("java8", "build.gradle.kts", "gradle"), - ("java8", "pom.xml", "maven") - ]) + @parameterized.expand( + [("java8", "build.gradle", "gradle"), ("java8", "build.gradle.kts", "gradle"), ("java8", "pom.xml", "maven")] + ) @patch("samcli.lib.build.workflow_config.os") def test_must_work_for_java(self, runtime, build_file, dep_manager, os_mock): os_mock.path.join.side_effect = lambda dirname, v: v os_mock.path.exists.side_effect = lambda v: v == build_file result = get_workflow_config(runtime, self.code_dir, self.project_dir) - self.assertEquals(result.language, "java") - self.assertEquals(result.dependency_manager, dep_manager) - self.assertEquals(result.application_framework, None) - self.assertEquals(result.manifest_name, build_file) + self.assertEqual(result.language, "java") + self.assertEqual(result.dependency_manager, dep_manager) + self.assertEqual(result.application_framework, None) + self.assertEqual(result.manifest_name, build_file) if dep_manager == "gradle": - self.assertEquals(result.executable_search_paths, [self.code_dir, self.project_dir]) + self.assertEqual(result.executable_search_paths, [self.code_dir, self.project_dir]) else: self.assertIsNone(result.executable_search_paths) - @parameterized.expand([ - ("java8", "unknown.manifest") - ]) + @parameterized.expand([("java8", "unknown.manifest")]) @patch("samcli.lib.build.workflow_config.os") def test_must_fail_when_manifest_not_found(self, runtime, build_file, os_mock): @@ -82,8 +68,7 @@ def test_must_fail_when_manifest_not_found(self, runtime, build_file, os_mock): with self.assertRaises(UnsupportedRuntimeException) as ctx: get_workflow_config(runtime, self.code_dir, self.project_dir) - self.assertIn("Unable to find a supported build workflow for runtime '{}'.".format(runtime), - str(ctx.exception)) + self.assertIn("Unable to find a supported build workflow for runtime '{}'.".format(runtime), str(ctx.exception)) def test_must_raise_for_unsupported_runtimes(self): @@ -92,5 +77,4 @@ def test_must_raise_for_unsupported_runtimes(self): with self.assertRaises(UnsupportedRuntimeException) as ctx: get_workflow_config(runtime, self.code_dir, self.project_dir) - self.assertEquals(str(ctx.exception), - "'foobar' runtime is not supported") + self.assertEqual(str(ctx.exception), "'foobar' runtime is not supported") diff --git a/tests/unit/lib/intrinsic_resolver/test_intrinsic_resolver.py b/tests/unit/lib/intrinsic_resolver/test_intrinsic_resolver.py index b257604ab1..991fa1cb20 100644 --- a/tests/unit/lib/intrinsic_resolver/test_intrinsic_resolver.py +++ b/tests/unit/lib/intrinsic_resolver/test_intrinsic_resolver.py @@ -12,9 +12,7 @@ from samcli.lib.intrinsic_resolver.intrinsic_property_resolver import IntrinsicResolver from samcli.lib.intrinsic_resolver.intrinsics_symbol_table import IntrinsicsSymbolTable -from samcli.lib.intrinsic_resolver.invalid_intrinsic_exception import ( - InvalidIntrinsicException, -) +from samcli.lib.intrinsic_resolver.invalid_intrinsic_exception import InvalidIntrinsicException class TestIntrinsicFnJoinResolver(TestCase): @@ -23,77 +21,55 @@ def setUp(self): def test_basic_fn_join(self): intrinsic = {"Fn::Join": [",", ["a", "b", "c", "d"]]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertEqual(result, "a,b,c,d") def test_nested_fn_join(self): intrinsic_base_1 = {"Fn::Join": [",", ["a", "b", "c", "d"]]} intrinsic_base_2 = {"Fn::Join": [";", ["g", "h", "i", intrinsic_base_1]]} intrinsic = {"Fn::Join": [":", [intrinsic_base_1, "e", "f", intrinsic_base_2]]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertEqual(result, "a,b,c,d:e:f:g;h;i;a,b,c,d") @parameterized.expand( [ - ( - "Fn::Join should fail for values that are not lists: {}".format(item), - item, - ) + ("Fn::Join should fail for values that are not lists: {}".format(item), item) for item in [True, False, "Test", {}, 42, None] ] ) def test_fn_join_arguments_invalid_formats(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::Join": intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::Join": intrinsic}, True) @parameterized.expand( [ - ( - "Fn::Join should fail if the first argument does not resolve to a string: {}".format( - item - ), - item, - ) + ("Fn::Join should fail if the first argument does not resolve to a string: {}".format(item), item) for item in [True, False, {}, 42, None] ] ) def test_fn_join_delimiter_invalid_type(self, name, delimiter): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::Join": [delimiter, []]}) + self.resolver.intrinsic_property_resolver({"Fn::Join": [delimiter, []]}, True) @parameterized.expand( [ - ( - "Fn::Join should fail if the list_of_objects is not a valid list: {}".format( - item - ), - item, - ) + ("Fn::Join should fail if the list_of_objects is not a valid list: {}".format(item), item) for item in [True, False, {}, 42, "t", None] ] ) def test_fn_list_of_objects_invalid_type(self, name, list_of_objects): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver( - {"Fn::Join": ["", list_of_objects]} - ) + self.resolver.intrinsic_property_resolver({"Fn::Join": ["", list_of_objects]}, True) @parameterized.expand( [ - ( - "Fn::Join should require that all items in the list_of_objects resolve to string: {}".format( - item - ), - item, - ) + ("Fn::Join should require that all items in the list_of_objects resolve to string: {}".format(item), item) for item in [True, False, {}, 42, None] ] ) def test_fn_join_items_all_str(self, name, single_obj): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver( - {"Fn::Join": ["", ["test", single_obj, "abcd"]]} - ) + self.resolver.intrinsic_property_resolver({"Fn::Join": ["", ["test", single_obj, "abcd"]]}, True) class TestIntrinsicFnSplitResolver(TestCase): @@ -102,66 +78,46 @@ def setUp(self): def test_basic_fn_split(self): intrinsic = {"Fn::Split": ["|", "a|b|c"]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertEqual(result, ["a", "b", "c"]) def test_nested_fn_split(self): intrinsic_base_1 = {"Fn::Split": [";", {"Fn::Join": [";", ["a", "b", "c"]]}]} intrinsic_base_2 = {"Fn::Join": [",", intrinsic_base_1]} - intrinsic = { - "Fn::Split": [ - ",", - {"Fn::Join": [",", [intrinsic_base_2, ",e", ",f,", intrinsic_base_2]]}, - ] - } - result = self.resolver.intrinsic_property_resolver(intrinsic) + intrinsic = {"Fn::Split": [",", {"Fn::Join": [",", [intrinsic_base_2, ",e", ",f,", intrinsic_base_2]]}]} + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertEqual(result, ["a", "b", "c", "", "e", "", "f", "", "a", "b", "c"]) @parameterized.expand( [ - ( - "Fn::Split should fail for values that are not lists: {}".format(item), - item, - ) + ("Fn::Split should fail for values that are not lists: {}".format(item), item) for item in [True, False, "Test", {}, 42] ] ) def test_fn_split_arguments_invalid_formats(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::Split": intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::Split": intrinsic}, True) @parameterized.expand( [ - ( - "Fn::Split should fail if the first argument does not resolve to a string: {}".format( - item - ), - item, - ) + ("Fn::Split should fail if the first argument does not resolve to a string: {}".format(item), item) for item in [True, False, {}, 42] ] ) def test_fn_split_delimiter_invalid_type(self, name, delimiter): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::Split": [delimiter, []]}) + self.resolver.intrinsic_property_resolver({"Fn::Split": [delimiter, []]}, True) @parameterized.expand( [ - ( - "Fn::Split should fail if the second argument does not resolve to a string: {}".format( - item - ), - item, - ) + ("Fn::Split should fail if the second argument does not resolve to a string: {}".format(item), item) for item in [True, False, {}, 42] ] ) def test_fn_split_source_string_invalid_type(self, name, source_string): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver( - {"Fn::Split": ["", source_string]} - ) + self.resolver.intrinsic_property_resolver({"Fn::Split": ["", source_string]}, True) class TestIntrinsicFnBase64Resolver(TestCase): @@ -170,39 +126,29 @@ def setUp(self): def test_basic_fn_split(self): intrinsic = {"Fn::Base64": "AWS CloudFormation"} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertEqual(result, "QVdTIENsb3VkRm9ybWF0aW9u") def test_nested_fn_base64(self): intrinsic_base_1 = {"Fn::Base64": "AWS CloudFormation"} intrinsic_base_2 = {"Fn::Base64": intrinsic_base_1} - intrinsic = { - "Fn::Base64": { - "Fn::Join": [",", [intrinsic_base_2, ",e", ",f,", intrinsic_base_2]] - } - } - result = self.resolver.intrinsic_property_resolver(intrinsic) + intrinsic = {"Fn::Base64": {"Fn::Join": [",", [intrinsic_base_2, ",e", ",f,", intrinsic_base_2]]}} + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertEqual( result, - "VVZaa1ZFbEZUbk5pTTFaclVtMDVlV0pYUmpCaFZ6bDEsLGUsLGYsLFVWWmtWRWxGVG5OaU0xWnJ" - "VbTA1ZVdKWFJqQmhWemwx", + "VVZaa1ZFbEZUbk5pTTFaclVtMDVlV0pYUmpCaFZ6bDEsLGUsLGYsLFVWWmtWRWxGVG5OaU0xWnJ" "VbTA1ZVdKWFJqQmhWemwx", ) @parameterized.expand( [ - ( - "Fn::Base64 must have a value that resolves to a string: {}".format( - item - ), - item, - ) + ("Fn::Base64 must have a value that resolves to a string: {}".format(item), item) for item in [True, False, {}, 42, None] ] ) def test_fn_base64_arguments_invalid_formats(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::Base64": intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::Base64": intrinsic}, True) class TestIntrinsicFnSelectResolver(TestCase): @@ -211,73 +157,52 @@ def setUp(self): def test_basic_fn_select(self): intrinsic = {"Fn::Select": [2, ["a", "b", "c", "d"]]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertEqual(result, "c") def test_nested_fn_select(self): intrinsic_base_1 = {"Fn::Select": [0, ["a", "b", "c", "d"]]} intrinsic_base_2 = {"Fn::Join": [";", ["g", "h", "i", intrinsic_base_1]]} intrinsic = {"Fn::Select": [3, [intrinsic_base_2, "e", "f", intrinsic_base_2]]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertEqual(result, "g;h;i;a") @parameterized.expand( [ - ( - "Fn::Select should fail for values that are not lists: {}".format(item), - item, - ) + ("Fn::Select should fail for values that are not lists: {}".format(item), item) for item in [True, False, "Test", {}, 42, None] ] ) def test_fn_select_arguments_invalid_formats(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::Select": intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::Select": intrinsic}, True) @parameterized.expand( [ - ( - "Fn::Select should fail if the first argument does not resolve to a int: {}".format( - item - ), - item, - ) + ("Fn::Select should fail if the first argument does not resolve to a int: {}".format(item), item) for item in [True, False, {}, "3", None] ] ) def test_fn_select_index_invalid_index_type(self, name, index): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::Select": [index, [0]]}) + self.resolver.intrinsic_property_resolver({"Fn::Select": [index, [0]]}, True) @parameterized.expand( - [ - ( - "Fn::Select should fail if the index is out of bounds: {}".format( - number - ), - number, - ) - for number in [-2, 7] - ] + [("Fn::Select should fail if the index is out of bounds: {}".format(number), number) for number in [-2, 7]] ) def test_fn_select_out_of_bounds(self, name, index): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::Select": [index, []]}) + self.resolver.intrinsic_property_resolver({"Fn::Select": [index, []]}, True) @parameterized.expand( [ - ( - "Fn::Select should fail if the second argument does not resolve to a list: {}".format( - item - ), - item, - ) + ("Fn::Select should fail if the second argument does not resolve to a list: {}".format(item), item) for item in [True, False, {}, "3", 33, None] ] ) def test_fn_select_second_argument_invalid_type(self, name, argument): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::Select": [0, argument]}) + self.resolver.intrinsic_property_resolver({"Fn::Select": [0, argument]}, True) class TestIntrinsicFnFindInMapResolver(TestCase): @@ -289,60 +214,43 @@ def setUp(self): "result": {"value": {"key": "final"}}, } } - self.resolver = IntrinsicResolver( - symbol_resolver=IntrinsicsSymbolTable(), template=template - ) + self.resolver = IntrinsicResolver(symbol_resolver=IntrinsicsSymbolTable(), template=template) def test_basic_find_in_map(self): intrinsic = {"Fn::FindInMap": ["Basic", "Test", "key"]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertEqual(result, "value") def test_nested_find_in_map(self): intrinsic_base_1 = {"Fn::FindInMap": ["Basic", "Test", "key"]} intrinsic_base_2 = {"Fn::FindInMap": [intrinsic_base_1, "anotherkey", "key"]} intrinsic = {"Fn::FindInMap": [intrinsic_base_2, intrinsic_base_1, "key"]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertEqual(result, "final") @parameterized.expand( [ - ( - "Fn::FindInMap should fail if the list does not resolve to a string: {}".format( - item - ), - item, - ) + ("Fn::FindInMap should fail if the list does not resolve to a string: {}".format(item), item) for item in [True, False, "Test", {}, 42, None] ] ) def test_fn_find_in_map_arguments_invalid_formats(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::FindInMap": intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::FindInMap": intrinsic}, True) @parameterized.expand( [ - ( - "Fn::FindInMap should fail if there isn't 3 arguments in the list: {}".format( - item - ), - item, - ) + ("Fn::FindInMap should fail if there isn't 3 arguments in the list: {}".format(item), item) for item in [[""] * i for i in [0, 1, 2, 4, 5, 6, 7, 8, 9, 10]] ] ) def test_fn_find_in_map_invalid_number_arguments(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::FindInMap": intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::FindInMap": intrinsic}, True) @parameterized.expand( [ - ( - "The arguments in Fn::FindInMap must fail if the arguments are not in the mappings".format( - item - ), - item, - ) + ("The arguments in Fn::FindInMap must fail if the arguments are not in the mappings".format(item), item) for item in [ ["", "Test", "key"], ["Basic", "", "key"], @@ -352,47 +260,24 @@ def test_fn_find_in_map_invalid_number_arguments(self, name, intrinsic): ) def test_fn_find_in_map_invalid_key_entries(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::FindInMap": intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::FindInMap": intrinsic}, True) class TestIntrinsicFnAzsResolver(TestCase): def setUp(self): logical_id_translator = {"AWS::Region": "us-east-1"} self.resolver = IntrinsicResolver( - template={}, - symbol_resolver=IntrinsicsSymbolTable( - logical_id_translator=logical_id_translator - ) + template={}, symbol_resolver=IntrinsicsSymbolTable(logical_id_translator=logical_id_translator) ) def test_basic_azs(self): intrinsic = {"Ref": "AWS::Region"} - result = self.resolver.intrinsic_property_resolver({"Fn::GetAZs": intrinsic}) - self.assertEqual( - result, - [ - "us-east-1a", - "us-east-1b", - "us-east-1c", - "us-east-1d", - "us-east-1e", - "us-east-1f", - ], - ) + result = self.resolver.intrinsic_property_resolver({"Fn::GetAZs": intrinsic}, True) + self.assertEqual(result, ["us-east-1a", "us-east-1b", "us-east-1c", "us-east-1d", "us-east-1e", "us-east-1f"]) def test_default_get_azs(self): - result = self.resolver.intrinsic_property_resolver({"Fn::GetAZs": ""}) - self.assertEqual( - result, - [ - "us-east-1a", - "us-east-1b", - "us-east-1c", - "us-east-1d", - "us-east-1e", - "us-east-1f", - ], - ) + result = self.resolver.intrinsic_property_resolver({"Fn::GetAZs": ""}, True) + self.assertEqual(result, ["us-east-1a", "us-east-1b", "us-east-1c", "us-east-1d", "us-east-1e", "us-east-1f"]) @parameterized.expand( [ @@ -402,56 +287,49 @@ def test_default_get_azs(self): ) def test_fn_azs_arguments_invalid_formats(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::GetAZs": intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::GetAZs": intrinsic}, True) def test_fn_azs_invalid_region(self): intrinsic = "UNKOWN REGION" with self.assertRaises(InvalidIntrinsicException, msg="FN::GetAzs should fail for unknown region"): - self.resolver.intrinsic_property_resolver({"Fn::GetAZs": intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::GetAZs": intrinsic}, True) class TestFnTransform(TestCase): def setUp(self): logical_id_translator = {"AWS::Region": "us-east-1"} self.resolver = IntrinsicResolver( - template={}, - symbol_resolver=IntrinsicsSymbolTable( - logical_id_translator=logical_id_translator - ) + template={}, symbol_resolver=IntrinsicsSymbolTable(logical_id_translator=logical_id_translator) ) def test_basic_fn_transform(self): intrinsic = {"Fn::Transform": {"Name": "AWS::Include", "Parameters": {"Location": "test"}}} - self.resolver.intrinsic_property_resolver(intrinsic) + self.resolver.intrinsic_property_resolver(intrinsic, True) def test_fn_transform_unsupported_macro(self): intrinsic = {"Fn::Transform": {"Name": "UNKNOWN", "Parameters": {"Location": "test"}}} with self.assertRaises(InvalidIntrinsicException, msg="FN::Transform should fail for unknown region"): - self.resolver.intrinsic_property_resolver(intrinsic) + self.resolver.intrinsic_property_resolver(intrinsic, True) class TestIntrinsicFnRefResolver(TestCase): def setUp(self): - logical_id_translator = { - "RestApi": {"Ref": "NewRestApi"}, - "AWS::StackId": "12301230123", - } + logical_id_translator = {"RestApi": {"Ref": "NewRestApi"}, "AWS::StackId": "12301230123"} resources = {"RestApi": {"Type": "AWS::ApiGateway::RestApi", "Properties": {}}} template = {"Resources": resources} self.resolver = IntrinsicResolver( - symbol_resolver=IntrinsicsSymbolTable( - logical_id_translator=logical_id_translator, template=template - ), template=template + symbol_resolver=IntrinsicsSymbolTable(logical_id_translator=logical_id_translator, template=template), + template=template, ) def test_basic_ref_translation(self): intrinsic = {"Ref": "RestApi"} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertEqual(result, "NewRestApi") def test_default_ref_translation(self): intrinsic = {"Ref": "UnknownApi"} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertEqual(result, "UnknownApi") @parameterized.expand( @@ -462,7 +340,7 @@ def test_default_ref_translation(self): ) def test_ref_arguments_invalid_formats(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Ref": intrinsic}) + self.resolver.intrinsic_property_resolver({"Ref": intrinsic}, True) class TestIntrinsicFnGetAttResolver(TestCase): @@ -471,7 +349,7 @@ def setUp(self): "RestApi": {"Ref": "NewRestApi"}, "LambdaFunction": { "Arn": "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/arn:aws:lambda:us-east" - "-1:123456789012:LambdaFunction/invocations" + "-1:123456789012:LambdaFunction/invocations" }, "AWS::StackId": "12301230123", "AWS::Region": "us-east-1", @@ -479,10 +357,7 @@ def setUp(self): } resources = { "RestApi": {"Type": "AWS::ApiGateway::RestApi", "Properties": {}}, - "HelloHandler2E4FBA4D": { - "Type": "AWS::Lambda::Function", - "Properties": {"handler": "main.handle"}, - }, + "HelloHandler2E4FBA4D": {"Type": "AWS::Lambda::Function", "Properties": {"handler": "main.handle"}}, "LambdaFunction": { "Type": "AWS::Lambda::Function", "Properties": { @@ -504,22 +379,18 @@ def setUp(self): }, } template = {"Resources": resources} - symbol_resolver = IntrinsicsSymbolTable( - template=template, logical_id_translator=logical_id_translator - ) + symbol_resolver = IntrinsicsSymbolTable(template=template, logical_id_translator=logical_id_translator) self.resources = resources - self.resolver = IntrinsicResolver( - template=template, symbol_resolver=symbol_resolver - ) + self.resolver = IntrinsicResolver(template=template, symbol_resolver=symbol_resolver) def test_fn_getatt_basic_translation(self): intrinsic = {"Fn::GetAtt": ["RestApi", "RootResourceId"]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertEqual(result, "/") def test_fn_getatt_logical_id_translated(self): intrinsic = {"Fn::GetAtt": ["LambdaFunction", "Arn"]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertEqual( result, "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/arn:aws:lambda:us-east" @@ -528,7 +399,7 @@ def test_fn_getatt_logical_id_translated(self): def test_fn_getatt_with_fn_join(self): intrinsic = self.resources.get("LambdaFunction").get("Properties").get("Uri") - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertEqual( result, "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/arn:aws:lambda:us" @@ -537,93 +408,60 @@ def test_fn_getatt_with_fn_join(self): @parameterized.expand( [ - ( - "Fn::GetAtt must fail if the argument does not resolve to a list: {}".format( - item - ), - item, - ) + ("Fn::GetAtt must fail if the argument does not resolve to a list: {}".format(item), item) for item in [True, False, {}, "test", 42, None] ] ) def test_fn_getatt_arguments_invalid_formats(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::GetAtt": intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::GetAtt": intrinsic}, True) @parameterized.expand( [ - ( - "Fn::GetAtt should fail if it doesn't have exactly 2 arguments: {}".format( - item - ), - item, - ) + ("Fn::GetAtt should fail if it doesn't have exactly 2 arguments: {}".format(item), item) for item in [[""] * i for i in [0, 1, 3, 4, 5, 6, 7, 8, 9, 10]] ] ) def test_fn_getatt_invalid_number_arguments(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::GetAtt": intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::GetAtt": intrinsic}, True) @parameterized.expand( [ - ( - "Fn::GetAtt first argument must resolve to a valid string: {}".format( - item - ), - item, - ) + ("Fn::GetAtt first argument must resolve to a valid string: {}".format(item), item) for item in [True, False, {}, [], 42, None] ] ) def test_fn_getatt_first_arguments_invalid(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver( - {"Fn::GetAtt": [intrinsic, IntrinsicResolver.REF]} - ) + self.resolver.intrinsic_property_resolver({"Fn::GetAtt": [intrinsic, IntrinsicResolver.REF]}, True) @parameterized.expand( [ - ( - "Fn::GetAtt second argument must resolve to a string:{}".format(item), - item, - ) + ("Fn::GetAtt second argument must resolve to a string:{}".format(item), item) for item in [True, False, {}, [], 42, None] ] ) def test_fn_getatt_second_arguments_invalid(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver( - {"Fn::GetAtt": ["some logical Id", intrinsic]} - ) + self.resolver.intrinsic_property_resolver({"Fn::GetAtt": ["some logical Id", intrinsic]}, True) class TestIntrinsicFnSubResolver(TestCase): def setUp(self): - logical_id_translator = { - "AWS::Region": "us-east-1", - "AWS::AccountId": "123456789012", - } - resources = { - "LambdaFunction": { - "Type": "AWS::ApiGateway::RestApi", - "Properties": {"Uri": "test"}, - } - } + logical_id_translator = {"AWS::Region": "us-east-1", "AWS::AccountId": "123456789012"} + resources = {"LambdaFunction": {"Type": "AWS::ApiGateway::RestApi", "Properties": {"Uri": "test"}}} template = {"Resources": resources} self.resolver = IntrinsicResolver( template=template, - symbol_resolver=IntrinsicsSymbolTable( - logical_id_translator=logical_id_translator, template=template - ) + symbol_resolver=IntrinsicsSymbolTable(logical_id_translator=logical_id_translator, template=template), ) def test_fn_sub_basic_uri(self): intrinsic = { - "Fn::Sub": - "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${LambdaFunction.Arn}/invocations" + "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${LambdaFunction.Arn}/invocations" } - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertEqual( result, "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/arn:aws:lambda:us-east-1" @@ -637,7 +475,7 @@ def test_fn_sub_uri_arguments(self): {"MyItem": {"Ref": "AWS::Region"}, "MyOtherItem": "LambdaFunction.Arn"}, ] } - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertEqual( result, "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/arn:aws:lambda:us-east" @@ -646,50 +484,33 @@ def test_fn_sub_uri_arguments(self): @parameterized.expand( [ - ( - "Fn::Sub arguments must either resolve to a string or a list".format( - item - ), - item, - ) + ("Fn::Sub arguments must either resolve to a string or a list".format(item), item) for item in [True, False, {}, 42, None] ] ) def test_fn_sub_arguments_invalid_formats(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::Sub": intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::Sub": intrinsic}, True) @parameterized.expand( [ - ( - "If Fn::Sub is a list, first argument must resolve to a string: {}".format( - item - ), - item, - ) + ("If Fn::Sub is a list, first argument must resolve to a string: {}".format(item), item) for item in [True, False, {}, 42, None] ] ) def test_fn_sub_first_argument_invalid_formats(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::Sub": [intrinsic, {}]}) + self.resolver.intrinsic_property_resolver({"Fn::Sub": [intrinsic, {}]}, True) @parameterized.expand( [ - ( - "If Fn::Sub is a list, second argument must resolve to a dictionary".format( - item - ), - item, - ) + ("If Fn::Sub is a list, second argument must resolve to a dictionary".format(item), item) for item in [True, False, "Another str", [], 42, None] ] ) def test_fn_sub_second_argument_invalid_formats(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver( - {"Fn::Sub": ["some str", intrinsic]} - ) + self.resolver.intrinsic_property_resolver({"Fn::Sub": ["some str", intrinsic]}, True) @parameterized.expand( [ @@ -699,7 +520,7 @@ def test_fn_sub_second_argument_invalid_formats(self, name, intrinsic): ) def test_fn_sub_invalid_number_arguments(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::Sub": ["test"] + intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::Sub": ["test"] + intrinsic}, True) class TestIntrinsicFnImportValueResolver(TestCase): @@ -707,33 +528,25 @@ def setUp(self): self.resolver = IntrinsicResolver(template={}, symbol_resolver=IntrinsicsSymbolTable()) def test_fn_import_value_unsupported(self): - with self.assertRaises( - InvalidIntrinsicException, msg="Fn::ImportValue should be unsupported" - ): - self.resolver.intrinsic_property_resolver({"Fn::ImportValue": ""}) + with self.assertRaises(InvalidIntrinsicException, msg="Fn::ImportValue should be unsupported"): + self.resolver.intrinsic_property_resolver({"Fn::ImportValue": ""}, True) class TestIntrinsicFnEqualsResolver(TestCase): def setUp(self): - logical_id_translator = { - "EnvironmentType": "prod", - "AWS::AccountId": "123456789012", - } + logical_id_translator = {"EnvironmentType": "prod", "AWS::AccountId": "123456789012"} self.resolver = IntrinsicResolver( - template={}, - symbol_resolver=IntrinsicsSymbolTable( - logical_id_translator=logical_id_translator - ) + template={}, symbol_resolver=IntrinsicsSymbolTable(logical_id_translator=logical_id_translator) ) def test_fn_equals_basic_true(self): intrinsic = {"Fn::Equals": [{"Ref": "EnvironmentType"}, "prod"]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertTrue(result) def test_fn_equals_basic_false(self): intrinsic = {"Fn::Equals": [{"Ref": "EnvironmentType"}, "NotProd"]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertFalse(result) def test_fn_equals_nested_true(self): @@ -741,33 +554,26 @@ def test_fn_equals_nested_true(self): intrinsic_base_2 = {"Fn::Equals": [{"Ref": "AWS::AccountId"}, "123456789012"]} intrinsic = {"Fn::Equals": [intrinsic_base_1, intrinsic_base_2]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertTrue(result) def test_fn_equals_nested_false(self): intrinsic_base_1 = {"Fn::Equals": [{"Ref": "EnvironmentType"}, "prod"]} - intrinsic_base_2 = { - "Fn::Equals": [{"Ref": "AWS::AccountId"}, "NOT_A_VALID_ACCOUNT_ID"] - } + intrinsic_base_2 = {"Fn::Equals": [{"Ref": "AWS::AccountId"}, "NOT_A_VALID_ACCOUNT_ID"]} intrinsic = {"Fn::Equals": [intrinsic_base_1, intrinsic_base_2]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertFalse(result) @parameterized.expand( [ - ( - "Fn::Equals must have arguments that resolve to a string: {}".format( - item - ), - item, - ) + ("Fn::Equals must have arguments that resolve to a string: {}".format(item), item) for item in [True, False, {}, 42, None, "test"] ] ) def test_fn_equals_arguments_invalid_formats(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::Equals": intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::Equals": intrinsic}, True) @parameterized.expand( [ @@ -777,98 +583,75 @@ def test_fn_equals_arguments_invalid_formats(self, name, intrinsic): ) def test_fn_equals_invalid_number_arguments(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::Equals": intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::Equals": intrinsic}, True) class TestIntrinsicFnNotResolver(TestCase): def setUp(self): - logical_id_translator = { - "EnvironmentType": "prod", - "AWS::AccountId": "123456789012", - } + logical_id_translator = {"EnvironmentType": "prod", "AWS::AccountId": "123456789012"} conditions = { "TestCondition": {"Fn::Equals": [{"Ref": "EnvironmentType"}, "prod"]}, "NotTestCondition": {"Fn::Not": [{"Condition": "TestCondition"}]}, } template = {"Conditions": conditions} - symbol_resolver = IntrinsicsSymbolTable( - template=template, logical_id_translator=logical_id_translator - ) - self.resolver = IntrinsicResolver( - template=template, symbol_resolver=symbol_resolver - ) + symbol_resolver = IntrinsicsSymbolTable(template=template, logical_id_translator=logical_id_translator) + self.resolver = IntrinsicResolver(template=template, symbol_resolver=symbol_resolver) def test_fn_not_basic_false(self): intrinsic = {"Fn::Not": [{"Fn::Equals": [{"Ref": "EnvironmentType"}, "prod"]}]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertFalse(result) def test_fn_not_basic_true(self): - intrinsic = { - "Fn::Not": [{"Fn::Equals": [{"Ref": "EnvironmentType"}, "NotProd"]}] - } - result = self.resolver.intrinsic_property_resolver(intrinsic) + intrinsic = {"Fn::Not": [{"Fn::Equals": [{"Ref": "EnvironmentType"}, "NotProd"]}]} + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertTrue(result) def test_fn_not_nested_true(self): - intrinsic_base_1 = { - "Fn::Not": [{"Fn::Equals": [{"Ref": "EnvironmentType"}, "prod"]}] - } + intrinsic_base_1 = {"Fn::Not": [{"Fn::Equals": [{"Ref": "EnvironmentType"}, "prod"]}]} intrinsic_base_2 = {"Fn::Equals": [{"Ref": "AWS::AccountId"}, "123456789012"]} # !(True && True) intrinsic = {"Fn::Not": [{"Fn::Equals": [intrinsic_base_1, intrinsic_base_2]}]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertTrue(result) def test_fn_not_nested_false(self): - intrinsic_base_1 = { - "Fn::Not": [{"Fn::Equals": [{"Ref": "EnvironmentType"}, "prod"]}] - } - intrinsic_base_2 = { - "Fn::Not": [{"Fn::Equals": [{"Ref": "AWS::AccountId"}, "123456789012"]}] - } + intrinsic_base_1 = {"Fn::Not": [{"Fn::Equals": [{"Ref": "EnvironmentType"}, "prod"]}]} + intrinsic_base_2 = {"Fn::Not": [{"Fn::Equals": [{"Ref": "AWS::AccountId"}, "123456789012"]}]} intrinsic = {"Fn::Not": [{"Fn::Equals": [intrinsic_base_1, intrinsic_base_2]}]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertFalse(result) def test_fn_not_condition_false(self): intrinsic = {"Fn::Not": [{"Condition": "TestCondition"}]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertFalse(result) def test_fn_not_condition_true(self): intrinsic = {"Fn::Not": [{"Condition": "NotTestCondition"}]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertTrue(result) @parameterized.expand( [ - ( - "Fn::Not must have an argument that resolves to a list: {}".format( - item - ), - item, - ) + ("Fn::Not must have an argument that resolves to a list: {}".format(item), item) for item in [True, False, {}, 42, None, "test"] ] ) def test_fn_not_arguments_invalid_formats(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::Not": intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::Not": intrinsic}, True) @parameterized.expand( [ - ( - "Fn::Not items in the list must resolve to booleans: {}".format(item), - item, - ) + ("Fn::Not items in the list must resolve to booleans: {}".format(item), item) for item in [{}, 42, None, "test"] ] ) def test_fn_not_first_arguments_invalid_formats(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::Not": [intrinsic]}) + self.resolver.intrinsic_property_resolver({"Fn::Not": [intrinsic]}, True) @parameterized.expand( [ @@ -878,75 +661,50 @@ def test_fn_not_first_arguments_invalid_formats(self, name, intrinsic): ) def test_fn_not_invalid_number_arguments(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::Not": intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::Not": intrinsic}, True) def test_fn_not_invalid_condition(self): with self.assertRaises(InvalidIntrinsicException, msg="Invalid Condition"): - self.resolver.intrinsic_property_resolver( - {"Fn::Not": [{"Condition": "NOT_VALID_CONDITION"}]} - ) + self.resolver.intrinsic_property_resolver({"Fn::Not": [{"Condition": "NOT_VALID_CONDITION"}]}, True) class TestIntrinsicFnAndResolver(TestCase): def setUp(self): - logical_id_translator = { - "EnvironmentType": "prod", - "AWS::AccountId": "123456789012", - } + logical_id_translator = {"EnvironmentType": "prod", "AWS::AccountId": "123456789012"} conditions = { "TestCondition": {"Fn::Equals": [{"Ref": "EnvironmentType"}, "prod"]}, "NotTestCondition": {"Fn::Not": [{"Condition": "TestCondition"}]}, } template = {"Conditions": conditions} - symbol_resolver = IntrinsicsSymbolTable( - template=template, logical_id_translator=logical_id_translator - ) - self.resolver = IntrinsicResolver( - template=template, symbol_resolver=symbol_resolver - ) + symbol_resolver = IntrinsicsSymbolTable(template=template, logical_id_translator=logical_id_translator) + self.resolver = IntrinsicResolver(template=template, symbol_resolver=symbol_resolver) def test_fn_and_basic_true(self): prod_fn_equals = {"Fn::Equals": [{"Ref": "EnvironmentType"}, "prod"]} - intrinsic = { - "Fn::And": [prod_fn_equals, {"Condition": "TestCondition"}, prod_fn_equals] - } - result = self.resolver.intrinsic_property_resolver(intrinsic) + intrinsic = {"Fn::And": [prod_fn_equals, {"Condition": "TestCondition"}, prod_fn_equals]} + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertTrue(result) def test_fn_and_basic_false(self): prod_fn_equals = {"Fn::Equals": [{"Ref": "EnvironmentType"}, "prod"]} - intrinsic = { - "Fn::And": [ - prod_fn_equals, - {"Condition": "NotTestCondition"}, - prod_fn_equals, - ] - } - result = self.resolver.intrinsic_property_resolver(intrinsic) + intrinsic = {"Fn::And": [prod_fn_equals, {"Condition": "NotTestCondition"}, prod_fn_equals]} + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertFalse(result) def test_fn_and_nested_true(self): prod_fn_equals = {"Fn::Equals": [{"Ref": "EnvironmentType"}, "prod"]} - intrinsic_base = { - "Fn::And": [prod_fn_equals, {"Condition": "TestCondition"}, prod_fn_equals] - } + intrinsic_base = {"Fn::And": [prod_fn_equals, {"Condition": "TestCondition"}, prod_fn_equals]} fn_not_intrinsic = {"Fn::Not": [{"Condition": "NotTestCondition"}]} intrinsic = {"Fn::And": [intrinsic_base, fn_not_intrinsic, prod_fn_equals]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertTrue(result) def test_fn_and_nested_false(self): prod_fn_equals = {"Fn::Equals": [{"Ref": "EnvironmentType"}, "prod"]} prod_fn_not_equals = {"Fn::Equals": [{"Ref": "EnvironmentType"}, "NOT_EQUAL"]} - intrinsic_base = { - "Fn::And": [ - prod_fn_equals, - {"Condition": "NotTestCondition"}, - prod_fn_equals, - ] - } + intrinsic_base = {"Fn::And": [prod_fn_equals, {"Condition": "NotTestCondition"}, prod_fn_equals]} intrinsic = {"Fn::And": [{"Fn::Not": [intrinsic_base]}, prod_fn_not_equals]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertFalse(result) @parameterized.expand( @@ -957,178 +715,122 @@ def test_fn_and_nested_false(self): ) def test_fn_and_arguments_invalid_formats(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::And": intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::And": intrinsic}, True) @parameterized.expand( [ - ( - "Fn:And must have all arguments that resolves to booleans".format(item), - item, - ) + ("Fn:And must have all arguments that resolves to booleans".format(item), item) for item in [{}, 42, None, "test"] ] ) def test_fn_and_all_arguments_bool(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver( - {"Fn::And": [intrinsic, intrinsic, intrinsic]} - ) + self.resolver.intrinsic_property_resolver({"Fn::And": [intrinsic, intrinsic, intrinsic]}, True) def test_fn_and_invalid_condition(self): with self.assertRaises(InvalidIntrinsicException, msg="Invalid Condition"): - self.resolver.intrinsic_property_resolver( - {"Fn::And": [{"Condition": "NOT_VALID_CONDITION"}]} - ) + self.resolver.intrinsic_property_resolver({"Fn::And": [{"Condition": "NOT_VALID_CONDITION"}]}, True) class TestIntrinsicFnOrResolver(TestCase): def setUp(self): - logical_id_translator = { - "EnvironmentType": "prod", - "AWS::AccountId": "123456789012", - } + logical_id_translator = {"EnvironmentType": "prod", "AWS::AccountId": "123456789012"} conditions = { "TestCondition": {"Fn::Equals": [{"Ref": "EnvironmentType"}, "prod"]}, "NotTestCondition": {"Fn::Not": [{"Condition": "TestCondition"}]}, } template = {"Conditions": conditions} - symbol_resolver = IntrinsicsSymbolTable( - template=template, logical_id_translator=logical_id_translator - ) - self.resolver = IntrinsicResolver( - template=template, symbol_resolver=symbol_resolver - ) + symbol_resolver = IntrinsicsSymbolTable(template=template, logical_id_translator=logical_id_translator) + self.resolver = IntrinsicResolver(template=template, symbol_resolver=symbol_resolver) def test_fn_or_basic_true(self): prod_fn_equals = {"Fn::Equals": [{"Ref": "EnvironmentType"}, "prod"]} - intrinsic = { - "Fn::Or": [prod_fn_equals, {"Condition": "TestCondition"}, prod_fn_equals] - } - result = self.resolver.intrinsic_property_resolver(intrinsic) + intrinsic = {"Fn::Or": [prod_fn_equals, {"Condition": "TestCondition"}, prod_fn_equals]} + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertTrue(result) def test_fn_or_basic_single_true(self): intrinsic = {"Fn::Or": [False, False, {"Condition": "TestCondition"}, False]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertTrue(result) def test_fn_or_basic_false(self): prod_fn_equals = {"Fn::Equals": [{"Ref": "EnvironmentType"}, "prod"]} intrinsic = { - "Fn::Or": [ - {"Fn::Not": [prod_fn_equals]}, - {"Condition": "NotTestCondition"}, - {"Fn::Not": [prod_fn_equals]}, - ] + "Fn::Or": [{"Fn::Not": [prod_fn_equals]}, {"Condition": "NotTestCondition"}, {"Fn::Not": [prod_fn_equals]}] } - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertFalse(result) def test_fn_or_nested_true(self): prod_fn_equals = {"Fn::Equals": [{"Ref": "EnvironmentType"}, "prod"]} failed_intrinsic_or = { - "Fn::Or": [ - {"Fn::Not": [prod_fn_equals]}, - {"Condition": "NotTestCondition"}, - {"Fn::Not": [prod_fn_equals]}, - ] - } - intrinsic_base = { - "Fn::Or": [prod_fn_equals, {"Condition": "TestCondition"}, prod_fn_equals] + "Fn::Or": [{"Fn::Not": [prod_fn_equals]}, {"Condition": "NotTestCondition"}, {"Fn::Not": [prod_fn_equals]}] } + intrinsic_base = {"Fn::Or": [prod_fn_equals, {"Condition": "TestCondition"}, prod_fn_equals]} fn_not_intrinsic = {"Fn::Not": [{"Condition": "NotTestCondition"}]} - intrinsic = { - "Fn::Or": [ - failed_intrinsic_or, - intrinsic_base, - fn_not_intrinsic, - fn_not_intrinsic, - ] - } - result = self.resolver.intrinsic_property_resolver(intrinsic) + intrinsic = {"Fn::Or": [failed_intrinsic_or, intrinsic_base, fn_not_intrinsic, fn_not_intrinsic]} + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertTrue(result) def test_fn_or_nested_false(self): prod_fn_equals = {"Fn::Equals": [{"Ref": "EnvironmentType"}, "prod"]} failed_intrinsic_or = { - "Fn::Or": [ - {"Fn::Not": [prod_fn_equals]}, - {"Condition": "NotTestCondition"}, - {"Fn::Not": [prod_fn_equals]}, - ] - } - intrinsic_base = { - "Fn::Or": [prod_fn_equals, {"Condition": "TestCondition"}, prod_fn_equals] + "Fn::Or": [{"Fn::Not": [prod_fn_equals]}, {"Condition": "NotTestCondition"}, {"Fn::Not": [prod_fn_equals]}] } + intrinsic_base = {"Fn::Or": [prod_fn_equals, {"Condition": "TestCondition"}, prod_fn_equals]} intrinsic = {"Fn::Or": [failed_intrinsic_or, {"Fn::Not": [intrinsic_base]}]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertFalse(result) @parameterized.expand( [ - ( - "Fn::Or must have an argument that resolves to a list: {}".format(item), - item, - ) + ("Fn::Or must have an argument that resolves to a list: {}".format(item), item) for item in [True, False, {}, 42, None, "test"] ] ) def test_fn_or_arguments_invalid_formats(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::Or": intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::Or": intrinsic}, True) @parameterized.expand( [ - ( - "Fn::Or must have all arguments resolve to booleans: {}".format(item), - item, - ) + ("Fn::Or must have all arguments resolve to booleans: {}".format(item), item) for item in [{}, 42, None, "test"] ] ) def test_fn_or_all_arguments_bool(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver( - {"Fn::Or": [intrinsic, intrinsic, intrinsic]} - ) + self.resolver.intrinsic_property_resolver({"Fn::Or": [intrinsic, intrinsic, intrinsic]}, True) def test_fn_or_invalid_condition(self): with self.assertRaises(InvalidIntrinsicException, msg="Invalid Condition"): - self.resolver.intrinsic_property_resolver( - {"Fn::Or": [{"Condition": "NOT_VALID_CONDITION"}]} - ) + self.resolver.intrinsic_property_resolver({"Fn::Or": [{"Condition": "NOT_VALID_CONDITION"}]}, True) class TestIntrinsicFnIfResolver(TestCase): def setUp(self): - logical_id_translator = { - "EnvironmentType": "prod", - "AWS::AccountId": "123456789012", - } + logical_id_translator = {"EnvironmentType": "prod", "AWS::AccountId": "123456789012"} conditions = { "TestCondition": {"Fn::Equals": [{"Ref": "EnvironmentType"}, "prod"]}, "NotTestCondition": {"Fn::Not": [{"Condition": "TestCondition"}]}, "InvalidCondition": ["random items"], } template = {"Conditions": conditions} - symbol_resolver = IntrinsicsSymbolTable( - template=template, logical_id_translator=logical_id_translator - ) - self.resolver = IntrinsicResolver( - template=template, symbol_resolver=symbol_resolver - ) + symbol_resolver = IntrinsicsSymbolTable(template=template, logical_id_translator=logical_id_translator) + self.resolver = IntrinsicResolver(template=template, symbol_resolver=symbol_resolver) def test_fn_if_basic_true(self): intrinsic = {"Fn::If": ["TestCondition", True, False]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertTrue(result) def test_fn_if_basic_false(self): intrinsic = {"Fn::If": ["NotTestCondition", True, False]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertFalse(result) def test_nested_fn_if_true(self): @@ -1136,7 +838,7 @@ def test_nested_fn_if_true(self): intrinsic_base_2 = {"Fn::If": ["TestCondition", True, False]} intrinsic = {"Fn::If": ["TestCondition", intrinsic_base_2, intrinsic_base_1]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertTrue(result) def test_nested_fn_if_false(self): @@ -1144,7 +846,7 @@ def test_nested_fn_if_false(self): intrinsic_base_2 = {"Fn::If": ["TestCondition", True, False]} intrinsic = {"Fn::If": ["TestCondition", intrinsic_base_1, intrinsic_base_2]} - result = self.resolver.intrinsic_property_resolver(intrinsic) + result = self.resolver.intrinsic_property_resolver(intrinsic, True) self.assertFalse(result) @parameterized.expand( @@ -1155,7 +857,7 @@ def test_nested_fn_if_false(self): ) def test_fn_if_arguments_invalid_formats(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver({"Fn::If": intrinsic}) + self.resolver.intrinsic_property_resolver({"Fn::If": intrinsic}, True) @parameterized.expand( [ @@ -1165,15 +867,11 @@ def test_fn_if_arguments_invalid_formats(self, name, intrinsic): ) def test_fn_if_condition_arguments_invalid_type(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver( - {"Fn::If": [intrinsic, True, False]} - ) + self.resolver.intrinsic_property_resolver({"Fn::If": [intrinsic, True, False]}, True) def test_fn_if_invalid_condition(self): with self.assertRaises(InvalidIntrinsicException, msg="Invalid Condition"): - self.resolver.intrinsic_property_resolver( - {"Fn::If": ["NOT_VALID_CONDITION", "test", "test"]} - ) + self.resolver.intrinsic_property_resolver({"Fn::If": ["NOT_VALID_CONDITION", "test", "test"]}, True) @parameterized.expand( [ @@ -1183,15 +881,11 @@ def test_fn_if_invalid_condition(self): ) def test_fn_if_invalid_number_arguments(self, name, intrinsic): with self.assertRaises(InvalidIntrinsicException, msg=name): - self.resolver.intrinsic_property_resolver( - {"Fn::Not": ["TestCondition"] + intrinsic} - ) + self.resolver.intrinsic_property_resolver({"Fn::Not": ["TestCondition"] + intrinsic}, True) def test_fn_if_condition_not_bool_fail(self): with self.assertRaises(InvalidIntrinsicException, msg="Invalid Condition"): - self.resolver.intrinsic_property_resolver( - {"Fn::If": ["InvalidCondition", "test", "test"]} - ) + self.resolver.intrinsic_property_resolver({"Fn::If": ["InvalidCondition", "test", "test"]}, True) class TestIntrinsicAttribteResolution(TestCase): @@ -1201,7 +895,7 @@ def setUp(self): "RestApi": "NewRestApi", "LambdaFunction": { "Arn": "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/arn:aws:lambda:us-east" - "-1:123456789012:LambdaFunction/invocations" + "-1:123456789012:LambdaFunction/invocations" }, "AWS::StackId": "12301230123", "AWS::Region": "us-east-1", @@ -1211,7 +905,8 @@ def setUp(self): self.logical_id_translator = logical_id_translator integration_path = str( - Path(__file__).resolve().parents[0].joinpath('test_data', 'inputs/test_intrinsic_template_resolution.json')) + Path(__file__).resolve().parents[0].joinpath("test_data", "inputs/test_intrinsic_template_resolution.json") + ) with open(integration_path) as f: template = json.load(f) @@ -1223,22 +918,17 @@ def setUp(self): symbol_resolver = IntrinsicsSymbolTable( template=self.template, logical_id_translator=self.logical_id_translator ) - self.resolver = IntrinsicResolver( - template=self.template, symbol_resolver=symbol_resolver - ) + self.resolver = IntrinsicResolver(template=self.template, symbol_resolver=symbol_resolver) def test_basic_attribte_resolution(self): resolved_template = self.resolver.resolve_attribute(self.resources, ignore_errors=False) expected_resources = { - "HelloHandler2E4FBA4D": { - "Properties": {"handler": "main.handle"}, - "Type": "AWS::Lambda::Function", - }, + "HelloHandler2E4FBA4D": {"Properties": {"handler": "main.handle"}, "Type": "AWS::Lambda::Function"}, "LambdaFunction": { "Properties": { "Uri": "arn:aws:apigateway:us-east-1a:lambda:path/2015-03-31/functions/arn:aws" - ":lambda:us-east-1:406033500479:function:HelloHandler2E4FBA4D/invocations" + ":lambda:us-east-1:406033500479:function:HelloHandler2E4FBA4D/invocations" }, "Type": "AWS::Lambda::Function", }, @@ -1247,119 +937,60 @@ def test_basic_attribte_resolution(self): "Handler": "layer-main.custom_layer_handler", "Runtime": "python3.6", "CodeUri": ".", - "Layers": [{"Ref": "MyCustomLambdaLayer"}] + "Layers": [{"Ref": "MyCustomLambdaLayer"}], }, "Type": "AWS::Serverless::Function", }, - "MyCustomLambdaLayer": { - "Type": "AWS::Lambda::LayerVersion", - "Properties": { - "Content": "custom_layer/" - } - }, + "MyCustomLambdaLayer": {"Type": "AWS::Lambda::LayerVersion", "Properties": {"Content": "custom_layer/"}}, "RestApi": { - "Properties": { - "Body": "YTtlO2Y7ZA==", - "BodyS3Location": "https://s3location/", - }, + "Properties": {"Body": "YTtlO2Y7ZA==", "BodyS3Location": "https://s3location/"}, "Type": "AWS::ApiGateway::RestApi", }, - "RestApiResource": { - "Properties": { - "PathPart": "{proxy+}", - "RestApiId": "RestApi", - "parentId": "/", - } - }, + "RestApiResource": {"Properties": {"PathPart": "{proxy+}", "RestApiId": "RestApi", "parentId": "/"}}, } self.assertEqual(dict(resolved_template), expected_resources) def test_template_fail_errors(self): resources = deepcopy(self.resources) - resources["RestApi.Deployment"]["Properties"]["BodyS3Location"] = { - "Fn::FindInMap": [] - } - template = { - "Mappings": self.mappings, - "Conditions": self.conditions, - "Resources": resources, - } - symbol_resolver = IntrinsicsSymbolTable( - template=template, logical_id_translator=self.logical_id_translator - ) + resources["RestApi.Deployment"]["Properties"]["BodyS3Location"] = {"Fn::FindInMap": []} + template = {"Mappings": self.mappings, "Conditions": self.conditions, "Resources": resources} + symbol_resolver = IntrinsicsSymbolTable(template=template, logical_id_translator=self.logical_id_translator) resolver = IntrinsicResolver(template=template, symbol_resolver=symbol_resolver) with self.assertRaises(InvalidIntrinsicException, msg="Invalid Find In Map"): resolver.resolve_attribute(resources, ignore_errors=False) def test_template_ignore_errors(self): resources = deepcopy(self.resources) - resources["RestApi.Deployment"]["Properties"]["BodyS3Location"] = { - "Fn::FindInMap": [] - } - template = { - "Mappings": self.mappings, - "Conditions": self.conditions, - "Resources": resources, - } - symbol_resolver = IntrinsicsSymbolTable( - template=template, logical_id_translator=self.logical_id_translator - ) + resources["RestApi.Deployment"]["Properties"]["BodyS3Location"] = {"Fn::FindInMap": []} + template = {"Mappings": self.mappings, "Conditions": self.conditions, "Resources": resources} + symbol_resolver = IntrinsicsSymbolTable(template=template, logical_id_translator=self.logical_id_translator) resolver = IntrinsicResolver(template=template, symbol_resolver=symbol_resolver) result = resolver.resolve_attribute(resources, ignore_errors=True) expected_template = { - "HelloHandler2E4FBA4D": { - "Properties": {"handler": "main.handle"}, - "Type": "AWS::Lambda::Function", - }, + "HelloHandler2E4FBA4D": {"Properties": {"handler": "main.handle"}, "Type": "AWS::Lambda::Function"}, "ReferenceLambdaLayerVersionLambdaFunction": { "Properties": { "Handler": "layer-main.custom_layer_handler", "Runtime": "python3.6", "CodeUri": ".", - "Layers": [{"Ref": "MyCustomLambdaLayer"}] + "Layers": [{"Ref": "MyCustomLambdaLayer"}], }, "Type": "AWS::Serverless::Function", }, - "MyCustomLambdaLayer": { - "Type": "AWS::Lambda::LayerVersion", - "Properties": { - "Content": "custom_layer/" - } - }, + "MyCustomLambdaLayer": {"Type": "AWS::Lambda::LayerVersion", "Properties": {"Content": "custom_layer/"}}, "LambdaFunction": { "Properties": { "Uri": "arn:aws:apigateway:us-east-1a:lambda:path/2015-03-31" - "/functions/arn:aws:lambda:us-east-1:406033500479" - ":function:HelloHandler2E4FBA4D/invocations" + "/functions/arn:aws:lambda:us-east-1:406033500479" + ":function:HelloHandler2E4FBA4D/invocations" }, "Type": "AWS::Lambda::Function", }, - "RestApi.Deployment": { - "Properties": { - "Body": { - "Fn::Base64": { - "Fn::Join": [ - ";", # NOQA - { - "Fn::Split": [ - ",", - {"Fn::Join": [",", ["a", "e", "f", "d"]]}, - ] - }, - ] - } - }, - "BodyS3Location": {"Fn::FindInMap": []}, - }, + "RestApi": { + "Properties": {"Body": "YTtlO2Y7ZA==", "BodyS3Location": {"Fn::FindInMap": []}}, "Type": "AWS::ApiGateway::RestApi", }, - "RestApiResource": { - "Properties": { - "PathPart": "{proxy+}", - "RestApiId": "RestApi", - "parentId": "/", - } - } + "RestApiResource": {"Properties": {"PathPart": "{proxy+}", "RestApiId": "RestApi", "parentId": "/"}}, } self.assertEqual(expected_template, dict(result)) @@ -1367,162 +998,85 @@ def test_template_ignore_errors(self): class TestResolveTemplate(TestCase): def test_parameter_not_resolved(self): template = { - "Parameters": { - "TestStageName": { - "Default": "test", - "Type": "string" - } - }, + "Parameters": {"TestStageName": {"Default": "test", "Type": "string"}}, "Resources": { - "Test": { - "Type": "AWS::ApiGateway::RestApi", - "Parameters": { - "StageName": { - "Ref": "TestStageName" - } - } - } - } + "Test": {"Type": "AWS::ApiGateway::RestApi", "Parameters": {"StageName": {"Ref": "TestStageName"}}} + }, } expected_template = { - "Parameters": { - "TestStageName": { - "Default": "test", - "Type": "string" - }, - }, - "Resources": OrderedDict({ - "Test": { - "Type": "AWS::ApiGateway::RestApi", - "Parameters": { - "StageName": "test" - } - } - }) + "Parameters": {"TestStageName": {"Default": "test", "Type": "string"}}, + "Resources": OrderedDict( + {"Test": {"Type": "AWS::ApiGateway::RestApi", "Parameters": {"StageName": "test"}}} + ), } - symbol_resolver = IntrinsicsSymbolTable( - template=template, logical_id_translator={} - ) + symbol_resolver = IntrinsicsSymbolTable(template=template, logical_id_translator={}) resolver = IntrinsicResolver(template=template, symbol_resolver=symbol_resolver) self.assertEqual(resolver.resolve_template(), expected_template) def test_mappings_directory_resolved(self): template = { - "Mappings": { - "TestStageName": { - "TestKey": { - "key": "StageName" - } - } - }, + "Mappings": {"TestStageName": {"TestKey": {"key": "StageName"}}}, "Resources": { "Test": { "Type": "AWS::ApiGateway::RestApi", - "Parameters": { - "StageName": { - "Fn::FindInMap": ["TestStageName", "TestKey", "key"] - } - } + "Parameters": {"StageName": {"Fn::FindInMap": ["TestStageName", "TestKey", "key"]}}, } - } + }, } expected_template = { - "Mappings": { - "TestStageName": { - "TestKey": { - "key": "StageName" - } - } - }, - "Resources": OrderedDict({ - "Test": { - "Type": "AWS::ApiGateway::RestApi", - "Parameters": { - "StageName": "StageName" - } - } - }) + "Mappings": {"TestStageName": {"TestKey": {"key": "StageName"}}}, + "Resources": OrderedDict( + {"Test": {"Type": "AWS::ApiGateway::RestApi", "Parameters": {"StageName": "StageName"}}} + ), } - symbol_resolver = IntrinsicsSymbolTable( - template=template, logical_id_translator={} - ) + symbol_resolver = IntrinsicsSymbolTable(template=template, logical_id_translator={}) resolver = IntrinsicResolver(template=template, symbol_resolver=symbol_resolver) self.assertEqual(resolver.resolve_template(), expected_template) def test_output_resolved(self): template = { - "Parameters": { - "StageRef": { - "Default": "StageName" - } - }, - "Outputs": { - "TestStageName": { - "Ref": "Test" - }, - "ParameterRef": { - "Ref": "StageRef" - } - }, + "Parameters": {"StageRef": {"Default": "StageName"}}, + "Outputs": {"TestStageName": {"Ref": "Test"}, "ParameterRef": {"Ref": "StageRef"}}, "Resources": { - "Test": { - "Type": "AWS::ApiGateway::RestApi", - "Parameters": { - "StageName": { - "Ref": "StageRef" - } - } - } - } + "Test": {"Type": "AWS::ApiGateway::RestApi", "Parameters": {"StageName": {"Ref": "StageRef"}}} + }, } expected_template = { - "Parameters": { - "StageRef": { - "Default": "StageName" - } - }, - "Resources": OrderedDict({ - "Test": { - "Type": "AWS::ApiGateway::RestApi", - "Parameters": { - "StageName": "StageName" - } - } - }), - "Outputs": OrderedDict({ - "TestStageName": "Test", - "ParameterRef": "StageName" - }) + "Parameters": {"StageRef": {"Default": "StageName"}}, + "Resources": OrderedDict( + {"Test": {"Type": "AWS::ApiGateway::RestApi", "Parameters": {"StageName": "StageName"}}} + ), + "Outputs": OrderedDict({"TestStageName": "Test", "ParameterRef": "StageName"}), } - symbol_resolver = IntrinsicsSymbolTable( - template=template, logical_id_translator={} - ) + symbol_resolver = IntrinsicsSymbolTable(template=template, logical_id_translator={}) resolver = IntrinsicResolver(template=template, symbol_resolver=symbol_resolver) self.assertEqual(resolver.resolve_template(), expected_template) def load_test_data(self, template_path): - integration_path = str( - Path(__file__).resolve().parents[0].joinpath('test_data', template_path)) + integration_path = str(Path(__file__).resolve().parents[0].joinpath("test_data", template_path)) with open(integration_path) as f: template = json.load(f) return template - @parameterized.expand([ - ('inputs/test_intrinsic_template_resolution.json', 'outputs/output_test_intrinsic_template_resolution.json'), - ('inputs/test_layers_resolution.json', 'outputs/outputs_test_layers_resolution.json'), - ('inputs/test_methods_resource_resolution.json', 'outputs/outputs_methods_resource_resolution.json'), - ]) + @parameterized.expand( + [ + ( + "inputs/test_intrinsic_template_resolution.json", + "outputs/output_test_intrinsic_template_resolution.json", + ), + ("inputs/test_layers_resolution.json", "outputs/outputs_test_layers_resolution.json"), + ("inputs/test_methods_resource_resolution.json", "outputs/outputs_methods_resource_resolution.json"), + ] + ) def test_intrinsic_sample_inputs_outputs(self, input, output): input_template = self.load_test_data(input) - symbol_resolver = IntrinsicsSymbolTable( - template=input_template, logical_id_translator={} - ) + symbol_resolver = IntrinsicsSymbolTable(template=input_template, logical_id_translator={}) resolver = IntrinsicResolver(template=input_template, symbol_resolver=symbol_resolver) processed_template = resolver.resolve_template() processed_template = json.loads(json.dumps(processed_template)) # Removes formatting of ordered dicts diff --git a/tests/unit/lib/intrinsic_resolver/test_intrinsics_symbol_table.py b/tests/unit/lib/intrinsic_resolver/test_intrinsics_symbol_table.py index 6887b54e74..6d03cb16ed 100644 --- a/tests/unit/lib/intrinsic_resolver/test_intrinsics_symbol_table.py +++ b/tests/unit/lib/intrinsic_resolver/test_intrinsics_symbol_table.py @@ -12,43 +12,41 @@ def setUp(self): self.symbol_table = IntrinsicsSymbolTable(template={}) def test_handle_account_id_default(self): - self.assertEquals(self.symbol_table.handle_pseudo_account_id(), "123456789012") + self.assertEqual(self.symbol_table.handle_pseudo_account_id(), "123456789012") def test_pseudo_partition(self): - self.assertEquals(self.symbol_table.handle_pseudo_partition(), "aws") + self.assertEqual(self.symbol_table.handle_pseudo_partition(), "aws") @patch("samcli.lib.intrinsic_resolver.intrinsics_symbol_table.os") def test_pseudo_partition_gov(self, mock_os): mock_os.getenv.return_value = "us-west-gov-1" - self.assertEquals(self.symbol_table.handle_pseudo_partition(), "aws-us-gov") + self.assertEqual(self.symbol_table.handle_pseudo_partition(), "aws-us-gov") @patch("samcli.lib.intrinsic_resolver.intrinsics_symbol_table.os") def test_pseudo_partition_china(self, mock_os): mock_os.getenv.return_value = "cn-west-1" - self.assertEquals(self.symbol_table.handle_pseudo_partition(), "aws-cn") + self.assertEqual(self.symbol_table.handle_pseudo_partition(), "aws-cn") @patch("samcli.lib.intrinsic_resolver.intrinsics_symbol_table.os") def test_pseudo_region_environ(self, mock_os): mock_os.getenv.return_value = "mytemp" - self.assertEquals(self.symbol_table.handle_pseudo_region(), "mytemp") + self.assertEqual(self.symbol_table.handle_pseudo_region(), "mytemp") @patch("samcli.lib.intrinsic_resolver.intrinsics_symbol_table.os") def test_pseudo_default_region(self, mock_os): mock_os.getenv.return_value = None - self.assertEquals(self.symbol_table.handle_pseudo_region(), "us-east-1") + self.assertEqual(self.symbol_table.handle_pseudo_region(), "us-east-1") def test_pseudo_no_value(self): self.assertIsNone(self.symbol_table.handle_pseudo_no_value()) def test_pseudo_url_prefix_default(self): - self.assertEquals(self.symbol_table.handle_pseudo_url_prefix(), "amazonaws.com") + self.assertEqual(self.symbol_table.handle_pseudo_url_prefix(), "amazonaws.com") @patch("samcli.lib.intrinsic_resolver.intrinsics_symbol_table.os") def test_pseudo_url_prefix_china(self, mock_os): mock_os.getenv.return_value = "cn-west-1" - self.assertEquals( - self.symbol_table.handle_pseudo_url_prefix(), "amazonaws.com.cn" - ) + self.assertEqual(self.symbol_table.handle_pseudo_url_prefix(), "amazonaws.com.cn") def test_get_availability_zone(self): res = IntrinsicsSymbolTable.get_availability_zone("us-east-1") @@ -64,59 +62,35 @@ def test_handle_pseudo_stack_name(self): def test_handle_pseudo_stack_id(self): res = IntrinsicsSymbolTable.handle_pseudo_stack_id() - self.assertEqual(res, "arn:aws:cloudformation:us-east-1:123456789012:stack/" - "local/51af3dc0-da77-11e4-872e-1234567db123") + self.assertEqual( + res, "arn:aws:cloudformation:us-east-1:123456789012:stack/" "local/51af3dc0-da77-11e4-872e-1234567db123" + ) class TestSymbolResolution(TestCase): def test_parameter_symbols(self): - template = { - "Resources": {}, - "Parameters": { - "Test": { - "Default": "data" - } - } - } + template = {"Resources": {}, "Parameters": {"Test": {"Default": "data"}}} symbol_resolver = IntrinsicsSymbolTable(template=template) result = symbol_resolver.resolve_symbols("Test", IntrinsicResolver.REF) - self.assertEquals(result, "data") + self.assertEqual(result, "data") def test_default_type_resolver_function(self): - template = { - "Resources": { - "MyApi": { - "Type": "AWS::ApiGateway::RestApi" - } - }, - } - default_type_resolver = { - "AWS::ApiGateway::RestApi": { - "RootResourceId": lambda logical_id: logical_id - } - } + template = {"Resources": {"MyApi": {"Type": "AWS::ApiGateway::RestApi"}}} + default_type_resolver = {"AWS::ApiGateway::RestApi": {"RootResourceId": lambda logical_id: logical_id}} symbol_resolver = IntrinsicsSymbolTable(template=template, default_type_resolver=default_type_resolver) result = symbol_resolver.resolve_symbols("MyApi", "RootResourceId") - self.assertEquals(result, "MyApi") + self.assertEqual(result, "MyApi") def test_custom_attribute_resolver(self): - template = { - "Resources": { - "MyApi": { - "Type": "AWS::ApiGateway::RestApi" - } - }, - } - common_attribute_resolver = { - "Arn": "test" - } + template = {"Resources": {"MyApi": {"Type": "AWS::ApiGateway::RestApi"}}} + common_attribute_resolver = {"Arn": "test"} symbol_resolver = IntrinsicsSymbolTable(template=template, common_attribute_resolver=common_attribute_resolver) result = symbol_resolver.resolve_symbols("MyApi", "Arn") - self.assertEquals(result, "test") + self.assertEqual(result, "test") def test_unknown_symbol_translation(self): symbol_resolver = IntrinsicsSymbolTable(template={}) @@ -134,19 +108,19 @@ def test_basic_unknown_translated_string_translation(self): self.assertEqual(res, None) def test_arn_resolver_lambda(self): - res = IntrinsicsSymbolTable().arn_resolver('test') - self.assertEquals(res, "arn:aws:lambda:us-east-1:123456789012:function:test") + res = IntrinsicsSymbolTable().arn_resolver("test") + self.assertEqual(res, "arn:aws:lambda:us-east-1:123456789012:function:test") def test_arn_resolver(self): - res = IntrinsicsSymbolTable().arn_resolver('test', service_name="sns") - self.assertEquals(res, "arn:aws:sns:us-east-1:123456789012:test") + res = IntrinsicsSymbolTable().arn_resolver("test", service_name="sns") + self.assertEqual(res, "arn:aws:sns:us-east-1:123456789012:test") def test_resolver_ignore_errors(self): resolver = IntrinsicsSymbolTable() - res = resolver.resolve_symbols('UNKNOWN', "SOME UNKNOWN RESOURCE PROPERTY", ignore_errors=True) + res = resolver.resolve_symbols("UNKNOWN", "SOME UNKNOWN RESOURCE PROPERTY", ignore_errors=True) self.assertEqual(res, "$UNKNOWN.SOME UNKNOWN RESOURCE PROPERTY") def test_symbol_resolver_unknown_fail(self): resolver = IntrinsicsSymbolTable() with self.assertRaises(InvalidSymbolException): - resolver.resolve_symbols('UNKNOWN', "SOME UNKNOWN RESOURCE PROPERTY") + resolver.resolve_symbols("UNKNOWN", "SOME UNKNOWN RESOURCE PROPERTY") diff --git a/tests/unit/lib/logs/test_event.py b/tests/unit/lib/logs/test_event.py index 694df0b902..c093edf0e2 100644 --- a/tests/unit/lib/logs/test_event.py +++ b/tests/unit/lib/logs/test_event.py @@ -1,11 +1,9 @@ - from unittest import TestCase from samcli.lib.logs.event import LogEvent class TestLogEvent(TestCase): - def setUp(self): self.group_name = "log group name" self.stream_name = "stream name" @@ -14,52 +12,44 @@ def setUp(self): self.timestamp_str = "2018-07-06T13:09:54" def test_must_extract_fields_from_event(self): - event = LogEvent(self.group_name, { - "timestamp": self.timestamp, - "logStreamName": self.stream_name, - "message": self.message - }) + event = LogEvent( + self.group_name, {"timestamp": self.timestamp, "logStreamName": self.stream_name, "message": self.message} + ) - self.assertEquals(event.log_group_name, self.group_name) - self.assertEquals(event.log_stream_name, self.stream_name) - self.assertEquals(event.message, self.message) - self.assertEquals(self.timestamp_str, event.timestamp) + self.assertEqual(event.log_group_name, self.group_name) + self.assertEqual(event.log_stream_name, self.stream_name) + self.assertEqual(event.message, self.message) + self.assertEqual(self.timestamp_str, event.timestamp) def test_must_ignore_if_some_fields_are_empty(self): - event = LogEvent(self.group_name, { - "logStreamName": "stream name" - }) + event = LogEvent(self.group_name, {"logStreamName": "stream name"}) - self.assertEquals(event.log_group_name, self.group_name) - self.assertEquals(event.log_stream_name, self.stream_name) - self.assertEquals(event.message, '') + self.assertEqual(event.log_group_name, self.group_name) + self.assertEqual(event.log_stream_name, self.stream_name) + self.assertEqual(event.message, "") self.assertIsNone(event.timestamp) def test_must_ignore_if_event_is_empty(self): event = LogEvent(self.group_name, {}) - self.assertEquals(event.log_group_name, self.group_name) + self.assertEqual(event.log_group_name, self.group_name) self.assertIsNone(event.log_stream_name) self.assertIsNone(event.message) self.assertIsNone(event.timestamp) def test_check_for_equality(self): - event = LogEvent(self.group_name, { - "timestamp": self.timestamp, - "logStreamName": self.stream_name, - "message": self.message - }) + event = LogEvent( + self.group_name, {"timestamp": self.timestamp, "logStreamName": self.stream_name, "message": self.message} + ) - other = LogEvent(self.group_name, { - "timestamp": self.timestamp, - "logStreamName": self.stream_name, - "message": self.message - }) + other = LogEvent( + self.group_name, {"timestamp": self.timestamp, "logStreamName": self.stream_name, "message": self.message} + ) - self.assertEquals(event, other) + self.assertEqual(event, other) def test_check_for_equality_with_other_data_types(self): event = LogEvent(self.group_name, {}) other = "this is not an event" - self.assertNotEquals(event, other) + self.assertNotEqual(event, other) diff --git a/tests/unit/lib/logs/test_fetcher.py b/tests/unit/lib/logs/test_fetcher.py index 0dd5518fec..96fb69d2b5 100644 --- a/tests/unit/lib/logs/test_fetcher.py +++ b/tests/unit/lib/logs/test_fetcher.py @@ -1,4 +1,3 @@ - import copy import datetime import botocore.session @@ -13,10 +12,9 @@ class TestLogsFetcher_fetch(TestCase): - def setUp(self): - real_client = botocore.session.get_session().create_client('logs', region_name="us-east-1") + real_client = botocore.session.get_session().create_client("logs", region_name="us-east-1") self.client_stubber = Stubber(real_client) self.fetcher = LogsFetcher(real_client) @@ -31,52 +29,53 @@ def setUp(self): "ingestionTime": 0, "logStreamName": self.stream_name, "message": "message 1", - "timestamp": self.timestamp + "timestamp": self.timestamp, }, - { "eventId": "id2", "ingestionTime": 0, "logStreamName": self.stream_name, "message": "message 2", - "timestamp": self.timestamp - } + "timestamp": self.timestamp, + }, ] } self.expected_events = [ - LogEvent(self.log_group_name, { + LogEvent( + self.log_group_name, + { "eventId": "id1", "ingestionTime": 0, "logStreamName": self.stream_name, "message": "message 1", - "timestamp": self.timestamp - }), - - LogEvent(self.log_group_name, { + "timestamp": self.timestamp, + }, + ), + LogEvent( + self.log_group_name, + { "eventId": "id2", "ingestionTime": 0, "logStreamName": self.stream_name, "message": "message 2", - "timestamp": self.timestamp - }) + "timestamp": self.timestamp, + }, + ), ] def test_must_fetch_logs_for_log_group(self): - expected_params = { - "logGroupName": self.log_group_name, - "interleaved": True - } + expected_params = {"logGroupName": self.log_group_name, "interleaved": True} # Configure the stubber to return the configured response. The stubber also verifies # that input params were provided as expected - self.client_stubber.add_response('filter_log_events', self.mock_api_response, expected_params) + self.client_stubber.add_response("filter_log_events", self.mock_api_response, expected_params) with self.client_stubber: events_iterable = self.fetcher.fetch(self.log_group_name) actual_result = list(events_iterable) - self.assertEquals(self.expected_events, actual_result) + self.assertEqual(self.expected_events, actual_result) def test_must_fetch_logs_with_all_params(self): pattern = "foobar" @@ -88,41 +87,34 @@ def test_must_fetch_logs_with_all_params(self): "interleaved": True, "startTime": to_timestamp(start), "endTime": to_timestamp(end), - "filterPattern": pattern + "filterPattern": pattern, } - self.client_stubber.add_response('filter_log_events', self.mock_api_response, expected_params) + self.client_stubber.add_response("filter_log_events", self.mock_api_response, expected_params) with self.client_stubber: events_iterable = self.fetcher.fetch(self.log_group_name, start=start, end=end, filter_pattern=pattern) actual_result = list(events_iterable) - self.assertEquals(self.expected_events, actual_result) + self.assertEqual(self.expected_events, actual_result) def test_must_paginate_using_next_token(self): """Make three API calls, first two returns a nextToken and last does not.""" token = "token" - expected_params = { - "logGroupName": self.log_group_name, - "interleaved": True - } - expected_params_with_token = { - "logGroupName": self.log_group_name, - "interleaved": True, - "nextToken": token - } + expected_params = {"logGroupName": self.log_group_name, "interleaved": True} + expected_params_with_token = {"logGroupName": self.log_group_name, "interleaved": True, "nextToken": token} mock_response_with_token = copy.deepcopy(self.mock_api_response) mock_response_with_token["nextToken"] = token # Call 1 returns a token. Also when first call is made, token is **not** passed as API params - self.client_stubber.add_response('filter_log_events', mock_response_with_token, expected_params) + self.client_stubber.add_response("filter_log_events", mock_response_with_token, expected_params) # Call 2 returns a token - self.client_stubber.add_response('filter_log_events', mock_response_with_token, expected_params_with_token) + self.client_stubber.add_response("filter_log_events", mock_response_with_token, expected_params_with_token) # Call 3 DOES NOT return a token. This will terminate the loop. - self.client_stubber.add_response('filter_log_events', self.mock_api_response, expected_params_with_token) + self.client_stubber.add_response("filter_log_events", self.mock_api_response, expected_params_with_token) # Same data was returned in each API call expected_events_result = self.expected_events + self.expected_events + self.expected_events @@ -131,11 +123,10 @@ def test_must_paginate_using_next_token(self): events_iterable = self.fetcher.fetch(self.log_group_name) actual_result = list(events_iterable) - self.assertEquals(expected_events_result, actual_result) + self.assertEqual(expected_events_result, actual_result) class TestLogsFetcher_tail(TestCase): - def setUp(self): self.fetcher = LogsFetcher(Mock()) @@ -164,7 +155,6 @@ def test_must_tail_logs_with_single_data_fetch(self, time_mock): self.fetcher.fetch.side_effect = [ self.mock_events1, - # Return empty data for `max_retries` number of polls self.mock_events_empty, self.mock_events_empty, @@ -174,7 +164,6 @@ def test_must_tail_logs_with_single_data_fetch(self, time_mock): expected_fetch_calls = [ # First fetch returns data call(ANY, start=self.start_time, filter_pattern=self.filter_pattern), - # Three empty fetches call(ANY, start=to_datetime(13), filter_pattern=self.filter_pattern), call(ANY, start=to_datetime(13), filter_pattern=self.filter_pattern), @@ -182,19 +171,19 @@ def test_must_tail_logs_with_single_data_fetch(self, time_mock): ] # One per poll - expected_sleep_calls = [ - call(self.poll_interval) for i in expected_fetch_calls - ] + expected_sleep_calls = [call(self.poll_interval) for i in expected_fetch_calls] - result_itr = self.fetcher.tail(self.log_group_name, - start=self.start_time, - filter_pattern=self.filter_pattern, - max_retries=self.max_retries, - poll_interval=self.poll_interval) + result_itr = self.fetcher.tail( + self.log_group_name, + start=self.start_time, + filter_pattern=self.filter_pattern, + max_retries=self.max_retries, + poll_interval=self.poll_interval, + ) - self.assertEquals(self.mock_events1, list(result_itr)) - self.assertEquals(expected_fetch_calls, self.fetcher.fetch.call_args_list) - self.assertEquals(expected_sleep_calls, time_mock.sleep.call_args_list) + self.assertEqual(self.mock_events1, list(result_itr)) + self.assertEqual(expected_fetch_calls, self.fetcher.fetch.call_args_list) + self.assertEqual(expected_sleep_calls, time_mock.sleep.call_args_list) @patch("samcli.lib.logs.fetcher.time") def test_must_tail_logs_with_multiple_data_fetches(self, time_mock): @@ -203,13 +192,10 @@ def test_must_tail_logs_with_multiple_data_fetches(self, time_mock): self.fetcher.fetch.side_effect = [ self.mock_events1, - # Just one empty fetch self.mock_events_empty, - # This fetch returns data self.mock_events2, - # Return empty data for `max_retries` number of polls self.mock_events_empty, self.mock_events_empty, @@ -219,13 +205,10 @@ def test_must_tail_logs_with_multiple_data_fetches(self, time_mock): expected_fetch_calls = [ # First fetch returns data call(ANY, start=self.start_time, filter_pattern=self.filter_pattern), - # This fetch was empty call(ANY, start=to_datetime(13), filter_pattern=self.filter_pattern), - # This fetch returned data call(ANY, start=to_datetime(13), filter_pattern=self.filter_pattern), - # Three empty fetches call(ANY, start=to_datetime(15), filter_pattern=self.filter_pattern), call(ANY, start=to_datetime(15), filter_pattern=self.filter_pattern), @@ -233,19 +216,19 @@ def test_must_tail_logs_with_multiple_data_fetches(self, time_mock): ] # One per poll - expected_sleep_calls = [ - call(self.poll_interval) for i in expected_fetch_calls - ] + expected_sleep_calls = [call(self.poll_interval) for i in expected_fetch_calls] - result_itr = self.fetcher.tail(self.log_group_name, - start=self.start_time, - filter_pattern=self.filter_pattern, - max_retries=self.max_retries, - poll_interval=self.poll_interval) + result_itr = self.fetcher.tail( + self.log_group_name, + start=self.start_time, + filter_pattern=self.filter_pattern, + max_retries=self.max_retries, + poll_interval=self.poll_interval, + ) - self.assertEquals(self.mock_events1 + self.mock_events2, list(result_itr)) - self.assertEquals(expected_fetch_calls, self.fetcher.fetch.call_args_list) - self.assertEquals(expected_sleep_calls, time_mock.sleep.call_args_list) + self.assertEqual(self.mock_events1 + self.mock_events2, list(result_itr)) + self.assertEqual(expected_fetch_calls, self.fetcher.fetch.call_args_list) + self.assertEqual(expected_sleep_calls, time_mock.sleep.call_args_list) @patch("samcli.lib.logs.fetcher.time") def test_without_start_time(self, time_mock): @@ -261,10 +244,12 @@ def test_without_start_time(self, time_mock): call(ANY, start=to_datetime(0), filter_pattern=ANY), ] - result_itr = self.fetcher.tail(self.log_group_name, - filter_pattern=self.filter_pattern, - max_retries=self.max_retries, - poll_interval=self.poll_interval) + result_itr = self.fetcher.tail( + self.log_group_name, + filter_pattern=self.filter_pattern, + max_retries=self.max_retries, + poll_interval=self.poll_interval, + ) - self.assertEquals([], list(result_itr)) - self.assertEquals(expected_fetch_calls, self.fetcher.fetch.call_args_list) + self.assertEqual([], list(result_itr)) + self.assertEqual(expected_fetch_calls, self.fetcher.fetch.call_args_list) diff --git a/tests/unit/lib/logs/test_formatter.py b/tests/unit/lib/logs/test_formatter.py index 23095f1983..5fe1828b59 100644 --- a/tests/unit/lib/logs/test_formatter.py +++ b/tests/unit/lib/logs/test_formatter.py @@ -1,4 +1,3 @@ - import json from unittest import TestCase @@ -10,17 +9,12 @@ class TestLogsFormatter_pretty_print_event(TestCase): - def setUp(self): self.colored_mock = Mock() self.group_name = "group name" self.stream_name = "stream name" self.message = "message" - self.event_dict = { - "timestamp": 1, - "message": self.message, - "logStreamName": self.stream_name - } + self.event_dict = {"timestamp": 1, "message": self.message, "logStreamName": self.stream_name} def test_must_serialize_event(self): colored_timestamp = "colored timestamp" @@ -30,10 +24,10 @@ def test_must_serialize_event(self): event = LogEvent(self.group_name, self.event_dict) - expected = ' '.join([colored_stream_name, colored_timestamp, self.message]) + expected = " ".join([colored_stream_name, colored_timestamp, self.message]) result = LogsFormatter._pretty_print_event(event, self.colored_mock) - self.assertEquals(expected, result) + self.assertEqual(expected, result) self.colored_mock.yellow.has_calls() self.colored_mock.cyan.assert_called_with(self.stream_name) @@ -43,7 +37,6 @@ def _passthru_formatter(event, colored): class TestLogsFormatter_do_format(TestCase): - def setUp(self): self.colored_mock = Mock() @@ -62,13 +55,13 @@ def test_must_map_formatters_sequentially(self, pretty_print_mock): expected_call_order = [ call(1, colored=self.colored_mock), call(2, colored=self.colored_mock), - call(3, colored=self.colored_mock) + call(3, colored=self.colored_mock), ] formatter = LogsFormatter(self.colored_mock, self.formatter_chain) result_iterable = formatter.do_format(events_iterable) - self.assertEquals(list(result_iterable), expected_result) + self.assertEqual(list(result_iterable), expected_result) self.chain_method1.assert_has_calls(expected_call_order) self.chain_method2.assert_has_calls(expected_call_order) @@ -83,14 +76,14 @@ def test_must_work_without_formatter_chain(self, pretty_print_mock): expected_call_order = [ call(1, colored=self.colored_mock), call(2, colored=self.colored_mock), - call(3, colored=self.colored_mock) + call(3, colored=self.colored_mock), ] # No formatter chain. formatter = LogsFormatter(self.colored_mock) result_iterable = formatter.do_format(events_iterable) - self.assertEquals(list(result_iterable), expected_result) + self.assertEqual(list(result_iterable), expected_result) # Pretty Print is always called, even if there are no other formatters in the chain. pretty_print_mock.assert_has_calls(expected_call_order) @@ -100,12 +93,13 @@ def test_must_work_without_formatter_chain(self, pretty_print_mock): class TestLambdaLogMsgFormatters_colorize_crashes(TestCase): - - @parameterized.expand([ - "Task timed out", - "Something happened. Task timed out. Something else happend", - "Process exited before completing request" - ]) + @parameterized.expand( + [ + "Task timed out", + "Something happened. Task timed out. Something else happend", + "Process exited before completing request", + ] + ) def test_must_color_crash_messages(self, input_msg): color_result = "colored messaage" colored = Mock() @@ -113,7 +107,7 @@ def test_must_color_crash_messages(self, input_msg): event = LogEvent("group_name", {"message": input_msg}) result = LambdaLogMsgFormatters.colorize_errors(event, colored) - self.assertEquals(result.message, color_result) + self.assertEqual(result.message, color_result) colored.red.assert_called_with(input_msg) def test_must_ignore_other_messages(self): @@ -121,12 +115,11 @@ def test_must_ignore_other_messages(self): event = LogEvent("group_name", {"message": "some msg"}) result = LambdaLogMsgFormatters.colorize_errors(event, colored) - self.assertEquals(result.message, "some msg") + self.assertEqual(result.message, "some msg") colored.red.assert_not_called() class TestKeywordHighlight_highlight_keyword(TestCase): - def test_must_highlight_all_keywords(self): input_msg = "this keyword some keyword other keyword" keyword = "keyword" @@ -138,7 +131,7 @@ def test_must_highlight_all_keywords(self): event = LogEvent("group_name", {"message": input_msg}) result = KeywordHighlighter(keyword).highlight_keywords(event, colored) - self.assertEquals(result.message, expected_msg) + self.assertEqual(result.message, expected_msg) colored.underline.assert_called_with(keyword) def test_must_ignore_if_keyword_is_absent(self): @@ -147,12 +140,11 @@ def test_must_ignore_if_keyword_is_absent(self): event = LogEvent("group_name", {"message": input_msg}) result = KeywordHighlighter().highlight_keywords(event, colored) - self.assertEquals(result.message, input_msg) + self.assertEqual(result.message, input_msg) colored.underline.assert_not_called() class TestJSONMsgFormatter_format_json(TestCase): - def test_must_pretty_print_json(self): data = {"a": "b"} input_msg = '{"a": "b"}' @@ -161,15 +153,12 @@ def test_must_pretty_print_json(self): event = LogEvent("group_name", {"message": input_msg}) result = JSONMsgFormatter.format_json(event, None) - self.assertEquals(result.message, expected_msg) + self.assertEqual(result.message, expected_msg) - @parameterized.expand([ - "this is not json", - '{"not a valid json"}', - ]) + @parameterized.expand(["this is not json", '{"not a valid json"}']) def test_ignore_non_json(self, input_msg): event = LogEvent("group_name", {"message": input_msg}) result = JSONMsgFormatter.format_json(event, None) - self.assertEquals(result.message, input_msg) + self.assertEqual(result.message, input_msg) diff --git a/tests/unit/lib/logs/test_provider.py b/tests/unit/lib/logs/test_provider.py index 441b1b647c..59da01928c 100644 --- a/tests/unit/lib/logs/test_provider.py +++ b/tests/unit/lib/logs/test_provider.py @@ -1,13 +1,11 @@ - from unittest import TestCase from samcli.lib.logs.provider import LogGroupProvider class TestLogGroupProvider_for_lambda_function(TestCase): - def test_must_return_log_group_name(self): expected = "/aws/lambda/myfunctionname" result = LogGroupProvider.for_lambda_function("myfunctionname") - self.assertEquals(expected, result) + self.assertEqual(expected, result) diff --git a/tests/unit/lib/samlib/test_cloudformation_command.py b/tests/unit/lib/samlib/test_cloudformation_command.py index f6cd9f5889..d5ce254ebc 100644 --- a/tests/unit/lib/samlib/test_cloudformation_command.py +++ b/tests/unit/lib/samlib/test_cloudformation_command.py @@ -12,7 +12,6 @@ class TestExecuteCommand(TestCase): - def setUp(self): self.args = ("--arg1", "value1", "different args", "more") @@ -23,15 +22,18 @@ def test_must_add_template_file(self, find_executable_mock, check_call_mock): check_call_mock.return_value = True execute_command("command", self.args, "/path/to/template") - check_call_mock.assert_called_with(["mycmd", "cloudformation", "command"] + - ["--arg1", "value1", "different args", "more", - "--template-file", "/path/to/template"], env=ANY) + check_call_mock.assert_called_with( + ["mycmd", "cloudformation", "command"] + + ["--arg1", "value1", "different args", "more", "--template-file", "/path/to/template"], + env=ANY, + ) @patch("subprocess.check_call") @patch("samcli.lib.samlib.cloudformation_command.find_executable") @patch("samcli.lib.samlib.cloudformation_command.GlobalConfig") - def test_must_add_sam_cli_info_to_execution_env_var_if_telemetry_is_on(self, global_config_mock, - find_executable_mock, check_call_mock): + def test_must_add_sam_cli_info_to_execution_env_var_if_telemetry_is_on( + self, global_config_mock, find_executable_mock, check_call_mock + ): installation_id = "testtest" global_config_mock.return_value.installation_id = installation_id global_config_mock.return_value.telemetry_enabled = True @@ -46,7 +48,7 @@ def test_must_add_sam_cli_info_to_execution_env_var_if_telemetry_is_on(self, glo check_call_mock.assert_called() kwargs = check_call_mock.call_args[1] self.assertIn("env", kwargs) - self.assertEquals(kwargs["env"], expected_env) + self.assertEqual(kwargs["env"], expected_env) @patch("subprocess.check_call") @patch("samcli.lib.samlib.cloudformation_command.find_executable") @@ -64,7 +66,7 @@ def test_must_not_set_exec_env(self, global_config_mock, find_executable_mock, c check_call_mock.assert_called() kwargs = check_call_mock.call_args[1] self.assertIn("env", kwargs) - self.assertEquals(kwargs["env"], expected_env) + self.assertEqual(kwargs["env"], expected_env) @patch("sys.exit") @patch("subprocess.check_call") @@ -78,7 +80,6 @@ def test_command_must_exit_with_status_code(self, find_executable_mock, check_ca class TestFindExecutable(TestCase): - @patch("subprocess.Popen") @patch("platform.system") def test_must_use_raw_name(self, platform_system_mock, popen_mock): @@ -87,9 +88,7 @@ def test_must_use_raw_name(self, platform_system_mock, popen_mock): find_executable(execname) - self.assertEquals(popen_mock.mock_calls, [ - call([execname], stdout=PIPE, stderr=PIPE) - ]) + self.assertEqual(popen_mock.mock_calls, [call([execname], stdout=PIPE, stderr=PIPE)]) @patch("subprocess.Popen") @patch("platform.system") @@ -99,11 +98,9 @@ def test_must_use_name_with_cmd_extension_on_windows(self, platform_system_mock, expected = "foo.cmd" result = find_executable(execname) - self.assertEquals(result, expected) + self.assertEqual(result, expected) - self.assertEquals(popen_mock.mock_calls, [ - call(["foo.cmd"], stdout=PIPE, stderr=PIPE) - ]) + self.assertEqual(popen_mock.mock_calls, [call(["foo.cmd"], stdout=PIPE, stderr=PIPE)]) @patch("subprocess.Popen") @patch("platform.system") @@ -115,12 +112,12 @@ def test_must_use_name_with_exe_extension_on_windows(self, platform_system_mock, popen_mock.side_effect = [OSError, "success"] # fail on .cmd extension result = find_executable(execname) - self.assertEquals(result, expected) + self.assertEqual(result, expected) - self.assertEquals(popen_mock.mock_calls, [ - call(["foo.cmd"], stdout=PIPE, stderr=PIPE), - call(["foo.exe"], stdout=PIPE, stderr=PIPE) - ]) + self.assertEqual( + popen_mock.mock_calls, + [call(["foo.cmd"], stdout=PIPE, stderr=PIPE), call(["foo.exe"], stdout=PIPE, stderr=PIPE)], + ) @patch("subprocess.Popen") @patch("platform.system") @@ -132,13 +129,16 @@ def test_must_use_name_with_no_extension_on_windows(self, platform_system_mock, popen_mock.side_effect = [OSError, OSError, "success"] # fail on .cmd and .exe extension result = find_executable(execname) - self.assertEquals(result, expected) + self.assertEqual(result, expected) - self.assertEquals(popen_mock.mock_calls, [ - call(["foo.cmd"], stdout=PIPE, stderr=PIPE), - call(["foo.exe"], stdout=PIPE, stderr=PIPE), - call(["foo"], stdout=PIPE, stderr=PIPE), - ]) + self.assertEqual( + popen_mock.mock_calls, + [ + call(["foo.cmd"], stdout=PIPE, stderr=PIPE), + call(["foo.exe"], stdout=PIPE, stderr=PIPE), + call(["foo"], stdout=PIPE, stderr=PIPE), + ], + ) @patch("subprocess.Popen") @patch("platform.system") @@ -152,11 +152,15 @@ def test_must_raise_error_if_executable_not_found(self, platform_system_mock, po find_executable(execname) expected = "Cannot find AWS CLI installation, was looking at executables with names: {}".format( - ["foo.cmd", "foo.exe", "foo"]) - self.assertEquals(expected, str(ctx.exception)) - - self.assertEquals(popen_mock.mock_calls, [ - call(["foo.cmd"], stdout=PIPE, stderr=PIPE), - call(["foo.exe"], stdout=PIPE, stderr=PIPE), - call(["foo"], stdout=PIPE, stderr=PIPE), - ]) + ["foo.cmd", "foo.exe", "foo"] + ) + self.assertEqual(expected, str(ctx.exception)) + + self.assertEqual( + popen_mock.mock_calls, + [ + call(["foo.cmd"], stdout=PIPE, stderr=PIPE), + call(["foo.exe"], stdout=PIPE, stderr=PIPE), + call(["foo"], stdout=PIPE, stderr=PIPE), + ], + ) diff --git a/tests/unit/lib/samlib/test_resource_metadata_normalizer.py b/tests/unit/lib/samlib/test_resource_metadata_normalizer.py index bd4a9caffa..e87d9d31d7 100644 --- a/tests/unit/lib/samlib/test_resource_metadata_normalizer.py +++ b/tests/unit/lib/samlib/test_resource_metadata_normalizer.py @@ -4,135 +4,85 @@ class TestResourceMeatadataNormalizer(TestCase): - def test_replace_property_with_path(self): template_data = { "Resources": { "Function1": { - "Properties": { - "Code": "some value" - }, - "Metadata": { - "aws:asset:path": "new path", - "aws:asset:property": "Code" - } + "Properties": {"Code": "some value"}, + "Metadata": {"aws:asset:path": "new path", "aws:asset:property": "Code"}, } } } ResourceMetadataNormalizer.normalize(template_data) - self.assertEqual("new path", template_data['Resources']['Function1']['Properties']['Code']) + self.assertEqual("new path", template_data["Resources"]["Function1"]["Properties"]["Code"]) def test_replace_all_resources_that_contain_metadata(self): template_data = { "Resources": { "Function1": { - "Properties": { - "Code": "some value" - }, - "Metadata": { - "aws:asset:path": "new path", - "aws:asset:property": "Code" - } + "Properties": {"Code": "some value"}, + "Metadata": {"aws:asset:path": "new path", "aws:asset:property": "Code"}, }, "Resource2": { - "Properties": { - "SomeRandomProperty": "some value" - }, - "Metadata": { - "aws:asset:path": "super cool path", - "aws:asset:property": "SomeRandomProperty" - } - } + "Properties": {"SomeRandomProperty": "some value"}, + "Metadata": {"aws:asset:path": "super cool path", "aws:asset:property": "SomeRandomProperty"}, + }, } } ResourceMetadataNormalizer.normalize(template_data) - self.assertEqual("new path", template_data['Resources']['Function1']['Properties']['Code']) - self.assertEqual("super cool path", template_data['Resources']['Resource2']['Properties']['SomeRandomProperty']) + self.assertEqual("new path", template_data["Resources"]["Function1"]["Properties"]["Code"]) + self.assertEqual("super cool path", template_data["Resources"]["Resource2"]["Properties"]["SomeRandomProperty"]) def test_tempate_without_metadata(self): - template_data = { - "Resources": { - "Function1": { - "Properties": { - "Code": "some value" - } - } - } - } + template_data = {"Resources": {"Function1": {"Properties": {"Code": "some value"}}}} ResourceMetadataNormalizer.normalize(template_data) - self.assertEqual("some value", template_data['Resources']['Function1']['Properties']['Code']) + self.assertEqual("some value", template_data["Resources"]["Function1"]["Properties"]["Code"]) def test_template_without_asset_property(self): template_data = { "Resources": { - "Function1": { - "Properties": { - "Code": "some value" - }, - "Metadata": { - "aws:asset:path": "new path", - } - } + "Function1": {"Properties": {"Code": "some value"}, "Metadata": {"aws:asset:path": "new path"}} } } ResourceMetadataNormalizer.normalize(template_data) - self.assertEqual("some value", template_data['Resources']['Function1']['Properties']['Code']) + self.assertEqual("some value", template_data["Resources"]["Function1"]["Properties"]["Code"]) def test_tempalte_without_asset_path(self): template_data = { "Resources": { - "Function1": { - "Properties": { - "Code": "some value" - }, - "Metadata": { - "aws:asset:property": "Code" - } - } + "Function1": {"Properties": {"Code": "some value"}, "Metadata": {"aws:asset:property": "Code"}} } } ResourceMetadataNormalizer.normalize(template_data) - self.assertEqual("some value", template_data['Resources']['Function1']['Properties']['Code']) + self.assertEqual("some value", template_data["Resources"]["Function1"]["Properties"]["Code"]) def test_template_with_empty_metadata(self): - template_data = { - "Resources": { - "Function1": { - "Properties": { - "Code": "some value" - }, - "Metadata": {} - } - } - } + template_data = {"Resources": {"Function1": {"Properties": {"Code": "some value"}, "Metadata": {}}}} ResourceMetadataNormalizer.normalize(template_data) - self.assertEqual("some value", template_data['Resources']['Function1']['Properties']['Code']) + self.assertEqual("some value", template_data["Resources"]["Function1"]["Properties"]["Code"]) def test_replace_of_property_that_does_not_exist(self): template_data = { "Resources": { "Function1": { "Properties": {}, - "Metadata": { - "aws:asset:path": "new path", - "aws:asset:property": "Code" - } + "Metadata": {"aws:asset:path": "new path", "aws:asset:property": "Code"}, } } } ResourceMetadataNormalizer.normalize(template_data) - self.assertEqual("new path", template_data['Resources']['Function1']['Properties']['Code']) + self.assertEqual("new path", template_data["Resources"]["Function1"]["Properties"]["Code"]) diff --git a/tests/unit/lib/telemetry/test_metrics.py b/tests/unit/lib/telemetry/test_metrics.py index 9ae585c9c7..3ea872014c 100644 --- a/tests/unit/lib/telemetry/test_metrics.py +++ b/tests/unit/lib/telemetry/test_metrics.py @@ -9,7 +9,6 @@ class TestSendInstalledMetric(TestCase): - def setUp(self): self.gc_mock = Mock() self.global_config_patcher = patch("samcli.lib.telemetry.metrics.GlobalConfig", self.gc_mock) @@ -25,14 +24,12 @@ def test_must_send_installed_metric_with_attributes(self, TelemetryClassMock): self.gc_mock.return_value.telemetry_enabled = False send_installed_metric() - telemetry_mock.emit.assert_called_with("installed", { - "osPlatform": platform.system(), - "telemetryEnabled": False - }) + telemetry_mock.emit.assert_called_with( + "installed", {"osPlatform": platform.system(), "telemetryEnabled": False} + ) class TestTrackCommand(TestCase): - def setUp(self): TelemetryClassMock = Mock() GlobalConfigClassMock = Mock() @@ -66,9 +63,9 @@ def real_fn(): track_command(real_fn)() - self.assertEquals(self.telemetry_instance.emit.mock_calls, [ - call("commandRun", ANY), - ], "The one command metric must be sent") + self.assertEqual( + self.telemetry_instance.emit.mock_calls, [call("commandRun", ANY)], "The one command metric must be sent" + ) @patch("samcli.lib.telemetry.metrics.Context") def test_must_emit_command_run_metric(self, ContextMock): @@ -84,14 +81,11 @@ def real_fn(): "debugFlagProvided": False, "region": "myregion", "commandName": "fakesam local invoke", - "duration": ANY, "exitReason": "success", - "exitCode": 0 + "exitCode": 0, } - self.telemetry_instance.emit.assert_has_calls([ - call("commandRun", expected_attrs) - ]) + self.telemetry_instance.emit.assert_has_calls([call("commandRun", expected_attrs)]) @patch("samcli.lib.telemetry.metrics.Context") def test_must_emit_command_run_metric_with_sanitized_profile_value(self, ContextMock): @@ -103,12 +97,8 @@ def real_fn(): track_command(real_fn)() - expected_attrs = _cmd_run_attrs({ - "awsProfileProvided": True - }) - self.telemetry_instance.emit.assert_has_calls([ - call("commandRun", expected_attrs) - ]) + expected_attrs = _cmd_run_attrs({"awsProfileProvided": True}) + self.telemetry_instance.emit.assert_has_calls([call("commandRun", expected_attrs)]) @patch("samcli.lib.telemetry.metrics.Context") def test_must_record_function_duration(self, ContextMock): @@ -124,11 +114,12 @@ def real_fn(): # And grab the second argument passed to this call, which are the attributes args, kwargs = self.telemetry_instance.emit.call_args_list[0] metric_name, actual_attrs = args - self.assertEquals("commandRun", metric_name) - self.assertGreaterEqual(actual_attrs["duration"], - sleep_duration, - "Measured duration must be in milliseconds and " - "greater than equal to the sleep duration") + self.assertEqual("commandRun", metric_name) + self.assertGreaterEqual( + actual_attrs["duration"], + sleep_duration, + "Measured duration must be in milliseconds and " "greater than equal to the sleep duration", + ) @patch("samcli.lib.telemetry.metrics.Context") def test_must_record_user_exception(self, ContextMock): @@ -141,16 +132,14 @@ def real_fn(): with self.assertRaises(UserException) as context: track_command(real_fn)() - self.assertEquals(context.exception, expected_exception, "Must re-raise the original exception object " - "without modification") + self.assertEqual( + context.exception, + expected_exception, + "Must re-raise the original exception object " "without modification", + ) - expected_attrs = _cmd_run_attrs({ - "exitReason": "UserException", - "exitCode": 1235 - }) - self.telemetry_instance.emit.assert_has_calls([ - call("commandRun", expected_attrs) - ]) + expected_attrs = _cmd_run_attrs({"exitReason": "UserException", "exitCode": 1235}) + self.telemetry_instance.emit.assert_has_calls([call("commandRun", expected_attrs)]) @patch("samcli.lib.telemetry.metrics.Context") def test_must_record_any_exceptions(self, ContextMock): @@ -162,16 +151,16 @@ def real_fn(): with self.assertRaises(KeyError) as context: track_command(real_fn)() - self.assertEquals(context.exception, expected_exception, "Must re-raise the original exception object " - "without modification") + self.assertEqual( + context.exception, + expected_exception, + "Must re-raise the original exception object " "without modification", + ) - expected_attrs = _cmd_run_attrs({ - "exitReason": "KeyError", - "exitCode": 255 # Unhandled exceptions always use exit code 255 - }) - self.telemetry_instance.emit.assert_has_calls([ - call("commandRun", expected_attrs) - ]) + expected_attrs = _cmd_run_attrs( + {"exitReason": "KeyError", "exitCode": 255} # Unhandled exceptions always use exit code 255 + ) + self.telemetry_instance.emit.assert_has_calls([call("commandRun", expected_attrs)]) @patch("samcli.lib.telemetry.metrics.Context") def test_must_return_value_from_decorated_function(self, ContextMock): @@ -181,35 +170,34 @@ def real_fn(): return expected_value actual = track_command(real_fn)() - self.assertEquals(actual, "some return value") + self.assertEqual(actual, "some return value") @patch("samcli.lib.telemetry.metrics.Context") def test_must_pass_all_arguments_to_wrapped_function(self, ContextMock): - def real_fn(*args, **kwargs): # simply return the arguments to be able to examine & assert return args, kwargs actual_args, actual_kwargs = track_command(real_fn)(1, 2, 3, a=1, b=2, c=3) - self.assertEquals(actual_args, (1, 2, 3)) - self.assertEquals(actual_kwargs, {"a": 1, "b": 2, "c": 3}) + self.assertEqual(actual_args, (1, 2, 3)) + self.assertEqual(actual_kwargs, {"a": 1, "b": 2, "c": 3}) @patch("samcli.lib.telemetry.metrics.Context") def test_must_decorate_functions(self, ContextMock): - @track_command def real_fn(a, b=None): return "{} {}".format(a, b) actual = real_fn("hello", b="world") - self.assertEquals(actual, "hello world") + self.assertEqual(actual, "hello world") - self.assertEquals(self.telemetry_instance.emit.mock_calls, [ - call("commandRun", ANY), - ], "The command metrics be emitted when used as a decorator") + self.assertEqual( + self.telemetry_instance.emit.mock_calls, + [call("commandRun", ANY)], + "The command metrics be emitted when used as a decorator", + ) def test_must_return_immediately_if_telemetry_is_disabled(self): - def real_fn(): return "hello" @@ -217,13 +205,20 @@ def real_fn(): self.gc_instance_mock.telemetry_enabled = False result = track_command(real_fn)() - self.assertEquals(result, "hello") + self.assertEqual(result, "hello") self.telemetry_instance.emit.assert_not_called() def _cmd_run_attrs(data): - common_attrs = ["awsProfileProvided", "debugFlagProvided", "region", "commandName", - "duration", "exitReason", "exitCode"] + common_attrs = [ + "awsProfileProvided", + "debugFlagProvided", + "region", + "commandName", + "duration", + "exitReason", + "exitCode", + ] return _ignore_other_attrs(data, common_attrs) diff --git a/tests/unit/lib/telemetry/test_telemetry.py b/tests/unit/lib/telemetry/test_telemetry.py index 46c32ed13a..9d57eafde0 100644 --- a/tests/unit/lib/telemetry/test_telemetry.py +++ b/tests/unit/lib/telemetry/test_telemetry.py @@ -9,7 +9,6 @@ class TestTelemetry(TestCase): - def setUp(self): self.test_session_id = "TestSessionId" self.test_installation_id = "TestInstallationId" @@ -46,36 +45,32 @@ def test_must_add_metric_with_attributes_to_registry(self, requests_mock): telemetry.emit(metric_name, attrs) expected = { - "metrics": [{ - metric_name: { - "a": 1, - "b": 2, - "requestId": ANY, - "installationId": self.test_installation_id, - "sessionId": self.test_session_id, - "executionEnvironment": "CLI", - "pyversion": platform.python_version(), - "samcliVersion": samcli_version + "metrics": [ + { + metric_name: { + "a": 1, + "b": 2, + "requestId": ANY, + "installationId": self.test_installation_id, + "sessionId": self.test_session_id, + "executionEnvironment": "CLI", + "pyversion": platform.python_version(), + "samcliVersion": samcli_version, + } } - }] + ] } requests_mock.post.assert_called_once_with(ANY, json=expected, timeout=ANY) @patch("samcli.lib.telemetry.telemetry.requests") - @patch('samcli.lib.telemetry.telemetry.uuid') + @patch("samcli.lib.telemetry.telemetry.uuid") def test_must_add_request_id_as_uuid_v4(self, uuid_mock, requests_mock): fake_uuid = uuid_mock.uuid4.return_value = "fake uuid" telemetry = Telemetry(url=self.url) telemetry.emit("metric_name", {}) - expected = { - "metrics": [{ - "metric_name": _ignore_other_attrs({ - "requestId": fake_uuid, - }) - }] - } + expected = {"metrics": [{"metric_name": _ignore_other_attrs({"requestId": fake_uuid})}]} requests_mock.post.assert_called_once_with(ANY, json=expected, timeout=ANY) @patch("samcli.lib.telemetry.telemetry.requests") @@ -87,11 +82,7 @@ def test_execution_environment_should_be_identified(self, requests_mock): expected_execution_environment = "CLI" expected = { - "metrics": [{ - "metric_name": _ignore_other_attrs({ - "executionEnvironment": expected_execution_environment - }) - }] + "metrics": [{"metric_name": _ignore_other_attrs({"executionEnvironment": expected_execution_environment})}] } requests_mock.post.assert_called_once_with(ANY, json=expected, timeout=ANY) @@ -100,7 +91,7 @@ def test_default_request_should_be_fire_and_forget(self, requests_mock): telemetry = Telemetry(url=self.url) telemetry.emit("metric_name", {}) - requests_mock.post.assert_called_once_with(ANY, json=ANY, timeout=(2, 0.1)) # 100ms response timeout + requests_mock.post.assert_called_once_with(ANY, json=ANY, timeout=(2, 0.1)) # 100ms response timeout @patch("samcli.lib.telemetry.telemetry.requests") def test_request_must_wait_for_2_seconds_for_response(self, requests_mock): @@ -147,11 +138,11 @@ def test_must_raise_on_other_requests_exception(self, requests_mock): with self.assertRaises(IOError): telemetry.emit("metric_name", {}) - @patch('samcli.lib.telemetry.telemetry.DEFAULT_ENDPOINT_URL') + @patch("samcli.lib.telemetry.telemetry.DEFAULT_ENDPOINT_URL") def test_must_use_default_endpoint_url_if_not_customized(self, default_endpoint_url_mock): telemetry = Telemetry() - self.assertEquals(telemetry._url, default_endpoint_url_mock) + self.assertEqual(telemetry._url, default_endpoint_url_mock) def _ignore_other_attrs(data): diff --git a/tests/unit/lib/utils/test_codeuri.py b/tests/unit/lib/utils/test_codeuri.py index 03652909b1..45052da423 100644 --- a/tests/unit/lib/utils/test_codeuri.py +++ b/tests/unit/lib/utils/test_codeuri.py @@ -11,29 +11,22 @@ class TestLocalLambda_get_code_path(TestCase): - def setUp(self): self.cwd = "/my/current/working/directory" self.relative_codeuri = "./my/path" self.absolute_codeuri = "/home/foo/bar" # Some absolute path to use self.os_cwd = os.getcwd() - @parameterized.expand([ - ("."), - ("") - ]) + @parameterized.expand([("."), ("")]) def test_must_resolve_present_cwd(self, cwd_path): codeuri = self.relative_codeuri expected = os.path.normpath(os.path.join(self.os_cwd, codeuri)) actual = resolve_code_path(cwd_path, codeuri) - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) self.assertTrue(os.path.isabs(actual), "Result must be an absolute path") - @parameterized.expand([ - ("var/task"), - ("some/dir") - ]) + @parameterized.expand([("var/task"), ("some/dir")]) def test_must_resolve_relative_cwd(self, cwd_path): codeuri = self.relative_codeuri @@ -42,32 +35,23 @@ def test_must_resolve_relative_cwd(self, cwd_path): expected = os.path.normpath(os.path.join(abs_cwd, codeuri)) actual = resolve_code_path(cwd_path, codeuri) - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) self.assertTrue(os.path.isabs(actual), "Result must be an absolute path") - @parameterized.expand([ - (""), - ("."), - ("hello"), - ("a/b/c/d"), - ("../../c/d/e") - ]) + @parameterized.expand([(""), ("."), ("hello"), ("a/b/c/d"), ("../../c/d/e")]) def test_must_resolve_relative_codeuri(self, codeuri): expected = os.path.normpath(os.path.join(self.cwd, codeuri)) actual = resolve_code_path(self.cwd, codeuri) - self.assertEquals(str(Path(expected).resolve()), actual) + self.assertEqual(str(Path(expected).resolve()), actual) self.assertTrue(os.path.isabs(actual), "Result must be an absolute path") - @parameterized.expand([ - ("/a/b/c"), - ("/") - ]) + @parameterized.expand([("/a/b/c"), ("/")]) def test_must_resolve_absolute_codeuri(self, codeuri): expected = codeuri # CodeUri must be return as is for absolute paths actual = resolve_code_path(None, codeuri) - self.assertEquals(expected, actual) + self.assertEqual(expected, actual) self.assertTrue(os.path.isabs(actual), "Result must be an absolute path") diff --git a/tests/unit/lib/utils/test_colors.py b/tests/unit/lib/utils/test_colors.py index 30475159c4..bb6264a699 100644 --- a/tests/unit/lib/utils/test_colors.py +++ b/tests/unit/lib/utils/test_colors.py @@ -1,4 +1,3 @@ - from unittest import TestCase from nose_parameterized import parameterized, param @@ -6,23 +5,24 @@ class TestColored(TestCase): - def setUp(self): - self.msg = 'message' - - @parameterized.expand([ - param('red', '\x1b[31m'), - param('green', '\x1b[32m'), - param('cyan', '\x1b[36m'), - param('white', '\x1b[37m'), - param('yellow', '\x1b[33m'), - param('underline', '\x1b[4m') - ]) + self.msg = "message" + + @parameterized.expand( + [ + param("red", "\x1b[31m"), + param("green", "\x1b[32m"), + param("cyan", "\x1b[36m"), + param("white", "\x1b[37m"), + param("yellow", "\x1b[33m"), + param("underline", "\x1b[4m"), + ] + ) def test_various_decorations(self, decoration_name, ansi_prefix): expected = ansi_prefix + self.msg + "\x1b[0m" with_color = Colored() without_color = Colored(colorize=False) - self.assertEquals(expected, getattr(with_color, decoration_name)(self.msg)) - self.assertEquals(self.msg, getattr(without_color, decoration_name)(self.msg)) + self.assertEqual(expected, getattr(with_color, decoration_name)(self.msg)) + self.assertEqual(self.msg, getattr(without_color, decoration_name)(self.msg)) diff --git a/tests/unit/lib/utils/test_osutils.py b/tests/unit/lib/utils/test_osutils.py index 10fb14132f..7e51a5950e 100644 --- a/tests/unit/lib/utils/test_osutils.py +++ b/tests/unit/lib/utils/test_osutils.py @@ -10,7 +10,6 @@ class Test_mkdir_temp(TestCase): - def test_must_return_temp_dir(self): with osutils.mkdir_temp() as tempdir: @@ -27,7 +26,6 @@ def test_must_delete_temp_dir_after_use(self): class Test_stderr(TestCase): - def test_must_return_sys_stderr(self): expected_stderr = sys.stderr @@ -35,11 +33,10 @@ def test_must_return_sys_stderr(self): if sys.version_info.major > 2: expected_stderr = sys.stderr.buffer - self.assertEquals(expected_stderr, osutils.stderr()) + self.assertEqual(expected_stderr, osutils.stderr()) class Test_stdout(TestCase): - def test_must_return_sys_stdout(self): expected_stdout = sys.stdout @@ -47,4 +44,4 @@ def test_must_return_sys_stdout(self): if sys.version_info.major > 2: expected_stdout = sys.stdout.buffer - self.assertEquals(expected_stdout, osutils.stdout()) + self.assertEqual(expected_stdout, osutils.stdout()) diff --git a/tests/unit/lib/utils/test_progressbar.py b/tests/unit/lib/utils/test_progressbar.py index 00a30073a5..d082d770d8 100644 --- a/tests/unit/lib/utils/test_progressbar.py +++ b/tests/unit/lib/utils/test_progressbar.py @@ -5,14 +5,13 @@ class TestProgressBar(TestCase): - - @patch('samcli.lib.utils.progressbar.click') + @patch("samcli.lib.utils.progressbar.click") def test_creating_progressbar(self, click_patch): progressbar_mock = Mock() click_patch.progressbar.return_value = progressbar_mock - actual = progressbar(100, 'this is a label') + actual = progressbar(100, "this is a label") - self.assertEquals(actual, progressbar_mock) + self.assertEqual(actual, progressbar_mock) - click_patch.progressbar.assert_called_with(length=100, label='this is a label', show_pos=True) + click_patch.progressbar.assert_called_with(length=100, label="this is a label", show_pos=True) diff --git a/tests/unit/lib/utils/test_sam_logging.py b/tests/unit/lib/utils/test_sam_logging.py index c05f4b17ca..44fb2a63b2 100644 --- a/tests/unit/lib/utils/test_sam_logging.py +++ b/tests/unit/lib/utils/test_sam_logging.py @@ -5,7 +5,6 @@ class TestSamCliLogger(TestCase): - @patch("samcli.lib.utils.sam_logging.logging") def test_configure_samcli_logger(self, logging_patch): formatter_mock = Mock() diff --git a/tests/unit/lib/utils/test_stream_writer.py b/tests/unit/lib/utils/test_stream_writer.py index 67a57b0996..d882857488 100644 --- a/tests/unit/lib/utils/test_stream_writer.py +++ b/tests/unit/lib/utils/test_stream_writer.py @@ -10,7 +10,6 @@ class TestStreamWriter(TestCase): - def test_must_write_to_stream(self): buffer = "something" stream_mock = Mock() diff --git a/tests/unit/lib/utils/test_tar.py b/tests/unit/lib/utils/test_tar.py index d429c8cc37..2781362106 100644 --- a/tests/unit/lib/utils/test_tar.py +++ b/tests/unit/lib/utils/test_tar.py @@ -5,7 +5,6 @@ class TestTar(TestCase): - @patch("samcli.lib.utils.tar.tarfile.open") @patch("samcli.lib.utils.tar.TemporaryFile") def test_generating_tarball(self, temporary_file_patch, tarfile_open_patch): @@ -16,13 +15,15 @@ def test_generating_tarball(self, temporary_file_patch, tarfile_open_patch): tarfile_open_patch.return_value.__enter__.return_value = tarfile_file_mock with create_tarball({"/some/path": "/layer1", "/some/dockerfile/path": "/Dockerfile"}) as acutal: - self.assertEquals(acutal, temp_file_mock) + self.assertEqual(acutal, temp_file_mock) tarfile_file_mock.add.assert_called() - tarfile_file_mock.add.assert_has_calls([call("/some/path", arcname="/layer1"), - call("/some/dockerfile/path", arcname="/Dockerfile")], any_order=True) + tarfile_file_mock.add.assert_has_calls( + [call("/some/path", arcname="/layer1"), call("/some/dockerfile/path", arcname="/Dockerfile")], + any_order=True, + ) temp_file_mock.flush.assert_called_once() temp_file_mock.seek.assert_called_once_with(0) temp_file_mock.close.assert_called_once() - tarfile_open_patch.assert_called_once_with(fileobj=temp_file_mock, mode='w') + tarfile_open_patch.assert_called_once_with(fileobj=temp_file_mock, mode="w") diff --git a/tests/unit/lib/utils/test_time.py b/tests/unit/lib/utils/test_time.py index d4aede3304..14b879b0e7 100644 --- a/tests/unit/lib/utils/test_time.py +++ b/tests/unit/lib/utils/test_time.py @@ -1,4 +1,3 @@ - import datetime from unittest import TestCase @@ -7,38 +6,35 @@ class TestTimestampToIso(TestCase): - def test_must_work_on_timestamp_with_milliseconds(self): timestamp = 1530882594123 expected = "2018-07-06T13:09:54.123000" - self.assertEquals(expected, timestamp_to_iso(timestamp)) + self.assertEqual(expected, timestamp_to_iso(timestamp)) def test_must_ignore_float_microseconds(self): timestamp = 1530882594123.9876 expected = "2018-07-06T13:09:54.123000" - self.assertEquals(expected, timestamp_to_iso(timestamp)) + self.assertEqual(expected, timestamp_to_iso(timestamp)) class TestToTimestamp(TestCase): - def test_must_convert_to_timestamp(self): date = datetime.datetime.utcfromtimestamp(1530882594.123) expected = 1530882594123 - self.assertEquals(expected, to_timestamp(date)) + self.assertEqual(expected, to_timestamp(date)) class TestToUtc(TestCase): - def test_with_timezone(self): date = parse_date("2018-07-06 13:09:54 PDT") expected = datetime.datetime(2018, 7, 6, 20, 9, 54) result = to_utc(date) - self.assertEquals(expected, result) + self.assertEqual(expected, result) def test_with_utc_timezone(self): @@ -46,7 +42,7 @@ def test_with_utc_timezone(self): expected = datetime.datetime(2018, 7, 6, 13, 9, 54) result = to_utc(date) - self.assertEquals(expected, result) + self.assertEqual(expected, result) def test_without_timezone(self): @@ -54,16 +50,15 @@ def test_without_timezone(self): expected = datetime.datetime(2018, 7, 6, 13, 9, 54) result = to_utc(date) - self.assertEquals(expected, result) + self.assertEqual(expected, result) class TestParseDate(TestCase): - def test_must_parse_date(self): date_str = "2018-07-06T13:09:54" expected = datetime.datetime(2018, 7, 6, 13, 9, 54) - self.assertEquals(expected, parse_date(date_str)) + self.assertEqual(expected, parse_date(date_str)) def test_must_parse_relative_time_in_utc(self): now = datetime.datetime.utcnow() @@ -73,4 +68,4 @@ def test_must_parse_relative_time_in_utc(self): expected = (now - datetime.timedelta(hours=1)).replace(microsecond=0, second=0) result = parse_date(date_str).replace(microsecond=0, second=0) - self.assertEquals(expected, result) + self.assertEqual(expected, result) diff --git a/tests/unit/local/apigw/test_local_apigw_service.py b/tests/unit/local/apigw/test_local_apigw_service.py index 6ff3ee025d..dfb3003c78 100644 --- a/tests/unit/local/apigw/test_local_apigw_service.py +++ b/tests/unit/local/apigw/test_local_apigw_service.py @@ -14,10 +14,9 @@ class TestApiGatewayService(TestCase): - def setUp(self): self.function_name = Mock() - self.api_gateway_route = Route(methods=['GET'], function_name=self.function_name, path='/') + self.api_gateway_route = Route(methods=["GET"], function_name=self.function_name, path="/") self.list_of_routes = [self.api_gateway_route] self.lambda_runner = Mock() @@ -25,11 +24,7 @@ def setUp(self): self.stderr = Mock() self.api = Api(routes=self.list_of_routes) - self.service = LocalApigwService(self.api, - self.lambda_runner, - port=3000, - host='127.0.0.1', - stderr=self.stderr) + self.service = LocalApigwService(self.api, self.lambda_runner, port=3000, host="127.0.0.1", stderr=self.stderr) @patch.object(LocalApigwService, "get_request_methods_endpoints") def test_request_must_invoke_lambda(self, request_mock): @@ -48,21 +43,18 @@ def test_request_must_invoke_lambda(self, request_mock): service_response_mock.return_value = make_response_mock self.service.service_response = service_response_mock - request_mock.return_value = ('test', 'test') + request_mock.return_value = ("test", "test") result = self.service._request_handler() - self.assertEquals(result, make_response_mock) - self.lambda_runner.invoke.assert_called_with(ANY, - ANY, - stdout=ANY, - stderr=self.stderr) + self.assertEqual(result, make_response_mock) + self.lambda_runner.invoke.assert_called_with(ANY, ANY, stdout=ANY, stderr=self.stderr) @patch.object(LocalApigwService, "get_request_methods_endpoints") - @patch('samcli.local.apigw.local_apigw_service.LambdaOutputParser') + @patch("samcli.local.apigw.local_apigw_service.LambdaOutputParser") def test_request_handler_returns_process_stdout_when_making_response(self, lambda_output_parser_mock, request_mock): make_response_mock = Mock() - request_mock.return_value = ('test', 'test') + request_mock.return_value = ("test", "test") self.service.service_response = make_response_mock self.service._get_current_route = MagicMock() self.service._get_current_route.methods = [] @@ -83,7 +75,7 @@ def test_request_handler_returns_process_stdout_when_making_response(self, lambd result = self.service._request_handler() - self.assertEquals(result, make_response_mock) + self.assertEqual(result, make_response_mock) lambda_output_parser_mock.get_lambda_output.assert_called_with(ANY) # Make sure the parse method is called only on the returned response and not on the raw data from stdout @@ -108,16 +100,16 @@ def test_request_handler_returns_make_response(self, request_mock): service_response_mock.return_value = make_response_mock self.service.service_response = service_response_mock - request_mock.return_value = ('test', 'test') + request_mock.return_value = ("test", "test") result = self.service._request_handler() - self.assertEquals(result, make_response_mock) + self.assertEqual(result, make_response_mock) def test_create_creates_dict_of_routes(self): function_name_1 = Mock() function_name_2 = Mock() - api_gateway_route_1 = Route(methods=["GET"], function_name=function_name_1, path='/') - api_gateway_route_2 = Route(methods=["POST"], function_name=function_name_2, path='/') + api_gateway_route_1 = Route(methods=["GET"], function_name=function_name_1, path="/") + api_gateway_route_2 = Route(methods=["POST"], function_name=function_name_2, path="/") list_of_routes = [api_gateway_route_1, api_gateway_route_2] @@ -128,11 +120,9 @@ def test_create_creates_dict_of_routes(self): service.create() - self.assertEquals(service._dict_of_routes, {'/:GET': api_gateway_route_1, - '/:POST': api_gateway_route_2 - }) + self.assertEqual(service._dict_of_routes, {"/:GET": api_gateway_route_1, "/:POST": api_gateway_route_2}) - @patch('samcli.local.apigw.local_apigw_service.Flask') + @patch("samcli.local.apigw.local_apigw_service.Flask") def test_create_creates_flask_app_with_url_rules(self, flask): app_mock = Mock() flask.return_value = app_mock @@ -141,30 +131,28 @@ def test_create_creates_flask_app_with_url_rules(self, flask): self.service.create() - app_mock.add_url_rule.assert_called_once_with('/', - endpoint='/', - view_func=self.service._request_handler, - methods=['GET'], - provide_automatic_options=False) + app_mock.add_url_rule.assert_called_once_with( + "/", endpoint="/", view_func=self.service._request_handler, methods=["GET"], provide_automatic_options=False + ) def test_initalize_creates_default_values(self): - self.assertEquals(self.service.port, 3000) - self.assertEquals(self.service.host, '127.0.0.1') - self.assertEquals(self.service.api.routes, self.list_of_routes) + self.assertEqual(self.service.port, 3000) + self.assertEqual(self.service.host, "127.0.0.1") + self.assertEqual(self.service.api.routes, self.list_of_routes) self.assertIsNone(self.service.static_dir) - self.assertEquals(self.service.lambda_runner, self.lambda_runner) + self.assertEqual(self.service.lambda_runner, self.lambda_runner) def test_initalize_with_values(self): lambda_runner = Mock() - local_service = LocalApigwService(Api(), lambda_runner, static_dir='dir/static', port=5000, host='129.0.0.0') - self.assertEquals(local_service.port, 5000) - self.assertEquals(local_service.host, '129.0.0.0') - self.assertEquals(local_service.api.routes, []) - self.assertEquals(local_service.static_dir, 'dir/static') - self.assertEquals(local_service.lambda_runner, lambda_runner) + local_service = LocalApigwService(Api(), lambda_runner, static_dir="dir/static", port=5000, host="129.0.0.0") + self.assertEqual(local_service.port, 5000) + self.assertEqual(local_service.host, "129.0.0.0") + self.assertEqual(local_service.api.routes, []) + self.assertEqual(local_service.static_dir, "dir/static") + self.assertEqual(local_service.lambda_runner, lambda_runner) @patch.object(LocalApigwService, "get_request_methods_endpoints") - @patch('samcli.local.apigw.local_apigw_service.ServiceErrorResponses') + @patch("samcli.local.apigw.local_apigw_service.ServiceErrorResponses") def test_request_handles_error_when_invoke_cant_find_function(self, service_error_responses_patch, request_mock): not_found_response_mock = Mock() self.service._construct_event = Mock() @@ -174,10 +162,10 @@ def test_request_handles_error_when_invoke_cant_find_function(self, service_erro service_error_responses_patch.lambda_not_found_response.return_value = not_found_response_mock self.lambda_runner.invoke.side_effect = FunctionNotFound() - request_mock.return_value = ('test', 'test') + request_mock.return_value = ("test", "test") response = self.service._request_handler() - self.assertEquals(response, not_found_response_mock) + self.assertEqual(response, not_found_response_mock) @patch.object(LocalApigwService, "get_request_methods_endpoints") def test_request_throws_when_invoke_fails(self, request_mock): @@ -185,15 +173,16 @@ def test_request_throws_when_invoke_fails(self, request_mock): self.service._construct_event = Mock() self.service._get_current_route = Mock() - request_mock.return_value = ('test', 'test') + request_mock.return_value = ("test", "test") with self.assertRaises(Exception): self.service._request_handler() @patch.object(LocalApigwService, "get_request_methods_endpoints") - @patch('samcli.local.apigw.local_apigw_service.ServiceErrorResponses') - def test_request_handler_errors_when_parse_lambda_output_raises_keyerror(self, service_error_responses_patch, - request_mock): + @patch("samcli.local.apigw.local_apigw_service.ServiceErrorResponses") + def test_request_handler_errors_when_parse_lambda_output_raises_keyerror( + self, service_error_responses_patch, request_mock + ): parse_output_mock = Mock() parse_output_mock.side_effect = KeyError() self.service._parse_lambda_output = parse_output_mock @@ -206,12 +195,12 @@ def test_request_handler_errors_when_parse_lambda_output_raises_keyerror(self, s self.service._get_current_route = MagicMock() self.service._get_current_route.methods = [] - request_mock.return_value = ('test', 'test') + request_mock.return_value = ("test", "test") result = self.service._request_handler() - self.assertEquals(result, failure_response_mock) + self.assertEqual(result, failure_response_mock) - @patch('samcli.local.apigw.local_apigw_service.ServiceErrorResponses') + @patch("samcli.local.apigw.local_apigw_service.ServiceErrorResponses") def test_request_handler_errors_when_get_current_route_fails(self, service_error_responses_patch): get_current_route = Mock() get_current_route.side_effect = KeyError() @@ -221,7 +210,7 @@ def test_request_handler_errors_when_get_current_route_fails(self, service_error self.service._request_handler() @patch.object(LocalApigwService, "get_request_methods_endpoints") - @patch('samcli.local.apigw.local_apigw_service.ServiceErrorResponses') + @patch("samcli.local.apigw.local_apigw_service.ServiceErrorResponses") def test_request_handler_errors_when_unable_to_read_binary_data(self, service_error_responses_patch, request_mock): _construct_event = Mock() _construct_event.side_effect = UnicodeDecodeError("utf8", b"obj", 1, 2, "reason") @@ -233,11 +222,11 @@ def test_request_handler_errors_when_unable_to_read_binary_data(self, service_er failure_mock = Mock() service_error_responses_patch.lambda_failure_response.return_value = failure_mock - request_mock.return_value = ('test', 'test') + request_mock.return_value = ("test", "test") result = self.service._request_handler() - self.assertEquals(result, failure_mock) + self.assertEqual(result, failure_mock) - @patch('samcli.local.apigw.local_apigw_service.request') + @patch("samcli.local.apigw.local_apigw_service.request") def test_get_current_route(self, request_patch): request_mock = Mock() request_mock.endpoint = "path" @@ -250,9 +239,9 @@ def test_get_current_route(self, request_patch): self.service._route_key = route_key_method_mock self.service._dict_of_routes = {"method:path": "function"} - self.assertEquals(self.service._get_current_route(request_mock), "function") + self.assertEqual(self.service._get_current_route(request_mock), "function") - @patch('samcli.local.apigw.local_apigw_service.request') + @patch("samcli.local.apigw.local_apigw_service.request") def test_get_current_route_keyerror(self, request_patch): """ When the a HTTP request for given method+path combination is allowed by Flask but not in the list of routes, @@ -275,26 +264,24 @@ def test_get_current_route_keyerror(self, request_patch): class TestApiGatewayModel(TestCase): - def setUp(self): self.function_name = "name" self.api_gateway = Route(function_name=self.function_name, methods=["Post"], path="/") def test_class_initialization(self): - self.assertEquals(self.api_gateway.methods, ['POST']) - self.assertEquals(self.api_gateway.function_name, self.function_name) - self.assertEquals(self.api_gateway.path, '/') + self.assertEqual(self.api_gateway.methods, ["POST"]) + self.assertEqual(self.api_gateway.function_name, self.function_name) + self.assertEqual(self.api_gateway.path, "/") class TestLambdaHeaderDictionaryMerge(TestCase): - def test_empty_dictionaries_produce_empty_result(self): headers = {} multi_value_headers = {} result = LocalApigwService._merge_response_headers(headers, multi_value_headers) - self.assertEquals(result, Headers({})) + self.assertEqual(result, Headers({})) def test_headers_are_merged(self): headers = {"h1": "value1", "h2": "value2", "h3": "value3"} @@ -305,9 +292,9 @@ def test_headers_are_merged(self): self.assertIn("h1", result) self.assertIn("h2", result) self.assertIn("h3", result) - self.assertEquals(result["h1"], "value1") - self.assertEquals(result["h2"], "value2") - self.assertEquals(result.get_all("h3"), ["value4", "value3"]) + self.assertEqual(result["h1"], "value1") + self.assertEqual(result["h2"], "value2") + self.assertEqual(result.get_all("h3"), ["value4", "value3"]) def test_merge_does_not_duplicate_values(self): headers = {"h1": "ValueB"} @@ -316,170 +303,182 @@ def test_merge_does_not_duplicate_values(self): result = LocalApigwService._merge_response_headers(headers, multi_value_headers) self.assertIn("h1", result) - self.assertEquals(result.get_all("h1"), ["ValueA", "ValueB", "ValueC"]) + self.assertEqual(result.get_all("h1"), ["ValueA", "ValueB", "ValueC"]) class TestServiceParsingLambdaOutput(TestCase): - def test_default_content_type_header_added_with_no_headers(self): - lambda_output = '{"statusCode": 200, "body": "{\\"message\\":\\"Hello from Lambda\\"}", ' \ - '"isBase64Encoded": false}' + lambda_output = ( + '{"statusCode": 200, "body": "{\\"message\\":\\"Hello from Lambda\\"}", ' '"isBase64Encoded": false}' + ) (_, headers, _) = LocalApigwService._parse_lambda_output(lambda_output, binary_types=[], flask_request=Mock()) self.assertIn("Content-Type", headers) - self.assertEquals(headers["Content-Type"], "application/json") + self.assertEqual(headers["Content-Type"], "application/json") def test_default_content_type_header_added_with_empty_headers(self): - lambda_output = '{"statusCode": 200, "headers":{}, "body": "{\\"message\\":\\"Hello from Lambda\\"}", ' \ - '"isBase64Encoded": false}' + lambda_output = ( + '{"statusCode": 200, "headers":{}, "body": "{\\"message\\":\\"Hello from Lambda\\"}", ' + '"isBase64Encoded": false}' + ) (_, headers, _) = LocalApigwService._parse_lambda_output(lambda_output, binary_types=[], flask_request=Mock()) self.assertIn("Content-Type", headers) - self.assertEquals(headers["Content-Type"], "application/json") + self.assertEqual(headers["Content-Type"], "application/json") def test_custom_content_type_header_is_not_modified(self): - lambda_output = '{"statusCode": 200, "headers":{"Content-Type": "text/xml"}, "body": "{}", ' \ - '"isBase64Encoded": false}' + lambda_output = ( + '{"statusCode": 200, "headers":{"Content-Type": "text/xml"}, "body": "{}", ' '"isBase64Encoded": false}' + ) (_, headers, _) = LocalApigwService._parse_lambda_output(lambda_output, binary_types=[], flask_request=Mock()) self.assertIn("Content-Type", headers) - self.assertEquals(headers["Content-Type"], "text/xml") + self.assertEqual(headers["Content-Type"], "text/xml") def test_custom_content_type_multivalue_header_is_not_modified(self): - lambda_output = '{"statusCode": 200, "multiValueHeaders":{"Content-Type": ["text/xml"]}, "body": "{}", ' \ - '"isBase64Encoded": false}' + lambda_output = ( + '{"statusCode": 200, "multiValueHeaders":{"Content-Type": ["text/xml"]}, "body": "{}", ' + '"isBase64Encoded": false}' + ) (_, headers, _) = LocalApigwService._parse_lambda_output(lambda_output, binary_types=[], flask_request=Mock()) self.assertIn("Content-Type", headers) - self.assertEquals(headers["Content-Type"], "text/xml") + self.assertEqual(headers["Content-Type"], "text/xml") def test_multivalue_headers(self): - lambda_output = '{"statusCode": 200, "multiValueHeaders":{"X-Foo": ["bar", "42"]}, ' \ - '"body": "{\\"message\\":\\"Hello from Lambda\\"}", "isBase64Encoded": false}' + lambda_output = ( + '{"statusCode": 200, "multiValueHeaders":{"X-Foo": ["bar", "42"]}, ' + '"body": "{\\"message\\":\\"Hello from Lambda\\"}", "isBase64Encoded": false}' + ) (_, headers, _) = LocalApigwService._parse_lambda_output(lambda_output, binary_types=[], flask_request=Mock()) - self.assertEquals(headers, Headers({"Content-Type": "application/json", "X-Foo": ["bar", "42"]})) + self.assertEqual(headers, Headers({"Content-Type": "application/json", "X-Foo": ["bar", "42"]})) def test_single_and_multivalue_headers(self): - lambda_output = '{"statusCode": 200, "headers":{"X-Foo": "foo", "X-Bar": "bar"}, ' \ - '"multiValueHeaders":{"X-Foo": ["bar", "42"]}, ' \ - '"body": "{\\"message\\":\\"Hello from Lambda\\"}", "isBase64Encoded": false}' + lambda_output = ( + '{"statusCode": 200, "headers":{"X-Foo": "foo", "X-Bar": "bar"}, ' + '"multiValueHeaders":{"X-Foo": ["bar", "42"]}, ' + '"body": "{\\"message\\":\\"Hello from Lambda\\"}", "isBase64Encoded": false}' + ) (_, headers, _) = LocalApigwService._parse_lambda_output(lambda_output, binary_types=[], flask_request=Mock()) - self.assertEquals( - headers, Headers({"Content-Type": "application/json", "X-Bar": "bar", "X-Foo": ["bar", "42", "foo"]})) + self.assertEqual( + headers, Headers({"Content-Type": "application/json", "X-Bar": "bar", "X-Foo": ["bar", "42", "foo"]}) + ) def test_extra_values_raise(self): - lambda_output = '{"statusCode": 200, "headers": {}, "body": "{\\"message\\":\\"Hello from Lambda\\"}", ' \ - '"isBase64Encoded": false, "another_key": "some value"}' + lambda_output = ( + '{"statusCode": 200, "headers": {}, "body": "{\\"message\\":\\"Hello from Lambda\\"}", ' + '"isBase64Encoded": false, "another_key": "some value"}' + ) with self.assertRaises(ValueError): - LocalApigwService._parse_lambda_output(lambda_output, - binary_types=[], - flask_request=Mock()) + LocalApigwService._parse_lambda_output(lambda_output, binary_types=[], flask_request=Mock()) def test_parse_returns_correct_tuple(self): - lambda_output = '{"statusCode": 200, "headers": {}, "body": "{\\"message\\":\\"Hello from Lambda\\"}", ' \ - '"isBase64Encoded": false}' + lambda_output = ( + '{"statusCode": 200, "headers": {}, "body": "{\\"message\\":\\"Hello from Lambda\\"}", ' + '"isBase64Encoded": false}' + ) - (status_code, headers, body) = LocalApigwService._parse_lambda_output(lambda_output, - binary_types=[], - flask_request=Mock()) + (status_code, headers, body) = LocalApigwService._parse_lambda_output( + lambda_output, binary_types=[], flask_request=Mock() + ) - self.assertEquals(status_code, 200) - self.assertEquals(headers, Headers({"Content-Type": "application/json"})) - self.assertEquals(body, '{"message":"Hello from Lambda"}') + self.assertEqual(status_code, 200) + self.assertEqual(headers, Headers({"Content-Type": "application/json"})) + self.assertEqual(body, '{"message":"Hello from Lambda"}') - @patch('samcli.local.apigw.local_apigw_service.LocalApigwService._should_base64_decode_body') + @patch("samcli.local.apigw.local_apigw_service.LocalApigwService._should_base64_decode_body") def test_parse_returns_decodes_base64_to_binary(self, should_decode_body_patch): should_decode_body_patch.return_value = True binary_body = b"011000100110100101101110011000010111001001111001" # binary in binary - base64_body = base64.b64encode(binary_body).decode('utf-8') - lambda_output = {"statusCode": 200, - "headers": {"Content-Type": "application/octet-stream"}, - "body": base64_body, - "isBase64Encoded": False} - - (status_code, headers, body) = LocalApigwService._parse_lambda_output(json.dumps(lambda_output), - binary_types=['*/*'], - flask_request=Mock()) - - self.assertEquals(status_code, 200) - self.assertEquals(headers, Headers({"Content-Type": "application/octet-stream"})) - self.assertEquals(body, binary_body) + base64_body = base64.b64encode(binary_body).decode("utf-8") + lambda_output = { + "statusCode": 200, + "headers": {"Content-Type": "application/octet-stream"}, + "body": base64_body, + "isBase64Encoded": False, + } + + (status_code, headers, body) = LocalApigwService._parse_lambda_output( + json.dumps(lambda_output), binary_types=["*/*"], flask_request=Mock() + ) + + self.assertEqual(status_code, 200) + self.assertEqual(headers, Headers({"Content-Type": "application/octet-stream"})) + self.assertEqual(body, binary_body) def test_status_code_not_int(self): - lambda_output = '{"statusCode": "str", "headers": {}, "body": "{\\"message\\":\\"Hello from Lambda\\"}", ' \ - '"isBase64Encoded": false}' + lambda_output = ( + '{"statusCode": "str", "headers": {}, "body": "{\\"message\\":\\"Hello from Lambda\\"}", ' + '"isBase64Encoded": false}' + ) with self.assertRaises(TypeError): - LocalApigwService._parse_lambda_output(lambda_output, - binary_types=[], - flask_request=Mock()) + LocalApigwService._parse_lambda_output(lambda_output, binary_types=[], flask_request=Mock()) def test_status_code_int_str(self): - lambda_output = '{"statusCode": "200", "headers": {}, "body": "{\\"message\\":\\"Hello from Lambda\\"}", ' \ - '"isBase64Encoded": false}' + lambda_output = ( + '{"statusCode": "200", "headers": {}, "body": "{\\"message\\":\\"Hello from Lambda\\"}", ' + '"isBase64Encoded": false}' + ) - (status_code, _, _) = LocalApigwService._parse_lambda_output(lambda_output, - binary_types=[], - flask_request=Mock()) - self.assertEquals(status_code, 200) + (status_code, _, _) = LocalApigwService._parse_lambda_output( + lambda_output, binary_types=[], flask_request=Mock() + ) + self.assertEqual(status_code, 200) def test_status_code_negative_int(self): - lambda_output = '{"statusCode": -1, "headers": {}, "body": "{\\"message\\":\\"Hello from Lambda\\"}", ' \ - '"isBase64Encoded": false}' + lambda_output = ( + '{"statusCode": -1, "headers": {}, "body": "{\\"message\\":\\"Hello from Lambda\\"}", ' + '"isBase64Encoded": false}' + ) with self.assertRaises(TypeError): - LocalApigwService._parse_lambda_output(lambda_output, - binary_types=[], - flask_request=Mock()) + LocalApigwService._parse_lambda_output(lambda_output, binary_types=[], flask_request=Mock()) def test_status_code_negative_int_str(self): - lambda_output = '{"statusCode": "-1", "headers": {}, "body": "{\\"message\\":\\"Hello from Lambda\\"}", ' \ - '"isBase64Encoded": false}' + lambda_output = ( + '{"statusCode": "-1", "headers": {}, "body": "{\\"message\\":\\"Hello from Lambda\\"}", ' + '"isBase64Encoded": false}' + ) with self.assertRaises(TypeError): - LocalApigwService._parse_lambda_output(lambda_output, - binary_types=[], - flask_request=Mock()) + LocalApigwService._parse_lambda_output(lambda_output, binary_types=[], flask_request=Mock()) def test_lambda_output_list_not_dict(self): - lambda_output = '[]' + lambda_output = "[]" with self.assertRaises(TypeError): - LocalApigwService._parse_lambda_output(lambda_output, - binary_types=[], - flask_request=Mock()) + LocalApigwService._parse_lambda_output(lambda_output, binary_types=[], flask_request=Mock()) def test_lambda_output_not_json_serializable(self): - lambda_output = 'some str' + lambda_output = "some str" with self.assertRaises(ValueError): LocalApigwService._parse_lambda_output(lambda_output, binary_types=[], flask_request=Mock()) def test_properties_are_null(self): - lambda_output = '{"statusCode": 0, "headers": null, "body": null, ' \ - '"isBase64Encoded": null}' + lambda_output = '{"statusCode": 0, "headers": null, "body": null, ' '"isBase64Encoded": null}' - (status_code, headers, body) = LocalApigwService._parse_lambda_output(lambda_output, - binary_types=[], - flask_request=Mock()) + (status_code, headers, body) = LocalApigwService._parse_lambda_output( + lambda_output, binary_types=[], flask_request=Mock() + ) - self.assertEquals(status_code, 200) - self.assertEquals(headers, Headers({"Content-Type": "application/json"})) - self.assertEquals(body, "no data") + self.assertEqual(status_code, 200) + self.assertEqual(headers, Headers({"Content-Type": "application/json"})) + self.assertEqual(body, "no data") class TestService_construct_event(TestCase): - def setUp(self): self.request_mock = Mock() self.request_mock.endpoint = "endpoint" @@ -498,41 +497,43 @@ def setUp(self): self.request_mock.view_args = {"path": "params"} self.request_mock.scheme = "http" - expected = '{"body": "DATA!!!!", "httpMethod": "GET", ' \ - '"multiValueQueryStringParameters": {"query": ["params"]}, ' \ - '"queryStringParameters": {"query": "params"}, "resource": ' \ - '"endpoint", "requestContext": {"httpMethod": "GET", "requestId": ' \ - '"c6af9ac6-7b61-11e6-9a41-93e8deadbeef", "path": "endpoint", "extendedRequestId": null, ' \ - '"resourceId": "123456", "apiId": "1234567890", "stage": null, "resourcePath": "endpoint", ' \ - '"identity": {"accountId": null, "apiKey": null, "userArn": null, ' \ - '"cognitoAuthenticationProvider": null, "cognitoIdentityPoolId": null, "userAgent": ' \ - '"Custom User Agent String", "caller": null, "cognitoAuthenticationType": null, "sourceIp": ' \ - '"190.0.0.0", "user": null}, "accountId": "123456789012"}, "headers": {"Content-Type": ' \ - '"application/json", "X-Test": "Value", "X-Forwarded-Port": "3000", "X-Forwarded-Proto": "http"}, ' \ - '"multiValueHeaders": {"Content-Type": ["application/json"], "X-Test": ["Value"], ' \ - '"X-Forwarded-Port": ["3000"], "X-Forwarded-Proto": ["http"]}, ' \ - '"stageVariables": null, "path": "path", "pathParameters": {"path": "params"}, ' \ - '"isBase64Encoded": false}' + expected = ( + '{"body": "DATA!!!!", "httpMethod": "GET", ' + '"multiValueQueryStringParameters": {"query": ["params"]}, ' + '"queryStringParameters": {"query": "params"}, "resource": ' + '"endpoint", "requestContext": {"httpMethod": "GET", "requestId": ' + '"c6af9ac6-7b61-11e6-9a41-93e8deadbeef", "path": "endpoint", "extendedRequestId": null, ' + '"resourceId": "123456", "apiId": "1234567890", "stage": null, "resourcePath": "endpoint", ' + '"identity": {"accountId": null, "apiKey": null, "userArn": null, ' + '"cognitoAuthenticationProvider": null, "cognitoIdentityPoolId": null, "userAgent": ' + '"Custom User Agent String", "caller": null, "cognitoAuthenticationType": null, "sourceIp": ' + '"190.0.0.0", "user": null}, "accountId": "123456789012"}, "headers": {"Content-Type": ' + '"application/json", "X-Test": "Value", "X-Forwarded-Port": "3000", "X-Forwarded-Proto": "http"}, ' + '"multiValueHeaders": {"Content-Type": ["application/json"], "X-Test": ["Value"], ' + '"X-Forwarded-Port": ["3000"], "X-Forwarded-Proto": ["http"]}, ' + '"stageVariables": null, "path": "path", "pathParameters": {"path": "params"}, ' + '"isBase64Encoded": false}' + ) self.expected_dict = json.loads(expected) def test_construct_event_with_data(self): actual_event_str = LocalApigwService._construct_event(self.request_mock, 3000, binary_types=[]) - self.assertEquals(json.loads(actual_event_str), self.expected_dict) + self.assertEqual(json.loads(actual_event_str), self.expected_dict) def test_construct_event_no_data(self): self.request_mock.get_data.return_value = None self.expected_dict["body"] = None actual_event_str = LocalApigwService._construct_event(self.request_mock, 3000, binary_types=[]) - self.assertEquals(json.loads(actual_event_str), self.expected_dict) + self.assertEqual(json.loads(actual_event_str), self.expected_dict) - @patch('samcli.local.apigw.local_apigw_service.LocalApigwService._should_base64_encode') + @patch("samcli.local.apigw.local_apigw_service.LocalApigwService._should_base64_encode") def test_construct_event_with_binary_data(self, should_base64_encode_patch): should_base64_encode_patch.return_value = True binary_body = b"011000100110100101101110011000010111001001111001" # binary in binary - base64_body = base64.b64encode(binary_body).decode('utf-8') + base64_body = base64.b64encode(binary_body).decode("utf-8") self.request_mock.get_data.return_value = binary_body self.expected_dict["body"] = base64_body @@ -540,7 +541,7 @@ def test_construct_event_with_binary_data(self, should_base64_encode_patch): self.maxDiff = None actual_event_str = LocalApigwService._construct_event(self.request_mock, 3000, binary_types=[]) - self.assertEquals(json.loads(actual_event_str), self.expected_dict) + self.assertEqual(json.loads(actual_event_str), self.expected_dict) def test_event_headers_with_empty_list(self): request_mock = Mock() @@ -550,8 +551,13 @@ def test_event_headers_with_empty_list(self): request_mock.scheme = "http" actual_query_string = LocalApigwService._event_headers(request_mock, "3000") - self.assertEquals(actual_query_string, ({"X-Forwarded-Proto": "http", "X-Forwarded-Port": "3000"}, - {"X-Forwarded-Proto": ["http"], "X-Forwarded-Port": ["3000"]})) + self.assertEqual( + actual_query_string, + ( + {"X-Forwarded-Proto": "http", "X-Forwarded-Port": "3000"}, + {"X-Forwarded-Proto": ["http"], "X-Forwarded-Port": ["3000"]}, + ), + ) def test_event_headers_with_non_empty_list(self): request_mock = Mock() @@ -563,10 +569,23 @@ def test_event_headers_with_non_empty_list(self): request_mock.scheme = "http" actual_query_string = LocalApigwService._event_headers(request_mock, "3000") - self.assertEquals(actual_query_string, ({"Content-Type": "application/json", "X-Test": "Value", - "X-Forwarded-Proto": "http", "X-Forwarded-Port": "3000"}, - {"Content-Type": ["application/json"], "X-Test": ["Value"], - "X-Forwarded-Proto": ["http"], "X-Forwarded-Port": ["3000"]})) + self.assertEqual( + actual_query_string, + ( + { + "Content-Type": "application/json", + "X-Test": "Value", + "X-Forwarded-Proto": "http", + "X-Forwarded-Port": "3000", + }, + { + "Content-Type": ["application/json"], + "X-Test": ["Value"], + "X-Forwarded-Proto": ["http"], + "X-Forwarded-Port": ["3000"], + }, + ), + ) def test_query_string_params_with_empty_params(self): request_mock = Mock() @@ -575,7 +594,7 @@ def test_query_string_params_with_empty_params(self): request_mock.args = query_param_args_mock actual_query_string = LocalApigwService._query_string_params(request_mock) - self.assertEquals(actual_query_string, ({}, {})) + self.assertEqual(actual_query_string, ({}, {})) def test_query_string_params_with_param_value_being_empty_list(self): request_mock = Mock() @@ -584,7 +603,7 @@ def test_query_string_params_with_param_value_being_empty_list(self): request_mock.args = query_param_args_mock actual_query_string = LocalApigwService._query_string_params(request_mock) - self.assertEquals(actual_query_string, ({"param": ""}, {"param": [""]})) + self.assertEqual(actual_query_string, ({"param": ""}, {"param": [""]})) def test_query_string_params_with_param_value_being_non_empty_list(self): request_mock = Mock() @@ -593,46 +612,53 @@ def test_query_string_params_with_param_value_being_non_empty_list(self): request_mock.args = query_param_args_mock actual_query_string = LocalApigwService._query_string_params(request_mock) - self.assertEquals(actual_query_string, ({"param": "b"}, {"param": ["a", "b"]})) + self.assertEqual(actual_query_string, ({"param": "b"}, {"param": ["a", "b"]})) class TestService_should_base64_encode(TestCase): - - @parameterized.expand([ - param("Mimeyype is in binary types", ['image/gif'], 'image/gif'), - param("Mimetype defined and binary types has */*", ['*/*'], 'image/gif'), - param("*/* is in binary types with no mimetype defined", ['*/*'], None) - ]) + @parameterized.expand( + [ + param("Mimeyype is in binary types", ["image/gif"], "image/gif"), + param("Mimetype defined and binary types has */*", ["*/*"], "image/gif"), + param("*/* is in binary types with no mimetype defined", ["*/*"], None), + ] + ) def test_should_base64_encode_returns_true(self, test_case_name, binary_types, mimetype): self.assertTrue(LocalApigwService._should_base64_encode(binary_types, mimetype)) - @parameterized.expand([ - param("Mimetype is not in binary types", ['image/gif'], "application/octet-stream") - ]) + @parameterized.expand([param("Mimetype is not in binary types", ["image/gif"], "application/octet-stream")]) def test_should_base64_encode_returns_false(self, test_case_name, binary_types, mimetype): self.assertFalse(LocalApigwService._should_base64_encode(binary_types, mimetype)) class TestServiceCorsToHeaders(TestCase): def test_basic_conversion(self): - cors = Cors(allow_origin="*", allow_methods=','.join(["POST", "OPTIONS"]), allow_headers="UPGRADE-HEADER", - max_age=6) + cors = Cors( + allow_origin="*", allow_methods=",".join(["POST", "OPTIONS"]), allow_headers="UPGRADE-HEADER", max_age=6 + ) headers = Cors.cors_to_headers(cors) - self.assertEquals(headers, {'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Methods': 'POST,OPTIONS', - 'Access-Control-Allow-Headers': 'UPGRADE-HEADER', 'Access-Control-Max-Age': 6}) + self.assertEqual( + headers, + { + "Access-Control-Allow-Origin": "*", + "Access-Control-Allow-Methods": "POST,OPTIONS", + "Access-Control-Allow-Headers": "UPGRADE-HEADER", + "Access-Control-Max-Age": 6, + }, + ) def test_empty_elements(self): - cors = Cors(allow_origin="www.domain.com", allow_methods=','.join(["GET", "POST", "OPTIONS"])) + cors = Cors(allow_origin="www.domain.com", allow_methods=",".join(["GET", "POST", "OPTIONS"])) headers = Cors.cors_to_headers(cors) - self.assertEquals(headers, - {'Access-Control-Allow-Origin': 'www.domain.com', - 'Access-Control-Allow-Methods': 'GET,POST,OPTIONS'}) + self.assertEqual( + headers, + {"Access-Control-Allow-Origin": "www.domain.com", "Access-Control-Allow-Methods": "GET,POST,OPTIONS"}, + ) class TestRouteEqualsHash(TestCase): - def test_route_in_list(self): route = Route(function_name="test", path="/test", methods=["POST"]) routes = [route] @@ -641,16 +667,16 @@ def test_route_in_list(self): def test_route_method_order_equals(self): route1 = Route(function_name="test", path="/test", methods=["POST", "GET"]) route2 = Route(function_name="test", path="/test", methods=["GET", "POST"]) - self.assertEquals(route1, route2) + self.assertEqual(route1, route2) def test_route_hash(self): route1 = Route(function_name="test", path="/test", methods=["POST", "GET"]) dic = {route1: "test"} - self.assertEquals(dic[route1], "test") + self.assertEqual(dic[route1], "test") def test_route_object_equals(self): route1 = Route(function_name="test", path="/test", methods=["POST", "GET"]) - route2 = type('obj', (object,), {'function_name': 'test', "path": "/test", "methods": ["GET", "POST"]}) + route2 = type("obj", (object,), {"function_name": "test", "path": "/test", "methods": ["GET", "POST"]}) self.assertNotEqual(route1, route2) @@ -666,7 +692,7 @@ def test_route_different_path_equals(self): def test_same_object_equals(self): route1 = Route(function_name="test", path="/test", methods=["POST", "GET"]) - self.assertEquals(route1, copy.deepcopy(route1)) + self.assertEqual(route1, copy.deepcopy(route1)) def test_route_function_name_hash(self): route1 = Route(function_name="test1", path="/test", methods=["GET", "POST"]) @@ -680,9 +706,9 @@ def test_route_different_path_hash(self): def test_same_object_hash(self): route1 = Route(function_name="test", path="/test", methods=["POST", "GET"]) - self.assertEquals(route1.__hash__(), copy.deepcopy(route1).__hash__()) + self.assertEqual(route1.__hash__(), copy.deepcopy(route1).__hash__()) def test_route_method_order_hash(self): route1 = Route(function_name="test", path="/test", methods=["POST", "GET"]) route2 = Route(function_name="test", path="/test", methods=["GET", "POST"]) - self.assertEquals(route1.__hash__(), route2.__hash__()) + self.assertEqual(route1.__hash__(), route2.__hash__()) diff --git a/tests/unit/local/apigw/test_path_converter.py b/tests/unit/local/apigw/test_path_converter.py index cba612bb8e..7d237f774e 100644 --- a/tests/unit/local/apigw/test_path_converter.py +++ b/tests/unit/local/apigw/test_path_converter.py @@ -6,98 +6,102 @@ class TestPathConverter_toFlask(TestCase): - def test_single_path_param(self): path = "/{id}" flask_path = PathConverter.convert_path_to_flask(path) - self.assertEquals(flask_path, "/") + self.assertEqual(flask_path, "/") def test_proxy_path(self): path = "/{proxy+}" flask_path = PathConverter.convert_path_to_flask(path) - self.assertEquals(flask_path, "/") - - @parameterized.expand([("/{resource+}", "/"), - ("/a/{id}/b/{resource+}", "/a//b/"), - ("/a/b/{proxy}/{resource+}", "/a/b//"), - ("/{id}/{something+}", "//"), - ("/{a}/{b}/{c}/{d+}", "////") - ]) + self.assertEqual(flask_path, "/") + + @parameterized.expand( + [ + ("/{resource+}", "/"), + ("/a/{id}/b/{resource+}", "/a//b/"), + ("/a/b/{proxy}/{resource+}", "/a/b//"), + ("/{id}/{something+}", "//"), + ("/{a}/{b}/{c}/{d+}", "////"), + ] + ) def test_proxy_path_with_different_name(self, path, expected_result): flask_path = PathConverter.convert_path_to_flask(path) - self.assertEquals(flask_path, expected_result) + self.assertEqual(flask_path, expected_result) def test_proxy_with_path_param(self): path = "/id/{id}/user/{proxy+}" flask_path = PathConverter.convert_path_to_flask(path) - self.assertEquals(flask_path, "/id//user/") + self.assertEqual(flask_path, "/id//user/") def test_multiple_path_params(self): path = "/id/{id}/user/{user}" flask_path = PathConverter.convert_path_to_flask(path) - self.assertEquals(flask_path, "/id//user/") + self.assertEqual(flask_path, "/id//user/") def test_no_changes_to_path(self): path = "/id/user" flask_path = PathConverter.convert_path_to_flask(path) - self.assertEquals(flask_path, "/id/user") + self.assertEqual(flask_path, "/id/user") class TestPathConverter_toApiGateway(TestCase): - def test_single_path_param(self): path = "/" flask_path = PathConverter.convert_path_to_api_gateway(path) - self.assertEquals(flask_path, "/{id}") + self.assertEqual(flask_path, "/{id}") def test_proxy_path(self): path = "/" flask_path = PathConverter.convert_path_to_api_gateway(path) - self.assertEquals(flask_path, "/{proxy+}") - - @parameterized.expand([("/", "/{resource+}"), - ("/a//b/", "/a/{id}/b/{resource+}"), - ("/a/b//", "/a/b/{proxy}/{resource+}"), - ("//", "/{id}/{something+}"), - ("////", "/{a}/{b}/{c}/{d+}") - ]) + self.assertEqual(flask_path, "/{proxy+}") + + @parameterized.expand( + [ + ("/", "/{resource+}"), + ("/a//b/", "/a/{id}/b/{resource+}"), + ("/a/b//", "/a/b/{proxy}/{resource+}"), + ("//", "/{id}/{something+}"), + ("////", "/{a}/{b}/{c}/{d+}"), + ] + ) def test_proxy_path_with_different_name(self, path, expected_result): flask_path = PathConverter.convert_path_to_api_gateway(path) - self.assertEquals(flask_path, expected_result) + self.assertEqual(flask_path, expected_result) def test_proxy_with_path_param(self): path = "/id//user/" flask_path = PathConverter.convert_path_to_api_gateway(path) - self.assertEquals(flask_path, "/id/{id}/user/{proxy+}") + self.assertEqual(flask_path, "/id/{id}/user/{proxy+}") def test_multiple_path_params(self): path = "/id//user/" flask_path = PathConverter.convert_path_to_api_gateway(path) - self.assertEquals(flask_path, "/id/{id}/user/{user}") + self.assertEqual(flask_path, "/id/{id}/user/{user}") def test_no_changes_to_path(self): path = "/id/user" flask_path = PathConverter.convert_path_to_api_gateway(path) - self.assertEquals(flask_path, "/id/user") + self.assertEqual(flask_path, "/id/user") diff --git a/tests/unit/local/apigw/test_service_error_responses.py b/tests/unit/local/apigw/test_service_error_responses.py index 5716edd147..433ff30241 100644 --- a/tests/unit/local/apigw/test_service_error_responses.py +++ b/tests/unit/local/apigw/test_service_error_responses.py @@ -5,22 +5,21 @@ class TestServiceErrorResponses(TestCase): - - @patch('samcli.local.apigw.service_error_responses.make_response') - @patch('samcli.local.apigw.service_error_responses.jsonify') + @patch("samcli.local.apigw.service_error_responses.make_response") + @patch("samcli.local.apigw.service_error_responses.jsonify") def test_lambda_failure_response(self, jsonify_patch, make_response_patch): jsonify_patch.return_value = {"json": "Response"} make_response_patch.return_value = {"Some Response"} response = ServiceErrorResponses.lambda_failure_response() - self.assertEquals(response, {"Some Response"}) + self.assertEqual(response, {"Some Response"}) jsonify_patch.assert_called_with({"message": "Internal server error"}) make_response_patch.assert_called_with({"json": "Response"}, 502) - @patch('samcli.local.apigw.service_error_responses.make_response') - @patch('samcli.local.apigw.service_error_responses.jsonify') + @patch("samcli.local.apigw.service_error_responses.make_response") + @patch("samcli.local.apigw.service_error_responses.jsonify") def test_lambda_not_found_response(self, jsonify_patch, make_response_patch): jsonify_patch.return_value = {"json": "Response"} make_response_patch.return_value = {"Some Response"} @@ -28,13 +27,13 @@ def test_lambda_not_found_response(self, jsonify_patch, make_response_patch): response = ServiceErrorResponses.lambda_not_found_response(error_mock) - self.assertEquals(response, {"Some Response"}) + self.assertEqual(response, {"Some Response"}) jsonify_patch.assert_called_with({"message": "No function defined for resource method"}) make_response_patch.assert_called_with({"json": "Response"}, 502) - @patch('samcli.local.apigw.service_error_responses.make_response') - @patch('samcli.local.apigw.service_error_responses.jsonify') + @patch("samcli.local.apigw.service_error_responses.make_response") + @patch("samcli.local.apigw.service_error_responses.jsonify") def test_route_not_found(self, jsonify_patch, make_response_patch): jsonify_patch.return_value = {"json": "Response"} make_response_patch.return_value = {"Some Response"} @@ -42,7 +41,7 @@ def test_route_not_found(self, jsonify_patch, make_response_patch): response = ServiceErrorResponses.route_not_found(error_mock) - self.assertEquals(response, {"Some Response"}) + self.assertEqual(response, {"Some Response"}) jsonify_patch.assert_called_with({"message": "Missing Authentication Token"}) make_response_patch.assert_called_with({"json": "Response"}, 403) diff --git a/tests/unit/local/docker/test_container.py b/tests/unit/local/docker/test_container.py index 2b8fe440b7..ce07dc7abf 100644 --- a/tests/unit/local/docker/test_container.py +++ b/tests/unit/local/docker/test_container.py @@ -9,7 +9,6 @@ class TestContainer_init(TestCase): - def setUp(self): self.image = "image" self.cmd = "cmd" @@ -24,31 +23,32 @@ def setUp(self): def test_init_must_store_all_values(self): - container = Container(self.image, - self.cmd, - self.working_dir, - self.host_dir, - self.memory_mb, - self.exposed_ports, - self.entrypoint, - self.env_vars, - self.mock_docker_client) - - self.assertEquals(self.image, container._image) - self.assertEquals(self.cmd, container._cmd) - self.assertEquals(self.working_dir, container._working_dir) - self.assertEquals(self.host_dir, container._host_dir) - self.assertEquals(self.exposed_ports, container._exposed_ports) - self.assertEquals(self.entrypoint, container._entrypoint) - self.assertEquals(self.env_vars, container._env_vars) - self.assertEquals(self.memory_mb, container._memory_limit_mb) - self.assertEquals(None, container._network_id) - self.assertEquals(None, container.id) - self.assertEquals(self.mock_docker_client, container.docker_client) + container = Container( + self.image, + self.cmd, + self.working_dir, + self.host_dir, + self.memory_mb, + self.exposed_ports, + self.entrypoint, + self.env_vars, + self.mock_docker_client, + ) + + self.assertEqual(self.image, container._image) + self.assertEqual(self.cmd, container._cmd) + self.assertEqual(self.working_dir, container._working_dir) + self.assertEqual(self.host_dir, container._host_dir) + self.assertEqual(self.exposed_ports, container._exposed_ports) + self.assertEqual(self.entrypoint, container._entrypoint) + self.assertEqual(self.env_vars, container._env_vars) + self.assertEqual(self.memory_mb, container._memory_limit_mb) + self.assertEqual(None, container._network_id) + self.assertEqual(None, container.id) + self.assertEqual(self.mock_docker_client, container.docker_client) class TestContainer_create(TestCase): - def setUp(self): self.image = "image" self.cmd = "cmd" @@ -59,7 +59,7 @@ def setUp(self): self.entrypoint = ["a", "b", "c"] self.env_vars = {"key": "value"} self.container_opts = {"container": "opts"} - self.additional_volumes = {'/somepath': {"blah": "blah value"}} + self.additional_volumes = {"/somepath": {"blah": "blah value"}} self.mock_docker_client = Mock() self.mock_docker_client.containers = Mock() @@ -73,32 +73,27 @@ def test_must_create_container_with_required_values(self): :return: """ - expected_volumes = { - self.host_dir: { - "bind": self.working_dir, - "mode": "ro,delegated" - } - } + expected_volumes = {self.host_dir: {"bind": self.working_dir, "mode": "ro,delegated"}} generated_id = "fooobar" self.mock_docker_client.containers.create.return_value = Mock() self.mock_docker_client.containers.create.return_value.id = generated_id - container = Container(self.image, - self.cmd, - self.working_dir, - self.host_dir, - docker_client=self.mock_docker_client) + container = Container( + self.image, self.cmd, self.working_dir, self.host_dir, docker_client=self.mock_docker_client + ) container_id = container.create() - self.assertEquals(container_id, generated_id) - self.assertEquals(container.id, generated_id) - - self.mock_docker_client.containers.create.assert_called_with(self.image, - command=self.cmd, - working_dir=self.working_dir, - volumes=expected_volumes, - tty=False, - use_config_proxy=True) + self.assertEqual(container_id, generated_id) + self.assertEqual(container.id, generated_id) + + self.mock_docker_client.containers.create.assert_called_with( + self.image, + command=self.cmd, + working_dir=self.working_dir, + volumes=expected_volumes, + tty=False, + use_config_proxy=True, + ) self.mock_docker_client.networks.get.assert_not_called() def test_must_create_container_including_all_optional_values(self): @@ -108,11 +103,8 @@ def test_must_create_container_including_all_optional_values(self): """ expected_volumes = { - self.host_dir: { - "bind": self.working_dir, - "mode": "ro,delegated" - }, - '/somepath': {"blah": "blah value"} + self.host_dir: {"bind": self.working_dir, "mode": "ro,delegated"}, + "/somepath": {"blah": "blah value"}, } expected_memory = "{}m".format(self.memory_mb) @@ -120,35 +112,37 @@ def test_must_create_container_including_all_optional_values(self): self.mock_docker_client.containers.create.return_value = Mock() self.mock_docker_client.containers.create.return_value.id = generated_id - container = Container(self.image, - self.cmd, - self.working_dir, - self.host_dir, - memory_limit_mb=self.memory_mb, - exposed_ports=self.exposed_ports, - entrypoint=self.entrypoint, - env_vars=self.env_vars, - docker_client=self.mock_docker_client, - container_opts=self.container_opts, - additional_volumes=self.additional_volumes - ) + container = Container( + self.image, + self.cmd, + self.working_dir, + self.host_dir, + memory_limit_mb=self.memory_mb, + exposed_ports=self.exposed_ports, + entrypoint=self.entrypoint, + env_vars=self.env_vars, + docker_client=self.mock_docker_client, + container_opts=self.container_opts, + additional_volumes=self.additional_volumes, + ) container_id = container.create() - self.assertEquals(container_id, generated_id) - self.assertEquals(container.id, generated_id) - - self.mock_docker_client.containers.create.assert_called_with(self.image, - command=self.cmd, - working_dir=self.working_dir, - volumes=expected_volumes, - tty=False, - use_config_proxy=True, - environment=self.env_vars, - ports=self.exposed_ports, - entrypoint=self.entrypoint, - mem_limit=expected_memory, - container='opts' - ) + self.assertEqual(container_id, generated_id) + self.assertEqual(container.id, generated_id) + + self.mock_docker_client.containers.create.assert_called_with( + self.image, + command=self.cmd, + working_dir=self.working_dir, + volumes=expected_volumes, + tty=False, + use_config_proxy=True, + environment=self.env_vars, + ports=self.exposed_ports, + entrypoint=self.entrypoint, + mem_limit=expected_memory, + container="opts", + ) self.mock_docker_client.networks.get.assert_not_called() @patch("samcli.local.docker.utils.os") @@ -160,24 +154,13 @@ def test_must_create_container_translate_volume_path(self, os_mock): os_mock.name = "nt" host_dir = "C:\\Users\\Username\\AppData\\Local\\Temp\\tmp1337" - additional_volumes = { - "C:\\Users\\Username\\AppData\\Local\\Temp\\tmp1338": { - "blah": "blah value" - } - } + additional_volumes = {"C:\\Users\\Username\\AppData\\Local\\Temp\\tmp1338": {"blah": "blah value"}} translated_volumes = { - "/c/Users/Username/AppData/Local/Temp/tmp1337": { - "bind": self.working_dir, - "mode": "ro,delegated" - } + "/c/Users/Username/AppData/Local/Temp/tmp1337": {"bind": self.working_dir, "mode": "ro,delegated"} } - translated_additional_volumes = { - "/c/Users/Username/AppData/Local/Temp/tmp1338": { - "blah": "blah value" - } - } + translated_additional_volumes = {"/c/Users/Username/AppData/Local/Temp/tmp1338": {"blah": "blah value"}} translated_volumes.update(translated_additional_volumes) expected_memory = "{}m".format(self.memory_mb) @@ -186,35 +169,37 @@ def test_must_create_container_translate_volume_path(self, os_mock): self.mock_docker_client.containers.create.return_value = Mock() self.mock_docker_client.containers.create.return_value.id = generated_id - container = Container(self.image, - self.cmd, - self.working_dir, - host_dir, - memory_limit_mb=self.memory_mb, - exposed_ports=self.exposed_ports, - entrypoint=self.entrypoint, - env_vars=self.env_vars, - docker_client=self.mock_docker_client, - container_opts=self.container_opts, - additional_volumes=additional_volumes - ) + container = Container( + self.image, + self.cmd, + self.working_dir, + host_dir, + memory_limit_mb=self.memory_mb, + exposed_ports=self.exposed_ports, + entrypoint=self.entrypoint, + env_vars=self.env_vars, + docker_client=self.mock_docker_client, + container_opts=self.container_opts, + additional_volumes=additional_volumes, + ) container_id = container.create() - self.assertEquals(container_id, generated_id) - self.assertEquals(container.id, generated_id) - - self.mock_docker_client.containers.create.assert_called_with(self.image, - command=self.cmd, - working_dir=self.working_dir, - volumes=translated_volumes, - tty=False, - use_config_proxy=True, - environment=self.env_vars, - ports=self.exposed_ports, - entrypoint=self.entrypoint, - mem_limit=expected_memory, - container='opts' - ) + self.assertEqual(container_id, generated_id) + self.assertEqual(container.id, generated_id) + + self.mock_docker_client.containers.create.assert_called_with( + self.image, + command=self.cmd, + working_dir=self.working_dir, + volumes=translated_volumes, + tty=False, + use_config_proxy=True, + environment=self.env_vars, + ports=self.exposed_ports, + entrypoint=self.entrypoint, + mem_limit=expected_memory, + container="opts", + ) self.mock_docker_client.networks.get.assert_not_called() def test_must_connect_to_network_on_create(self): @@ -222,12 +207,7 @@ def test_must_connect_to_network_on_create(self): Create a container with only required values. Optional values are not provided :return: """ - expected_volumes = { - self.host_dir: { - "bind": self.working_dir, - "mode": "ro,delegated" - } - } + expected_volumes = {self.host_dir: {"bind": self.working_dir, "mode": "ro,delegated"}} network_id = "some id" generated_id = "fooobar" @@ -238,24 +218,23 @@ def test_must_connect_to_network_on_create(self): self.mock_docker_client.networks.get.return_value = network_mock network_mock.connect = Mock() - container = Container(self.image, - self.cmd, - self.working_dir, - self.host_dir, - docker_client=self.mock_docker_client) + container = Container( + self.image, self.cmd, self.working_dir, self.host_dir, docker_client=self.mock_docker_client + ) container.network_id = network_id container_id = container.create() - self.assertEquals(container_id, generated_id) + self.assertEqual(container_id, generated_id) - self.mock_docker_client.containers.create.assert_called_with(self.image, - command=self.cmd, - working_dir=self.working_dir, - tty=False, - use_config_proxy=True, - volumes=expected_volumes - ) + self.mock_docker_client.containers.create.assert_called_with( + self.image, + command=self.cmd, + working_dir=self.working_dir, + tty=False, + use_config_proxy=True, + volumes=expected_volumes, + ) self.mock_docker_client.networks.get.assert_called_with(network_id) network_mock.connect.assert_called_with(container_id) @@ -265,12 +244,7 @@ def test_must_connect_to_host_network_on_create(self): Create a container with only required values. Optional values are not provided :return: """ - expected_volumes = { - self.host_dir: { - "bind": self.working_dir, - "mode": "ro,delegated" - } - } + expected_volumes = {self.host_dir: {"bind": self.working_dir, "mode": "ro,delegated"}} network_id = "host" generated_id = "fooobar" @@ -281,35 +255,32 @@ def test_must_connect_to_host_network_on_create(self): self.mock_docker_client.networks.get.return_value = network_mock network_mock.connect = Mock() - container = Container(self.image, - self.cmd, - self.working_dir, - self.host_dir, - docker_client=self.mock_docker_client) + container = Container( + self.image, self.cmd, self.working_dir, self.host_dir, docker_client=self.mock_docker_client + ) container.network_id = network_id container_id = container.create() - self.assertEquals(container_id, generated_id) - - self.mock_docker_client.containers.create.assert_called_with(self.image, - command=self.cmd, - working_dir=self.working_dir, - tty=False, - use_config_proxy=True, - volumes=expected_volumes, - network_mode='host' - ) + self.assertEqual(container_id, generated_id) + + self.mock_docker_client.containers.create.assert_called_with( + self.image, + command=self.cmd, + working_dir=self.working_dir, + tty=False, + use_config_proxy=True, + volumes=expected_volumes, + network_mode="host", + ) self.mock_docker_client.networks.get.assert_not_called() def test_must_fail_if_already_created(self): - container = Container(self.image, - self.cmd, - self.working_dir, - self.host_dir, - docker_client=self.mock_docker_client) + container = Container( + self.image, self.cmd, self.working_dir, self.host_dir, docker_client=self.mock_docker_client + ) container.is_created = Mock() container.is_created.return_value = True @@ -319,7 +290,6 @@ def test_must_fail_if_already_created(self): class TestContainer_delete(TestCase): - def setUp(self): self.image = "image" self.cmd = "cmd" @@ -330,11 +300,9 @@ def setUp(self): self.mock_docker_client.containers = Mock() self.mock_docker_client.containers.get = Mock() - self.container = Container(self.image, - self.cmd, - self.working_dir, - self.host_dir, - docker_client=self.mock_docker_client) + self.container = Container( + self.image, self.cmd, self.working_dir, self.host_dir, docker_client=self.mock_docker_client + ) self.container.id = "someid" self.container.is_created = Mock() @@ -404,7 +372,6 @@ def test_must_skip_if_container_is_not_created(self): class TestContainer_start(TestCase): - def setUp(self): self.image = "image" self.cmd = "cmd" @@ -415,11 +382,9 @@ def setUp(self): self.mock_docker_client.containers = Mock() self.mock_docker_client.containers.get = Mock() - self.container = Container(self.image, - self.cmd, - self.working_dir, - self.host_dir, - docker_client=self.mock_docker_client) + self.container = Container( + self.image, self.cmd, self.working_dir, self.host_dir, docker_client=self.mock_docker_client + ) self.container.id = "someid" self.container.is_created = Mock() @@ -453,7 +418,6 @@ def test_must_not_support_input_data(self): class TestContainer_wait_for_logs(TestCase): - def setUp(self): self.image = "image" self.cmd = ["cmd"] @@ -464,11 +428,9 @@ def setUp(self): self.mock_docker_client.containers = Mock() self.mock_docker_client.containers.get = Mock() - self.container = Container(self.image, - self.cmd, - self.working_dir, - self.host_dir, - docker_client=self.mock_docker_client) + self.container = Container( + self.image, self.cmd, self.working_dir, self.host_dir, docker_client=self.mock_docker_client + ) self.container.id = "someid" self.container.is_created = Mock() @@ -490,8 +452,9 @@ def test_must_fetch_stdout_and_stderr_data(self, attach_mock): self.container.wait_for_logs(stdout=stdout_mock, stderr=stderr_mock) - attach_mock.assert_called_with(self.mock_docker_client, container=real_container_mock, - stdout=True, stderr=True, logs=True) + attach_mock.assert_called_with( + self.mock_docker_client, container=real_container_mock, stdout=True, stderr=True, logs=True + ) self.container._write_container_output.assert_called_with(output_itr, stdout=stdout_mock, stderr=stderr_mock) def test_must_skip_if_no_stdout_and_stderr(self): @@ -508,17 +471,14 @@ def test_must_raise_if_container_is_not_created(self): class TestContainer_write_container_output(TestCase): - def setUp(self): self.output_itr = [ (Container._STDOUT_FRAME_TYPE, b"stdout1"), (Container._STDERR_FRAME_TYPE, b"stderr1"), (30, b"invalid1"), - (Container._STDOUT_FRAME_TYPE, b"stdout2"), (Container._STDERR_FRAME_TYPE, b"stderr2"), (30, b"invalid2"), - (Container._STDOUT_FRAME_TYPE, b"stdout3"), (Container._STDERR_FRAME_TYPE, b"stderr3"), (30, b"invalid3"), @@ -532,21 +492,15 @@ def test_must_write_stdout_and_stderr_data(self): Container._write_container_output(self.output_itr, stdout=self.stdout_mock, stderr=self.stderr_mock) - self.stdout_mock.write.assert_has_calls([ - call(b"stdout1"), call(b"stdout2"), call(b"stdout3") - ]) + self.stdout_mock.write.assert_has_calls([call(b"stdout1"), call(b"stdout2"), call(b"stdout3")]) - self.stderr_mock.write.assert_has_calls([ - call(b"stderr1"), call(b"stderr2"), call(b"stderr3") - ]) + self.stderr_mock.write.assert_has_calls([call(b"stderr1"), call(b"stderr2"), call(b"stderr3")]) def test_must_write_only_stdout(self): Container._write_container_output(self.output_itr, stdout=self.stdout_mock, stderr=None) - self.stdout_mock.write.assert_has_calls([ - call(b"stdout1"), call(b"stdout2"), call(b"stdout3") - ]) + self.stdout_mock.write.assert_has_calls([call(b"stdout1"), call(b"stdout2"), call(b"stdout3")]) self.stderr_mock.write.assert_not_called() # stderr must never be called @@ -557,29 +511,25 @@ def test_must_write_only_stderr(self): self.stdout_mock.write.assert_not_called() - self.stderr_mock.write.assert_has_calls([ - call(b"stderr1"), call(b"stderr2"), call(b"stderr3") - ]) + self.stderr_mock.write.assert_has_calls([call(b"stderr1"), call(b"stderr2"), call(b"stderr3")]) class TestContainer_image(TestCase): - def test_must_return_image_value(self): image = "myimage" container = Container(image, "cmd", "dir", "dir") - self.assertEquals(image, container.image) + self.assertEqual(image, container.image) class TestContainer_copy(TestCase): - def setUp(self): self.mock_client = Mock() self.container = Container("image", "cmd", "dir", "dir", docker_client=self.mock_client) self.container.id = "containerid" - @patch('samcli.local.docker.container.tempfile') - @patch('samcli.local.docker.container.tarfile') + @patch("samcli.local.docker.container.tempfile") + @patch("samcli.local.docker.container.tarfile") def test_must_copy_files_from_container(self, tarfile_mock, tempfile_mock): source = "source" dest = "dest" @@ -604,5 +554,5 @@ def test_must_copy_files_from_container(self, tarfile_mock, tempfile_mock): fp_mock.write.assert_has_calls([call(x) for x in tar_stream], any_order=False) # Make sure we open the tarfile right and extract to right location - tarfile_mock.open.assert_called_with(fileobj=fp_mock, mode='r') + tarfile_mock.open.assert_called_with(fileobj=fp_mock, mode="r") tar_mock.extractall(path=dest) diff --git a/tests/unit/local/docker/test_lambda_build_container.py b/tests/unit/local/docker/test_lambda_build_container.py index f51e8589be..037d1450d3 100644 --- a/tests/unit/local/docker/test_lambda_build_container.py +++ b/tests/unit/local/docker/test_lambda_build_container.py @@ -3,6 +3,7 @@ """ import json + try: import pathlib except ImportError: @@ -17,16 +18,11 @@ class TestLambdaBuildContainer_init(TestCase): - @patch.object(LambdaBuildContainer, "_make_request") @patch.object(LambdaBuildContainer, "_get_image") @patch.object(LambdaBuildContainer, "_get_entrypoint") @patch.object(LambdaBuildContainer, "_get_container_dirs") - def test_must_init_class(self, - get_container_dirs_mock, - get_entrypoint_mock, - get_image_mock, - make_request_mock): + def test_must_init_class(self, get_container_dirs_mock, get_entrypoint_mock, get_image_mock, make_request_mock): request = make_request_mock.return_value = "somerequest" entry = get_entrypoint_mock.return_value = "entrypoint" @@ -38,106 +34,113 @@ def test_must_init_class(self, "scratch_dir": "/myscratch", } - container = LambdaBuildContainer("protocol", - "language", - "dependency", - "application", - "/foo/source", - "/bar/manifest.txt", - "runtime", - optimizations="optimizations", - options="options", - log_level="log-level", - mode="mode") - - self.assertEquals(container.image, image) - self.assertEquals(container.executable_name, "lambda-builders") - self.assertEquals(container._entrypoint, entry) - self.assertEquals(container._cmd, []) - self.assertEquals(container._working_dir, container_dirs["source_dir"]) - self.assertEquals(container._host_dir, str(pathlib.Path("/foo/source").resolve())) - self.assertEquals(container._env_vars, {"LAMBDA_BUILDERS_LOG_LEVEL": "log-level"}) - self.assertEquals(container._additional_volumes, { - str(pathlib.Path("/bar").resolve()): { - "bind": container_dirs["manifest_dir"], - "mode": "ro" - } - }) - - self.assertEquals(container._exposed_ports, None) - self.assertEquals(container._memory_limit_mb, None) - self.assertEquals(container._network_id, None) - self.assertEquals(container._container_opts, None) + container = LambdaBuildContainer( + "protocol", + "language", + "dependency", + "application", + "/foo/source", + "/bar/manifest.txt", + "runtime", + optimizations="optimizations", + options="options", + log_level="log-level", + mode="mode", + ) + + self.assertEqual(container.image, image) + self.assertEqual(container.executable_name, "lambda-builders") + self.assertEqual(container._entrypoint, entry) + self.assertEqual(container._cmd, []) + self.assertEqual(container._working_dir, container_dirs["source_dir"]) + self.assertEqual(container._host_dir, str(pathlib.Path("/foo/source").resolve())) + self.assertEqual(container._env_vars, {"LAMBDA_BUILDERS_LOG_LEVEL": "log-level"}) + self.assertEqual( + container._additional_volumes, + {str(pathlib.Path("/bar").resolve()): {"bind": container_dirs["manifest_dir"], "mode": "ro"}}, + ) + + self.assertEqual(container._exposed_ports, None) + self.assertEqual(container._memory_limit_mb, None) + self.assertEqual(container._network_id, None) + self.assertEqual(container._container_opts, None) make_request_mock.assert_called_once() get_entrypoint_mock.assert_called_once_with(request) get_image_mock.assert_called_once_with("runtime") - get_container_dirs_mock.assert_called_once_with(str(pathlib.Path("/foo/source").resolve()), - str(pathlib.Path("/bar").resolve())) + get_container_dirs_mock.assert_called_once_with( + str(pathlib.Path("/foo/source").resolve()), str(pathlib.Path("/bar").resolve()) + ) class TestLambdaBuildContainer_make_request(TestCase): - def test_must_make_request_object_string(self): container_dirs = { "source_dir": "source_dir", "artifacts_dir": "artifacts_dir", "scratch_dir": "scratch_dir", - "manifest_dir": "manifest_dir" + "manifest_dir": "manifest_dir", } - result = LambdaBuildContainer._make_request("protocol", - "language", - "dependency", - "application", - container_dirs, - "manifest_file_name", - "runtime", - "optimizations", - "options", - "executable_search_paths", - "mode") + result = LambdaBuildContainer._make_request( + "protocol", + "language", + "dependency", + "application", + container_dirs, + "manifest_file_name", + "runtime", + "optimizations", + "options", + "executable_search_paths", + "mode", + ) self.maxDiff = None # Print whole json diff - self.assertEqual(json.loads(result), { - "jsonschema": "2.0", - "id": 1, - "method": "LambdaBuilder.build", - "params": { - "__protocol_version": "protocol", - "capability": { - "language": "language", - "dependency_manager": "dependency", - "application_framework": "application" + self.assertEqual( + json.loads(result), + { + "jsonschema": "2.0", + "id": 1, + "method": "LambdaBuilder.build", + "params": { + "__protocol_version": "protocol", + "capability": { + "language": "language", + "dependency_manager": "dependency", + "application_framework": "application", + }, + "source_dir": "source_dir", + "artifacts_dir": "artifacts_dir", + "scratch_dir": "scratch_dir", + "manifest_path": "manifest_dir/manifest_file_name", + "runtime": "runtime", + "optimizations": "optimizations", + "options": "options", + "executable_search_paths": "executable_search_paths", + "mode": "mode", }, - "source_dir": "source_dir", - "artifacts_dir": "artifacts_dir", - "scratch_dir": "scratch_dir", - "manifest_path": "manifest_dir/manifest_file_name", - "runtime": "runtime", - "optimizations": "optimizations", - "options": "options", - "executable_search_paths": "executable_search_paths", - "mode": "mode" - } - }) + }, + ) class TestLambdaBuildContainer_get_container_dirs(TestCase): - def test_must_return_dirs(self): source_dir = "source" manifest_dir = "manifest" result = LambdaBuildContainer._get_container_dirs(source_dir, manifest_dir) - self.assertEquals(result, { - "source_dir": "/tmp/samcli/source", - "manifest_dir": "/tmp/samcli/manifest", - "artifacts_dir": "/tmp/samcli/artifacts", - "scratch_dir": "/tmp/samcli/scratch", - }) + self.assertEqual( + result, + { + "source_dir": "/tmp/samcli/source", + "manifest_dir": "/tmp/samcli/manifest", + "artifacts_dir": "/tmp/samcli/artifacts", + "scratch_dir": "/tmp/samcli/scratch", + }, + ) def test_must_override_manifest_if_equal_to_source(self): source_dir = "/home/source" @@ -145,70 +148,58 @@ def test_must_override_manifest_if_equal_to_source(self): result = LambdaBuildContainer._get_container_dirs(source_dir, manifest_dir) - self.assertEquals(result, { - - # When source & manifest directories are the same, manifest_dir must be equal to source - "source_dir": "/tmp/samcli/source", - "manifest_dir": "/tmp/samcli/source", - - "artifacts_dir": "/tmp/samcli/artifacts", - "scratch_dir": "/tmp/samcli/scratch", - }) + self.assertEqual( + result, + { + # When source & manifest directories are the same, manifest_dir must be equal to source + "source_dir": "/tmp/samcli/source", + "manifest_dir": "/tmp/samcli/source", + "artifacts_dir": "/tmp/samcli/artifacts", + "scratch_dir": "/tmp/samcli/scratch", + }, + ) class TestLambdaBuildContainer_get_image(TestCase): - - @parameterized.expand([ - ("myruntime", "lambci/lambda:build-myruntime"), - ("nodejs10.x", "amazon/lambda-build-node10.x") - ]) + @parameterized.expand( + [("myruntime", "lambci/lambda:build-myruntime"), ("nodejs10.x", "amazon/lambda-build-node10.x")] + ) def test_must_get_image_name(self, runtime, expected_image_name): - self.assertEquals(expected_image_name, LambdaBuildContainer._get_image(runtime)) + self.assertEqual(expected_image_name, LambdaBuildContainer._get_image(runtime)) class TestLambdaBuildContainer_get_entrypoint(TestCase): - def test_must_get_entrypoint(self): - self.assertEquals(["lambda-builders", "requestjson"], - LambdaBuildContainer._get_entrypoint("requestjson")) + self.assertEqual(["lambda-builders", "requestjson"], LambdaBuildContainer._get_entrypoint("requestjson")) class TestLambdaBuildContainer_convert_to_container_dirs(TestCase): - def test_must_work_on_abs_and_relative_paths(self): input = [".", "../foo", "/some/abs/path"] - mapping = { - str(pathlib.Path(".").resolve()): "/first", - "../foo": "/second", - "/some/abs/path": "/third" - } + mapping = {str(pathlib.Path(".").resolve()): "/first", "../foo": "/second", "/some/abs/path": "/third"} expected = ["/first", "/second", "/third"] result = LambdaBuildContainer._convert_to_container_dirs(input, mapping) - self.assertEquals(result, expected) + self.assertEqual(result, expected) def test_must_skip_unknown_paths(self): input = ["/known/path", "/unknown/path"] - mapping = { - "/known/path": "/first" - } + mapping = {"/known/path": "/first"} expected = ["/first", "/unknown/path"] result = LambdaBuildContainer._convert_to_container_dirs(input, mapping) - self.assertEquals(result, expected) + self.assertEqual(result, expected) def test_must_skip_on_empty_input(self): input = None - mapping = { - "/known/path": "/first" - } + mapping = {"/known/path": "/first"} expected = None result = LambdaBuildContainer._convert_to_container_dirs(input, mapping) - self.assertEquals(result, expected) + self.assertEqual(result, expected) diff --git a/tests/unit/local/docker/test_lambda_container.py b/tests/unit/local/docker/test_lambda_container.py index 963e268a0b..cd7afdcd33 100644 --- a/tests/unit/local/docker/test_lambda_container.py +++ b/tests/unit/local/docker/test_lambda_container.py @@ -10,26 +10,26 @@ from samcli.local.docker.lambda_container import LambdaContainer, Runtime from samcli.local.docker.lambda_debug_entrypoint import DebuggingNotSupported -RUNTIMES_WITH_ENTRYPOINT = [Runtime.java8.value, - Runtime.dotnetcore20.value, - Runtime.dotnetcore21.value, - Runtime.go1x.value, - Runtime.nodejs.value, - Runtime.nodejs43.value, - Runtime.nodejs610.value, - Runtime.nodejs810.value, - Runtime.python36.value, - Runtime.python27.value] - -RUNTIMES_WITH_BOOTSTRAP_ENTRYPOINT = [Runtime.nodejs10x.value, - Runtime.python37.value] +RUNTIMES_WITH_ENTRYPOINT = [ + Runtime.java8.value, + Runtime.dotnetcore20.value, + Runtime.dotnetcore21.value, + Runtime.go1x.value, + Runtime.nodejs.value, + Runtime.nodejs43.value, + Runtime.nodejs610.value, + Runtime.nodejs810.value, + Runtime.python36.value, + Runtime.python27.value, +] + +RUNTIMES_WITH_BOOTSTRAP_ENTRYPOINT = [Runtime.nodejs10x.value, Runtime.python37.value] ALL_RUNTIMES = [r for r in Runtime] class TestLambdaContainer_init(TestCase): - def setUp(self): self.runtime = "nodejs4.3" self.handler = "handler" @@ -43,12 +43,14 @@ def setUp(self): @patch.object(LambdaContainer, "_get_entry_point") @patch.object(LambdaContainer, "_get_additional_options") @patch.object(LambdaContainer, "_get_additional_volumes") - def test_must_configure_container_properly(self, - get_additional_volumes_mock, - get_additional_options_mock, - get_entry_point_mock, - get_exposed_ports_mock, - get_image_mock): + def test_must_configure_container_properly( + self, + get_additional_volumes_mock, + get_additional_options_mock, + get_entry_point_mock, + get_exposed_ports_mock, + get_image_mock, + ): image = "image" ports = {"a": "b"} @@ -65,23 +67,25 @@ def test_must_configure_container_properly(self, image_builder_mock = Mock() - container = LambdaContainer(self.runtime, - self.handler, - self.code_dir, - layers=[], - image_builder=image_builder_mock, - env_vars=self.env_var, - memory_mb=self.memory_mb, - debug_options=self.debug_options) - - self.assertEquals(image, container._image) - self.assertEquals(expected_cmd, container._cmd) - self.assertEquals("/var/task", container._working_dir) - self.assertEquals(self.code_dir, container._host_dir) - self.assertEquals(ports, container._exposed_ports) - self.assertEquals(entry, container._entrypoint) - self.assertEquals(self.env_var, container._env_vars) - self.assertEquals(self.memory_mb, container._memory_limit_mb) + container = LambdaContainer( + self.runtime, + self.handler, + self.code_dir, + layers=[], + image_builder=image_builder_mock, + env_vars=self.env_var, + memory_mb=self.memory_mb, + debug_options=self.debug_options, + ) + + self.assertEqual(image, container._image) + self.assertEqual(expected_cmd, container._cmd) + self.assertEqual("/var/task", container._working_dir) + self.assertEqual(self.code_dir, container._host_dir) + self.assertEqual(ports, container._exposed_ports) + self.assertEqual(entry, container._entrypoint) + self.assertEqual(self.env_var, container._env_vars) + self.assertEqual(self.memory_mb, container._memory_limit_mb) get_image_mock.assert_called_with(image_builder_mock, self.runtime, []) get_exposed_ports_mock.assert_called_with(self.debug_options) @@ -98,18 +102,17 @@ def test_must_fail_for_unsupported_runtime(self): with self.assertRaises(ValueError) as context: LambdaContainer(runtime, self.handler, self.code_dir, [], image_builder_mock) - self.assertEquals(str(context.exception), "Unsupported Lambda runtime foo") + self.assertEqual(str(context.exception), "Unsupported Lambda runtime foo") class TestLambdaContainer_get_exposed_ports(TestCase): - def test_must_map_same_port_on_host_and_container(self): debug_options = DebugContext(debug_port=12345) expected = {debug_options.debug_port: debug_options.debug_port} result = LambdaContainer._get_exposed_ports(debug_options) - self.assertEquals(expected, result) + self.assertEqual(expected, result) def test_must_skip_if_port_is_not_given(self): @@ -117,7 +120,6 @@ def test_must_skip_if_port_is_not_given(self): class TestLambdaContainer_get_image(TestCase): - def test_must_return_lambci_image(self): expected = "lambci/lambda:foo" @@ -125,11 +127,10 @@ def test_must_return_lambci_image(self): image_builder = Mock() image_builder.build.return_value = expected - self.assertEquals(LambdaContainer._get_image(image_builder, 'foo', []), expected) + self.assertEqual(LambdaContainer._get_image(image_builder, "foo", []), expected) class TestLambdaContainer_get_entry_point(TestCase): - def setUp(self): self.debug_port = 1235 @@ -137,8 +138,9 @@ def setUp(self): self.debug_options = DebugContext(debug_port=1235, debug_args="a=b c=d e=f") def test_must_skip_if_debug_port_is_not_specified(self): - self.assertIsNone(LambdaContainer._get_entry_point("runtime", None), - "Must not provide entrypoint if debug port is not given") + self.assertIsNone( + LambdaContainer._get_entry_point("runtime", None), "Must not provide entrypoint if debug port is not given" + ) @parameterized.expand([param(r) for r in ALL_RUNTIMES]) def test_must_provide_entrypoint_for_certain_runtimes_only(self, runtime): @@ -159,7 +161,7 @@ def test_debug_arg_must_be_split_by_spaces_and_appended_to_entrypoint(self, runt result = LambdaContainer._get_entry_point(runtime, self.debug_options) actual = result[1:4] - self.assertEquals(actual, expected_debug_args) + self.assertEqual(actual, expected_debug_args) @parameterized.expand([param(r) for r in set(RUNTIMES_WITH_BOOTSTRAP_ENTRYPOINT)]) def test_debug_arg_must_be_split_by_spaces_and_appended_to_bootstrap_based_entrypoint(self, runtime): @@ -181,32 +183,30 @@ def test_must_provide_entrypoint_even_without_debug_args(self, runtime): class TestLambdaContainer_get_additional_options(TestCase): - def test_no_additional_options_when_debug_options_is_none(self): debug_options = DebugContext(debug_port=None) - result = LambdaContainer._get_additional_options('runtime', debug_options) + result = LambdaContainer._get_additional_options("runtime", debug_options) self.assertIsNone(result) - @parameterized.expand([param(r) for r in RUNTIMES_WITH_ENTRYPOINT if not r.startswith('go')]) + @parameterized.expand([param(r) for r in RUNTIMES_WITH_ENTRYPOINT if not r.startswith("go")]) def test_default_value_returned_for_non_go_runtimes(self, runtime): debug_options = DebugContext(debug_port=1235) result = LambdaContainer._get_additional_options(runtime, debug_options) - self.assertEquals(result, {}) + self.assertEqual(result, {}) - @parameterized.expand([param(r) for r in RUNTIMES_WITH_ENTRYPOINT if r.startswith('go')]) + @parameterized.expand([param(r) for r in RUNTIMES_WITH_ENTRYPOINT if r.startswith("go")]) def test_go_runtime_returns_additional_options(self, runtime): expected = {"security_opt": ["seccomp:unconfined"], "cap_add": ["SYS_PTRACE"]} debug_options = DebugContext(debug_port=1235) result = LambdaContainer._get_additional_options(runtime, debug_options) - self.assertEquals(result, expected) + self.assertEqual(result, expected) class TestLambdaContainer_get_additional_volumes(TestCase): - def test_no_additional_volumes_when_debug_options_is_none(self): debug_options = DebugContext(debug_port=None) @@ -220,9 +220,9 @@ def test_no_additional_volumes_when_debuggr_path_is_none(self): self.assertIsNone(result) def test_additional_volumes_returns_volume_with_debugger_path_is_set(self): - expected = {'/somepath': {"bind": "/tmp/lambci_debug_files", "mode": "ro"}} + expected = {"/somepath": {"bind": "/tmp/lambci_debug_files", "mode": "ro"}} - debug_options = DebugContext(debug_port=1234, debugger_path='/somepath') + debug_options = DebugContext(debug_port=1234, debugger_path="/somepath") result = LambdaContainer._get_additional_volumes(debug_options) - self.assertEquals(result, expected) + self.assertEqual(result, expected) diff --git a/tests/unit/local/docker/test_lambda_image.py b/tests/unit/local/docker/test_lambda_image.py index 86934ab524..a9d3972c6d 100644 --- a/tests/unit/local/docker/test_lambda_image.py +++ b/tests/unit/local/docker/test_lambda_image.py @@ -8,14 +8,13 @@ class TestLambdaImage(TestCase): - def test_initialization_without_defaults(self): lambda_image = LambdaImage("layer_downloader", False, False, docker_client="docker_client") - self.assertEquals(lambda_image.layer_downloader, "layer_downloader") + self.assertEqual(lambda_image.layer_downloader, "layer_downloader") self.assertFalse(lambda_image.skip_pull_image) self.assertFalse(lambda_image.force_image_build) - self.assertEquals(lambda_image.docker_client, "docker_client") + self.assertEqual(lambda_image.docker_client, "docker_client") @patch("samcli.local.docker.lambda_image.docker") def test_initialization_with_defaults(self, docker_patch): @@ -24,23 +23,21 @@ def test_initialization_with_defaults(self, docker_patch): lambda_image = LambdaImage("layer_downloader", False, False) - self.assertEquals(lambda_image.layer_downloader, "layer_downloader") + self.assertEqual(lambda_image.layer_downloader, "layer_downloader") self.assertFalse(lambda_image.skip_pull_image) self.assertFalse(lambda_image.force_image_build) - self.assertEquals(lambda_image.docker_client, docker_client_mock) + self.assertEqual(lambda_image.docker_client, docker_client_mock) def test_building_image_with_no_layers(self): docker_client_mock = Mock() lambda_image = LambdaImage("layer_downloader", False, False, docker_client=docker_client_mock) - self.assertEquals(lambda_image.build("python3.6", []), "lambci/lambda:python3.6") + self.assertEqual(lambda_image.build("python3.6", []), "lambci/lambda:python3.6") @patch("samcli.local.docker.lambda_image.LambdaImage._build_image") @patch("samcli.local.docker.lambda_image.LambdaImage._generate_docker_image_version") - def test_not_building_image_that_already_exists(self, - generate_docker_image_version_patch, - build_image_patch): + def test_not_building_image_that_already_exists(self, generate_docker_image_version_patch, build_image_patch): layer_downloader_mock = Mock() layer_mock = Mock() layer_mock.name = "layers1" @@ -55,7 +52,7 @@ def test_not_building_image_that_already_exists(self, lambda_image = LambdaImage(layer_downloader_mock, False, False, docker_client=docker_client_mock) actual_image_id = lambda_image.build("python3.6", [layer_mock]) - self.assertEquals(actual_image_id, "samcli/lambda:image-version") + self.assertEqual(actual_image_id, "samcli/lambda:image-version") layer_downloader_mock.download_all.assert_called_once_with([layer_mock], False) generate_docker_image_version_patch.assert_called_once_with([layer_mock], "python3.6") @@ -64,9 +61,9 @@ def test_not_building_image_that_already_exists(self, @patch("samcli.local.docker.lambda_image.LambdaImage._build_image") @patch("samcli.local.docker.lambda_image.LambdaImage._generate_docker_image_version") - def test_force_building_image_that_doesnt_already_exists(self, - generate_docker_image_version_patch, - build_image_patch): + def test_force_building_image_that_doesnt_already_exists( + self, generate_docker_image_version_patch, build_image_patch + ): layer_downloader_mock = Mock() layer_downloader_mock.download_all.return_value = ["layers1"] @@ -78,7 +75,7 @@ def test_force_building_image_that_doesnt_already_exists(self, lambda_image = LambdaImage(layer_downloader_mock, False, True, docker_client=docker_client_mock) actual_image_id = lambda_image.build("python3.6", ["layers1"]) - self.assertEquals(actual_image_id, "samcli/lambda:image-version") + self.assertEqual(actual_image_id, "samcli/lambda:image-version") layer_downloader_mock.download_all.assert_called_once_with(["layers1"], True) generate_docker_image_version_patch.assert_called_once_with(["layers1"], "python3.6") @@ -87,9 +84,9 @@ def test_force_building_image_that_doesnt_already_exists(self, @patch("samcli.local.docker.lambda_image.LambdaImage._build_image") @patch("samcli.local.docker.lambda_image.LambdaImage._generate_docker_image_version") - def test_not_force_building_image_that_doesnt_already_exists(self, - generate_docker_image_version_patch, - build_image_patch): + def test_not_force_building_image_that_doesnt_already_exists( + self, generate_docker_image_version_patch, build_image_patch + ): layer_downloader_mock = Mock() layer_downloader_mock.download_all.return_value = ["layers1"] @@ -101,7 +98,7 @@ def test_not_force_building_image_that_doesnt_already_exists(self, lambda_image = LambdaImage(layer_downloader_mock, False, False, docker_client=docker_client_mock) actual_image_id = lambda_image.build("python3.6", ["layers1"]) - self.assertEquals(actual_image_id, "samcli/lambda:image-version") + self.assertEqual(actual_image_id, "samcli/lambda:image-version") layer_downloader_mock.download_all.assert_called_once_with(["layers1"], False) generate_docker_image_version_patch.assert_called_once_with(["layers1"], "python3.6") @@ -115,13 +112,13 @@ def test_generate_docker_image_version(self, hashlib_patch): haslib_sha256_mock.hexdigest.return_value = "thisisahexdigestofshahash" layer_mock = Mock() - layer_mock.name = 'layer1' + layer_mock.name = "layer1" - image_version = LambdaImage._generate_docker_image_version([layer_mock], 'runtime') + image_version = LambdaImage._generate_docker_image_version([layer_mock], "runtime") - self.assertEquals(image_version, "runtime-thisisahexdigestofshahash") + self.assertEqual(image_version, "runtime-thisisahexdigestofshahash") - hashlib_patch.sha256.assert_called_once_with(b'layer1') + hashlib_patch.sha256.assert_called_once_with(b"layer1") @patch("samcli.local.docker.lambda_image.docker") def test_generate_dockerfile(self, docker_patch): @@ -133,7 +130,7 @@ def test_generate_dockerfile(self, docker_patch): layer_mock = Mock() layer_mock.name = "layer1" - self.assertEquals(LambdaImage._generate_dockerfile("python", [layer_mock]), expected_docker_file) + self.assertEqual(LambdaImage._generate_dockerfile("python", [layer_mock]), expected_docker_file) @patch("samcli.local.docker.lambda_image.create_tarball") @patch("samcli.local.docker.lambda_image.uuid") @@ -161,17 +158,16 @@ def test_build_image(self, generate_dockerfile_patch, path_patch, uuid_patch, cr dockerfile_mock = Mock() m = mock_open(dockerfile_mock) with patch("samcli.local.docker.lambda_image.open", m): - LambdaImage(layer_downloader_mock, True, False, docker_client=docker_client_mock)\ - ._build_image("base_image", "docker_tag", [layer_version1]) + LambdaImage(layer_downloader_mock, True, False, docker_client=docker_client_mock)._build_image( + "base_image", "docker_tag", [layer_version1] + ) handle = m() handle.write.assert_called_with("Dockerfile content") path_patch.assert_called_once_with("cached layers", "dockerfile_uuid") - docker_client_mock.images.build.assert_called_once_with(fileobj=tarball_fileobj, - rm=True, - tag="docker_tag", - pull=False, - custom_context=True) + docker_client_mock.images.build.assert_called_once_with( + fileobj=tarball_fileobj, rm=True, tag="docker_tag", pull=False, custom_context=True + ) docker_full_path_mock.unlink.assert_called_once() @@ -179,11 +175,9 @@ def test_build_image(self, generate_dockerfile_patch, path_patch, uuid_patch, cr @patch("samcli.local.docker.lambda_image.uuid") @patch("samcli.local.docker.lambda_image.Path") @patch("samcli.local.docker.lambda_image.LambdaImage._generate_dockerfile") - def test_build_image_fails_with_BuildError(self, - generate_dockerfile_patch, - path_patch, - uuid_patch, - create_tarball_patch): + def test_build_image_fails_with_BuildError( + self, generate_dockerfile_patch, path_patch, uuid_patch, create_tarball_patch + ): uuid_patch.uuid4.return_value = "uuid" generate_dockerfile_patch.return_value = "Dockerfile content" @@ -207,17 +201,16 @@ def test_build_image_fails_with_BuildError(self, m = mock_open(dockerfile_mock) with patch("samcli.local.docker.lambda_image.open", m): with self.assertRaises(ImageBuildException): - LambdaImage(layer_downloader_mock, True, False, docker_client=docker_client_mock) \ - ._build_image("base_image", "docker_tag", [layer_version1]) + LambdaImage(layer_downloader_mock, True, False, docker_client=docker_client_mock)._build_image( + "base_image", "docker_tag", [layer_version1] + ) handle = m() handle.write.assert_called_with("Dockerfile content") path_patch.assert_called_once_with("cached layers", "dockerfile_uuid") - docker_client_mock.images.build.assert_called_once_with(fileobj=tarball_fileobj, - rm=True, - tag="docker_tag", - pull=False, - custom_context=True) + docker_client_mock.images.build.assert_called_once_with( + fileobj=tarball_fileobj, rm=True, tag="docker_tag", pull=False, custom_context=True + ) docker_full_path_mock.unlink.assert_not_called() @@ -225,11 +218,9 @@ def test_build_image_fails_with_BuildError(self, @patch("samcli.local.docker.lambda_image.uuid") @patch("samcli.local.docker.lambda_image.Path") @patch("samcli.local.docker.lambda_image.LambdaImage._generate_dockerfile") - def test_build_image_fails_with_ApiError(self, - generate_dockerfile_patch, - path_patch, - uuid_patch, - create_tarball_patch): + def test_build_image_fails_with_ApiError( + self, generate_dockerfile_patch, path_patch, uuid_patch, create_tarball_patch + ): uuid_patch.uuid4.return_value = "uuid" generate_dockerfile_patch.return_value = "Dockerfile content" @@ -252,15 +243,14 @@ def test_build_image_fails_with_ApiError(self, m = mock_open(dockerfile_mock) with patch("samcli.local.docker.lambda_image.open", m): with self.assertRaises(ImageBuildException): - LambdaImage(layer_downloader_mock, True, False, docker_client=docker_client_mock) \ - ._build_image("base_image", "docker_tag", [layer_version1]) + LambdaImage(layer_downloader_mock, True, False, docker_client=docker_client_mock)._build_image( + "base_image", "docker_tag", [layer_version1] + ) handle = m() handle.write.assert_called_with("Dockerfile content") path_patch.assert_called_once_with("cached layers", "dockerfile_uuid") - docker_client_mock.images.build.assert_called_once_with(fileobj=tarball_fileobj, - rm=True, - tag="docker_tag", - pull=False, - custom_context=True) + docker_client_mock.images.build.assert_called_once_with( + fileobj=tarball_fileobj, rm=True, tag="docker_tag", pull=False, custom_context=True + ) docker_full_path_mock.unlink.assert_called_once() diff --git a/tests/unit/local/docker/test_manager.py b/tests/unit/local/docker/test_manager.py index e9347d8994..835631300f 100644 --- a/tests/unit/local/docker/test_manager.py +++ b/tests/unit/local/docker/test_manager.py @@ -13,7 +13,6 @@ class TestContainerManager_init(TestCase): - def test_must_initialize_with_default_value(self): manager = ContainerManager() @@ -21,7 +20,6 @@ def test_must_initialize_with_default_value(self): class TestContainerManager_run(TestCase): - def setUp(self): self.mock_docker_client = Mock() self.manager = ContainerManager(docker_client=self.mock_docker_client) @@ -170,7 +168,6 @@ def test_must_not_create_container_if_it_already_exists(self): class TestContainerManager_pull_image(TestCase): - def setUp(self): self.image_name = "image name" @@ -186,16 +183,13 @@ def test_must_pull_and_print_progress_dots(self): pull_result = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0] self.mock_docker_client.api.pull.return_value = pull_result expected_stream_output = "\nFetching {} Docker container image...{}\n".format( - self.image_name, - '.' * len(pull_result) # Progress bar will print one dot per response from pull API + self.image_name, "." * len(pull_result) # Progress bar will print one dot per response from pull API ) self.manager.pull_image(self.image_name, stream=stream) - self.mock_docker_client.api.pull.assert_called_with(self.image_name, - stream=True, - decode=True) - self.assertEquals(stream.getvalue(), expected_stream_output) + self.mock_docker_client.api.pull.assert_called_with(self.image_name, stream=True, decode=True) + self.assertEqual(stream.getvalue(), expected_stream_output) def test_must_raise_if_image_not_found(self): msg = "some error" @@ -205,11 +199,10 @@ def test_must_raise_if_image_not_found(self): self.manager.pull_image("imagename") ex = context.exception - self.assertEquals(str(ex), msg) + self.assertEqual(str(ex), msg) class TestContainerManager_is_docker_reachable(TestCase): - def setUp(self): self.ping_mock = Mock() @@ -244,7 +237,6 @@ def test_must_return_false_if_ping_raises_connection_error(self): class TestContainerManager_has_image(TestCase): - def setUp(self): self.image_name = "image name" @@ -265,7 +257,6 @@ def test_must_not_find_image(self): class TestContainerManager_stop(TestCase): - def test_must_call_delete_on_container(self): manager = ContainerManager() diff --git a/tests/unit/local/docker/test_utils.py b/tests/unit/local/docker/test_utils.py index 3597e4979e..2e4af7ca02 100644 --- a/tests/unit/local/docker/test_utils.py +++ b/tests/unit/local/docker/test_utils.py @@ -11,7 +11,6 @@ class TestUtils(TestCase): - def setUp(self): self.ntpath = "C:\\Users\\UserName\\AppData\\Local\\Temp\\temp1337" self.posixpath = "/c/Users/UserName/AppData/Local/Temp/temp1337" @@ -20,9 +19,9 @@ def setUp(self): @patch("samcli.local.docker.utils.os") def test_convert_posix_path_if_windows_style_path(self, mock_os): mock_os.name = "nt" - self.assertEquals(self.posixpath, to_posix_path(self.ntpath)) + self.assertEqual(self.posixpath, to_posix_path(self.ntpath)) @patch("samcli.local.docker.utils.os") def test_do_not_convert_posix_path(self, mock_os): mock_os.name = "posix" - self.assertEquals(self.current_working_dir, to_posix_path(self.current_working_dir)) + self.assertEqual(self.current_working_dir, to_posix_path(self.current_working_dir)) diff --git a/tests/unit/local/events/test_api_event.py b/tests/unit/local/events/test_api_event.py index ddbc553890..c2c425dab9 100644 --- a/tests/unit/local/events/test_api_event.py +++ b/tests/unit/local/events/test_api_event.py @@ -5,326 +5,341 @@ class TestContextIdentity(TestCase): - def test_class_initialized(self): - identity = ContextIdentity('api_key', - 'user_arn', - 'cognito_authentication_type', - 'caller', - 'user_agent', - 'user', - 'cognito_identity_pool_id', - 'cognito_authentication_provider', - 'source_ip', - 'account_id' - ) - - self.assertEquals(identity.api_key, 'api_key') - self.assertEquals(identity.user_arn, 'user_arn') - self.assertEquals(identity.cognito_authentication_type, 'cognito_authentication_type') - self.assertEquals(identity.caller, 'caller') - self.assertEquals(identity.user_agent, 'user_agent') - self.assertEquals(identity.user, 'user') - self.assertEquals(identity.cognito_identity_pool_id, 'cognito_identity_pool_id') - self.assertEquals(identity.cognito_authentication_provider, 'cognito_authentication_provider') - self.assertEquals(identity.source_ip, 'source_ip') - self.assertEquals(identity.account_id, 'account_id') + identity = ContextIdentity( + "api_key", + "user_arn", + "cognito_authentication_type", + "caller", + "user_agent", + "user", + "cognito_identity_pool_id", + "cognito_authentication_provider", + "source_ip", + "account_id", + ) + + self.assertEqual(identity.api_key, "api_key") + self.assertEqual(identity.user_arn, "user_arn") + self.assertEqual(identity.cognito_authentication_type, "cognito_authentication_type") + self.assertEqual(identity.caller, "caller") + self.assertEqual(identity.user_agent, "user_agent") + self.assertEqual(identity.user, "user") + self.assertEqual(identity.cognito_identity_pool_id, "cognito_identity_pool_id") + self.assertEqual(identity.cognito_authentication_provider, "cognito_authentication_provider") + self.assertEqual(identity.source_ip, "source_ip") + self.assertEqual(identity.account_id, "account_id") def test_to_dict(self): - identity = ContextIdentity('api_key', - 'user_arn', - 'cognito_authentication_type', - 'caller', - 'user_agent', - 'user', - 'cognito_identity_pool_id', - 'cognito_authentication_provider', - 'source_ip', - 'account_id' - ) - - expected = {"apiKey": "api_key", - "userArn": "user_arn", - "cognitoAuthenticationType": "cognito_authentication_type", - "caller": "caller", - "userAgent": "user_agent", - "user": "user", - "cognitoIdentityPoolId": "cognito_identity_pool_id", - "cognitoAuthenticationProvider": "cognito_authentication_provider", - "sourceIp": "source_ip", - "accountId": "account_id" - } - - self.assertEquals(identity.to_dict(), expected) + identity = ContextIdentity( + "api_key", + "user_arn", + "cognito_authentication_type", + "caller", + "user_agent", + "user", + "cognito_identity_pool_id", + "cognito_authentication_provider", + "source_ip", + "account_id", + ) + + expected = { + "apiKey": "api_key", + "userArn": "user_arn", + "cognitoAuthenticationType": "cognito_authentication_type", + "caller": "caller", + "userAgent": "user_agent", + "user": "user", + "cognitoIdentityPoolId": "cognito_identity_pool_id", + "cognitoAuthenticationProvider": "cognito_authentication_provider", + "sourceIp": "source_ip", + "accountId": "account_id", + } + + self.assertEqual(identity.to_dict(), expected) def test_to_dict_with_defaults(self): identity = ContextIdentity() - expected = {"apiKey": None, - "userArn": None, - "cognitoAuthenticationType": None, - "caller": None, - "userAgent": "Custom User Agent String", - "user": None, - "cognitoIdentityPoolId": None, - "cognitoAuthenticationProvider": None, - "sourceIp": "127.0.0.1", - "accountId": None - } + expected = { + "apiKey": None, + "userArn": None, + "cognitoAuthenticationType": None, + "caller": None, + "userAgent": "Custom User Agent String", + "user": None, + "cognitoIdentityPoolId": None, + "cognitoAuthenticationProvider": None, + "sourceIp": "127.0.0.1", + "accountId": None, + } - self.assertEquals(identity.to_dict(), expected) + self.assertEqual(identity.to_dict(), expected) class TestRequestContext(TestCase): - def test_class_initialized(self): identity_mock = Mock() - request_context = RequestContext('resource_id', - 'api_id', - 'request_path', - 'request_method', - 'request_id', - 'account_id', - 'prod', - identity_mock, - 'extended_request_id', - 'path' - ) - - self.assertEquals(request_context.resource_id, 'resource_id') - self.assertEquals(request_context.api_id, 'api_id') - self.assertEquals(request_context.resource_path, 'request_path') - self.assertEquals(request_context.http_method, 'request_method') - self.assertEquals(request_context.request_id, 'request_id') - self.assertEquals(request_context.account_id, 'account_id') - self.assertEquals(request_context.stage, 'prod') - self.assertEquals(request_context.identity, identity_mock) - self.assertEquals(request_context.extended_request_id, 'extended_request_id') - self.assertEquals(request_context.path, 'path') + request_context = RequestContext( + "resource_id", + "api_id", + "request_path", + "request_method", + "request_id", + "account_id", + "prod", + identity_mock, + "extended_request_id", + "path", + ) + + self.assertEqual(request_context.resource_id, "resource_id") + self.assertEqual(request_context.api_id, "api_id") + self.assertEqual(request_context.resource_path, "request_path") + self.assertEqual(request_context.http_method, "request_method") + self.assertEqual(request_context.request_id, "request_id") + self.assertEqual(request_context.account_id, "account_id") + self.assertEqual(request_context.stage, "prod") + self.assertEqual(request_context.identity, identity_mock) + self.assertEqual(request_context.extended_request_id, "extended_request_id") + self.assertEqual(request_context.path, "path") def test_to_dict(self): identity_mock = Mock() identity_mock.to_dict.return_value = {"identity": "the identity"} - request_context = RequestContext('resource_id', - 'api_id', - 'request_path', - 'request_method', - 'request_id', - 'account_id', - 'prod', - identity_mock, - 'extended_request_id', - 'path' - ) - - expected = {"resourceId": "resource_id", - "apiId": "api_id", - "resourcePath": "request_path", - "httpMethod": "request_method", - "requestId": "request_id", - "accountId": "account_id", - "stage": "prod", - "identity": {"identity": "the identity"}, - "extendedRequestId": "extended_request_id", - "path": "path" - } - - self.assertEquals(request_context.to_dict(), expected) + request_context = RequestContext( + "resource_id", + "api_id", + "request_path", + "request_method", + "request_id", + "account_id", + "prod", + identity_mock, + "extended_request_id", + "path", + ) + + expected = { + "resourceId": "resource_id", + "apiId": "api_id", + "resourcePath": "request_path", + "httpMethod": "request_method", + "requestId": "request_id", + "accountId": "account_id", + "stage": "prod", + "identity": {"identity": "the identity"}, + "extendedRequestId": "extended_request_id", + "path": "path", + } + + self.assertEqual(request_context.to_dict(), expected) def test_to_dict_with_defaults(self): request_context = RequestContext() - expected = {"resourceId": "123456", - "apiId": "1234567890", - "resourcePath": None, - "httpMethod": None, - "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef", - "accountId": "123456789012", - "stage": None, - "identity": {}, - "extendedRequestId": None, - "path": None - } + expected = { + "resourceId": "123456", + "apiId": "1234567890", + "resourcePath": None, + "httpMethod": None, + "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef", + "accountId": "123456789012", + "stage": None, + "identity": {}, + "extendedRequestId": None, + "path": None, + } - self.assertEquals(request_context.to_dict(), expected) + self.assertEqual(request_context.to_dict(), expected) class TestApiGatewayLambdaEvent(TestCase): - def test_class_initialized(self): - event = ApiGatewayLambdaEvent('request_method', - 'request_data', - 'resource', - 'request_context', - {"query": "some query"}, - {"query": ["some query"]}, - {"header_key": "value"}, - {"header_key": ["value"]}, - {"param": "some param"}, - {"stage_vars": "some vars"}, - 'request_path', - False - ) - - self.assertEquals(event.http_method, 'request_method') - self.assertEquals(event.body, 'request_data') - self.assertEquals(event.resource, 'resource') - self.assertEquals(event.request_context, 'request_context') - self.assertEquals(event.query_string_params, {"query": "some query"}) - self.assertEquals(event.headers, {"header_key": "value"}) - self.assertEquals(event.path_parameters, {"param": "some param"}) - self.assertEquals(event.stage_variables, {"stage_vars": "some vars"}) - self.assertEquals(event.path, 'request_path') - self.assertEquals(event.is_base_64_encoded, False) + event = ApiGatewayLambdaEvent( + "request_method", + "request_data", + "resource", + "request_context", + {"query": "some query"}, + {"query": ["some query"]}, + {"header_key": "value"}, + {"header_key": ["value"]}, + {"param": "some param"}, + {"stage_vars": "some vars"}, + "request_path", + False, + ) + + self.assertEqual(event.http_method, "request_method") + self.assertEqual(event.body, "request_data") + self.assertEqual(event.resource, "resource") + self.assertEqual(event.request_context, "request_context") + self.assertEqual(event.query_string_params, {"query": "some query"}) + self.assertEqual(event.headers, {"header_key": "value"}) + self.assertEqual(event.path_parameters, {"param": "some param"}) + self.assertEqual(event.stage_variables, {"stage_vars": "some vars"}) + self.assertEqual(event.path, "request_path") + self.assertEqual(event.is_base_64_encoded, False) def test_to_dict(self): request_context_mock = Mock() request_context_mock.to_dict.return_value = {"request_context": "the request context"} - event = ApiGatewayLambdaEvent('request_method', - 'request_data', - 'resource', - request_context_mock, - {"query": "some query"}, - {"query": ["first query", "some query"]}, - {"header_key": "value"}, - {"header_key": ["value"]}, - {"param": "some param"}, - {"stagevars": "some vars"}, - 'request_path', - False - ) - - expected = {"httpMethod": "request_method", - "body": "request_data", - "resource": "resource", - "requestContext": {"request_context": "the request context"}, - "queryStringParameters": {"query": "some query"}, - "multiValueQueryStringParameters": {"query": ["first query", "some query"]}, - "headers": {"header_key": "value"}, - "multiValueHeaders": {"header_key": ["value"]}, - "pathParameters": {"param": "some param"}, - "stageVariables": {"stagevars": "some vars"}, - "path": "request_path", - "isBase64Encoded": False - } - - self.assertEquals(event.to_dict(), expected) + event = ApiGatewayLambdaEvent( + "request_method", + "request_data", + "resource", + request_context_mock, + {"query": "some query"}, + {"query": ["first query", "some query"]}, + {"header_key": "value"}, + {"header_key": ["value"]}, + {"param": "some param"}, + {"stagevars": "some vars"}, + "request_path", + False, + ) + + expected = { + "httpMethod": "request_method", + "body": "request_data", + "resource": "resource", + "requestContext": {"request_context": "the request context"}, + "queryStringParameters": {"query": "some query"}, + "multiValueQueryStringParameters": {"query": ["first query", "some query"]}, + "headers": {"header_key": "value"}, + "multiValueHeaders": {"header_key": ["value"]}, + "pathParameters": {"param": "some param"}, + "stageVariables": {"stagevars": "some vars"}, + "path": "request_path", + "isBase64Encoded": False, + } + + self.assertEqual(event.to_dict(), expected) def test_to_dict_with_defaults(self): event = ApiGatewayLambdaEvent() - expected = {"httpMethod": None, - "body": None, - "resource": None, - "requestContext": {}, - "queryStringParameters": None, - "multiValueQueryStringParameters": None, - "headers": None, - "multiValueHeaders": None, - "pathParameters": None, - "stageVariables": None, - "path": None, - "isBase64Encoded": False - } - - self.assertEquals(event.to_dict(), expected) + expected = { + "httpMethod": None, + "body": None, + "resource": None, + "requestContext": {}, + "queryStringParameters": None, + "multiValueQueryStringParameters": None, + "headers": None, + "multiValueHeaders": None, + "pathParameters": None, + "stageVariables": None, + "path": None, + "isBase64Encoded": False, + } + + self.assertEqual(event.to_dict(), expected) def test_init_with_invalid_query_string_params(self): with self.assertRaises(TypeError): - ApiGatewayLambdaEvent('request_method', - 'request_data', - 'resource', - 'request_context', - "not a dict", - {"query": ["first query", "some query"]}, - {"header_key": "value"}, - {"header_key": ["value"]}, - {"param": "some param"}, - {"stage_vars": "some vars"}, - 'request_path', - False - ) + ApiGatewayLambdaEvent( + "request_method", + "request_data", + "resource", + "request_context", + "not a dict", + {"query": ["first query", "some query"]}, + {"header_key": "value"}, + {"header_key": ["value"]}, + {"param": "some param"}, + {"stage_vars": "some vars"}, + "request_path", + False, + ) def test_init_with_invalid_multi_value_query_string_params(self): with self.assertRaises(TypeError): - ApiGatewayLambdaEvent('request_method', - 'request_data', - 'resource', - 'request_context', - {"query": "some query"}, - "not a dict", - {"header_key": "value"}, - {"header_key": ["value"]}, - {"param": "some param"}, - {"stage_vars": "some vars"}, - 'request_path', - False - ) + ApiGatewayLambdaEvent( + "request_method", + "request_data", + "resource", + "request_context", + {"query": "some query"}, + "not a dict", + {"header_key": "value"}, + {"header_key": ["value"]}, + {"param": "some param"}, + {"stage_vars": "some vars"}, + "request_path", + False, + ) def test_init_with_invalid_headers(self): with self.assertRaises(TypeError): - ApiGatewayLambdaEvent('request_method', - 'request_data', - 'resource', - 'request_context', - {"query": "some query"}, - {"query": ["first query", "some query"]}, - "not EnvironHeaders", - {"header_key": ["value"]}, - {"param": "some param"}, - {"stage_vars": "some vars"}, - 'request_path', - False - ) + ApiGatewayLambdaEvent( + "request_method", + "request_data", + "resource", + "request_context", + {"query": "some query"}, + {"query": ["first query", "some query"]}, + "not EnvironHeaders", + {"header_key": ["value"]}, + {"param": "some param"}, + {"stage_vars": "some vars"}, + "request_path", + False, + ) def test_init_with_invalid_multi_value_headers(self): with self.assertRaises(TypeError): - ApiGatewayLambdaEvent('request_method', - 'request_data', - 'resource', - 'request_context', - {"query": "some query"}, - {"query": ["first query", "some query"]}, - {"header_key": "value"}, - "not EnvironHeaders", - {"param": "some param"}, - {"stage_vars": "some vars"}, - 'request_path', - False - ) + ApiGatewayLambdaEvent( + "request_method", + "request_data", + "resource", + "request_context", + {"query": "some query"}, + {"query": ["first query", "some query"]}, + {"header_key": "value"}, + "not EnvironHeaders", + {"param": "some param"}, + {"stage_vars": "some vars"}, + "request_path", + False, + ) def test_init_with_invalid_path_parameters(self): with self.assertRaises(TypeError): - ApiGatewayLambdaEvent('request_method', - 'request_data', - 'resource', - 'request_context', - {"query": "some query"}, - {"query": ["first query", "some query"]}, - {"header_key": "value"}, - {"header_key": ["value"]}, - "Not a dict", - {"stage_vars": "some vars"}, - 'request_path', - False - ) + ApiGatewayLambdaEvent( + "request_method", + "request_data", + "resource", + "request_context", + {"query": "some query"}, + {"query": ["first query", "some query"]}, + {"header_key": "value"}, + {"header_key": ["value"]}, + "Not a dict", + {"stage_vars": "some vars"}, + "request_path", + False, + ) def test_init_with_invalid_stage_variables(self): with self.assertRaises(TypeError): - ApiGatewayLambdaEvent('request_method', - 'request_data', - 'resource', - 'request_context', - {"query": "some query"}, - {"query": ["first query", "some query"]}, - {"header_key": "value"}, - {"header_key": ["value"]}, - {"param": "some param"}, - "Not a dict", - 'request_path', - False - ) + ApiGatewayLambdaEvent( + "request_method", + "request_data", + "resource", + "request_context", + {"query": "some query"}, + {"query": ["first query", "some query"]}, + {"header_key": "value"}, + {"header_key": ["value"]}, + {"param": "some param"}, + "Not a dict", + "request_path", + False, + ) diff --git a/tests/unit/local/init/test_init.py b/tests/unit/local/init/test_init.py index 8ecb941a85..b4d29c3979 100644 --- a/tests/unit/local/init/test_init.py +++ b/tests/unit/local/init/test_init.py @@ -8,7 +8,6 @@ class TestInit(TestCase): - def setUp(self): self.location = None self.runtime = "python3.6" @@ -16,7 +15,7 @@ def setUp(self): self.output_dir = "." self.name = "testing project" self.no_input = True - self.extra_context = {'project_name': 'testing project', "runtime": self.runtime} + self.extra_context = {"project_name": "testing project", "runtime": self.runtime} self.template = RUNTIME_DEP_TEMPLATE_MAPPING["python"][0]["init_location"] @patch("samcli.local.init.cookiecutter") @@ -24,35 +23,51 @@ def test_init_successful(self, cookiecutter_patch): # GIVEN generate_project successfully created a project # WHEN a project name has been passed generate_project( - location=self.location, runtime=self.runtime, dependency_manager=self.dependency_manager, + location=self.location, + runtime=self.runtime, + dependency_manager=self.dependency_manager, output_dir=self.output_dir, - name=self.name, no_input=self.no_input) + name=self.name, + no_input=self.no_input, + ) # THEN we should receive no errors cookiecutter_patch.assert_called_once_with( - extra_context=self.extra_context, no_input=self.no_input, - output_dir=self.output_dir, template=self.template) + extra_context=self.extra_context, no_input=self.no_input, output_dir=self.output_dir, template=self.template + ) @patch("samcli.local.init.cookiecutter") def test_init_successful_with_no_dep_manager(self, cookiecutter_patch): generate_project( - location=self.location, runtime=self.runtime, dependency_manager=None, + location=self.location, + runtime=self.runtime, + dependency_manager=None, output_dir=self.output_dir, - name=self.name, no_input=self.no_input) + name=self.name, + no_input=self.no_input, + ) # THEN we should receive no errors cookiecutter_patch.assert_called_once_with( - extra_context=self.extra_context, no_input=self.no_input, - output_dir=self.output_dir, template=self.template) + extra_context=self.extra_context, no_input=self.no_input, output_dir=self.output_dir, template=self.template + ) def test_init_error_with_non_compatible_dependency_manager(self): with self.assertRaises(GenerateProjectFailedError) as ctx: generate_project( - location=self.location, runtime=self.runtime, dependency_manager="gradle", - output_dir=self.output_dir, name=self.name, no_input=self.no_input) - self.assertEquals("An error occurred while generating this " - "testing project: Lambda Runtime python3.6 " - "does not support dependency manager: gradle", str(ctx.exception)) + location=self.location, + runtime=self.runtime, + dependency_manager="gradle", + output_dir=self.output_dir, + name=self.name, + no_input=self.no_input, + ) + self.assertEqual( + "An error occurred while generating this " + "testing project: Lambda Runtime python3.6 " + "does not support dependency manager: gradle", + str(ctx.exception), + ) @patch("samcli.local.init.cookiecutter") def test_when_generate_project_returns_error(self, cookiecutter_patch): @@ -67,34 +82,32 @@ def test_when_generate_project_returns_error(self, cookiecutter_patch): # THEN we should receive a GenerateProjectFailedError Exception with self.assertRaises(GenerateProjectFailedError) as ctx: generate_project( - location=self.location, runtime=self.runtime, dependency_manager=self.dependency_manager, - output_dir=self.output_dir, name=self.name, no_input=self.no_input) + location=self.location, + runtime=self.runtime, + dependency_manager=self.dependency_manager, + output_dir=self.output_dir, + name=self.name, + no_input=self.no_input, + ) - self.assertEquals(expected_msg, str(ctx.exception)) + self.assertEqual(expected_msg, str(ctx.exception)) @patch("samcli.local.init.cookiecutter") def test_must_not_set_name_when_location_is_given(self, cookiecutter_patch): - generate_project(runtime=self.runtime, output_dir=self.output_dir, - name=self.name, no_input=False) + generate_project(runtime=self.runtime, output_dir=self.output_dir, name=self.name, no_input=False) - expected_extra_content = { - "project_name": self.name, - "runtime": self.runtime - } + expected_extra_content = {"project_name": self.name, "runtime": self.runtime} # THEN we should receive no errors cookiecutter_patch.assert_called_once_with( - template=self.template, - extra_context=expected_extra_content, no_input=True, - output_dir=self.output_dir) + template=self.template, extra_context=expected_extra_content, no_input=True, output_dir=self.output_dir + ) @patch("samcli.local.init.cookiecutter") def test_must_not_set_extra_content(self, cookiecutter_patch): custom_location = "mylocation" - generate_project(location=custom_location, - runtime=self.runtime, output_dir=self.output_dir, - name=self.name, no_input=False) + generate_project( + location=custom_location, runtime=self.runtime, output_dir=self.output_dir, name=self.name, no_input=False + ) # THEN we should receive no errors - cookiecutter_patch.assert_called_once_with( - template=custom_location, no_input=False, - output_dir=self.output_dir) + cookiecutter_patch.assert_called_once_with(template=custom_location, no_input=False, output_dir=self.output_dir) diff --git a/tests/unit/local/lambda_service/test_lambda_error_responses.py b/tests/unit/local/lambda_service/test_lambda_error_responses.py index 3248cb4e4a..699e70371f 100644 --- a/tests/unit/local/lambda_service/test_lambda_error_responses.py +++ b/tests/unit/local/lambda_service/test_lambda_error_responses.py @@ -5,88 +5,94 @@ class TestLambdaErrorResponses(TestCase): - - @patch('samcli.local.services.base_local_service.BaseLocalService.service_response') + @patch("samcli.local.services.base_local_service.BaseLocalService.service_response") def test_resource_not_found(self, service_response_mock): service_response_mock.return_value = "ResourceNotFound" - response = LambdaErrorResponses.resource_not_found('HelloFunction') + response = LambdaErrorResponses.resource_not_found("HelloFunction") - self.assertEquals(response, 'ResourceNotFound') + self.assertEqual(response, "ResourceNotFound") service_response_mock.assert_called_once_with( '{"Type": "User", "Message": "Function not found: ' 'arn:aws:lambda:us-west-2:012345678901:function:HelloFunction"}', - {'x-amzn-errortype': 'ResourceNotFound', 'Content-Type': 'application/json'}, - 404) + {"x-amzn-errortype": "ResourceNotFound", "Content-Type": "application/json"}, + 404, + ) - @patch('samcli.local.services.base_local_service.BaseLocalService.service_response') + @patch("samcli.local.services.base_local_service.BaseLocalService.service_response") def test_invalid_request_content(self, service_response_mock): service_response_mock.return_value = "InvalidRequestContent" - response = LambdaErrorResponses.invalid_request_content('InvalidRequestContent') + response = LambdaErrorResponses.invalid_request_content("InvalidRequestContent") - self.assertEquals(response, 'InvalidRequestContent') + self.assertEqual(response, "InvalidRequestContent") service_response_mock.assert_called_once_with( '{"Type": "User", "Message": "InvalidRequestContent"}', - {'x-amzn-errortype': 'InvalidRequestContent', 'Content-Type': 'application/json'}, - 400) + {"x-amzn-errortype": "InvalidRequestContent", "Content-Type": "application/json"}, + 400, + ) - @patch('samcli.local.services.base_local_service.BaseLocalService.service_response') + @patch("samcli.local.services.base_local_service.BaseLocalService.service_response") def test_unsupported_media_type(self, service_response_mock): service_response_mock.return_value = "UnsupportedMediaType" - response = LambdaErrorResponses.unsupported_media_type('UnsupportedMediaType') + response = LambdaErrorResponses.unsupported_media_type("UnsupportedMediaType") - self.assertEquals(response, 'UnsupportedMediaType') + self.assertEqual(response, "UnsupportedMediaType") service_response_mock.assert_called_once_with( '{"Type": "User", "Message": "Unsupported content type: UnsupportedMediaType"}', - {'x-amzn-errortype': 'UnsupportedMediaType', 'Content-Type': 'application/json'}, - 415) + {"x-amzn-errortype": "UnsupportedMediaType", "Content-Type": "application/json"}, + 415, + ) - @patch('samcli.local.services.base_local_service.BaseLocalService.service_response') + @patch("samcli.local.services.base_local_service.BaseLocalService.service_response") def test_generic_service_exception(self, service_response_mock): service_response_mock.return_value = "GenericServiceException" - response = LambdaErrorResponses.generic_service_exception('GenericServiceException') + response = LambdaErrorResponses.generic_service_exception("GenericServiceException") - self.assertEquals(response, 'GenericServiceException') + self.assertEqual(response, "GenericServiceException") service_response_mock.assert_called_once_with( '{"Type": "Service", "Message": "ServiceException"}', - {'x-amzn-errortype': 'Service', 'Content-Type': 'application/json'}, - 500) + {"x-amzn-errortype": "Service", "Content-Type": "application/json"}, + 500, + ) - @patch('samcli.local.services.base_local_service.BaseLocalService.service_response') + @patch("samcli.local.services.base_local_service.BaseLocalService.service_response") def test_not_implemented_locally(self, service_response_mock): service_response_mock.return_value = "NotImplementedLocally" - response = LambdaErrorResponses.not_implemented_locally('NotImplementedLocally') + response = LambdaErrorResponses.not_implemented_locally("NotImplementedLocally") - self.assertEquals(response, 'NotImplementedLocally') + self.assertEqual(response, "NotImplementedLocally") service_response_mock.assert_called_once_with( '{"Type": "LocalService", "Message": "NotImplementedLocally"}', - {'x-amzn-errortype': 'NotImplemented', 'Content-Type': 'application/json'}, - 501) + {"x-amzn-errortype": "NotImplemented", "Content-Type": "application/json"}, + 501, + ) - @patch('samcli.local.services.base_local_service.BaseLocalService.service_response') + @patch("samcli.local.services.base_local_service.BaseLocalService.service_response") def test_generic_path_not_found(self, service_response_mock): service_response_mock.return_value = "GenericPathNotFound" - response = LambdaErrorResponses.generic_path_not_found('GenericPathNotFound') + response = LambdaErrorResponses.generic_path_not_found("GenericPathNotFound") - self.assertEquals(response, 'GenericPathNotFound') + self.assertEqual(response, "GenericPathNotFound") service_response_mock.assert_called_once_with( '{"Type": "LocalService", "Message": "PathNotFoundException"}', - {'x-amzn-errortype': 'PathNotFoundLocally', 'Content-Type': 'application/json'}, - 404) + {"x-amzn-errortype": "PathNotFoundLocally", "Content-Type": "application/json"}, + 404, + ) - @patch('samcli.local.services.base_local_service.BaseLocalService.service_response') + @patch("samcli.local.services.base_local_service.BaseLocalService.service_response") def test_generic_method_not_allowed(self, service_response_mock): service_response_mock.return_value = "GenericMethodNotAllowed" - response = LambdaErrorResponses.generic_method_not_allowed('GenericMethodNotAllowed') + response = LambdaErrorResponses.generic_method_not_allowed("GenericMethodNotAllowed") - self.assertEquals(response, 'GenericMethodNotAllowed') + self.assertEqual(response, "GenericMethodNotAllowed") service_response_mock.assert_called_once_with( '{"Type": "LocalService", "Message": "MethodNotAllowedException"}', - {'x-amzn-errortype': 'MethodNotAllowedLocally', 'Content-Type': 'application/json'}, - 405) + {"x-amzn-errortype": "MethodNotAllowedLocally", "Content-Type": "application/json"}, + 405, + ) diff --git a/tests/unit/local/lambda_service/test_local_lambda_invoke_service.py b/tests/unit/local/lambda_service/test_local_lambda_invoke_service.py index 4ca10d65b5..ae92926c4c 100644 --- a/tests/unit/local/lambda_service/test_local_lambda_invoke_service.py +++ b/tests/unit/local/lambda_service/test_local_lambda_invoke_service.py @@ -6,26 +6,25 @@ class TestLocalLambdaService(TestCase): - def test_initalize_creates_default_values(self): lambda_runner_mock = Mock() - service = LocalLambdaInvokeService(lambda_runner=lambda_runner_mock, port=3001, host='127.0.0.1') - self.assertEquals(service.port, 3001) - self.assertEquals(service.host, '127.0.0.1') - self.assertEquals(service.lambda_runner, lambda_runner_mock) + service = LocalLambdaInvokeService(lambda_runner=lambda_runner_mock, port=3001, host="127.0.0.1") + self.assertEqual(service.port, 3001) + self.assertEqual(service.host, "127.0.0.1") + self.assertEqual(service.lambda_runner, lambda_runner_mock) self.assertIsNone(service.stderr) def test_initalize_with_values(self): lambda_runner_mock = Mock() stderr_mock = Mock() - local_service = LocalLambdaInvokeService(lambda_runner_mock, port=5000, host='129.0.0.0', stderr=stderr_mock) - self.assertEquals(local_service.port, 5000) - self.assertEquals(local_service.host, '129.0.0.0') - self.assertEquals(local_service.stderr, stderr_mock) - self.assertEquals(local_service.lambda_runner, lambda_runner_mock) - - @patch('samcli.local.lambda_service.local_lambda_invoke_service.LocalLambdaInvokeService._construct_error_handling') - @patch('samcli.local.lambda_service.local_lambda_invoke_service.Flask') + local_service = LocalLambdaInvokeService(lambda_runner_mock, port=5000, host="129.0.0.0", stderr=stderr_mock) + self.assertEqual(local_service.port, 5000) + self.assertEqual(local_service.host, "129.0.0.0") + self.assertEqual(local_service.stderr, stderr_mock) + self.assertEqual(local_service.lambda_runner, lambda_runner_mock) + + @patch("samcli.local.lambda_service.local_lambda_invoke_service.LocalLambdaInvokeService._construct_error_handling") + @patch("samcli.local.lambda_service.local_lambda_invoke_service.Flask") def test_create_service_endpoints(self, flask_mock, error_handling_mock): app_mock = Mock() flask_mock.return_value = app_mock @@ -33,217 +32,219 @@ def test_create_service_endpoints(self, flask_mock, error_handling_mock): error_handling_mock.return_value = Mock() lambda_runner_mock = Mock() - service = LocalLambdaInvokeService(lambda_runner=lambda_runner_mock, port=3000, host='localhost') + service = LocalLambdaInvokeService(lambda_runner=lambda_runner_mock, port=3000, host="localhost") service.create() - app_mock.add_url_rule.assert_called_once_with('/2015-03-31/functions//invocations', - endpoint='/2015-03-31/functions//invocations', - view_func=service._invoke_request_handler, - methods=['POST'], - provide_automatic_options=False) - - @patch('samcli.local.lambda_service.local_lambda_invoke_service.LocalLambdaInvokeService.service_response') - @patch('samcli.local.lambda_service.local_lambda_invoke_service.LambdaOutputParser') - @patch('samcli.local.lambda_service.local_lambda_invoke_service.request') + app_mock.add_url_rule.assert_called_once_with( + "/2015-03-31/functions//invocations", + endpoint="/2015-03-31/functions//invocations", + view_func=service._invoke_request_handler, + methods=["POST"], + provide_automatic_options=False, + ) + + @patch("samcli.local.lambda_service.local_lambda_invoke_service.LocalLambdaInvokeService.service_response") + @patch("samcli.local.lambda_service.local_lambda_invoke_service.LambdaOutputParser") + @patch("samcli.local.lambda_service.local_lambda_invoke_service.request") def test_invoke_request_handler(self, request_mock, lambda_output_parser_mock, service_response_mock): - lambda_output_parser_mock.get_lambda_output.return_value = 'hello world', None, False - service_response_mock.return_value = 'request response' - request_mock.get_data.return_value = b'{}' + lambda_output_parser_mock.get_lambda_output.return_value = "hello world", None, False + service_response_mock.return_value = "request response" + request_mock.get_data.return_value = b"{}" lambda_runner_mock = Mock() - service = LocalLambdaInvokeService(lambda_runner=lambda_runner_mock, port=3000, host='localhost') + service = LocalLambdaInvokeService(lambda_runner=lambda_runner_mock, port=3000, host="localhost") - response = service._invoke_request_handler(function_name='HelloWorld') + response = service._invoke_request_handler(function_name="HelloWorld") - self.assertEquals(response, 'request response') + self.assertEqual(response, "request response") - lambda_runner_mock.invoke.assert_called_once_with('HelloWorld', '{}', stdout=ANY, stderr=None) - service_response_mock.assert_called_once_with('hello world', {'Content-Type': 'application/json'}, 200) + lambda_runner_mock.invoke.assert_called_once_with("HelloWorld", "{}", stdout=ANY, stderr=None) + service_response_mock.assert_called_once_with("hello world", {"Content-Type": "application/json"}, 200) - @patch('samcli.local.lambda_service.local_lambda_invoke_service.LambdaErrorResponses') - @patch('samcli.local.lambda_service.local_lambda_invoke_service.request') + @patch("samcli.local.lambda_service.local_lambda_invoke_service.LambdaErrorResponses") + @patch("samcli.local.lambda_service.local_lambda_invoke_service.request") def test_invoke_request_handler_on_incorrect_path(self, request_mock, lambda_error_responses_mock): - request_mock.get_data.return_value = b'{}' + request_mock.get_data.return_value = b"{}" lambda_runner_mock = Mock() lambda_runner_mock.invoke.side_effect = FunctionNotFound lambda_error_responses_mock.resource_not_found.return_value = "Couldn't find Lambda" - service = LocalLambdaInvokeService(lambda_runner=lambda_runner_mock, port=3000, host='localhost') + service = LocalLambdaInvokeService(lambda_runner=lambda_runner_mock, port=3000, host="localhost") - response = service._invoke_request_handler(function_name='NotFound') + response = service._invoke_request_handler(function_name="NotFound") - self.assertEquals(response, "Couldn't find Lambda") + self.assertEqual(response, "Couldn't find Lambda") - lambda_runner_mock.invoke.assert_called_once_with('NotFound', '{}', stdout=ANY, stderr=None) + lambda_runner_mock.invoke.assert_called_once_with("NotFound", "{}", stdout=ANY, stderr=None) - lambda_error_responses_mock.resource_not_found.assert_called_once_with('NotFound') + lambda_error_responses_mock.resource_not_found.assert_called_once_with("NotFound") - @patch('samcli.local.lambda_service.local_lambda_invoke_service.LocalLambdaInvokeService.service_response') - @patch('samcli.local.lambda_service.local_lambda_invoke_service.LambdaOutputParser') - @patch('samcli.local.lambda_service.local_lambda_invoke_service.request') - def test_request_handler_returns_process_stdout_when_making_response(self, request_mock, lambda_output_parser_mock, - service_response_mock): - request_mock.get_data.return_value = b'{}' + @patch("samcli.local.lambda_service.local_lambda_invoke_service.LocalLambdaInvokeService.service_response") + @patch("samcli.local.lambda_service.local_lambda_invoke_service.LambdaOutputParser") + @patch("samcli.local.lambda_service.local_lambda_invoke_service.request") + def test_request_handler_returns_process_stdout_when_making_response( + self, request_mock, lambda_output_parser_mock, service_response_mock + ): + request_mock.get_data.return_value = b"{}" lambda_logs = "logs" lambda_response = "response" is_customer_error = False lambda_output_parser_mock.get_lambda_output.return_value = lambda_response, lambda_logs, is_customer_error - service_response_mock.return_value = 'request response' + service_response_mock.return_value = "request response" lambda_runner_mock = Mock() stderr_mock = Mock() - service = LocalLambdaInvokeService(lambda_runner=lambda_runner_mock, - port=3000, - host='localhost', - stderr=stderr_mock) + service = LocalLambdaInvokeService( + lambda_runner=lambda_runner_mock, port=3000, host="localhost", stderr=stderr_mock + ) - result = service._invoke_request_handler(function_name='HelloWorld') + result = service._invoke_request_handler(function_name="HelloWorld") - self.assertEquals(result, 'request response') + self.assertEqual(result, "request response") lambda_output_parser_mock.get_lambda_output.assert_called_with(ANY) # Make sure the logs are written to stderr stderr_mock.write.assert_called_with(lambda_logs) - @patch('samcli.local.lambda_service.local_lambda_invoke_service.LambdaErrorResponses') + @patch("samcli.local.lambda_service.local_lambda_invoke_service.LambdaErrorResponses") def test_construct_error_handling(self, lambda_error_response_mock): - service = LocalLambdaInvokeService(lambda_runner=Mock(), - port=3000, - host='localhost', - stderr=Mock()) + service = LocalLambdaInvokeService(lambda_runner=Mock(), port=3000, host="localhost", stderr=Mock()) flask_app_mock = Mock() service._app = flask_app_mock service._construct_error_handling() - flask_app_mock.register_error_handler.assert_has_calls([ - call(500, lambda_error_response_mock.generic_service_exception), - call(404, lambda_error_response_mock.generic_path_not_found), - call(405, lambda_error_response_mock.generic_method_not_allowed)]) - - @patch('samcli.local.lambda_service.local_lambda_invoke_service.LocalLambdaInvokeService.service_response') - @patch('samcli.local.lambda_service.local_lambda_invoke_service.LambdaOutputParser') - @patch('samcli.local.lambda_service.local_lambda_invoke_service.request') - def test_invoke_request_handler_with_lambda_that_errors(self, - request_mock, - lambda_output_parser_mock, - service_response_mock): - lambda_output_parser_mock.get_lambda_output.return_value = 'hello world', None, True - service_response_mock.return_value = 'request response' - request_mock.get_data.return_value = b'{}' + flask_app_mock.register_error_handler.assert_has_calls( + [ + call(500, lambda_error_response_mock.generic_service_exception), + call(404, lambda_error_response_mock.generic_path_not_found), + call(405, lambda_error_response_mock.generic_method_not_allowed), + ] + ) + + @patch("samcli.local.lambda_service.local_lambda_invoke_service.LocalLambdaInvokeService.service_response") + @patch("samcli.local.lambda_service.local_lambda_invoke_service.LambdaOutputParser") + @patch("samcli.local.lambda_service.local_lambda_invoke_service.request") + def test_invoke_request_handler_with_lambda_that_errors( + self, request_mock, lambda_output_parser_mock, service_response_mock + ): + lambda_output_parser_mock.get_lambda_output.return_value = "hello world", None, True + service_response_mock.return_value = "request response" + request_mock.get_data.return_value = b"{}" lambda_runner_mock = Mock() - service = LocalLambdaInvokeService(lambda_runner=lambda_runner_mock, port=3000, host='localhost') + service = LocalLambdaInvokeService(lambda_runner=lambda_runner_mock, port=3000, host="localhost") - response = service._invoke_request_handler(function_name='HelloWorld') + response = service._invoke_request_handler(function_name="HelloWorld") - self.assertEquals(response, 'request response') + self.assertEqual(response, "request response") - lambda_runner_mock.invoke.assert_called_once_with('HelloWorld', '{}', stdout=ANY, stderr=None) - service_response_mock.assert_called_once_with('hello world', - {'Content-Type': 'application/json', - 'x-amz-function-error': 'Unhandled'}, - 200) + lambda_runner_mock.invoke.assert_called_once_with("HelloWorld", "{}", stdout=ANY, stderr=None) + service_response_mock.assert_called_once_with( + "hello world", {"Content-Type": "application/json", "x-amz-function-error": "Unhandled"}, 200 + ) - @patch('samcli.local.lambda_service.local_lambda_invoke_service.LocalLambdaInvokeService.service_response') - @patch('samcli.local.lambda_service.local_lambda_invoke_service.LambdaOutputParser') - @patch('samcli.local.lambda_service.local_lambda_invoke_service.request') + @patch("samcli.local.lambda_service.local_lambda_invoke_service.LocalLambdaInvokeService.service_response") + @patch("samcli.local.lambda_service.local_lambda_invoke_service.LambdaOutputParser") + @patch("samcli.local.lambda_service.local_lambda_invoke_service.request") def test_invoke_request_handler_with_no_data(self, request_mock, lambda_output_parser_mock, service_response_mock): - lambda_output_parser_mock.get_lambda_output.return_value = 'hello world', None, False - service_response_mock.return_value = 'request response' + lambda_output_parser_mock.get_lambda_output.return_value = "hello world", None, False + service_response_mock.return_value = "request response" request_mock.get_data.return_value = None lambda_runner_mock = Mock() - service = LocalLambdaInvokeService(lambda_runner=lambda_runner_mock, port=3000, host='localhost') + service = LocalLambdaInvokeService(lambda_runner=lambda_runner_mock, port=3000, host="localhost") - response = service._invoke_request_handler(function_name='HelloWorld') + response = service._invoke_request_handler(function_name="HelloWorld") - self.assertEquals(response, 'request response') + self.assertEqual(response, "request response") - lambda_runner_mock.invoke.assert_called_once_with('HelloWorld', '{}', stdout=ANY, stderr=None) - service_response_mock.assert_called_once_with('hello world', {'Content-Type': 'application/json'}, 200) + lambda_runner_mock.invoke.assert_called_once_with("HelloWorld", "{}", stdout=ANY, stderr=None) + service_response_mock.assert_called_once_with("hello world", {"Content-Type": "application/json"}, 200) class TestValidateRequestHandling(TestCase): - - @patch('samcli.local.lambda_service.local_lambda_invoke_service.LambdaErrorResponses') - @patch('samcli.local.lambda_service.local_lambda_invoke_service.request') + @patch("samcli.local.lambda_service.local_lambda_invoke_service.LambdaErrorResponses") + @patch("samcli.local.lambda_service.local_lambda_invoke_service.request") def test_request_with_non_json_data(self, flask_request, lambda_error_responses_mock): - flask_request.get_data.return_value = b'notat:asdfasdf' + flask_request.get_data.return_value = b"notat:asdfasdf" flask_request.headers = {} - flask_request.content_type = 'application/json' + flask_request.content_type = "application/json" flask_request.args = {} lambda_error_responses_mock.invalid_request_content.return_value = "InvalidRequestContent" response = LocalLambdaInvokeService.validate_request() - self.assertEquals(response, "InvalidRequestContent") + self.assertEqual(response, "InvalidRequestContent") expected_called_with = "Could not parse request body into json: No JSON object could be decoded" lambda_error_responses_mock.invalid_request_content.assert_called_once_with(expected_called_with) - @patch('samcli.local.lambda_service.local_lambda_invoke_service.LambdaErrorResponses') - @patch('samcli.local.lambda_service.local_lambda_invoke_service.request') + @patch("samcli.local.lambda_service.local_lambda_invoke_service.LambdaErrorResponses") + @patch("samcli.local.lambda_service.local_lambda_invoke_service.request") def test_request_with_query_strings(self, flask_request, lambda_error_responses_mock): flask_request.get_data.return_value = None flask_request.headers = {} - flask_request.content_type = 'application/json' + flask_request.content_type = "application/json" flask_request.args = {"key": "value"} lambda_error_responses_mock.invalid_request_content.return_value = "InvalidRequestContent" response = LocalLambdaInvokeService.validate_request() - self.assertEquals(response, "InvalidRequestContent") + self.assertEqual(response, "InvalidRequestContent") lambda_error_responses_mock.invalid_request_content.assert_called_once_with( - "Query Parameters are not supported") + "Query Parameters are not supported" + ) - @patch('samcli.local.lambda_service.local_lambda_invoke_service.LambdaErrorResponses') - @patch('samcli.local.lambda_service.local_lambda_invoke_service.request') + @patch("samcli.local.lambda_service.local_lambda_invoke_service.LambdaErrorResponses") + @patch("samcli.local.lambda_service.local_lambda_invoke_service.request") def test_request_log_type_not_None(self, flask_request, lambda_error_responses_mock): flask_request.get_data.return_value = None - flask_request.headers = {'X-Amz-Log-Type': 'Tail'} - flask_request.content_type = 'application/json' + flask_request.headers = {"X-Amz-Log-Type": "Tail"} + flask_request.content_type = "application/json" flask_request.args = {} lambda_error_responses_mock.not_implemented_locally.return_value = "NotImplementedLocally" response = LocalLambdaInvokeService.validate_request() - self.assertEquals(response, "NotImplementedLocally") + self.assertEqual(response, "NotImplementedLocally") lambda_error_responses_mock.not_implemented_locally.assert_called_once_with( - "log-type: Tail is not supported. None is only supported.") + "log-type: Tail is not supported. None is only supported." + ) - @patch('samcli.local.lambda_service.local_lambda_invoke_service.LambdaErrorResponses') - @patch('samcli.local.lambda_service.local_lambda_invoke_service.request') + @patch("samcli.local.lambda_service.local_lambda_invoke_service.LambdaErrorResponses") + @patch("samcli.local.lambda_service.local_lambda_invoke_service.request") def test_request_invocation_type_not_ResponseRequest(self, flask_request, lambda_error_responses_mock): flask_request.get_data.return_value = None - flask_request.headers = {'X-Amz-Invocation-Type': 'DryRun'} - flask_request.content_type = 'application/json' + flask_request.headers = {"X-Amz-Invocation-Type": "DryRun"} + flask_request.content_type = "application/json" flask_request.args = {} lambda_error_responses_mock.not_implemented_locally.return_value = "NotImplementedLocally" response = LocalLambdaInvokeService.validate_request() - self.assertEquals(response, "NotImplementedLocally") + self.assertEqual(response, "NotImplementedLocally") lambda_error_responses_mock.not_implemented_locally.assert_called_once_with( - "invocation-type: DryRun is not supported. RequestResponse is only supported.") + "invocation-type: DryRun is not supported. RequestResponse is only supported." + ) - @patch('samcli.local.lambda_service.local_lambda_invoke_service.request') + @patch("samcli.local.lambda_service.local_lambda_invoke_service.request") def test_request_with_no_data(self, flask_request): flask_request.get_data.return_value = None flask_request.headers = {} - flask_request.content_type = 'application/json' + flask_request.content_type = "application/json" flask_request.args = {} response = LocalLambdaInvokeService.validate_request() diff --git a/tests/unit/local/lambdafn/test_config.py b/tests/unit/local/lambdafn/test_config.py index 4f62ffa81a..d53e7d3ee3 100644 --- a/tests/unit/local/lambdafn/test_config.py +++ b/tests/unit/local/lambdafn/test_config.py @@ -1,4 +1,3 @@ - from unittest import TestCase from mock import Mock @@ -18,37 +17,45 @@ def setUp(self): self.memory = 1234 self.timeout = 34 self.env_vars_mock = Mock() - self.layers = ['layer1'] + self.layers = ["layer1"] def test_init_with_env_vars(self): - config = FunctionConfig(self.name, self.runtime, self.handler, self.code_path, self.layers, - memory=self.memory, timeout=self.timeout, env_vars=self.env_vars_mock) - - self.assertEquals(config.name, self.name) - self.assertEquals(config.runtime, self.runtime) - self.assertEquals(config.handler, self.handler) - self.assertEquals(config.code_abs_path, self.code_path) - self.assertEquals(config.layers, self.layers) - self.assertEquals(config.memory, self.memory) - self.assertEquals(config.timeout, self.timeout) - self.assertEquals(config.env_vars, self.env_vars_mock) - - self.assertEquals(self.env_vars_mock.handler, self.handler) - self.assertEquals(self.env_vars_mock.memory, self.memory) - self.assertEquals(self.env_vars_mock.timeout, self.timeout) + config = FunctionConfig( + self.name, + self.runtime, + self.handler, + self.code_path, + self.layers, + memory=self.memory, + timeout=self.timeout, + env_vars=self.env_vars_mock, + ) + + self.assertEqual(config.name, self.name) + self.assertEqual(config.runtime, self.runtime) + self.assertEqual(config.handler, self.handler) + self.assertEqual(config.code_abs_path, self.code_path) + self.assertEqual(config.layers, self.layers) + self.assertEqual(config.memory, self.memory) + self.assertEqual(config.timeout, self.timeout) + self.assertEqual(config.env_vars, self.env_vars_mock) + + self.assertEqual(self.env_vars_mock.handler, self.handler) + self.assertEqual(self.env_vars_mock.memory, self.memory) + self.assertEqual(self.env_vars_mock.timeout, self.timeout) def test_init_without_optional_values(self): config = FunctionConfig(self.name, self.runtime, self.handler, self.code_path, self.layers) - self.assertEquals(config.name, self.name) - self.assertEquals(config.runtime, self.runtime) - self.assertEquals(config.handler, self.handler) - self.assertEquals(config.code_abs_path, self.code_path) - self.assertEquals(config.layers, self.layers) - self.assertEquals(config.memory, self.DEFAULT_MEMORY) - self.assertEquals(config.timeout, self.DEFAULT_TIMEOUT) + self.assertEqual(config.name, self.name) + self.assertEqual(config.runtime, self.runtime) + self.assertEqual(config.handler, self.handler) + self.assertEqual(config.code_abs_path, self.code_path) + self.assertEqual(config.layers, self.layers) + self.assertEqual(config.memory, self.DEFAULT_MEMORY) + self.assertEqual(config.timeout, self.DEFAULT_TIMEOUT) self.assertIsNotNone(config.env_vars) - self.assertEquals(config.env_vars.handler, self.handler) - self.assertEquals(config.env_vars.memory, self.DEFAULT_MEMORY) - self.assertEquals(config.env_vars.timeout, self.DEFAULT_TIMEOUT) + self.assertEqual(config.env_vars.handler, self.handler) + self.assertEqual(config.env_vars.memory, self.DEFAULT_MEMORY) + self.assertEqual(config.env_vars.timeout, self.DEFAULT_TIMEOUT) diff --git a/tests/unit/local/lambdafn/test_env_vars.py b/tests/unit/local/lambdafn/test_env_vars.py index 6b34ad22e9..3e319fc682 100644 --- a/tests/unit/local/lambdafn/test_env_vars.py +++ b/tests/unit/local/lambdafn/test_env_vars.py @@ -8,7 +8,6 @@ class TestEnvironmentVariables_init(TestCase): - def test_must_initialize_with_empty_values(self): memory = 123 @@ -20,9 +19,9 @@ def test_must_initialize_with_empty_values(self): environ.timeout = timeout environ.handler = handler - self.assertEquals(environ.memory, memory) - self.assertEquals(environ.timeout, timeout) - self.assertEquals(environ.handler, handler) + self.assertEqual(environ.memory, memory) + self.assertEqual(environ.timeout, timeout) + self.assertEqual(environ.handler, handler) def test_must_initialize_values_with_required_values(self): memory = 123 @@ -30,13 +29,13 @@ def test_must_initialize_values_with_required_values(self): handler = "handler" environ = EnvironmentVariables(memory, timeout, handler) - self.assertEquals(environ.memory, memory) - self.assertEquals(environ.timeout, timeout) - self.assertEquals(environ.handler, handler) - self.assertEquals(environ.variables, {}) - self.assertEquals(environ.shell_env_values, {}) - self.assertEquals(environ.override_values, {}) - self.assertEquals(environ.aws_creds, {}) + self.assertEqual(environ.memory, memory) + self.assertEqual(environ.timeout, timeout) + self.assertEqual(environ.handler, handler) + self.assertEqual(environ.variables, {}) + self.assertEqual(environ.shell_env_values, {}) + self.assertEqual(environ.override_values, {}) + self.assertEqual(environ.aws_creds, {}) def test_must_initialize_with_optional_values(self): memory = 123 @@ -47,20 +46,23 @@ def test_must_initialize_with_optional_values(self): overrides = {"e": "f"} aws_creds = {"g": "h"} - environ = EnvironmentVariables(memory, timeout, handler, - variables=variables, - shell_env_values=shell_values, - override_values=overrides, - aws_creds=aws_creds) + environ = EnvironmentVariables( + memory, + timeout, + handler, + variables=variables, + shell_env_values=shell_values, + override_values=overrides, + aws_creds=aws_creds, + ) - self.assertEquals(environ.variables, {"a": "b"}) - self.assertEquals(environ.shell_env_values, {"c": "d"}) - self.assertEquals(environ.override_values, {"e": "f"}) - self.assertEquals(environ.aws_creds, {"g": "h"}) + self.assertEqual(environ.variables, {"a": "b"}) + self.assertEqual(environ.shell_env_values, {"c": "d"}) + self.assertEqual(environ.override_values, {"e": "f"}) + self.assertEqual(environ.aws_creds, {"g": "h"}) class TestEnvironmentVariables_resolve(TestCase): - def setUp(self): self.memory = 1024 self.timeout = 123 @@ -70,36 +72,32 @@ def setUp(self): "region": "some region", "key": "some key", "secret": "some other secret", - "sessiontoken": "some other token" + "sessiontoken": "some other token", } self.variables = { "variable1": 1, "variable2": "mystring", - "list_var": [1, 2, 3], "dict_var": {"a": {"b": "c"}}, "none_var": None, "true_var": True, "false_var": False, - # We should be able to override AWS_* values - "AWS_DEFAULT_REGION": "user-specified-region" + "AWS_DEFAULT_REGION": "user-specified-region", } self.shell_env = { # This variable is not defined in self.variables. So won't show up in resutlt "myothervar": "somevalue", - - "variable1": "variable1 value from shell_env" + "variable1": "variable1 value from shell_env", } self.override = { # This variable is not defined in self.variables. So won't show up in resutlt "unknown_var": "newvalue", - "variable1": "variable1 value from overrides", - "list_var": "list value coming from overrides" + "list_var": "list value coming from overrides", } def test_with_no_additional_variables(self): @@ -116,7 +114,7 @@ def test_with_no_additional_variables(self): "AWS_DEFAULT_REGION": "some region", "AWS_ACCESS_KEY_ID": "some key", "AWS_SECRET_ACCESS_KEY": "some other secret", - "AWS_SESSION_TOKEN": "some other token" + "AWS_SESSION_TOKEN": "some other token", } environ = EnvironmentVariables(self.memory, self.timeout, self.handler, aws_creds=self.aws_creds) @@ -124,7 +122,7 @@ def test_with_no_additional_variables(self): result = environ.resolve() # With no additional environment variables, resolve() should just return all AWS variables - self.assertEquals(result, expected) + self.assertEqual(result, expected) def test_with_only_default_values_for_variables(self): """ @@ -139,23 +137,20 @@ def test_with_only_default_values_for_variables(self): "AWS_REGION": "us-east-1", "AWS_ACCESS_KEY_ID": "defaultkey", "AWS_SECRET_ACCESS_KEY": "defaultsecret", - # This value is coming from user passed environment variable "AWS_DEFAULT_REGION": "user-specified-region", "variable1": "1", "variable2": "mystring", - "list_var": "", "dict_var": "", "none_var": "", "true_var": "true", - "false_var": "false" + "false_var": "false", } - environ = EnvironmentVariables(self.memory, self.timeout, self.handler, - variables=self.variables) + environ = EnvironmentVariables(self.memory, self.timeout, self.handler, variables=self.variables) - self.assertEquals(environ.resolve(), expected) + self.assertEqual(environ.resolve(), expected) def test_with_shell_env_value(self): """ @@ -170,26 +165,23 @@ def test_with_shell_env_value(self): "AWS_REGION": "us-east-1", "AWS_ACCESS_KEY_ID": "defaultkey", "AWS_SECRET_ACCESS_KEY": "defaultsecret", - # This value is coming from user passed environment variable "AWS_DEFAULT_REGION": "user-specified-region", - # Value coming from the shell "variable1": "variable1 value from shell_env", "variable2": "mystring", - "list_var": "", "dict_var": "", "none_var": "", "true_var": "true", - "false_var": "false" + "false_var": "false", } - environ = EnvironmentVariables(self.memory, self.timeout, self.handler, - variables=self.variables, - shell_env_values=self.shell_env) + environ = EnvironmentVariables( + self.memory, self.timeout, self.handler, variables=self.variables, shell_env_values=self.shell_env + ) - self.assertEquals(environ.resolve(), expected) + self.assertEqual(environ.resolve(), expected) def test_with_overrides_value(self): """ @@ -204,32 +196,31 @@ def test_with_overrides_value(self): "AWS_REGION": "us-east-1", "AWS_ACCESS_KEY_ID": "defaultkey", "AWS_SECRET_ACCESS_KEY": "defaultsecret", - # This value is coming from user passed environment variable "AWS_DEFAULT_REGION": "user-specified-region", - "variable2": "mystring", - # Value coming from the overrides "variable1": "variable1 value from overrides", "list_var": "list value coming from overrides", - "dict_var": "", "none_var": "", "true_var": "true", - "false_var": "false" + "false_var": "false", } - environ = EnvironmentVariables(self.memory, self.timeout, self.handler, - variables=self.variables, - shell_env_values=self.shell_env, - override_values=self.override) + environ = EnvironmentVariables( + self.memory, + self.timeout, + self.handler, + variables=self.variables, + shell_env_values=self.shell_env, + override_values=self.override, + ) - self.assertEquals(environ.resolve(), expected) + self.assertEqual(environ.resolve(), expected) class TestEnvironmentVariables_get_aws_variables(TestCase): - def setUp(self): self.memory = 1024 self.timeout = 123 @@ -239,7 +230,7 @@ def setUp(self): "region": "some region", "key": "some key", "secret": "some other secret", - "sessiontoken": "some other token" + "sessiontoken": "some other token", } def test_must_work_with_overridden_aws_creds(self): @@ -253,12 +244,12 @@ def test_must_work_with_overridden_aws_creds(self): "AWS_DEFAULT_REGION": "some region", "AWS_ACCESS_KEY_ID": "some key", "AWS_SECRET_ACCESS_KEY": "some other secret", - "AWS_SESSION_TOKEN": "some other token" + "AWS_SESSION_TOKEN": "some other token", } environ = EnvironmentVariables(self.memory, self.timeout, self.handler, aws_creds=self.aws_creds) - self.assertEquals(expected, environ._get_aws_variables()) + self.assertEqual(expected, environ._get_aws_variables()) def test_must_work_without_any_aws_creds(self): @@ -267,7 +258,6 @@ def test_must_work_without_any_aws_creds(self): "AWS_LAMBDA_FUNCTION_MEMORY_SIZE": "1024", "AWS_LAMBDA_FUNCTION_TIMEOUT": "123", "AWS_LAMBDA_FUNCTION_HANDLER": "handler", - # Default values assigned to these variables "AWS_REGION": "us-east-1", "AWS_DEFAULT_REGION": "us-east-1", @@ -276,64 +266,54 @@ def test_must_work_without_any_aws_creds(self): } environ = EnvironmentVariables(self.memory, self.timeout, self.handler) - self.assertEquals(expected, environ._get_aws_variables()) + self.assertEqual(expected, environ._get_aws_variables()) def test_must_work_with_partial_aws_creds(self): - creds = { - "region": "some other region", - "sessiontoken": "my awesome token" - } + creds = {"region": "some other region", "sessiontoken": "my awesome token"} expected = { "AWS_SAM_LOCAL": "true", "AWS_LAMBDA_FUNCTION_MEMORY_SIZE": "1024", "AWS_LAMBDA_FUNCTION_TIMEOUT": "123", "AWS_LAMBDA_FUNCTION_HANDLER": "handler", - # Values from the input creds "AWS_REGION": "some other region", "AWS_DEFAULT_REGION": "some other region", "AWS_SESSION_TOKEN": "my awesome token", - # These variables still get the default value "AWS_ACCESS_KEY_ID": "defaultkey", "AWS_SECRET_ACCESS_KEY": "defaultsecret", } environ = EnvironmentVariables(self.memory, self.timeout, self.handler, aws_creds=creds) - self.assertEquals(expected, environ._get_aws_variables()) + self.assertEqual(expected, environ._get_aws_variables()) class TestEnvironmentVariables_stringify_value(TestCase): - def setUp(self): self.environ = EnvironmentVariables(1024, 10, "handler") - @parameterized.expand([ - param([1, 2, 3]), - param({"a": {"b": "c"}}), - param(("this", "is", "tuple")), - param(None) - ]) + @parameterized.expand([param([1, 2, 3]), param({"a": {"b": "c"}}), param(("this", "is", "tuple")), param(None)]) def test_must_replace_non_scalar_with_blank_values(self, input): - self.assertEquals("", self.environ._stringify_value(input)) - - @parameterized.expand([ - (True, "true"), - (False, "false"), - (1234, "1234"), - (3.14, "3.14"), - (u"mystring\xe0", u"mystring\xe0"), - ("mystring", "mystring"), - ]) + self.assertEqual("", self.environ._stringify_value(input)) + + @parameterized.expand( + [ + (True, "true"), + (False, "false"), + (1234, "1234"), + (3.14, "3.14"), + (u"mystring\xe0", u"mystring\xe0"), + ("mystring", "mystring"), + ] + ) def test_must_stringify(self, input, expected): - self.assertEquals(expected, self.environ._stringify_value(input)) + self.assertEqual(expected, self.environ._stringify_value(input)) class TestEnvironmentVariables_add_lambda_event_body(TestCase): - def test_must_add_proper_variable(self): value = "foobar" @@ -341,4 +321,4 @@ def test_must_add_proper_variable(self): environ = EnvironmentVariables() environ.add_lambda_event_body(value) - self.assertEquals(environ.variables.get("AWS_LAMBDA_EVENT_BODY"), value) + self.assertEqual(environ.variables.get("AWS_LAMBDA_EVENT_BODY"), value) diff --git a/tests/unit/local/lambdafn/test_runtime.py b/tests/unit/local/lambdafn/test_runtime.py index f3079ecb10..688219cd61 100644 --- a/tests/unit/local/lambdafn/test_runtime.py +++ b/tests/unit/local/lambdafn/test_runtime.py @@ -54,11 +54,7 @@ def test_must_run_container_and_wait_for_logs(self, LambdaContainerMock): LambdaContainerMock.return_value = container - self.runtime.invoke(self.func_config, - event, - debug_context=debug_options, - stdout=stdout, - stderr=stderr) + self.runtime.invoke(self.func_config, event, debug_context=debug_options, stdout=stdout, stderr=stderr) # Verify if Lambda Event data is set self.env_vars.add_lambda_event_body.assert_called_with(event) @@ -70,9 +66,16 @@ def test_must_run_container_and_wait_for_logs(self, LambdaContainerMock): self.runtime._get_code_dir.assert_called_with(self.code_path) # Make sure the container is created with proper values - LambdaContainerMock.assert_called_with(self.lang, self.handler, code_dir, self.layers, lambda_image_mock, - memory_mb=self.DEFAULT_MEMORY, env_vars=self.env_var_value, - debug_options=debug_options) + LambdaContainerMock.assert_called_with( + self.lang, + self.handler, + code_dir, + self.layers, + lambda_image_mock, + memory_mb=self.DEFAULT_MEMORY, + env_vars=self.env_var_value, + debug_options=debug_options, + ) # Run the container and get results self.manager_mock.run.assert_called_with(container) @@ -106,11 +109,7 @@ def test_exception_from_run_must_trigger_cleanup(self, LambdaContainerMock): self.manager_mock.run.side_effect = ValueError("some exception") with self.assertRaises(ValueError): - self.runtime.invoke(self.func_config, - event, - debug_context=None, - stdout=stdout, - stderr=stderr) + self.runtime.invoke(self.func_config, event, debug_context=None, stdout=stdout, stderr=stderr) # Run the container and get results self.manager_mock.run.assert_called_with(container) @@ -147,11 +146,7 @@ def test_exception_from_wait_for_logs_must_trigger_cleanup(self, LambdaContainer container.wait_for_logs.side_effect = ValueError("some exception") with self.assertRaises(ValueError): - self.runtime.invoke(self.func_config, - event, - debug_context=debug_options, - stdout=stdout, - stderr=stderr) + self.runtime.invoke(self.func_config, event, debug_context=debug_options, stdout=stdout, stderr=stderr) # Run the container and get results self.manager_mock.run.assert_called_with(container) @@ -184,10 +179,7 @@ def test_keyboard_interrupt_must_not_raise(self, LambdaContainerMock): self.manager_mock.run.side_effect = KeyboardInterrupt("some exception") - self.runtime.invoke(self.func_config, - event, - stdout=stdout, - stderr=stderr) + self.runtime.invoke(self.func_config, event, stdout=stdout, stderr=stderr) # Run the container and get results self.manager_mock.run.assert_called_with(container) @@ -199,7 +191,6 @@ def test_keyboard_interrupt_must_not_raise(self, LambdaContainerMock): class TestLambdaRuntime_configure_interrupt(TestCase): - def setUp(self): self.name = "name" self.timeout = 123 @@ -218,7 +209,7 @@ def test_must_setup_timer(self, SignalMock, ThreadingMock): result = self.runtime._configure_interrupt(self.name, self.timeout, self.container, is_debugging) - self.assertEquals(result, timer_obj) + self.assertEqual(result, timer_obj) ThreadingMock.Timer.assert_called_with(self.timeout, ANY, ()) timer_obj.start.assert_called_with() @@ -277,18 +268,12 @@ def fake_timer(timeout, handler, args): class TestLambdaRuntime_get_code_dir(TestCase): - def setUp(self): self.manager_mock = Mock() self.layer_downloader = Mock() self.runtime = LambdaRuntime(self.manager_mock, self.layer_downloader) - @parameterized.expand([ - (".zip"), - (".ZIP"), - (".JAR"), - (".jar") - ]) + @parameterized.expand([(".zip"), (".ZIP"), (".JAR"), (".jar")]) @patch("samcli.local.lambdafn.runtime.os") @patch("samcli.local.lambdafn.runtime.shutil") @patch("samcli.local.lambdafn.runtime._unzip_file") @@ -300,7 +285,7 @@ def test_must_uncompress_zip_files(self, extension, unzip_file_mock, shutil_mock os_mock.path.isfile.return_value = True with self.runtime._get_code_dir(code_path) as result: - self.assertEquals(result, decompressed_dir) + self.assertEqual(result, decompressed_dir) unzip_file_mock.assert_called_with(code_path) os_mock.path.isfile.assert_called_with(code_path) @@ -321,7 +306,7 @@ def test_must_return_a_valid_file(self, unzip_file_mock, shutil_mock, os_mock): with self.runtime._get_code_dir(code_path) as result: # code path must be returned. No decompression - self.assertEquals(result, code_path) + self.assertEqual(result, code_path) unzip_file_mock.assert_not_called() # Unzip must not be called os_mock.path.isfile.assert_called_with(code_path) @@ -331,7 +316,6 @@ def test_must_return_a_valid_file(self, unzip_file_mock, shutil_mock, os_mock): class TestUnzipFile(TestCase): - @patch("samcli.local.lambdafn.runtime.tempfile") @patch("samcli.local.lambdafn.runtime.unzip") @patch("samcli.local.lambdafn.runtime.os") @@ -342,10 +326,10 @@ def test_must_unzip_not_posix(self, os_mock, unzip_mock, tempfile_mock): tempfile_mock.mkdtemp.return_value = tmpdir os_mock.path.realpath.return_value = realpath - os_mock.name = 'not-posix' + os_mock.name = "not-posix" output = _unzip_file(inputpath) - self.assertEquals(output, realpath) + self.assertEqual(output, realpath) tempfile_mock.mkdtemp.assert_called_with() unzip_mock.assert_called_with(inputpath, tmpdir) # unzip files to temporary directory @@ -362,10 +346,10 @@ def test_must_unzip_posix(self, os_mock, unzip_mock, tempfile_mock): tempfile_mock.mkdtemp.return_value = tmpdir os_mock.path.realpath.return_value = realpath - os_mock.name = 'posix' + os_mock.name = "posix" output = _unzip_file(inputpath) - self.assertEquals(output, realpath) + self.assertEqual(output, realpath) tempfile_mock.mkdtemp.assert_called_with() unzip_mock.assert_called_with(inputpath, tmpdir) # unzip files to temporary directory diff --git a/tests/unit/local/lambdafn/test_zip.py b/tests/unit/local/lambdafn/test_zip.py index ce0feea079..5f36b6023f 100644 --- a/tests/unit/local/lambdafn/test_zip.py +++ b/tests/unit/local/lambdafn/test_zip.py @@ -14,7 +14,7 @@ from samcli.local.lambdafn.zip import unzip, unzip_from_uri, _override_permissions # On Windows, permissions do not match 1:1 with permissions on Unix systems. -SKIP_UNZIP_PERMISSION_TESTS = platform.system() == 'Windows' +SKIP_UNZIP_PERMISSION_TESTS = platform.system() == "Windows" @skipIf(SKIP_UNZIP_PERMISSION_TESTS, "Skip UnZip Permissions tests in Windows only") @@ -23,7 +23,7 @@ class TestUnzipWithPermissions(TestCase): "folder1/1.txt": 0o644, "folder1/2.txt": 0o777, "folder2/subdir/1.txt": 0o666, - "folder2/subdir/2.txt": 0o400 + "folder2/subdir/2.txt": 0o400, } @parameterized.expand([param(True), param(False)]) @@ -44,15 +44,15 @@ def test_must_unzip(self, check_permissions): self.assertIn(key, self.files_with_permissions) if check_permissions: - self.assertEquals(expected_permission, - perm, - "File {} has wrong permission {}".format(key, perm)) + self.assertEqual( + expected_permission, perm, "File {} has wrong permission {}".format(key, perm) + ) @contextmanager def _create_zip(self, files_with_permissions, add_permissions=True): zipfilename = None - data = b'hello world' + data = b"hello world" try: zipfilename = NamedTemporaryFile(mode="w+b").name @@ -85,23 +85,18 @@ def _temp_dir(self): class TestUnzipFromUri(TestCase): - - @patch('samcli.local.lambdafn.zip.unzip') - @patch('samcli.local.lambdafn.zip.Path') - @patch('samcli.local.lambdafn.zip.progressbar') - @patch('samcli.local.lambdafn.zip.requests') - @patch('samcli.local.lambdafn.zip.open') - @patch('samcli.local.lambdafn.zip.os') - def test_successfully_unzip_from_uri(self, - os_patch, - open_patch, - requests_patch, - progressbar_patch, - path_patch, - unzip_patch): + @patch("samcli.local.lambdafn.zip.unzip") + @patch("samcli.local.lambdafn.zip.Path") + @patch("samcli.local.lambdafn.zip.progressbar") + @patch("samcli.local.lambdafn.zip.requests") + @patch("samcli.local.lambdafn.zip.open") + @patch("samcli.local.lambdafn.zip.os") + def test_successfully_unzip_from_uri( + self, os_patch, open_patch, requests_patch, progressbar_patch, path_patch, unzip_patch + ): get_request_mock = Mock() get_request_mock.headers = {"Content-length": "200"} - get_request_mock.iter_content.return_value = [b'data1'] + get_request_mock.iter_content.return_value = [b"data1"] requests_patch.get.return_value = get_request_mock file_mock = Mock() @@ -116,34 +111,30 @@ def test_successfully_unzip_from_uri(self, os_patch.environ.get.return_value = True - unzip_from_uri('uri', 'layer_zip_path', 'output_zip_dir', 'layer_arn') + unzip_from_uri("uri", "layer_zip_path", "output_zip_dir", "layer_arn") - requests_patch.get.assert_called_with('uri', stream=True, verify=True) + requests_patch.get.assert_called_with("uri", stream=True, verify=True) get_request_mock.iter_content.assert_called_with(chunk_size=None) - open_patch.assert_called_with('layer_zip_path', 'wb') - file_mock.write.assert_called_with(b'data1') + open_patch.assert_called_with("layer_zip_path", "wb") + file_mock.write.assert_called_with(b"data1") progressbar_mock.update.assert_called_with(5) - path_patch.assert_called_with('layer_zip_path') + path_patch.assert_called_with("layer_zip_path") path_mock.unlink.assert_called() - unzip_patch.assert_called_with('layer_zip_path', 'output_zip_dir', permission=0o700) - os_patch.environ.get.assert_called_with('AWS_CA_BUNDLE', True) - - @patch('samcli.local.lambdafn.zip.unzip') - @patch('samcli.local.lambdafn.zip.Path') - @patch('samcli.local.lambdafn.zip.progressbar') - @patch('samcli.local.lambdafn.zip.requests') - @patch('samcli.local.lambdafn.zip.open') - @patch('samcli.local.lambdafn.zip.os') - def test_not_unlink_file_when_file_doesnt_exist(self, - os_patch, - open_patch, - requests_patch, - progressbar_patch, - path_patch, - unzip_patch): + unzip_patch.assert_called_with("layer_zip_path", "output_zip_dir", permission=0o700) + os_patch.environ.get.assert_called_with("AWS_CA_BUNDLE", True) + + @patch("samcli.local.lambdafn.zip.unzip") + @patch("samcli.local.lambdafn.zip.Path") + @patch("samcli.local.lambdafn.zip.progressbar") + @patch("samcli.local.lambdafn.zip.requests") + @patch("samcli.local.lambdafn.zip.open") + @patch("samcli.local.lambdafn.zip.os") + def test_not_unlink_file_when_file_doesnt_exist( + self, os_patch, open_patch, requests_patch, progressbar_patch, path_patch, unzip_patch + ): get_request_mock = Mock() get_request_mock.headers = {"Content-length": "200"} - get_request_mock.iter_content.return_value = [b'data1'] + get_request_mock.iter_content.return_value = [b"data1"] requests_patch.get.return_value = get_request_mock file_mock = Mock() @@ -158,33 +149,30 @@ def test_not_unlink_file_when_file_doesnt_exist(self, os_patch.environ.get.return_value = True - unzip_from_uri('uri', 'layer_zip_path', 'output_zip_dir', 'layer_arn') + unzip_from_uri("uri", "layer_zip_path", "output_zip_dir", "layer_arn") - requests_patch.get.assert_called_with('uri', stream=True, verify=True) + requests_patch.get.assert_called_with("uri", stream=True, verify=True) get_request_mock.iter_content.assert_called_with(chunk_size=None) - open_patch.assert_called_with('layer_zip_path', 'wb') - file_mock.write.assert_called_with(b'data1') + open_patch.assert_called_with("layer_zip_path", "wb") + file_mock.write.assert_called_with(b"data1") progressbar_mock.update.assert_called_with(5) - path_patch.assert_called_with('layer_zip_path') + path_patch.assert_called_with("layer_zip_path") path_mock.unlink.assert_not_called() - unzip_patch.assert_called_with('layer_zip_path', 'output_zip_dir', permission=0o700) - os_patch.environ.get.assert_called_with('AWS_CA_BUNDLE', True) - - @patch('samcli.local.lambdafn.zip.unzip') - @patch('samcli.local.lambdafn.zip.Path') - @patch('samcli.local.lambdafn.zip.progressbar') - @patch('samcli.local.lambdafn.zip.requests') - @patch('samcli.local.lambdafn.zip.open') - @patch('samcli.local.lambdafn.zip.os') - def test_unzip_from_uri_reads_AWS_CA_BUNDLE_env_var(self, os_patch, - open_patch, - requests_patch, - progressbar_patch, - path_patch, - unzip_patch): + unzip_patch.assert_called_with("layer_zip_path", "output_zip_dir", permission=0o700) + os_patch.environ.get.assert_called_with("AWS_CA_BUNDLE", True) + + @patch("samcli.local.lambdafn.zip.unzip") + @patch("samcli.local.lambdafn.zip.Path") + @patch("samcli.local.lambdafn.zip.progressbar") + @patch("samcli.local.lambdafn.zip.requests") + @patch("samcli.local.lambdafn.zip.open") + @patch("samcli.local.lambdafn.zip.os") + def test_unzip_from_uri_reads_AWS_CA_BUNDLE_env_var( + self, os_patch, open_patch, requests_patch, progressbar_patch, path_patch, unzip_patch + ): get_request_mock = Mock() get_request_mock.headers = {"Content-length": "200"} - get_request_mock.iter_content.return_value = [b'data1'] + get_request_mock.iter_content.return_value = [b"data1"] requests_patch.get.return_value = get_request_mock file_mock = Mock() @@ -197,30 +185,29 @@ def test_unzip_from_uri_reads_AWS_CA_BUNDLE_env_var(self, os_patch, path_mock.exists.return_value = True path_patch.return_value = path_mock - os_patch.environ.get.return_value = '/some/path/on/the/system' + os_patch.environ.get.return_value = "/some/path/on/the/system" - unzip_from_uri('uri', 'layer_zip_path', 'output_zip_dir', 'layer_arn') + unzip_from_uri("uri", "layer_zip_path", "output_zip_dir", "layer_arn") - requests_patch.get.assert_called_with('uri', stream=True, verify='/some/path/on/the/system') + requests_patch.get.assert_called_with("uri", stream=True, verify="/some/path/on/the/system") get_request_mock.iter_content.assert_called_with(chunk_size=None) - open_patch.assert_called_with('layer_zip_path', 'wb') - file_mock.write.assert_called_with(b'data1') + open_patch.assert_called_with("layer_zip_path", "wb") + file_mock.write.assert_called_with(b"data1") progressbar_mock.update.assert_called_with(5) - path_patch.assert_called_with('layer_zip_path') + path_patch.assert_called_with("layer_zip_path") path_mock.unlink.assert_called() - unzip_patch.assert_called_with('layer_zip_path', 'output_zip_dir', permission=0o700) - os_patch.environ.get.assert_called_with('AWS_CA_BUNDLE', True) + unzip_patch.assert_called_with("layer_zip_path", "output_zip_dir", permission=0o700) + os_patch.environ.get.assert_called_with("AWS_CA_BUNDLE", True) class TestOverridePermissions(TestCase): - - @patch('samcli.local.lambdafn.zip.os') + @patch("samcli.local.lambdafn.zip.os") def test_must_override_permissions(self, os_patch): _override_permissions(path="./home", permission=0o700) os_patch.chmod.assert_called_once_with("./home", 0o700) - @patch('samcli.local.lambdafn.zip.os') + @patch("samcli.local.lambdafn.zip.os") def test_must_not_override_permissions(self, os_patch): _override_permissions(path="./home", permission=None) diff --git a/tests/unit/local/layers/test_download_layers.py b/tests/unit/local/layers/test_download_layers.py index 8abe57dcb4..288252bfbe 100644 --- a/tests/unit/local/layers/test_download_layers.py +++ b/tests/unit/local/layers/test_download_layers.py @@ -2,6 +2,7 @@ from mock import patch, Mock, call from botocore.exceptions import NoCredentialsError, ClientError + try: from pathlib import Path except ImportError: @@ -13,39 +14,38 @@ class TestDownloadLayers(TestCase): - @patch("samcli.local.layers.layer_downloader.LayerDownloader._create_cache") def test_initialization(self, create_cache_patch): create_cache_patch.return_value = None download_layers = LayerDownloader("/some/path", ".") - self.assertEquals(download_layers.layer_cache, "/some/path") + self.assertEqual(download_layers.layer_cache, "/some/path") create_cache_patch.assert_called_with("/some/path") @patch("samcli.local.layers.layer_downloader.LayerDownloader.download") def test_download_all_without_force(self, download_patch): - download_patch.side_effect = ['/home/layer1', '/home/layer2'] + download_patch.side_effect = ["/home/layer1", "/home/layer2"] download_layers = LayerDownloader("/home", ".") - acutal_results = download_layers.download_all(['layer1', 'layer2']) + acutal_results = download_layers.download_all(["layer1", "layer2"]) - self.assertEquals(acutal_results, ['/home/layer1', '/home/layer2']) + self.assertEqual(acutal_results, ["/home/layer1", "/home/layer2"]) - download_patch.assert_has_calls([call('layer1', False), call("layer2", False)]) + download_patch.assert_has_calls([call("layer1", False), call("layer2", False)]) @patch("samcli.local.layers.layer_downloader.LayerDownloader.download") def test_download_all_with_force(self, download_patch): - download_patch.side_effect = ['/home/layer1', '/home/layer2'] + download_patch.side_effect = ["/home/layer1", "/home/layer2"] download_layers = LayerDownloader("/home", ".") - acutal_results = download_layers.download_all(['layer1', 'layer2'], force=True) + acutal_results = download_layers.download_all(["layer1", "layer2"], force=True) - self.assertEquals(acutal_results, ['/home/layer1', '/home/layer2']) + self.assertEqual(acutal_results, ["/home/layer1", "/home/layer2"]) - download_patch.assert_has_calls([call('layer1', True), call("layer2", True)]) + download_patch.assert_has_calls([call("layer1", True), call("layer2", True)]) @patch("samcli.local.layers.layer_downloader.LayerDownloader._create_cache") @patch("samcli.local.layers.layer_downloader.LayerDownloader._is_layer_cached") @@ -60,7 +60,7 @@ def test_download_layer_that_is_cached(self, is_layer_cached_patch, create_cache actual = download_layers.download(layer_mock) - self.assertEquals(actual.codeuri, str(Path('/home/layer1').resolve())) + self.assertEqual(actual.codeuri, str(Path("/home/layer1").resolve())) create_cache_patch.assert_called_once_with("/home") @@ -75,11 +75,11 @@ def test_download_layer_that_was_template_defined(self, create_cache_patch, reso layer_mock.name = "layer1" layer_mock.codeuri = "/some/custom/path" - resolve_code_path_patch.return_value = './some/custom/path' + resolve_code_path_patch.return_value = "./some/custom/path" actual = download_layers.download(layer_mock) - self.assertEquals(actual.codeuri, './some/custom/path') + self.assertEqual(actual.codeuri, "./some/custom/path") create_cache_patch.assert_not_called() resolve_code_path_patch.assert_called_once_with(".", "/some/custom/path") @@ -88,8 +88,9 @@ def test_download_layer_that_was_template_defined(self, create_cache_patch, reso @patch("samcli.local.layers.layer_downloader.LayerDownloader._fetch_layer_uri") @patch("samcli.local.layers.layer_downloader.LayerDownloader._create_cache") @patch("samcli.local.layers.layer_downloader.LayerDownloader._is_layer_cached") - def test_download_layer(self, is_layer_cached_patch, create_cache_patch, - fetch_layer_uri_patch, unzip_from_uri_patch): + def test_download_layer( + self, is_layer_cached_patch, create_cache_patch, fetch_layer_uri_patch, unzip_from_uri_patch + ): is_layer_cached_patch.return_value = False download_layers = LayerDownloader("/home", ".") @@ -104,14 +105,16 @@ def test_download_layer(self, is_layer_cached_patch, create_cache_patch, actual = download_layers.download(layer_mock) - self.assertEquals(actual.codeuri, str(Path("/home/layer1").resolve())) + self.assertEqual(actual.codeuri, str(Path("/home/layer1").resolve())) create_cache_patch.assert_called_once_with("/home") fetch_layer_uri_patch.assert_called_once_with(layer_mock) - unzip_from_uri_patch.assert_called_once_with("layer/uri", - str(Path('/home/layer1.zip').resolve()), - unzip_output_dir=str(Path('/home/layer1').resolve()), - progressbar_label="Downloading arn:layer:layer1") + unzip_from_uri_patch.assert_called_once_with( + "layer/uri", + str(Path("/home/layer1.zip").resolve()), + unzip_output_dir=str(Path("/home/layer1").resolve()), + progressbar_label="Downloading arn:layer:layer1", + ) def test_layer_is_cached(self): download_layers = LayerDownloader("/", ".") @@ -141,7 +144,6 @@ def test_create_cache(self, path_patch): class TestLayerDownloader_fetch_layer_uri(TestCase): - def test_fetch_layer_uri_is_successful(self): lambda_client_mock = Mock() lambda_client_mock.get_layer_version.return_value = {"Content": {"Location": "some/uri"}} @@ -152,7 +154,7 @@ def test_fetch_layer_uri_is_successful(self): layer.version = 1 actual_uri = download_layers._fetch_layer_uri(layer=layer) - self.assertEquals(actual_uri, "some/uri") + self.assertEqual(actual_uri, "some/uri") def test_fetch_layer_uri_fails_with_no_creds(self): lambda_client_mock = Mock() @@ -169,7 +171,8 @@ def test_fetch_layer_uri_fails_with_no_creds(self): def test_fetch_layer_uri_fails_with_AccessDeniedException(self): lambda_client_mock = Mock() lambda_client_mock.get_layer_version.side_effect = ClientError( - error_response={'Error': {'Code': 'AccessDeniedException'}}, operation_name="lambda") + error_response={"Error": {"Code": "AccessDeniedException"}}, operation_name="lambda" + ) download_layers = LayerDownloader("/", ".", lambda_client_mock) layer = Mock() @@ -182,7 +185,8 @@ def test_fetch_layer_uri_fails_with_AccessDeniedException(self): def test_fetch_layer_uri_fails_with_ResourceNotFoundException(self): lambda_client_mock = Mock() lambda_client_mock.get_layer_version.side_effect = ClientError( - error_response={'Error': {'Code': 'ResourceNotFoundException'}}, operation_name="lambda") + error_response={"Error": {"Code": "ResourceNotFoundException"}}, operation_name="lambda" + ) download_layers = LayerDownloader("/", ".", lambda_client_mock) layer = Mock() @@ -195,7 +199,8 @@ def test_fetch_layer_uri_fails_with_ResourceNotFoundException(self): def test_fetch_layer_uri_re_raises_client_error(self): lambda_client_mock = Mock() lambda_client_mock.get_layer_version.side_effect = ClientError( - error_response={'Error': {'Code': 'Unknown'}}, operation_name="lambda") + error_response={"Error": {"Code": "Unknown"}}, operation_name="lambda" + ) download_layers = LayerDownloader("/", ".", lambda_client_mock) layer = Mock() diff --git a/tests/unit/local/services/test_base_local_service.py b/tests/unit/local/services/test_base_local_service.py index a37c5e21fb..adb53989fc 100644 --- a/tests/unit/local/services/test_base_local_service.py +++ b/tests/unit/local/services/test_base_local_service.py @@ -7,17 +7,16 @@ class TestLocalHostRunner(TestCase): - def test_runtime_error_raised_when_app_not_created(self): is_debugging = False - service = BaseLocalService(is_debugging=is_debugging, port=3000, host='127.0.0.1') + service = BaseLocalService(is_debugging=is_debugging, port=3000, host="127.0.0.1") with self.assertRaises(RuntimeError): service.run() def test_run_starts_service_multithreaded(self): is_debugging = False # multithreaded - service = BaseLocalService(is_debugging=is_debugging, port=3000, host='127.0.0.1') + service = BaseLocalService(is_debugging=is_debugging, port=3000, host="127.0.0.1") service._app = Mock() app_run_mock = Mock() @@ -25,11 +24,11 @@ def test_run_starts_service_multithreaded(self): service.run() - app_run_mock.assert_called_once_with(threaded=True, host='127.0.0.1', port=3000) + app_run_mock.assert_called_once_with(threaded=True, host="127.0.0.1", port=3000) def test_run_starts_service_singlethreaded(self): is_debugging = True # singlethreaded - service = BaseLocalService(is_debugging=is_debugging, port=3000, host='127.0.0.1') + service = BaseLocalService(is_debugging=is_debugging, port=3000, host="127.0.0.1") service._app = Mock() app_run_mock = Mock() @@ -37,9 +36,9 @@ def test_run_starts_service_singlethreaded(self): service.run() - app_run_mock.assert_called_once_with(threaded=False, host='127.0.0.1', port=3000) + app_run_mock.assert_called_once_with(threaded=False, host="127.0.0.1", port=3000) - @patch('samcli.local.services.base_local_service.Response') + @patch("samcli.local.services.base_local_service.Response") def test_service_response(self, flask_response_patch): flask_response_mock = Mock() @@ -53,78 +52,64 @@ def test_service_response(self, flask_response_patch): flask_response_patch.assert_called_once_with("this is the body") - self.assertEquals(actual_response.status_code, 200) - self.assertEquals(actual_response.headers, {"Content-Type": "application/json"}) + self.assertEqual(actual_response.status_code, 200) + self.assertEqual(actual_response.headers, {"Content-Type": "application/json"}) def test_create_returns_not_implemented(self): is_debugging = False - service = BaseLocalService(is_debugging=is_debugging, port=3000, host='127.0.0.1') + service = BaseLocalService(is_debugging=is_debugging, port=3000, host="127.0.0.1") with self.assertRaises(NotImplementedError): service.create() class TestLambdaOutputParser(TestCase): - - @parameterized.expand([ - param( - "with both logs and response", - b'this\nis\nlog\ndata\n{"a": "b"}', b'this\nis\nlog\ndata', '{"a": "b"}' - ), - param( - "with response as string", - b"logs\nresponse", b"logs", "response" - ), - param( - "with response only", - b'{"a": "b"}', None, '{"a": "b"}' - ), - param( - "with one new line and response", - b'\n{"a": "b"}', b'', '{"a": "b"}' - ), - param( - "with response only as string", - b'this is the response line', None, 'this is the response line' - ), - param( - "with whitespaces", - b'log\ndata\n{"a": "b"} \n\n\n', b"log\ndata", '{"a": "b"}' - ), - param( - "with empty data", - b'', None, '' - ), - param( - "with just new lines", - b'\n\n', None, '' - ), - param( - "with no data but with whitespaces", - b'\n \n \n', b'\n ', '' # Log data with whitespaces will be in the output unchanged - ) - ]) + @parameterized.expand( + [ + param( + "with both logs and response", b'this\nis\nlog\ndata\n{"a": "b"}', b"this\nis\nlog\ndata", '{"a": "b"}' + ), + param("with response as string", b"logs\nresponse", b"logs", "response"), + param("with response only", b'{"a": "b"}', None, '{"a": "b"}'), + param("with one new line and response", b'\n{"a": "b"}', b"", '{"a": "b"}'), + param("with response only as string", b"this is the response line", None, "this is the response line"), + param("with whitespaces", b'log\ndata\n{"a": "b"} \n\n\n', b"log\ndata", '{"a": "b"}'), + param("with empty data", b"", None, ""), + param("with just new lines", b"\n\n", None, ""), + param( + "with no data but with whitespaces", + b"\n \n \n", + b"\n ", + "", # Log data with whitespaces will be in the output unchanged + ), + ] + ) def test_get_lambda_output_extracts_response(self, test_case_name, stdout_data, expected_logs, expected_response): stdout = Mock() stdout.getvalue.return_value = stdout_data response, logs, is_customer_error = LambdaOutputParser.get_lambda_output(stdout) - self.assertEquals(logs, expected_logs) - self.assertEquals(response, expected_response) + self.assertEqual(logs, expected_logs) + self.assertEqual(response, expected_response) self.assertFalse(is_customer_error) - @parameterized.expand([ - param('{"errorMessage": "has a message", "stackTrace": "has a stacktrace", "errorType": "has a type"}', - True), - param('{"error message": "has a message", "stack Trace": "has a stacktrace", "error Type": "has a type"}', - False), - param('{"errorMessage": "has a message", "stackTrace": "has a stacktrace", "errorType": "has a type", ' - '"hasextrakey": "value"}', - False), - param("notat:asdfasdf", - False), - param("errorMessage and stackTrace and errorType are in the string", - False) - ]) + @parameterized.expand( + [ + param( + '{"errorMessage": "has a message", "stackTrace": "has a stacktrace", "errorType": "has a type"}', True + ), + param( + '{"error message": "has a message", "stack Trace": "has a stacktrace", "error Type": "has a type"}', + False, + ), + param( + '{"errorMessage": "has a message", "stackTrace": "has a stacktrace", "errorType": "has a type", ' + '"hasextrakey": "value"}', + False, + ), + param("notat:asdfasdf", False), + param("errorMessage and stackTrace and errorType are in the string", False), + ] + ) def test_is_lambda_error_response(self, input, exected_result): - self.assertEquals(LambdaOutputParser.is_lambda_error_response(input), exected_result) + self.assertEqual(LambdaOutputParser.is_lambda_error_response(input), exected_result) diff --git a/tests/unit/test_yamlhelper.py b/tests/unit/test_yamlhelper.py index 32adb6c72a..c8f0aa46d5 100644 --- a/tests/unit/test_yamlhelper.py +++ b/tests/unit/test_yamlhelper.py @@ -1,6 +1,16 @@ -""" -Helper to be able to parse/dump YAML files -""" +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0e +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from botocore.compat import OrderedDict from unittest import TestCase from samcli.yamlhelper import yaml_parse, yaml_dump @@ -20,61 +30,123 @@ class TestYaml(TestCase): parsed_yaml_dict = { "Resource": { - "Key1": { - "Ref": "Something" - }, - "Key2": { - "Fn::GetAtt": ["Another", "Arn"] - }, - "Key3": { - "Fn::FooBar": [ - {"Fn::Baz": "YetAnother"}, - "hello" - ] - }, - "Key4": { - "Fn::SomeTag": { - "a": "1" - } - }, - "Key5": { - "Fn::GetAtt": ["OneMore", "Outputs.Arn"] - }, - "Key6": { - "Condition": "OtherCondition" - } + "Key1": {"Ref": "Something"}, + "Key2": {"Fn::GetAtt": ["Another", "Arn"]}, + "Key3": {"Fn::FooBar": [{"Fn::Baz": "YetAnother"}, "hello"]}, + "Key4": {"Fn::SomeTag": {"a": "1"}}, + "Key5": {"Fn::GetAtt": ["OneMore", "Outputs.Arn"]}, + "Key6": {"Condition": "OtherCondition"}, } } def test_yaml_with_tags(self): output = yaml_parse(self.yaml_with_tags) - self.assertEquals(self.parsed_yaml_dict, output) + self.assertEqual(self.parsed_yaml_dict, output) # Make sure formatter and parser work well with each other formatted_str = yaml_dump(output) output_again = yaml_parse(formatted_str) - self.assertEquals(output, output_again) + self.assertEqual(output, output_again) def test_yaml_getatt(self): - # This is an invalid syntax for !GetAtt. But make sure the code does not crash when we encouter this syntax - # Let CloudFormation interpret this value at runtime - input = """ + # This is an invalid syntax for !GetAtt. But make sure the code does + # not crash when we encounter this syntax. Let CloudFormation + # interpret this value at runtime + yaml_input = """ Resource: Key: !GetAtt ["a", "b"] """ - output = { - "Resource": { - "Key": { - "Fn::GetAtt": ["a", "b"] - } - } - } + output = {"Resource": {"Key": {"Fn::GetAtt": ["a", "b"]}}} - actual_output = yaml_parse(input) - self.assertEquals(actual_output, output) + actual_output = yaml_parse(yaml_input) + self.assertEqual(actual_output, output) def test_parse_json_with_tabs(self): template = '{\n\t"foo": "bar"\n}' output = yaml_parse(template) - self.assertEqual(output, {'foo': 'bar'}) + self.assertEqual(output, {"foo": "bar"}) + + def test_parse_json_preserve_elements_order(self): + input_template = """ + { + "B_Resource": { + "Key2": { + "Name": "name2" + }, + "Key1": { + "Name": "name1" + } + }, + "A_Resource": { + "Key2": { + "Name": "name2" + }, + "Key1": { + "Name": "name1" + } + } + } + """ + expected_dict = OrderedDict( + [ + ("B_Resource", OrderedDict([("Key2", {"Name": "name2"}), ("Key1", {"Name": "name1"})])), + ("A_Resource", OrderedDict([("Key2", {"Name": "name2"}), ("Key1", {"Name": "name1"})])), + ] + ) + output_dict = yaml_parse(input_template) + self.assertEqual(expected_dict, output_dict) + + def test_parse_yaml_preserve_elements_order(self): + input_template = ( + "B_Resource:\n" + " Key2:\n" + " Name: name2\n" + " Key1:\n" + " Name: name1\n" + "A_Resource:\n" + " Key2:\n" + " Name: name2\n" + " Key1:\n" + " Name: name1\n" + ) + output_dict = yaml_parse(input_template) + expected_dict = OrderedDict( + [ + ("B_Resource", OrderedDict([("Key2", {"Name": "name2"}), ("Key1", {"Name": "name1"})])), + ("A_Resource", OrderedDict([("Key2", {"Name": "name2"}), ("Key1", {"Name": "name1"})])), + ] + ) + self.assertEqual(expected_dict, output_dict) + + output_template = yaml_dump(output_dict) + self.assertEqual(input_template, output_template) + + def test_yaml_merge_tag(self): + test_yaml = """ + base: &base + property: value + test: + <<: *base + """ + output = yaml_parse(test_yaml) + self.assertTrue(isinstance(output, OrderedDict)) + self.assertEqual(output.get("test").get("property"), "value") + + def test_unroll_yaml_anchors(self): + properties = {"Foo": "bar", "Spam": "eggs"} + template = {"Resources": {"Resource1": {"Properties": properties}, "Resource2": {"Properties": properties}}} + + expected = ( + "Resources:\n" + " Resource1:\n" + " Properties:\n" + " Foo: bar\n" + " Spam: eggs\n" + " Resource2:\n" + " Properties:\n" + " Foo: bar\n" + " Spam: eggs\n" + ) + actual = yaml_dump(template) + self.assertEqual(actual, expected)