diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index 199a7f494..1d510aa63 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -1,7 +1,7 @@ ## Context - + ## Expected Behavior diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 19115637a..dada7a22a 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -20,7 +20,7 @@ Before you submit this PR, please make sure that you meet these criteria: * Did you **make sure this code actually works on Lambda**, as well as locally? -* Did you test this code with all of **Python 3.6**, **Python 3.7** and **Python 3.8** ? +* Did you test this code with all of **Python 3.7**, **Python 3.8** and **Python 3.9** ? * Does this commit ONLY relate to the issue at hand and have your linter shit all over the code? diff --git a/.github/workflows/cd.yaml b/.github/workflows/cd.yaml new file mode 100644 index 000000000..e433735c1 --- /dev/null +++ b/.github/workflows/cd.yaml @@ -0,0 +1,29 @@ +name: CD + +on: # yamllint disable-line rule:truthy + # From documentation: + # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onpushbranchestagsbranches-ignoretags-ignore + # on.push.tags: If you define only tags/tags-ignore or only branches/branches-ignore, the workflow won't run for events affecting the undefined Git ref. + # + # This workflow will only run when a tag matching the criteria is pushed + push: + tags: ["v?[0-9]+.[0-9]+.[0-9]+"] + +jobs: + publish: + runs-on: ubuntu-20.04 + steps: + - name: Checkout Code Repository + uses: actions/checkout@v2 + - name: Set up Python 3.9 + uses: actions/setup-python@v2 + with: + python-version: 3.9 + - name: Install `pypa/build` + run: python -m pip install build + - name: Build sdist and wheel + run: python -m build --sdist --wheel --outdir ./dist/ + - name: Publish package to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 000000000..708f93098 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,53 @@ +name: CI + +on: # yamllint disable-line rule:truthy + pull_request: + branches: ["master"] + push: + branches: ["master"] + +jobs: + test: + runs-on: ubuntu-20.04 + strategy: + matrix: + python: [3.7, 3.8, 3.9] + steps: + - name: Checkout Code Repository + uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - uses: actions/cache@v2 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip${{ matrix.python }}-${{ hashFiles('Pipfile') }} + restore-keys: ${{ runner.os }}-pip${{ matrix.python }}- + - name: make sure we have version tags + run: git fetch --unshallow --tags + - name: Setup Virtualenv + run: python -m venv .venv + - name: Install + run: source .venv/bin/activate && make requirements && python setup.py install + - name: Lint + run: source .venv/bin/activate && make flake black-check isort-check + - name: Test + run: source .venv/bin/activate && make tests + - name: Upload Coverage + run: .venv/bin/coveralls --service=github + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + COVERALLS_FLAG_NAME: py${{ matrix.python }} + COVERALLS_PARALLEL: true + + coverage: + needs: test + runs-on: ubuntu-20.04 + steps: + - name: Set up Python + uses: actions/setup-python@v2 + - name: Finalise Coverage + run: pip3 install --upgrade coveralls && coveralls --service=github --finish + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index a198169bc..3ba494941 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ *~ *.zip +Pipfile.lock # Byte-compiled / optimized / DLL files __pycache__/ diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index e605fe0a9..000000000 --- a/.travis.yml +++ /dev/null @@ -1,31 +0,0 @@ -language: python -python: - - "3.6" - - "3.7" - - "3.8" -dist: xenial -addons: - apt: - packages: - - cmake -# command to install dependencies -cache: - - pip -install: - - make requirements - - python setup.py install - -script: - - make flake - - make black-check - - make test-docs - - make test-handler - - make test-middleware - - make test-placebo - - make test-async - - make test-general - -after_success: - coveralls -notifications: - slack: zappateam:TTJ0mfHunDK0IBweKkEXjGpR diff --git a/CHANGELOG.md b/CHANGELOG.md index d3658d12f..b2a949025 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,14 +1,42 @@ # Zappa Changelog -## 0.53.0 +## 0.55.0 +* Fix "cd workflow fired event outside of the create tags event" (#1152) +* Remove 'futures' package requirement (#826,#808) +* Remove function invoke command does not need quotes around function (#900) +* Add python_requires to setup.py (#1111) +* Remove python 3.6 support (#1151) +* Update handler to be global if INSTANTIATE_LAMBDA_HANDLER_ON_IMPORT=True (#1096) +* Fix async invocation in Python 3 (#1006) +* Drastically reduce cold start times by calling LambdaHandler externally (#982) +* Support Newest Manylinux Version for dependencies (#1083) +* Decode zappa invoke output regardless of --no-color option (#1069) +* Convert project to using 'pipenv' to manage dependencies (#1077) +* Ensure unique scheduled event names (#1080) +* Check isort in CI (#1078) +* Use 'black' everywhere (#1076) +* Update setup.py (#1050) + +## 0.54.2 +* Update documentation to reflect python 3.9 support (#1137) + +## 0.54.1 +* Increase Lambda client read timeout to 15m (#1065) +* Unpin `Werkzeug` from `v0.x` (#1067) + +## 0.54.0 +* Pin troposphere version and update to 3.x (#1029) +* Relax stage name restrictions when not using apigateway (#993) +* Wait for lambda to become active during deploy/update (#992) +* add support for Python 3.9 (#1026) +## 0.53.0 * Deprecated ACME v1 for Lets Encrypt * Global black formatting * Update & deploy using a docker container * See [blog post](https://ianwhitestone.work/zappa-serverless-docker/) for more details on the current functionality and how it works * See [PR](https://github.com/zappa/Zappa/pull/967) or [original issue](https://github.com/Miserlou/Zappa/issues/2188) for discussion around what is currently supported & next steps - ## 0.52.0 * Remove dateutil version restriction * Fix failed downloads of wheel packages with non-alphanumeric characters diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..d19a4b960 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,48 @@ +# Contributing to Zappa +We love your input! We want to make contributing to this project as easy and transparent as possible, whether it's: + +- Reporting a bug +- Discussing the current state of the code +- Submitting a fix +- Proposing new features +- Becoming a maintainer + +## We Develop with Github +We use github to host code, to track issues and feature requests, as well as accept pull requests. +We actively welcome your pull requests: + +1. Fork the repo and create your branch from `master`. +2. If you've added code that should be tested, add tests. +3. If you've changed APIs, update the documentation. +4. Ensure the test suite passes. +5. Issue that pull request! + +## Any contributions you make will be under the MIT Software License +In short, when you submit code changes, your submissions are understood to be under the same [MIT License](http://choosealicense.com/licenses/mit/) that covers the project. Feel free to contact the maintainers if that's a concern. + +## Report bugs using Github's [issues](https://github.com/zappa/Zappa/issues) +We use GitHub issues to track public bugs. Report a bug by [opening a new issue](https://github.com/zappa/Zappa/issues/new); it's that easy! + +## Write bug reports with detail, background, and sample code +Here are two great examples: +- [http://stackoverflow.com/q/12488905/180626](http://stackoverflow.com/q/12488905/180626) +- [http://www.openradar.me/11905408](http://www.openradar.me/11905408) + +**Great Bug Reports** tend to have: + +- A quick summary and/or background +- Steps to reproduce + - Be specific! + - Give sample code if you can +- What you expected would happen +- What actually happens +- Notes (possibly including why you think this might be happening, or stuff you tried that didn't work) + +People *love* thorough bug reports, not even kidding. + +## License +By contributing, you agree that your contributions will be licensed under its MIT License. + +## References +This document was adapted from the basic template for contributing guideliens by @briandk +[Contributing to Transcriptase](https://gist.github.com/briandk/3d2e8b3ec8daf5a27a62). The template itself is based on the open-source contribution guidelines for [Facebook's Draft](https://github.com/facebook/draft-js/blob/a9316a723f9e918afde44dea68b5f9f39b7d9b00/CONTRIBUTING.md) \ No newline at end of file diff --git a/Makefile b/Makefile index 290c30eab..4dd514337 100644 --- a/Makefile +++ b/Makefile @@ -27,9 +27,9 @@ clean: rm -f .coverage requirements: - ./requirements.sh - pip install -r requirements.txt - pip install -r test_requirements.txt + pip install pipenv>2021.11.15 + pipenv lock + pipenv sync --dev build: clean requirements-install python setup.py sdist @@ -39,18 +39,21 @@ mypy: mypy --show-error-codes --pretty --ignore-missing-imports --strict zappa tests black: - black zappa tests + black --line-length 127 . black-check: - black zappa tests --check + black --line-length 127 . --check @echo "If this fails, simply run: make black" isort: - isort --recursive . + isort . --profile=black + +isort-check: + isort --check . --profile=black flake: flake8 zappa --count --select=E9,F63,F7,F82 --show-source --statistics - flake8 zappa --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + flake8 zappa --count --exit-zero --max-complexity=55 --max-line-length=127 --statistics --ignore F403,F405,E203,E231,E252,W503 test-docs: nosetests tests/tests_docs.py --with-coverage --cover-package=zappa --with-timer diff --git a/Pipfile b/Pipfile new file mode 100644 index 000000000..0b6cf396f --- /dev/null +++ b/Pipfile @@ -0,0 +1,48 @@ +[[source]] +name = "pypi" +url = "https://pypi.org/simple" +verify_ssl = true + +[dev-packages] +black = "*" +boto3-stubs = "*" +coveralls = "*" +Django = "<4" +django-stubs = "*" +flake8 = "*" +Flask = "*" +isort = "*" +mock = "*" +mypy = "*" +nose = "*" +nose-timer = "*" +pipenv = ">2021.11.15" +packaging = "*" + +[packages] +argcomplete = "*" +boto3 = ">=1.17.28" +durationpy = "*" +hjson = "*" +jmespath = "*" +kappa = "==0.6.0" +pip = ">=9.0.1" +# Workaround until tests are updated to work with 'placebo' 0.10 +# Move to 'dev-packages' when unpinned +placebo = "<0.10" +python-dateutil = "*" +python-slugify = "*" +PyYAML = "*" +# previous versions don't work with urllib3 1.24 +requests = ">=2.20.0" +six = "*" +toml = "*" +tqdm = "*" +troposphere = ">=3.0" +Werkzeug = "*" +wheel = "*" +wsgi-request-logger = "*" + +[pipenv] +# Required for 'black' since all of its release tags contain 'b' +allow_prereleases = true diff --git a/README.md b/README.md index ab5f971ab..911860667 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ ## Zappa - Serverless Python -[![Build Status](https://travis-ci.org/zappa/Zappa.svg)](https://travis-ci.org/zappa/Zappa) +[![CI](https://github.com/zappa/Zappa/actions/workflows/ci.yaml/badge.svg?branch=master&event=push)](https://github.com/zappa/Zappa/actions/workflows/ci.yaml) [![Coverage](https://img.shields.io/coveralls/zappa/Zappa.svg)](https://coveralls.io/github/zappa/Zappa) [![PyPI](https://img.shields.io/pypi/v/Zappa.svg)](https://pypi.python.org/pypi/zappa) [![Slack](https://img.shields.io/badge/chat-slack-ff69b4.svg)](https://zappateam.slack.com/) @@ -79,6 +79,7 @@ - [Application Load Balancer Event Source](#application-load-balancer-event-source) - [Endpoint Configuration](#endpoint-configuration) - [Example Private API Gateway configuration](#example-private-api-gateway-configuration) + - [Cold Starts (Experimental)](#cold-starts-experimental) - [Zappa Guides](#zappa-guides) - [Zappa in the Press](#zappa-in-the-press) - [Sites Using Zappa](#sites-using-zappa) @@ -134,7 +135,7 @@ __Awesome!__ ## Installation and Configuration -_Before you begin, make sure you are running Python 3.6/3.7/3.8 and you have a valid AWS account and your [AWS credentials file](https://blogs.aws.amazon.com/security/post/Tx3D6U6WSFGOK2H/A-New-and-Standardized-Way-to-Manage-Credentials-in-the-AWS-SDKs) is properly installed._ +_Before you begin, make sure you are running Python 3.7/3.8/3.9 and you have a valid AWS account and your [AWS credentials file](https://blogs.aws.amazon.com/security/post/Tx3D6U6WSFGOK2H/A-New-and-Standardized-Way-to-Manage-Credentials-in-the-AWS-SDKs) is properly installed._ **Zappa** can easily be installed through pip, like so: @@ -393,11 +394,11 @@ You can execute any function in your application directly at any time by using t For instance, suppose you have a basic application in a file called "my_app.py", and you want to invoke a function in it called "my_function". Once your application is deployed, you can invoke that function at any time by calling: - $ zappa invoke production 'my_app.my_function' + $ zappa invoke production my_app.my_function Any remote print statements made and the value the function returned will then be printed to your local console. **Nifty!** -You can also invoke interpretable Python 3.6/3.7/3.8 strings directly by using `--raw`, like so: +You can also invoke interpretable Python 3.7/3.8/3.9 strings directly by using `--raw`, like so: $ zappa invoke production "print(1 + 2 + 3)" --raw @@ -420,8 +421,6 @@ Commands which require direct user input, such as `createsuperuser`, should be [ For more Django integration, take a look at the [zappa-django-utils](https://github.com/Miserlou/zappa-django-utils) project. -_(Please note that commands which take over 30 seconds to execute may time-out preventing output from being returned - but the command may continue to run. See [this related issue](https://github.com/Miserlou/Zappa/issues/205#issuecomment-236391248) for a work-around.)_ - ### SSL Certification Zappa can be deployed to custom domain names and subdomains with custom SSL certificates, Let's Encrypt certificates, and [AWS Certificate Manager](https://aws.amazon.com/certificate-manager/) (ACM) certificates. @@ -930,7 +929,7 @@ to change Zappa's behavior. Use these at your own risk! "role_name": "MyLambdaRole", // Name of Zappa execution role. Default --ZappaExecutionRole. To use a different, pre-existing policy, you must also set manage_roles to false. "role_arn": "arn:aws:iam::12345:role/app-ZappaLambdaExecutionRole", // ARN of Zappa execution role. Default to None. To use a different, pre-existing policy, you must also set manage_roles to false. This overrides role_name. Use with temporary credentials via GetFederationToken. "route53_enabled": true, // Have Zappa update your Route53 Hosted Zones when certifying with a custom domain. Default true. - "runtime": "python3.6", // Python runtime to use on Lambda. Can be one of "python3.6", "python3.7" or "python3.8". Defaults to whatever the current Python being used is. + "runtime": "python3.9", // Python runtime to use on Lambda. Can be one of "python3.7", "python3.8", or "python3.9". Defaults to whatever the current Python being used is. "s3_bucket": "dev-bucket", // Zappa zip bucket, "slim_handler": false, // Useful if project >50M. Set true to just upload a small handler to Lambda and load actual project from S3 at runtime. Default false. "settings_file": "~/Projects/MyApp/settings/dev_settings.py", // Server side settings file location, @@ -1445,6 +1444,10 @@ apigateway_resource_policy.json: } ``` +### Cold Starts (Experimental) + +Lambda may provide additional resources than provisioned during cold start initialization. Set `INSTANTIATE_LAMBDA_HANDLER_ON_IMPORT=True` to instantiate the lambda handler on import. This is an experimental feature - if startup time is critical, look into using Provisioned Concurrency. + ## Zappa Guides * [Django-Zappa tutorial (screencast)](https://www.youtube.com/watch?v=plUrbPN0xc8&feature=youtu.be). @@ -1535,6 +1538,7 @@ If you are adding a non-trivial amount of new code, please include a functioning Please include the GitHub issue or pull request URL that has discussion related to your changes as a comment in the code ([example](https://github.com/zappa/Zappa/blob/fae2925431b820eaedf088a632022e4120a29f89/zappa/zappa.py#L241-L243)). This greatly helps for project maintainability, as it allows us to trace back use cases and explain decision making. Similarly, please make sure that you meet all of the requirements listed in the [pull request template](https://raw.githubusercontent.com/zappa/Zappa/master/.github/PULL_REQUEST_TEMPLATE.md). Please feel free to work on any open ticket, especially any ticket marked with the "help-wanted" label. If you get stuck or want to discuss an issue further, please join [our Slack channel](https://zappateam.slack.com/), where you'll find a community of smart and interesting people working dilligently on hard problems. +[Zappa Slack Auto Invite](https://slackautoinviter.herokuapp.com) Zappa does not intend to conform to PEP8, isolate your commits so that changes to functionality with changes made by your linter. diff --git a/example/app.py b/example/app.py index 9335bef0f..4e62c0816 100644 --- a/example/app.py +++ b/example/app.py @@ -1,4 +1,5 @@ import logging + from flask import Flask app = Flask(__name__) @@ -6,11 +7,13 @@ logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) -@app.route('/', methods=['GET', 'POST']) + +@app.route("/", methods=["GET", "POST"]) def lambda_handler(event=None, context=None): - logger.info('Lambda function invoked index()') + logger.info("Lambda function invoked index()") + + return "Flask says Hello!!" - return 'Flask says Hello!!' -if __name__ == '__main__': +if __name__ == "__main__": app.run(debug=True) diff --git a/example/authmodule.py b/example/authmodule.py index 9517e4b12..a5c8f47a9 100644 --- a/example/authmodule.py +++ b/example/authmodule.py @@ -4,15 +4,15 @@ http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ +import json +import pprint import re import time -import pprint -import json def lambda_handler(event, context): - print("Client token: " + event['authorizationToken']) - print("Method ARN: " + event['methodArn']) + print("Client token: " + event["authorizationToken"]) + print("Method ARN: " + event["methodArn"]) """validate the incoming token""" """and produce the principal user identifier associated with the token""" @@ -38,8 +38,8 @@ def lambda_handler(event, context): """made with the same token""" """the example policy below denies access to all resources in the RestApi""" - tmp = event['methodArn'].split(':') - apiGatewayArnTmp = tmp[5].split('/') + tmp = event["methodArn"].split(":") + apiGatewayArnTmp = tmp[5].split("/") awsAccountId = tmp[4] policy = AuthPolicy(principalId, awsAccountId) @@ -58,15 +58,17 @@ def lambda_handler(event, context): """finally, build the policy and exit the function using return""" return policy.build() + class HttpVerb: - GET = "GET" - POST = "POST" - PUT = "PUT" - PATCH = "PATCH" - HEAD = "HEAD" - DELETE = "DELETE" + GET = "GET" + POST = "POST" + PUT = "PUT" + PATCH = "PATCH" + HEAD = "HEAD" + DELETE = "DELETE" OPTIONS = "OPTIONS" - ALL = "*" + ALL = "*" + class AuthPolicy: awsAccountId = "" @@ -112,32 +114,33 @@ def _addMethod(self, effect, verb, resource, conditions): if resource[:1] == "/": resource = resource[1:] - resourceArn = ("arn:aws:execute-api:" + - self.region + ":" + - self.awsAccountId + ":" + - self.restApiId + "/" + - self.stage + "/" + - verb + "/" + - resource) + resourceArn = ( + "arn:aws:execute-api:" + + self.region + + ":" + + self.awsAccountId + + ":" + + self.restApiId + + "/" + + self.stage + + "/" + + verb + + "/" + + resource + ) if effect.lower() == "allow": - self.allowMethods.append({ - 'resourceArn' : resourceArn, - 'conditions' : conditions - }) + self.allowMethods.append({"resourceArn": resourceArn, "conditions": conditions}) elif effect.lower() == "deny": - self.denyMethods.append({ - 'resourceArn' : resourceArn, - 'conditions' : conditions - }) + self.denyMethods.append({"resourceArn": resourceArn, "conditions": conditions}) def _getEmptyStatement(self, effect): """Returns an empty statement object prepopulated with the correct action and the desired effect.""" statement = { - 'Action': 'execute-api:Invoke', - 'Effect': effect[:1].upper() + effect[1:].lower(), - 'Resource': [] + "Action": "execute-api:Invoke", + "Effect": effect[:1].upper() + effect[1:].lower(), + "Resource": [], } return statement @@ -151,12 +154,12 @@ def _getStatementForEffect(self, effect, methods): statement = self._getEmptyStatement(effect) for curMethod in methods: - if curMethod['conditions'] is None or len(curMethod['conditions']) == 0: - statement['Resource'].append(curMethod['resourceArn']) + if curMethod["conditions"] is None or len(curMethod["conditions"]) == 0: + statement["Resource"].append(curMethod["resourceArn"]) else: conditionalStatement = self._getEmptyStatement(effect) - conditionalStatement['Resource'].append(curMethod['resourceArn']) - conditionalStatement['Condition'] = curMethod['conditions'] + conditionalStatement["Resource"].append(curMethod["resourceArn"]) + conditionalStatement["Condition"] = curMethod["conditions"] statements.append(conditionalStatement) statements.append(statement) @@ -198,19 +201,17 @@ def build(self): conditions. This will generate a policy with two main statements for the effect: one statement for Allow and one statement for Deny. Methods that includes conditions will have their own statement in the policy.""" - if ((self.allowMethods is None or len(self.allowMethods) == 0) and - (self.denyMethods is None or len(self.denyMethods) == 0)): + if (self.allowMethods is None or len(self.allowMethods) == 0) and ( + self.denyMethods is None or len(self.denyMethods) == 0 + ): raise NameError("No statements defined for the policy") policy = { - 'principalId' : self.principalId, - 'policyDocument' : { - 'Version' : self.version, - 'Statement' : [] - } + "principalId": self.principalId, + "policyDocument": {"Version": self.version, "Statement": []}, } - policy['policyDocument']['Statement'].extend(self._getStatementForEffect("Allow", self.allowMethods)) - policy['policyDocument']['Statement'].extend(self._getStatementForEffect("Deny", self.denyMethods)) + policy["policyDocument"]["Statement"].extend(self._getStatementForEffect("Allow", self.allowMethods)) + policy["policyDocument"]["Statement"].extend(self._getStatementForEffect("Deny", self.denyMethods)) return policy diff --git a/example/mymodule.py b/example/mymodule.py index 14b08aabc..ad326efbd 100644 --- a/example/mymodule.py +++ b/example/mymodule.py @@ -1,8 +1,8 @@ def myfunc(): - print('Running my function in a schedule!') + print("Running my function in a schedule!") def myfunc_with_events(event, context): - print('Event time was', event['time']) - print('This log is', context.log_group_name, context.log_stream_name) - print('Time left for execution:', context.get_remaining_time_in_millis()) + print("Event time was", event["time"]) + print("This log is", context.log_group_name, context.log_stream_name) + print("Time left for execution:", context.get_remaining_time_in_millis()) diff --git a/requirements.in b/requirements.in deleted file mode 100644 index 1e3b5a2b8..000000000 --- a/requirements.in +++ /dev/null @@ -1,22 +0,0 @@ -argcomplete -boto3>=1.17.28 -durationpy -hjson -jmespath -kappa==0.6.0 -pip>=9.0.1 -python-dateutil -python-slugify -PyYAML -future -# previous version don't work with urllib3 1.24 -requests>=2.20.0 -six -toml -tqdm -troposphere -# See https://github.com/Miserlou/Zappa/issues/2036 -Werkzeug<1.0 -wheel -wsgi-request-logger -pip-tools \ No newline at end of file diff --git a/requirements.sh b/requirements.sh deleted file mode 100755 index 195992c2d..000000000 --- a/requirements.sh +++ /dev/null @@ -1,13 +0,0 @@ -#! /bin/bash - -set -e - -ARGS="" -if [ "$1" == "--upgrade" ]; then - ARGS="-U" -fi - -pip install -U pip-tools -pip-compile ${ARGS} -o test_requirements.txt requirements.in test_requirements.in -cp test_requirements.txt requirements.txt -pip-compile -o requirements.txt requirements.in diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 9d123ebef..000000000 --- a/requirements.txt +++ /dev/null @@ -1,91 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --output-file=requirements.txt requirements.in -# -argcomplete==1.12.2 - # via -r requirements.in -boto3==1.17.44 - # via - # -r requirements.in - # kappa -botocore==1.20.44 - # via - # boto3 - # s3transfer -certifi==2020.12.5 - # via requests -cfn-flip==1.2.3 - # via troposphere -chardet==4.0.0 - # via requests -click==7.1.2 - # via - # cfn-flip - # kappa - # pip-tools -durationpy==0.5 - # via -r requirements.in -future==0.18.2 - # via -r requirements.in -hjson==3.0.2 - # via -r requirements.in -idna==2.10 - # via requests -jmespath==0.10.0 - # via - # -r requirements.in - # boto3 - # botocore -kappa==0.6.0 - # via -r requirements.in -pep517==0.10.0 - # via pip-tools -pip-tools==6.0.1 - # via -r requirements.in -placebo==0.9.0 - # via kappa -python-dateutil==2.8.1 - # via - # -r requirements.in - # botocore -python-slugify==4.0.1 - # via -r requirements.in -pyyaml==5.4.1 - # via - # -r requirements.in - # cfn-flip - # kappa -requests==2.25.1 - # via -r requirements.in -s3transfer==0.3.6 - # via boto3 -six==1.15.0 - # via - # -r requirements.in - # cfn-flip - # python-dateutil -text-unidecode==1.3 - # via python-slugify -toml==0.10.2 - # via - # -r requirements.in - # pep517 -tqdm==4.59.0 - # via -r requirements.in -troposphere==2.7.0 - # via -r requirements.in -urllib3==1.26.4 - # via - # botocore - # requests -werkzeug==0.16.1 - # via -r requirements.in -wheel==0.36.2 - # via -r requirements.in -wsgi-request-logger==0.4.6 - # via -r requirements.in - -# The following packages are considered to be unsafe in a requirements file: -# pip diff --git a/setup.py b/setup.py index 8ea8be741..af89dea10 100755 --- a/setup.py +++ b/setup.py @@ -1,52 +1,58 @@ -import os -import sys -from setuptools import setup +from configparser import ConfigParser from io import open +from pathlib import Path + +from setuptools import setup + from zappa import __version__ -with open('README.md') as readme_file: +with open("README.md", encoding="utf-8") as readme_file: long_description = readme_file.read() -with open(os.path.join(os.path.dirname(__file__), 'requirements.in')) as f: - required = f.read().splitlines() - -with open(os.path.join(os.path.dirname(__file__), 'test_requirements.in')) as f: - test_required = f.read().splitlines() +pipfile = ConfigParser() +pipfile.read(Path(__file__).parent.resolve() / "Pipfile") +required = [ + "{}{}".format(name, version.strip('"')) if version != '"*"' else name for name, version in pipfile["packages"].items() +] +test_required = [ + "{}{}".format(name, version.strip('"')) if version != '"*"' else name for name, version in pipfile["dev-packages"].items() +] setup( - name='zappa', + name="zappa", version=__version__, - packages=['zappa'], + packages=["zappa"], install_requires=required, + python_requires=">=3.7, <3.10", tests_require=test_required, - test_suite='nose.collector', + test_suite="nose.collector", include_package_data=True, - license='MIT License', - description='Server-less Python Web Services for AWS Lambda and API Gateway', + license="MIT License", + description="Server-less Python Web Services for AWS Lambda and API Gateway", long_description=long_description, - long_description_content_type='text/markdown', - url='https://github.com/zappa/Zappa', - author='Rich Jones', - author_email='rich@openwatch.net', + long_description_content_type="text/markdown", + url="https://github.com/zappa/Zappa", + author="Rich Jones", + author_email="rich@openwatch.net", entry_points={ - 'console_scripts': [ - 'zappa=zappa.cli:handle', - 'z=zappa.cli:handle', + "console_scripts": [ + "zappa=zappa.cli:handle", + "z=zappa.cli:handle", ] }, classifiers=[ - 'Environment :: Console', - 'License :: OSI Approved :: Apache Software License', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Framework :: Django', - 'Framework :: Django :: 1.11', - 'Framework :: Django :: 2.0', - 'Framework :: Django :: 3.0', - 'Topic :: Internet :: WWW/HTTP', - 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', + "Environment :: Console", + "License :: OSI Approved :: Apache Software License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Framework :: Django", + "Framework :: Django :: 1.11", + "Framework :: Django :: 2.0", + "Framework :: Django :: 3.0", + "Topic :: Internet :: WWW/HTTP", + "Topic :: Internet :: WWW/HTTP :: Dynamic Content", ], ) diff --git a/test_requirements.in b/test_requirements.in deleted file mode 100644 index e0ec2d3de..000000000 --- a/test_requirements.in +++ /dev/null @@ -1,13 +0,0 @@ -black -boto3-stubs -coveralls -Django -django-stubs -flake8 -Flask -mock -mypy -nose -nose-timer -placebo -isort diff --git a/test_requirements.txt b/test_requirements.txt deleted file mode 100644 index 47a67ce65..000000000 --- a/test_requirements.txt +++ /dev/null @@ -1,171 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --output-file=test_requirements.txt requirements.in test_requirements.in -# -appdirs==1.4.4 - # via black -argcomplete==1.12.2 - # via -r requirements.in -asgiref==3.3.1 - # via django -black==21.5b1 - # via -r test_requirements.in -boto3-stubs==1.17.78 - # via -r test_requirements.in -boto3==1.17.44 - # via - # -r requirements.in - # kappa -botocore==1.20.44 - # via - # boto3 - # s3transfer -certifi==2020.12.5 - # via requests -cfn-flip==1.2.3 - # via troposphere -chardet==4.0.0 - # via requests -click==7.1.2 - # via - # black - # cfn-flip - # flask - # kappa - # pip-tools -coverage==5.5 - # via coveralls -coveralls==3.0.1 - # via -r test_requirements.in -django-stubs-ext==0.2.0 - # via django-stubs -django-stubs==1.8.0 - # via -r test_requirements.in -django==3.1.7 - # via - # -r test_requirements.in - # django-stubs - # django-stubs-ext -docopt==0.6.2 - # via coveralls -durationpy==0.5 - # via -r requirements.in -flake8==3.9.0 - # via -r test_requirements.in -flask==1.1.2 - # via -r test_requirements.in -future==0.18.2 - # via -r requirements.in -hjson==3.0.2 - # via -r requirements.in -idna==2.10 - # via requests -isort==5.8.0 - # via -r test_requirements.in -itsdangerous==1.1.0 - # via flask -jinja2==2.11.3 - # via flask -jmespath==0.10.0 - # via - # -r requirements.in - # boto3 - # botocore -kappa==0.6.0 - # via -r requirements.in -markupsafe==1.1.1 - # via jinja2 -mccabe==0.6.1 - # via flake8 -mock==4.0.3 - # via -r test_requirements.in -mypy-extensions==0.4.3 - # via - # black - # mypy -mypy==0.812 - # via - # -r test_requirements.in - # django-stubs -nose-timer==1.0.1 - # via -r test_requirements.in -nose==1.3.7 - # via - # -r test_requirements.in - # nose-timer -pathspec==0.8.1 - # via black -pep517==0.10.0 - # via pip-tools -pip-tools==6.0.1 - # via -r requirements.in -placebo==0.9.0 - # via - # -r test_requirements.in - # kappa -pycodestyle==2.7.0 - # via flake8 -pyflakes==2.3.1 - # via flake8 -python-dateutil==2.8.1 - # via - # -r requirements.in - # botocore -python-slugify==4.0.1 - # via -r requirements.in -pytz==2021.1 - # via django -pyyaml==5.4.1 - # via - # -r requirements.in - # cfn-flip - # kappa -regex==2021.4.4 - # via black -requests==2.25.1 - # via - # -r requirements.in - # coveralls -s3transfer==0.3.6 - # via boto3 -six==1.15.0 - # via - # -r requirements.in - # cfn-flip - # python-dateutil -sqlparse==0.4.1 - # via django -text-unidecode==1.3 - # via python-slugify -toml==0.10.2 - # via - # -r requirements.in - # black - # pep517 -tqdm==4.59.0 - # via -r requirements.in -troposphere==2.7.0 - # via -r requirements.in -typed-ast==1.4.3 - # via mypy -typing-extensions==3.10.0.0 - # via - # django-stubs - # mypy -urllib3==1.26.4 - # via - # botocore - # requests -werkzeug==0.16.1 - # via - # -r requirements.in - # flask -wheel==0.36.2 - # via -r requirements.in -wsgi-request-logger==0.4.6 - # via -r requirements.in - -# The following packages are considered to be unsafe in a requirements file: -# pip diff --git a/test_settings.py b/test_settings.py index 4c9950e22..533309f75 100644 --- a/test_settings.py +++ b/test_settings.py @@ -1,31 +1,31 @@ -APP_MODULE = 'tests.test_app' -APP_FUNCTION = 'hello_world' +APP_MODULE = "tests.test_app" +APP_FUNCTION = "hello_world" DJANGO_SETTINGS = None -DEBUG = 'True' -LOG_LEVEL = 'DEBUG' -SCRIPT_NAME = 'hello_world' +DEBUG = "True" +LOG_LEVEL = "DEBUG" +SCRIPT_NAME = "hello_world" DOMAIN = None -API_STAGE = 'ttt888' -PROJECT_NAME = 'ttt888' +API_STAGE = "ttt888" +PROJECT_NAME = "ttt888" -REMOTE_ENV='s3://lmbda/test_env.json' +REMOTE_ENV = "s3://lmbda/test_env.json" ## test_env.json -#{ +# { # "hello": "world" -#} +# } # AWS_EVENT_MAPPING = { - 'arn:aws:s3:1': 'test_settings.aws_s3_event', - 'arn:aws:sns:1': 'test_settings.aws_sns_event', - 'arn:aws:dynamodb:1': 'test_settings.aws_dynamodb_event', - 'arn:aws:kinesis:1': 'test_settings.aws_kinesis_event', - 'arn:aws:sqs:1': 'test_settings.aws_sqs_event' + "arn:aws:s3:1": "test_settings.aws_s3_event", + "arn:aws:sns:1": "test_settings.aws_sns_event", + "arn:aws:dynamodb:1": "test_settings.aws_dynamodb_event", + "arn:aws:kinesis:1": "test_settings.aws_kinesis_event", + "arn:aws:sqs:1": "test_settings.aws_sqs_event", } -ENVIRONMENT_VARIABLES={'testenv': 'envtest'} +ENVIRONMENT_VARIABLES = {"testenv": "envtest"} -AUTHORIZER_FUNCTION='test_settings.authorizer_event' +AUTHORIZER_FUNCTION = "test_settings.authorizer_event" def prebuild_me(): @@ -43,6 +43,7 @@ def aws_s3_event(event, content): def aws_sns_event(event, content): return "AWS SNS EVENT" + def aws_async_sns_event(arg1, arg2, arg3): return "AWS ASYNC SNS EVENT" diff --git a/tests/data/test1.py b/tests/data/test1.py index eec2c37ac..d4a378658 100644 --- a/tests/data/test1.py +++ b/tests/data/test1.py @@ -2,6 +2,4 @@ def greet(): - print( - "There is more stupidity than hydrogen in the universe, and it has a longer shelf life." - ) + print("There is more stupidity than hydrogen in the universe, and it has a longer shelf life.") diff --git a/tests/placebo/TestZappa.test_cli_aws/lambda.GetFunctionConfiguration_1.json b/tests/placebo/TestZappa.test_cli_aws/lambda.GetFunctionConfiguration_1.json index 1d826bbf1..d3f362b2b 100644 --- a/tests/placebo/TestZappa.test_cli_aws/lambda.GetFunctionConfiguration_1.json +++ b/tests/placebo/TestZappa.test_cli_aws/lambda.GetFunctionConfiguration_1.json @@ -1,33 +1,7 @@ { "status_code": 200, "data": { - "ResponseMetadata": { - "RequestId": "8cd3e440-6d74-11e7-9ebd-693a5cc9b121", - "HTTPStatusCode": 200, - "HTTPHeaders": { - "content-type": "application/json", - "date": "Thu, 20 Jul 2017 17:54:59 GMT", - "x-amzn-requestid": "8cd3e440-6d74-11e7-9ebd-693a5cc9b121", - "content-length": "609", - "connection": "keep-alive" - }, - "RetryAttempts": 0 - }, - "FunctionName": "zappa-ttt888", - "FunctionArn": "arn:aws:lambda:us-east-1:004396165043:function:zappa-ttt888", - "Runtime": "python3.6", - "Role": "arn:aws:iam::004396165043:role/zappa-ttt888-ZappaLambdaExecutionRole", - "Handler": "handler.lambda_handler", - "CodeSize": 16975308, - "Description": "Zappa Deployment", - "Timeout": 30, - "MemorySize": 512, - "LastModified": "2017-07-20T17:54:59.235+0000", - "CodeSha256": "0pOcmP7sDoO6mLbZKmtH5z0XyjT8wuu/1VenHLgu/MU=", - "Version": "$LATEST", - "Environment": { - "Variables": {} - }, - "PackageType": "Zip" + "State": "Active", + "LastUpdateStatus": "Successful" } -} \ No newline at end of file +} diff --git a/tests/placebo/TestZappa.test_cli_aws/lambda.GetFunctionConfiguration_2.json b/tests/placebo/TestZappa.test_cli_aws/lambda.GetFunctionConfiguration_2.json new file mode 100644 index 000000000..1d826bbf1 --- /dev/null +++ b/tests/placebo/TestZappa.test_cli_aws/lambda.GetFunctionConfiguration_2.json @@ -0,0 +1,33 @@ +{ + "status_code": 200, + "data": { + "ResponseMetadata": { + "RequestId": "8cd3e440-6d74-11e7-9ebd-693a5cc9b121", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "content-type": "application/json", + "date": "Thu, 20 Jul 2017 17:54:59 GMT", + "x-amzn-requestid": "8cd3e440-6d74-11e7-9ebd-693a5cc9b121", + "content-length": "609", + "connection": "keep-alive" + }, + "RetryAttempts": 0 + }, + "FunctionName": "zappa-ttt888", + "FunctionArn": "arn:aws:lambda:us-east-1:004396165043:function:zappa-ttt888", + "Runtime": "python3.6", + "Role": "arn:aws:iam::004396165043:role/zappa-ttt888-ZappaLambdaExecutionRole", + "Handler": "handler.lambda_handler", + "CodeSize": 16975308, + "Description": "Zappa Deployment", + "Timeout": 30, + "MemorySize": 512, + "LastModified": "2017-07-20T17:54:59.235+0000", + "CodeSha256": "0pOcmP7sDoO6mLbZKmtH5z0XyjT8wuu/1VenHLgu/MU=", + "Version": "$LATEST", + "Environment": { + "Variables": {} + }, + "PackageType": "Zip" + } +} \ No newline at end of file diff --git a/tests/placebo/TestZappa.test_create_lambda_function_docker/lambda.GetFunctionConfiguration_1.json b/tests/placebo/TestZappa.test_create_lambda_function_docker/lambda.GetFunctionConfiguration_1.json new file mode 100644 index 000000000..d3f362b2b --- /dev/null +++ b/tests/placebo/TestZappa.test_create_lambda_function_docker/lambda.GetFunctionConfiguration_1.json @@ -0,0 +1,7 @@ +{ + "status_code": 200, + "data": { + "State": "Active", + "LastUpdateStatus": "Successful" + } +} diff --git a/tests/placebo/TestZappa.test_create_lambda_function_local/lambda.GetFunctionConfiguration_1.json b/tests/placebo/TestZappa.test_create_lambda_function_local/lambda.GetFunctionConfiguration_1.json new file mode 100644 index 000000000..d3f362b2b --- /dev/null +++ b/tests/placebo/TestZappa.test_create_lambda_function_local/lambda.GetFunctionConfiguration_1.json @@ -0,0 +1,7 @@ +{ + "status_code": 200, + "data": { + "State": "Active", + "LastUpdateStatus": "Successful" + } +} diff --git a/tests/placebo/TestZappa.test_create_lambda_function_s3/lambda.GetFunctionConfiguration_1.json b/tests/placebo/TestZappa.test_create_lambda_function_s3/lambda.GetFunctionConfiguration_1.json new file mode 100644 index 000000000..d3f362b2b --- /dev/null +++ b/tests/placebo/TestZappa.test_create_lambda_function_s3/lambda.GetFunctionConfiguration_1.json @@ -0,0 +1,7 @@ +{ + "status_code": 200, + "data": { + "State": "Active", + "LastUpdateStatus": "Successful" + } +} diff --git a/tests/test_bad_stage_name_settings.json b/tests/test_bad_stage_name_settings.json index 65f1aa723..783c6acd8 100644 --- a/tests/test_bad_stage_name_settings.json +++ b/tests/test_bad_stage_name_settings.json @@ -1,5 +1,6 @@ { "ttt-888": { + "apigateway_enabled": true, "touch": false, "s3_bucket": "lmbda", "app_function": "tests.test_app.hello_world", @@ -29,6 +30,7 @@ ] }, "devor": { + "apigateway_enabled": true, "s3_bucket": "lmbda", "app_function": "tests.test_app.hello_world", "callbacks": { @@ -45,4 +47,4 @@ "expression": "rate(1 minute)" }] } -} +} \ No newline at end of file diff --git a/tests/test_bot_exception_handler_settings.py b/tests/test_bot_exception_handler_settings.py index 978f4e94c..6c69e0968 100644 --- a/tests/test_bot_exception_handler_settings.py +++ b/tests/test_bot_exception_handler_settings.py @@ -10,7 +10,5 @@ LOG_LEVEL = "DEBUG" PROJECT_NAME = "wsgi_script_name_settings" COGNITO_TRIGGER_MAPPING = {} -AWS_BOT_EVENT_MAPPING = { - "intent-name:DialogCodeHook": "tests.test_handler.raises_exception" -} +AWS_BOT_EVENT_MAPPING = {"intent-name:DialogCodeHook": "tests.test_handler.raises_exception"} EXCEPTION_HANDLER = "tests.test_handler.mocked_exception_handler" diff --git a/tests/test_bot_handler_being_triggered.py b/tests/test_bot_handler_being_triggered.py index ec546da45..90ec6bbdb 100644 --- a/tests/test_bot_handler_being_triggered.py +++ b/tests/test_bot_handler_being_triggered.py @@ -10,6 +10,4 @@ LOG_LEVEL = "DEBUG" PROJECT_NAME = "wsgi_script_name_settings" COGNITO_TRIGGER_MAPPING = {} -AWS_BOT_EVENT_MAPPING = { - "intent-name:DialogCodeHook": "tests.test_handler.handle_bot_intent" -} +AWS_BOT_EVENT_MAPPING = {"intent-name:DialogCodeHook": "tests.test_handler.handle_bot_intent"} diff --git a/tests/test_event_script_app.py b/tests/test_event_script_app.py index 4f64f0873..5ffeee4ce 100644 --- a/tests/test_event_script_app.py +++ b/tests/test_event_script_app.py @@ -1,6 +1,3 @@ -from __future__ import print_function - - def handler_for_events(event, context): print("Event:", event) return True diff --git a/tests/test_handler.py b/tests/test_handler.py index 1089ef798..cc0590128 100644 --- a/tests/test_handler.py +++ b/tests/test_handler.py @@ -1,6 +1,8 @@ -from mock import Mock import sys import unittest + +from mock import Mock + from zappa.handler import LambdaHandler from zappa.utilities import merge_headers @@ -54,9 +56,7 @@ def test_run_function(self): self.assertEqual(LambdaHandler.run_function(one_arg, "e", "c"), "e") self.assertEqual(LambdaHandler.run_function(two_args, "e", "c"), ("e", "c")) self.assertEqual(LambdaHandler.run_function(var_args, "e", "c"), ("e", "c")) - self.assertEqual( - LambdaHandler.run_function(var_args_with_one, "e", "c"), ("e", "c") - ) + self.assertEqual(LambdaHandler.run_function(var_args_with_one, "e", "c"), ("e", "c")) try: LambdaHandler.run_function(unsupported, "e", "c") diff --git a/tests/tests.py b/tests/tests.py index 8b4ef564c..bc0d73e21 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -1,58 +1,61 @@ # -*- coding: utf8 -*- +import base64 import collections +import hashlib import json - -from io import BytesIO -import botocore -import botocore.stub -import flask -import mock import os import random -import string -import zipfile -import unittest +import re import shutil +import string import sys import tempfile +import unittest import uuid +import zipfile +from io import BytesIO +from subprocess import check_output -from click.globals import resolve_color_default +import botocore +import botocore.stub +import flask +import mock from click.exceptions import ClickException +from click.globals import resolve_color_default +from packaging import version -from zappa.cli import ZappaCLI, shamelessly_promote, disable_click_colors -from zappa.core import ALB_LAMBDA_ALIAS +from zappa.cli import ZappaCLI, disable_click_colors, shamelessly_promote +from zappa.core import ALB_LAMBDA_ALIAS, ASSUME_POLICY, ATTACH_POLICY, Zappa from zappa.ext.django_zappa import get_django_wsgi from zappa.letsencrypt import ( - get_cert_and_update_domain, - create_domain_key, - create_domain_csr, - create_chained_certificate, cleanup, + create_chained_certificate, + create_domain_csr, + create_domain_key, + encode_certificate, + get_cert_and_update_domain, + gettempdir, parse_account_key, parse_csr, - sign_certificate, - encode_certificate, register_account, + sign_certificate, verify_challenge, - gettempdir, ) from zappa.utilities import ( + InvalidAwsLambdaName, conflicts_with_a_neighbouring_module, contains_python_files_or_subdirs, detect_django_settings, detect_flask_apps, get_venv_from_python_version, human_size, - InvalidAwsLambdaName, + is_valid_bucket_name, parse_s3_url, string_to_timestamp, titlecase_keys, - is_valid_bucket_name, validate_name, ) -from zappa.wsgi import create_wsgi_request, common_log -from zappa.core import Zappa, ASSUME_POLICY, ATTACH_POLICY +from zappa.wsgi import common_log, create_wsgi_request def random_string(length): @@ -101,9 +104,7 @@ def test_disable_click_colors(self): def test_copy_editable_packages(self, mock_remove, mock_find_packages): virtual_env = os.environ.get("VIRTUAL_ENV") if not virtual_env: - return self.skipTest( - "test_copy_editable_packages must be run in a virtualenv" - ) + return self.skipTest("test_copy_editable_packages must be run in a virtualenv") temp_package_dir = tempfile.mkdtemp() try: @@ -126,9 +127,9 @@ def test_copy_editable_packages(self, mock_remove, mock_find_packages): z = Zappa() mock_open = mock.mock_open(read_data=egg_path.encode("utf-8")) - with mock.patch("zappa.core.open", mock_open), mock.patch( - "glob.glob" - ) as mock_glob, mock.patch("zappa.core.copytree") as mock_copytree: + with mock.patch("zappa.core.open", mock_open), mock.patch("glob.glob") as mock_glob, mock.patch( + "zappa.core.copytree" + ) as mock_copytree: # we use glob.glob to get the egg-links in the temp packages # directory mock_glob.return_value = [temp_egg_link] @@ -161,23 +162,7 @@ def test_create_lambda_package(self): "zappa.core.Zappa.get_installed_packages", return_value=mock_installed_packages, ): - z = Zappa(runtime="python3.6") - path = z.create_lambda_zip(handler_file=os.path.realpath(__file__)) - self.assertTrue(os.path.isfile(path)) - os.remove(path) - - def test_get_manylinux_python36(self): - z = Zappa(runtime="python3.6") - self.assertIsNotNone(z.get_cached_manylinux_wheel("psycopg2", "2.7.1")) - self.assertIsNone(z.get_cached_manylinux_wheel("derpderpderpderp", "0.0")) - - # mock with a known manylinux wheel package so that code for downloading them gets invoked - mock_installed_packages = {"psycopg2": "2.7.1"} - with mock.patch( - "zappa.core.Zappa.get_installed_packages", - return_value=mock_installed_packages, - ): - z = Zappa(runtime="python3.6") + z = Zappa(runtime="python3.7") path = z.create_lambda_zip(handler_file=os.path.realpath(__file__)) self.assertTrue(os.path.isfile(path)) os.remove(path) @@ -225,35 +210,52 @@ def test_get_manylinux_python38(self): self.assertTrue(os.path.isfile(path)) os.remove(path) + def test_get_manylinux_python39(self): + z = Zappa(runtime="python3.9") + self.assertIsNotNone(z.get_cached_manylinux_wheel("psycopg2-binary", "2.9.1")) + self.assertIsNone(z.get_cached_manylinux_wheel("derp_no_such_thing", "0.0")) + + # mock with a known manylinux wheel package so that code for downloading them gets invoked + mock_installed_packages = {"psycopg2-binary": "2.9.1"} + with mock.patch( + "zappa.core.Zappa.get_installed_packages", + return_value=mock_installed_packages, + ): + z = Zappa(runtime="python3.9") + path = z.create_lambda_zip(handler_file=os.path.realpath(__file__)) + self.assertTrue(os.path.isfile(path)) + os.remove(path) + + # same, but with an ABI3 package + mock_installed_packages = {"cryptography": "2.8"} + with mock.patch( + "zappa.core.Zappa.get_installed_packages", + return_value=mock_installed_packages, + ): + z = Zappa(runtime="python3.9") + path = z.create_lambda_zip(handler_file=os.path.realpath(__file__)) + self.assertTrue(os.path.isfile(path)) + os.remove(path) + def test_getting_installed_packages(self, *args): - z = Zappa(runtime="python3.6") + z = Zappa(runtime="python3.7") # mock pkg_resources call to be same as what our mocked site packages dir has - mock_package = collections.namedtuple( - "mock_package", ["project_name", "version", "location"] - ) - mock_pip_installed_packages = [ - mock_package("super_package", "0.1", "/venv/site-packages") - ] + mock_package = collections.namedtuple("mock_package", ["project_name", "version", "location"]) + mock_pip_installed_packages = [mock_package("super_package", "0.1", "/venv/site-packages")] with mock.patch("os.path.isdir", return_value=True): with mock.patch("os.listdir", return_value=["super_package"]): import pkg_resources # this gets called in non-test Zappa mode - with mock.patch( - "pkg_resources.WorkingSet", return_value=mock_pip_installed_packages - ): - self.assertDictEqual( - z.get_installed_packages("", ""), {"super_package": "0.1"} - ) + with mock.patch("pkg_resources.WorkingSet", return_value=mock_pip_installed_packages): + self.assertDictEqual(z.get_installed_packages("", ""), {"super_package": "0.1"}) def test_getting_installed_packages_mixed_case_location(self, *args): - z = Zappa(runtime="python3.6") + z = Zappa(runtime="python3.7") # mock pip packages call to be same as what our mocked site packages dir has - mock_package = collections.namedtuple( - "mock_package", ["project_name", "version", "location"] - ) + mock_package = collections.namedtuple("mock_package", ["project_name", "version", "location"]) mock_pip_installed_packages = [ mock_package("SuperPackage", "0.1", "/Venv/site-packages"), mock_package("SuperPackage64", "0.1", "/Venv/site-packages64"), @@ -263,13 +265,9 @@ def test_getting_installed_packages_mixed_case_location(self, *args): with mock.patch("os.listdir", return_value=[]): import pkg_resources # this gets called in non-test Zappa mode - with mock.patch( - "pkg_resources.WorkingSet", return_value=mock_pip_installed_packages - ): + with mock.patch("pkg_resources.WorkingSet", return_value=mock_pip_installed_packages): self.assertDictEqual( - z.get_installed_packages( - "/venv/Site-packages", "/venv/site-packages64" - ), + z.get_installed_packages("/venv/Site-packages", "/venv/site-packages64"), { "superpackage": "0.1", "superpackage64": "0.1", @@ -277,26 +275,18 @@ def test_getting_installed_packages_mixed_case_location(self, *args): ) def test_getting_installed_packages_mixed_case(self, *args): - z = Zappa(runtime="python3.6") + z = Zappa(runtime="python3.7") # mock pkg_resources call to be same as what our mocked site packages dir has - mock_package = collections.namedtuple( - "mock_package", ["project_name", "version", "location"] - ) - mock_pip_installed_packages = [ - mock_package("SuperPackage", "0.1", "/venv/site-packages") - ] + mock_package = collections.namedtuple("mock_package", ["project_name", "version", "location"]) + mock_pip_installed_packages = [mock_package("SuperPackage", "0.1", "/venv/site-packages")] with mock.patch("os.path.isdir", return_value=True): with mock.patch("os.listdir", return_value=["superpackage"]): import pkg_resources # this gets called in non-test Zappa mode - with mock.patch( - "pkg_resources.WorkingSet", return_value=mock_pip_installed_packages - ): - self.assertDictEqual( - z.get_installed_packages("", ""), {"superpackage": "0.1"} - ) + with mock.patch("pkg_resources.WorkingSet", return_value=mock_pip_installed_packages): + self.assertDictEqual(z.get_installed_packages("", ""), {"superpackage": "0.1"}) def test_load_credentials(self): z = Zappa() @@ -387,55 +377,39 @@ def test_create_api_gateway_routes_with_different_auth_methods(self): ) self.assertEqual( "NONE", - parsable_template["Resources"]["OPTIONS0"]["Properties"][ - "AuthorizationType" - ], + parsable_template["Resources"]["OPTIONS0"]["Properties"]["AuthorizationType"], ) self.assertEqual( "NONE", - parsable_template["Resources"]["OPTIONS1"]["Properties"][ - "AuthorizationType" - ], + parsable_template["Resources"]["OPTIONS1"]["Properties"]["AuthorizationType"], ) self.assertEqual( "MOCK", - parsable_template["Resources"]["OPTIONS0"]["Properties"]["Integration"][ - "Type" - ], + parsable_template["Resources"]["OPTIONS0"]["Properties"]["Integration"]["Type"], ) self.assertEqual( "MOCK", - parsable_template["Resources"]["OPTIONS1"]["Properties"]["Integration"][ - "Type" - ], + parsable_template["Resources"]["OPTIONS1"]["Properties"]["Integration"]["Type"], ) self.assertEqual( "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'", - parsable_template["Resources"]["OPTIONS0"]["Properties"]["Integration"][ - "IntegrationResponses" - ][0]["ResponseParameters"][ - "method.response.header.Access-Control-Allow-Headers" - ], + parsable_template["Resources"]["OPTIONS0"]["Properties"]["Integration"]["IntegrationResponses"][0][ + "ResponseParameters" + ]["method.response.header.Access-Control-Allow-Headers"], ) self.assertEqual( "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'", - parsable_template["Resources"]["OPTIONS1"]["Properties"]["Integration"][ - "IntegrationResponses" - ][0]["ResponseParameters"][ - "method.response.header.Access-Control-Allow-Headers" - ], + parsable_template["Resources"]["OPTIONS1"]["Properties"]["Integration"]["IntegrationResponses"][0][ + "ResponseParameters" + ]["method.response.header.Access-Control-Allow-Headers"], ) self.assertTrue( - parsable_template["Resources"]["OPTIONS0"]["Properties"]["MethodResponses"][ - 0 - ]["ResponseParameters"][ + parsable_template["Resources"]["OPTIONS0"]["Properties"]["MethodResponses"][0]["ResponseParameters"][ "method.response.header.Access-Control-Allow-Headers" ] ) self.assertTrue( - parsable_template["Resources"]["OPTIONS1"]["Properties"]["MethodResponses"][ - 0 - ]["ResponseParameters"][ + parsable_template["Resources"]["OPTIONS1"]["Properties"]["MethodResponses"][0]["ResponseParameters"][ "method.response.header.Access-Control-Allow-Headers" ] ) @@ -459,12 +433,8 @@ def test_create_api_gateway_routes_with_different_auth_methods(self): "AWS_IAM", parsable_template["Resources"]["GET1"]["Properties"]["AuthorizationType"], ) - self.assertEqual( - True, parsable_template["Resources"]["GET0"]["Properties"]["ApiKeyRequired"] - ) - self.assertEqual( - True, parsable_template["Resources"]["GET1"]["Properties"]["ApiKeyRequired"] - ) + self.assertEqual(True, parsable_template["Resources"]["GET0"]["Properties"]["ApiKeyRequired"]) + self.assertEqual(True, parsable_template["Resources"]["GET1"]["Properties"]["ApiKeyRequired"]) # Authorizer and IAM authorizer = { @@ -487,11 +457,7 @@ def test_create_api_gateway_routes_with_different_auth_methods(self): parsable_template["Resources"]["Authorizer"] # Authorizer with validation expression - invocations_uri = ( - "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/" - + lambda_arn - + "/invocations" - ) + invocations_uri = "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/" + lambda_arn + "/invocations" z.create_stack_template(lambda_arn, "helloworld", False, False, authorizer) parsable_template = json.loads(z.cf_template.to_json()) self.assertEqual( @@ -502,18 +468,14 @@ def test_create_api_gateway_routes_with_different_auth_methods(self): "CUSTOM", parsable_template["Resources"]["GET1"]["Properties"]["AuthorizationType"], ) - self.assertEqual( - "TOKEN", parsable_template["Resources"]["Authorizer"]["Properties"]["Type"] - ) + self.assertEqual("TOKEN", parsable_template["Resources"]["Authorizer"]["Properties"]["Type"]) self.assertEqual( "ZappaAuthorizer", parsable_template["Resources"]["Authorizer"]["Properties"]["Name"], ) self.assertEqual( 300, - parsable_template["Resources"]["Authorizer"]["Properties"][ - "AuthorizerResultTtlInSeconds" - ], + parsable_template["Resources"]["Authorizer"]["Properties"]["AuthorizerResultTtlInSeconds"], ) self.assertEqual( invocations_uri, @@ -521,15 +483,11 @@ def test_create_api_gateway_routes_with_different_auth_methods(self): ) self.assertEqual( z.credentials_arn, - parsable_template["Resources"]["Authorizer"]["Properties"][ - "AuthorizerCredentials" - ], + parsable_template["Resources"]["Authorizer"]["Properties"]["AuthorizerCredentials"], ) self.assertEqual( "xxx", - parsable_template["Resources"]["Authorizer"]["Properties"][ - "IdentityValidationExpression" - ], + parsable_template["Resources"]["Authorizer"]["Properties"]["IdentityValidationExpression"], ) # Authorizer without validation expression @@ -544,13 +502,9 @@ def test_create_api_gateway_routes_with_different_auth_methods(self): "CUSTOM", parsable_template["Resources"]["GET1"]["Properties"]["AuthorizationType"], ) - self.assertEqual( - "TOKEN", parsable_template["Resources"]["Authorizer"]["Properties"]["Type"] - ) + self.assertEqual("TOKEN", parsable_template["Resources"]["Authorizer"]["Properties"]["Type"]) with self.assertRaises(KeyError): - parsable_template["Resources"]["Authorizer"]["Properties"][ - "IdentityValidationExpression" - ] + parsable_template["Resources"]["Authorizer"]["Properties"]["IdentityValidationExpression"] # Authorizer with arn authorizer = { @@ -582,9 +536,7 @@ def test_update_aws_env_vars(self): # Simulate already having some AWS env vars remotely mock_client.get_function_configuration.return_value = { "PackageType": "Zip", - "Environment": { - "Variables": {"REMOTE_ONLY": "AAA", "CHANGED_REMOTE": "BBB"} - }, + "Environment": {"Variables": {"REMOTE_ONLY": "AAA", "CHANGED_REMOTE": "BBB"}}, } z.update_lambda_configuration( "test", @@ -606,9 +558,7 @@ def test_update_aws_env_vars(self): # Simulate already having some AWS env vars remotely but none set in aws_environment_variables mock_client.get_function_configuration.return_value = { "PackageType": "Zip", - "Environment": { - "Variables": {"REMOTE_ONLY_1": "AAA", "REMOTE_ONLY_2": "BBB"} - }, + "Environment": {"Variables": {"REMOTE_ONLY_1": "AAA", "REMOTE_ONLY_2": "BBB"}}, } z.update_lambda_configuration("test", "test", "test") end_result_should_be = {"REMOTE_ONLY_1": "AAA", "REMOTE_ONLY_2": "BBB"} @@ -623,9 +573,7 @@ def test_update_layers(self): with mock.patch.object(z, "lambda_client") as mock_client: mock_client.get_function_configuration.return_value = {"PackageType": "Zip"} - z.update_lambda_configuration( - "test", "test", "test", layers=["Layer1", "Layer2"] - ) + z.update_lambda_configuration("test", "test", "test", layers=["Layer1", "Layer2"]) self.assertEqual( mock_client.update_function_configuration.call_args[1]["Layers"], ["Layer1", "Layer2"], @@ -633,9 +581,7 @@ def test_update_layers(self): with mock.patch.object(z, "lambda_client") as mock_client: mock_client.get_function_configuration.return_value = {"PackageType": "Zip"} z.update_lambda_configuration("test", "test", "test") - self.assertEqual( - mock_client.update_function_configuration.call_args[1]["Layers"], [] - ) + self.assertEqual(mock_client.update_function_configuration.call_args[1]["Layers"], []) def test_update_empty_aws_env_hash(self): z = Zappa() @@ -1101,9 +1047,7 @@ def test_load_extended_settings(self): zappa_cli = ZappaCLI() zappa_cli.api_stage = "extendo2" zappa_cli.load_settings("test_settings.json") - self.assertEqual( - "lmbda2", zappa_cli.stage_config["s3_bucket"] - ) # Second Extension + self.assertEqual("lmbda2", zappa_cli.stage_config["s3_bucket"]) # Second Extension self.assertTrue(zappa_cli.stage_config["touch"]) # First Extension self.assertTrue(zappa_cli.stage_config["delete_local_zip"]) # The base @@ -1159,34 +1103,26 @@ def test_settings_extension(self): zappa_cli = ZappaCLI() # With all three, we should get the JSON file first. - self.assertEqual( - zappa_cli.get_json_or_yaml_settings(), "zappa_settings.json" - ) + self.assertEqual(zappa_cli.get_json_or_yaml_settings(), "zappa_settings.json") zappa_cli.load_settings_file() self.assertIn("lonely", zappa_cli.zappa_settings) os.unlink("zappa_settings.json") # Without the JSON file, we should get the TOML file. - self.assertEqual( - zappa_cli.get_json_or_yaml_settings(), "zappa_settings.toml" - ) + self.assertEqual(zappa_cli.get_json_or_yaml_settings(), "zappa_settings.toml") zappa_cli.load_settings_file() self.assertIn("ttt888", zappa_cli.zappa_settings) self.assertNotIn("devor", zappa_cli.zappa_settings) os.unlink("zappa_settings.toml") # With just the YAML file, we should get it. - self.assertEqual( - zappa_cli.get_json_or_yaml_settings(), "zappa_settings.yml" - ) + self.assertEqual(zappa_cli.get_json_or_yaml_settings(), "zappa_settings.yml") zappa_cli.load_settings_file() self.assertIn("ttt888", zappa_cli.zappa_settings) self.assertIn("devor", zappa_cli.zappa_settings) os.unlink("zappa_settings.yml") - self.assertEqual( - zappa_cli.get_json_or_yaml_settings(), "zappa_settings.yaml" - ) + self.assertEqual(zappa_cli.get_json_or_yaml_settings(), "zappa_settings.yaml") zappa_cli.load_settings_file() self.assertIn("ttt888", zappa_cli.zappa_settings) self.assertIn("devor", zappa_cli.zappa_settings) @@ -1234,9 +1170,7 @@ def test_cli_utility(self): zappa_cli.print_logs(logs, colorize=False, force_colorize=False) zappa_cli.print_logs(logs, colorize=False, force_colorize=True) zappa_cli.print_logs(logs, colorize=True, force_colorize=False) - zappa_cli.print_logs( - logs, colorize=True, non_http=False, http=False, force_colorize=True - ) + zappa_cli.print_logs(logs, colorize=True, non_http=False, http=False, force_colorize=True) zappa_cli.check_for_update() def test_cli_format_invoke_command(self): @@ -1271,6 +1205,54 @@ def test_cli_colorize_invoke_command_bad_string(self): colorized_string = zappa_cli.colorize_invoke_command(plain_string) self.assertEqual(final_string, colorized_string) + @mock.patch("zappa.cli.ZappaCLI.colorize_invoke_command") + @mock.patch("zappa.cli.ZappaCLI.format_invoke_command") + def test_cli_format_lambda_response(self, mock_format, mock_colorize): + format_msg = "formatted string" + colorize_msg = "colorized string" + mock_format.return_value = format_msg + mock_colorize.return_value = colorize_msg + zappa_cli = ZappaCLI() + + response_without_logresult = {"StatusCode": 200, "FunctionError": "some_err"} + self.assertEqual( + zappa_cli.format_lambda_response(response_without_logresult), + response_without_logresult, + ) + + bad_utf8 = b"\xfc\xfc\xfc" + bad_utf8_logresult = { + "StatusCode": 200, + "LogResult": base64.b64encode(bad_utf8), + } + self.assertEqual(zappa_cli.format_lambda_response(bad_utf8_logresult), bad_utf8) + + log_msg = "Function output logs go here" + regular_logresult = { + "StatusCode": 200, + "LogResult": base64.b64encode(log_msg.encode()), + } + with mock.patch.object(sys.stdout, "isatty") as mock_isatty: + mock_isatty.return_value = True + formatted = zappa_cli.format_lambda_response(regular_logresult, True) + mock_format.assert_called_once_with(log_msg) + mock_colorize.assert_called_once_with(format_msg) + self.assertEqual(formatted, colorize_msg) + mock_format.reset_mock() + mock_colorize.reset_mock() + + with mock.patch.object(sys.stdout, "isatty") as mock_isatty: + mock_isatty.return_value = False + formatted = zappa_cli.format_lambda_response(regular_logresult, True) + mock_format.assert_not_called() + mock_colorize.assert_not_called() + self.assertEqual(formatted, log_msg) + + formatted = zappa_cli.format_lambda_response(regular_logresult, False) + mock_format.assert_not_called() + mock_colorize.assert_not_called() + self.assertEqual(formatted, log_msg) + def test_cli_save_python_settings_file(self): zappa_cli = ZappaCLI() zappa_cli.api_stage = "ttt888" @@ -1431,24 +1413,18 @@ def test_cli_save_python_settings_file(self): def test_bad_json_catch(self): zappa_cli = ZappaCLI() - self.assertRaises( - ValueError, zappa_cli.load_settings_file, "tests/test_bad_settings.json" - ) + self.assertRaises(ValueError, zappa_cli.load_settings_file, "tests/test_bad_settings.json") def test_bad_stage_name_catch(self): zappa_cli = ZappaCLI() - self.assertRaises( - ValueError, - zappa_cli.load_settings, - "tests/test_bad_stage_name_settings.json", - ) + zappa_cli.api_stage = "ttt-888" + zappa_cli.load_settings("tests/test_bad_stage_name_settings.json") + self.assertRaises(ValueError, zappa_cli.dispatch_command, "deploy", "ttt-888") def test_bad_environment_vars_catch(self): zappa_cli = ZappaCLI() zappa_cli.api_stage = "ttt888" - self.assertRaises( - ValueError, zappa_cli.load_settings, "tests/test_bad_environment_vars.json" - ) + self.assertRaises(ValueError, zappa_cli.load_settings, "tests/test_bad_environment_vars.json") # @mock.patch('botocore.session.Session.full_config', new_callable=mock.PropertyMock) # def test_cli_init(self, mock_config): @@ -1705,9 +1681,7 @@ def get_domain_name(domain, *_args, **_kwargs): return zappa_mock.domain_names.get(domain) zappa_mock.get_domain_name.side_effect = get_domain_name - zappa_mock.get_lambda_function_versions.side_effect = ( - get_lambda_function_versions - ) + zappa_mock.get_lambda_function_versions.side_effect = get_lambda_function_versions zappa_cli.zappa = zappa_mock self.assertRaises(ClickException, zappa_cli.certify) @@ -1853,9 +1827,7 @@ def test_get_domain_respects_route53_setting(self, client, template): self.assertIsNotNone(record) zappa_core.apigateway_client.get_domain_name.assert_called_once() zappa_core.route53.list_hosted_zones.assert_called_once() - zappa_core.route53.list_resource_record_sets.assert_called_once_with( - HostedZoneId="somezone" - ) + zappa_core.route53.list_resource_record_sets.assert_called_once_with(HostedZoneId="somezone") @mock.patch("botocore.client") def test_get_all_zones_normal_case(self, client): @@ -2026,9 +1998,7 @@ def test_event_name(self): "this.is.my.dang.function.wassup.yeah.its.long", ) self.assertTrue(len(truncated) <= 64) - self.assertTrue( - truncated.endswith("this.is.my.dang.function.wassup.yeah.its.long") - ) + self.assertTrue(truncated.endswith("this.is.my.dang.function.wassup.yeah.its.long")) truncated = zappa.get_event_name( "basldfkjalsdkfjalsdkfjaslkdfjalsdkfjadlsfkjasdlfkjasdlfkjasdflkjasdf-asdfasdfasdfasdfasdf", "thisidoasdfaljksdfalskdjfalsdkfjasldkfjalsdkfjalsdkfjalsdfkjalasdfasdfasdfasdklfjasldkfjalsdkjfaslkdfjasldkfjasdflkjdasfskdj", @@ -2038,22 +2008,80 @@ def test_event_name(self): self.assertTrue(len(truncated) <= 64) self.assertEqual(truncated, "a-b") - def test_hashed_rule_name(self): + def test_get_scheduled_event_name(self): zappa = Zappa() - truncated = zappa.get_event_name( - "basldfkjalsdkfjalsdkfjaslkdfjalsdkfjadlsfkjasdlfkjasdlfkjasdflkjasdf-asdfasdfasdfasdfasdf", - "this.is.my.dang.function.wassup.yeah.its.long", + event = {} + function = "foo" + lambda_name = "bar" + self.assertEqual( + zappa.get_scheduled_event_name(event, function, lambda_name), + f"{lambda_name}-{function}", ) - self.assertTrue(len(truncated) == 64) - rule_name = zappa.get_hashed_rule_name( - event=dict(name="some-event-name"), - function="this.is.my.dang.function.wassup.yeah.its.long", - lambda_name="basldfkjalsdkfjalsdkfjaslkdfjalsdkfjadlsfkjasdlfkjasdlfkjasdflkjasdf-asdfasdfasdfasdfasdf", + def test_get_scheduled_event_name__has_name(self): + zappa = Zappa() + event = {"name": "my_event"} + function = "foo" + lambda_name = "bar" + self.assertEqual( + zappa.get_scheduled_event_name(event, function, lambda_name), + f"{lambda_name}-{event['name']}-{function}", ) - self.assertTrue(len(rule_name) <= 64) - self.assertTrue( - rule_name.endswith("-this.is.my.dang.function.wassup.yeah.its.long") + + def test_get_scheduled_event_name__has_index(self): + zappa = Zappa() + event = {} + function = "foo" + lambda_name = "bar" + index = 1 + self.assertEqual( + zappa.get_scheduled_event_name(event, function, lambda_name, index), + f"{lambda_name}-{index}-{function}", + ) + + def test_get_scheduled_event_name__has_name__has_index(self): + zappa = Zappa() + event = {"name": "my_event"} + function = "foo" + lambda_name = "bar" + index = 1 + self.assertEqual( + zappa.get_scheduled_event_name(event, function, lambda_name, index), + f"{lambda_name}-{index}-{event['name']}-{function}", + ) + + def test_get_scheduled_event_name__truncated(self): + zappa = Zappa() + event = {} + function = "foo" + lambda_name = "bar" * 100 + hashed_lambda_name = hashlib.sha1(lambda_name.encode()).hexdigest() + self.assertEqual( + zappa.get_scheduled_event_name(event, function, lambda_name), + f"{hashed_lambda_name}-{function}", + ) + + def test_get_scheduled_event_name__truncated__has_name(self): + zappa = Zappa() + event = {"name": "my_event"} + function = "foo" + lambda_name = "bar" * 100 + hashed_lambda_name = hashlib.sha1(lambda_name.encode()).hexdigest() + self.assertEqual( + zappa.get_scheduled_event_name(event, function, lambda_name), + f"{hashed_lambda_name}-{event['name']}-{function}", + ) + + def test_get_scheduled_event_name__truncated__has_name__has_index(self): + zappa = Zappa() + event = {"name": "my_event"} + function = "foo" + lambda_name = "bar" * 100 + index = 1 + hashed_lambda_name = hashlib.sha1(lambda_name.encode()).hexdigest() + self.assertEqual( + zappa.get_scheduled_event_name(event, function, lambda_name, index), + f"{hashed_lambda_name}-{index}-{event['name']}-{function}", ) def test_detect_dj(self): @@ -2068,9 +2096,7 @@ def test_shameless(self): shamelessly_promote() def test_s3_url_parser(self): - remote_bucket, remote_file = parse_s3_url( - "s3://my-project-config-files/filename.json" - ) + remote_bucket, remote_file = parse_s3_url("s3://my-project-config-files/filename.json") self.assertEqual(remote_bucket, "my-project-config-files") self.assertEqual(remote_file, "filename.json") @@ -2078,9 +2104,7 @@ def test_s3_url_parser(self): self.assertEqual(remote_bucket, "your-bucket") self.assertEqual(remote_file, "account.key") - remote_bucket, remote_file = parse_s3_url( - "s3://my-config-bucket/super-secret-config.json" - ) + remote_bucket, remote_file = parse_s3_url("s3://my-config-bucket/super-secret-config.json") self.assertEqual(remote_bucket, "my-config-bucket") self.assertEqual(remote_file, "super-secret-config.json") @@ -2088,9 +2112,7 @@ def test_s3_url_parser(self): self.assertEqual(remote_bucket, "your-secure-bucket") self.assertEqual(remote_file, "account.key") - remote_bucket, remote_file = parse_s3_url( - "s3://your-bucket/subfolder/account.key" - ) + remote_bucket, remote_file = parse_s3_url("s3://your-bucket/subfolder/account.key") self.assertEqual(remote_bucket, "your-bucket") self.assertEqual(remote_file, "subfolder/account.key") @@ -2114,9 +2136,7 @@ def test_remote_env_package(self): zappa_cli = ZappaCLI() zappa_cli.api_stage = "remote_env" zappa_cli.load_settings("test_settings.json") - self.assertEqual( - "s3://lmbda-env/prod/env.json", zappa_cli.stage_config["remote_env"] - ) + self.assertEqual("s3://lmbda-env/prod/env.json", zappa_cli.stage_config["remote_env"]) zappa_cli.create_package() with zipfile.ZipFile(zappa_cli.zip_path, "r") as lambda_zip: content = lambda_zip.read("zappa_settings.py") @@ -2204,7 +2224,21 @@ def test_slim_handler(self): zappa_cli = ZappaCLI() zappa_cli.api_stage = "slim_handler" zappa_cli.load_settings("test_settings.json") - zappa_cli.create_package() + + # create_package builds the package from the latest zappa pypi release + # If the *current* minor release is not available on pypi create_package() will fail + # assumes that the latest pypi release has a tag matching "v?[0-9]+.[0-9]+.[0-9]+" defined in git. + command = "git tag" + command_output = check_output(command, shell=True).decode("utf8") + + # get valid versions from tags + version_match_string = "v?[0-9]+.[0-9]+.[0-9]+" + tags = [ + tag.strip() for tag in command_output.split("\n") if tag.strip() and re.match(version_match_string, tag.strip()) + ] + + latest_release_tag = sorted(tags, key=version.parse)[-1] + zappa_cli.create_package(use_zappa_release=latest_release_tag) self.assertTrue(os.path.isfile(zappa_cli.handler_path)) self.assertTrue(os.path.isfile(zappa_cli.zip_path)) @@ -2245,9 +2279,7 @@ def test_settings_py_generation(self): # validate environment variables self.assertIn("ENVIRONMENT_VARIABLES", settings) - self.assertEqual( - settings["ENVIRONMENT_VARIABLES"]["TEST_ENV_VAR"], "test_value" - ) + self.assertEqual(settings["ENVIRONMENT_VARIABLES"]["TEST_ENV_VAR"], "test_value") # validate Context header mappings self.assertIn("CONTEXT_HEADER_MAPPINGS", settings) @@ -2265,9 +2297,7 @@ def test_only_ascii_env_var_allowed(self): zappa_cli.load_settings("tests/test_non_ascii_environment_var_key.json") with self.assertRaises(ValueError) as context: zappa_cli.create_package() - self.assertEqual( - "Environment variable keys must be ascii.", str(context.exception) - ) + self.assertEqual("Environment variable keys must be ascii.", str(context.exception)) def test_titlecase_keys(self): raw = { @@ -2296,11 +2326,7 @@ def test_titlecase_keys(self): def test_is_valid_bucket_name(self): # Bucket names must be at least 3 and no more than 63 characters long. self.assertFalse(is_valid_bucket_name("ab")) - self.assertFalse( - is_valid_bucket_name( - "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefhijlmn" - ) - ) + self.assertFalse(is_valid_bucket_name("abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefhijlmn")) # Bucket names must not contain uppercase characters or underscores. self.assertFalse(is_valid_bucket_name("aaaBaaa")) self.assertFalse(is_valid_bucket_name("aaa_aaa")) @@ -2365,9 +2391,7 @@ def test_zappa_core_deploy_lambda_alb(self): load_credentials=False, ) zappa_core.elbv2_client = botocore.session.get_session().create_client("elbv2") - zappa_core.lambda_client = botocore.session.get_session().create_client( - "lambda" - ) + zappa_core.lambda_client = botocore.session.get_session().create_client("lambda") elbv2_stubber = botocore.stub.Stubber(zappa_core.elbv2_client) lambda_stubber = botocore.stub.Stubber(zappa_core.lambda_client) @@ -2400,25 +2424,15 @@ def test_zappa_core_deploy_lambda_alb(self): expected_params={ "LoadBalancerArns": [loadbalancer_arn], }, - service_response={ - "LoadBalancers": [ - {"LoadBalancerArn": loadbalancer_arn, "State": {"Code": "active"}} - ] - }, + service_response={"LoadBalancers": [{"LoadBalancerArn": loadbalancer_arn, "State": {"Code": "active"}}]}, ) elbv2_stubber.add_response( "modify_load_balancer_attributes", expected_params={ "LoadBalancerArn": loadbalancer_arn, - "Attributes": [ - {"Key": "idle_timeout.timeout_seconds", "Value": kwargs["timeout"]} - ], - }, - service_response={ - "Attributes": [ - {"Key": "idle_timeout.timeout_seconds", "Value": kwargs["timeout"]} - ] + "Attributes": [{"Key": "idle_timeout.timeout_seconds", "Value": kwargs["timeout"]}], }, + service_response={"Attributes": [{"Key": "idle_timeout.timeout_seconds", "Value": kwargs["timeout"]}]}, ) elbv2_stubber.add_response( @@ -2439,14 +2453,10 @@ def test_zappa_core_deploy_lambda_alb(self): "modify_target_group_attributes", expected_params={ "TargetGroupArn": targetgroup_arn, - "Attributes": [ - {"Key": "lambda.multi_value_headers.enabled", "Value": "true"} - ], + "Attributes": [{"Key": "lambda.multi_value_headers.enabled", "Value": "true"}], }, service_response={ - "Attributes": [ - {"Key": "lambda.multi_value_headers.enabled", "Value": "true"} - ], + "Attributes": [{"Key": "lambda.multi_value_headers.enabled", "Value": "true"}], }, ) @@ -2465,9 +2475,7 @@ def test_zappa_core_deploy_lambda_alb(self): "register_targets", expected_params={ "TargetGroupArn": targetgroup_arn, - "Targets": [ - {"Id": "{}:{}".format(kwargs["lambda_arn"], ALB_LAMBDA_ALIAS)} - ], + "Targets": [{"Id": "{}:{}".format(kwargs["lambda_arn"], ALB_LAMBDA_ALIAS)}], }, service_response={}, ) @@ -2507,9 +2515,7 @@ def test_zappa_core_undeploy_lambda_alb(self): load_credentials=False, ) zappa_core.elbv2_client = botocore.session.get_session().create_client("elbv2") - zappa_core.lambda_client = botocore.session.get_session().create_client( - "lambda" - ) + zappa_core.lambda_client = botocore.session.get_session().create_client("lambda") elbv2_stubber = botocore.stub.Stubber(zappa_core.elbv2_client) lambda_stubber = botocore.stub.Stubber(zappa_core.lambda_client) diff --git a/tests/tests_async.py b/tests/tests_async.py index 9d0c113d4..7b4f58d19 100644 --- a/tests/tests_async.py +++ b/tests/tests_async.py @@ -1,16 +1,22 @@ # -*- coding: utf8 -*- -import boto3 -import mock import os import unittest +import boto3 +import mock + try: from mock import patch except ImportError: from unittest.mock import patch -from zappa.asynchronous import AsyncException, LambdaAsyncResponse, SnsAsyncResponse -from zappa.asynchronous import import_and_get_task, get_func_task_path +from zappa.asynchronous import ( + AsyncException, + LambdaAsyncResponse, + SnsAsyncResponse, + get_func_task_path, + import_and_get_task, +) class TestZappa(unittest.TestCase): @@ -67,9 +73,7 @@ def test_async_call_with_defaults(self): async_me = import_and_get_task("tests.test_app.async_me") lambda_async_mock = mock.Mock() lambda_async_mock.return_value.send.return_value = "Running async!" - with mock.patch.dict( - "zappa.asynchronous.ASYNC_CLASSES", {"lambda": lambda_async_mock} - ): + with mock.patch.dict("zappa.asynchronous.ASYNC_CLASSES", {"lambda": lambda_async_mock}): # First check that it still runs synchronously by default self.assertEqual(async_me("123"), "run async when on lambda 123") @@ -89,6 +93,4 @@ def test_async_call_with_defaults(self): capture_response=False, lambda_function_name="MyLambda", ) - lambda_async_mock.return_value.send.assert_called_with( - get_func_task_path(async_me), ("qux",), {} - ) + lambda_async_mock.return_value.send.assert_called_with(get_func_task_path(async_me), ("qux",), {}) diff --git a/tests/tests_async_old.py b/tests/tests_async_old.py index 7539ea9e9..37bd1a7dc 100644 --- a/tests/tests_async_old.py +++ b/tests/tests_async_old.py @@ -1,9 +1,10 @@ # -*- coding: utf8 -*- -import boto3 -import mock import os import unittest +import boto3 +import mock + try: from mock import patch except ImportError: @@ -83,9 +84,7 @@ def test_async_call_with_defaults(self): async_me = import_and_get_task("tests.test_app.async_me") lambda_async_mock = mock.Mock() lambda_async_mock.return_value.send.return_value = "Running async!" - with mock.patch.dict( - "zappa.async.ASYNC_CLASSES", {"lambda": lambda_async_mock} - ): + with mock.patch.dict("zappa.async.ASYNC_CLASSES", {"lambda": lambda_async_mock}): # First check that it still runs synchronously by default self.assertEqual(async_me("123"), "run async when on lambda 123") @@ -105,6 +104,4 @@ def test_async_call_with_defaults(self): capture_response=False, lambda_function_name="MyLambda", ) - lambda_async_mock.return_value.send.assert_called_with( - get_func_task_path(async_me), ("qux",), {} - ) + lambda_async_mock.return_value.send.assert_called_with(get_func_task_path(async_me), ("qux",), {}) diff --git a/tests/tests_docs.py b/tests/tests_docs.py index af7152d6a..7c45156ad 100644 --- a/tests/tests_docs.py +++ b/tests/tests_docs.py @@ -1,8 +1,7 @@ -import unittest import re -from os import path, environ +import unittest from copy import copy - +from os import environ, path DIR = path.realpath(path.dirname(__file__)) @@ -65,9 +64,7 @@ def test_readmetoc(self): # skip empty header continue - header_text_no_spaces = header_text_strip.replace( - " ", "-" - ).lower() + header_text_no_spaces = header_text_strip.replace(" ", "-").lower() toc_line = " " * (len(header) - 2) + "- [%s](#%s)" % ( header_text, header_text_no_spaces.lower(), @@ -89,14 +86,10 @@ def test_readmetoc(self): msg = "README.test.md written so you can manually compare." else: - msg = ( - "You can set environ[ZAPPA_TEST_SAVE_README_NEW]=1 to generate\n" - " README.test.md to manually compare." - ) + msg = "You can set environ[ZAPPA_TEST_SAVE_README_NEW]=1 to generate\n" " README.test.md to manually compare." self.assertEquals( "".join(old_readme), new_readme, - "README doesn't match after regenerating TOC\n\n" - "You need to run doctoc after a heading change.\n{}".format(msg), + "README doesn't match after regenerating TOC\n\n" "You need to run doctoc after a heading change.\n{}".format(msg), ) diff --git a/tests/tests_middleware.py b/tests/tests_middleware.py index 81d82128d..2afaf1636 100644 --- a/tests/tests_middleware.py +++ b/tests/tests_middleware.py @@ -2,8 +2,8 @@ import sys import unittest -from zappa.wsgi import create_wsgi_request from zappa.middleware import ZappaWSGIMiddleware, all_casings +from zappa.wsgi import create_wsgi_request class TestWSGIMockMiddleWare(unittest.TestCase): @@ -61,9 +61,7 @@ def simple_app(environ, start_response): def simple_app(environ, start_response): # String of weird characters status = "301 Moved Permanently" - response_headers = [ - ("Location", f"http://zappa.com/elsewhere{ugly_string}") - ] + response_headers = [("Location", f"http://zappa.com/elsewhere{ugly_string}")] start_response(status, response_headers) return [ugly_string] @@ -88,9 +86,7 @@ def test_wsgi_authorizer_handling(self): "query": {}, } - environ = create_wsgi_request( - event, script_name="http://zappa.com/", trailing_slash=False - ) + environ = create_wsgi_request(event, script_name="http://zappa.com/", trailing_slash=False) self.assertEqual(environ["REMOTE_USER"], "user1") # With empty authorizer, should not include REMOTE_USER @@ -106,9 +102,7 @@ def test_wsgi_authorizer_handling(self): "query": {}, } - environ = create_wsgi_request( - event, script_name="http://zappa.com/", trailing_slash=False - ) + environ = create_wsgi_request(event, script_name="http://zappa.com/", trailing_slash=False) user = environ.get("REMOTE_USER", "no_user") self.assertEqual(user, "no_user") @@ -125,9 +119,7 @@ def test_wsgi_authorizer_handling(self): "query": {}, } - environ = create_wsgi_request( - event, script_name="http://zappa.com/", trailing_slash=False - ) + environ = create_wsgi_request(event, script_name="http://zappa.com/", trailing_slash=False) user = environ.get("REMOTE_USER", "no_user") self.assertEqual(user, "no_user") @@ -144,9 +136,7 @@ def test_wsgi_authorizer_handling(self): "query": {}, } - environ = create_wsgi_request( - event, script_name="http://zappa.com/", trailing_slash=False - ) + environ = create_wsgi_request(event, script_name="http://zappa.com/", trailing_slash=False) user = environ.get("REMOTE_USER", "no_user") self.assertEqual(user, "no_user") @@ -246,9 +236,7 @@ def test_should_allow_empty_query_params(self): }, "query": {}, } - environ = create_wsgi_request( - event, script_name="http://zappa.com/", trailing_slash=False - ) + environ = create_wsgi_request(event, script_name="http://zappa.com/", trailing_slash=False) self.assertEqual(environ["QUERY_STRING"], "") def test_should_handle_multi_value_query_string_params(self): @@ -278,7 +266,5 @@ def test_should_handle_multi_value_query_string_params(self): }, "query": {}, } - environ = create_wsgi_request( - event, script_name="http://zappa.com/", trailing_slash=False - ) + environ = create_wsgi_request(event, script_name="http://zappa.com/", trailing_slash=False) self.assertEqual(environ["QUERY_STRING"], "foo=1&foo=2") diff --git a/tests/tests_placebo.py b/tests/tests_placebo.py index 91783c055..2b8f43cf8 100644 --- a/tests/tests_placebo.py +++ b/tests/tests_placebo.py @@ -1,16 +1,17 @@ # -*- coding: utf8 -*- -import mock import os import random import string import unittest -from .utils import placebo_session +import mock from zappa.cli import ZappaCLI +from zappa.core import Zappa from zappa.handler import LambdaHandler from zappa.utilities import add_event_source, remove_event_source -from zappa.core import Zappa + +from .utils import placebo_session def random_string(length): @@ -175,13 +176,6 @@ def test_rollback_lambda_function_version_docker(self, session): with self.assertRaises(NotImplementedError): z.rollback_lambda_function_version(function_name) - @placebo_session - def test_is_lambda_function_ready(self, session): - z = Zappa(session) - z.credentials_arn = "arn:aws:iam::724336686645:role/ZappaLambdaExecution" - function_name = "django-helloworld-unicode" - z.is_lambda_function_ready(function_name) - @placebo_session def test_invoke_lambda_function(self, session): z = Zappa(session) @@ -246,9 +240,7 @@ def test_handler(self, session): "version": "0", "time": "2016-05-10T21:05:39Z", "id": "0d6a6db0-d5e7-4755-93a0-750a8bf49d55", - "resources": [ - "arn:aws:events:us-east-1:72333333333:rule/tests.test_app.schedule_me" - ], + "resources": ["arn:aws:events:us-east-1:72333333333:rule/tests.test_app.schedule_me"], } lh.handler(event, None) @@ -262,9 +254,7 @@ def test_handler(self, session): "version": "0", "time": "2016-05-10T21:05:39Z", "id": "0d6a6db0-d5e7-4755-93a0-750a8bf49d55", - "resources": [ - "arn:aws:events:us-east-1:72333333333:rule/tests.test_app.schedule_me" - ], + "resources": ["arn:aws:events:us-east-1:72333333333:rule/tests.test_app.schedule_me"], } lh.handler(event, None) @@ -296,9 +286,7 @@ def test_handler(self, session): "version": "0", "time": "2016-05-10T21:05:39Z", "id": "0d6a6db0-d5e7-4755-93a0-750a8bf49d55", - "resources": [ - "arn:aws:events:us-east-1:72333333333:rule/tests.test_app.schedule_me" - ], + "resources": ["arn:aws:events:us-east-1:72333333333:rule/tests.test_app.schedule_me"], } lh.handler(event, None) @@ -307,16 +295,12 @@ def test_handler(self, session): "account": "72333333333", "region": "us-east-1", "detail": {}, - "Records": [ - {"s3": {"configurationId": "test_project:test_settings.aws_s3_event"}} - ], + "Records": [{"s3": {"configurationId": "test_project:test_settings.aws_s3_event"}}], "source": "aws.events", "version": "0", "time": "2016-05-10T21:05:39Z", "id": "0d6a6db0-d5e7-4755-93a0-750a8bf49d55", - "resources": [ - "arn:aws:events:us-east-1:72333333333:rule/tests.test_app.schedule_me" - ], + "resources": ["arn:aws:events:us-east-1:72333333333:rule/tests.test_app.schedule_me"], } self.assertEqual("AWS S3 EVENT", lh.handler(event, None)) @@ -528,9 +512,7 @@ def test_add_event_source(self, session): event_source = {"arn": "blah:blah:blah:blah", "events": ["s3:ObjectCreated:*"]} # Sanity. This should fail. try: - es = add_event_source( - event_source, "blah:blah:blah:blah", "test_settings.callback", session - ) + es = add_event_source(event_source, "blah:blah:blah:blah", "test_settings.callback", session) self.fail("Success should have failed.") except ValueError: pass @@ -578,9 +560,7 @@ def test_cli_cognito_triggers(self, session): zappa_cli.api_stage = "ttt888" zappa_cli.api_key_required = True zappa_cli.load_settings("test_settings.json", session) - zappa_cli.lambda_arn = ( - "arn:aws:lambda:us-east-1:12345:function:Zappa-Trigger-Test" - ) + zappa_cli.lambda_arn = "arn:aws:lambda:us-east-1:12345:function:Zappa-Trigger-Test" zappa_cli.update_cognito_triggers() @placebo_session diff --git a/tests/utils.py b/tests/utils.py index 9e719ca9b..91e588cc4 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,9 +1,10 @@ -import placebo -import boto3 -import os import functools +import os from contextlib import contextmanager -from mock import patch, MagicMock + +import boto3 +import placebo +from mock import MagicMock, patch try: file @@ -28,9 +29,7 @@ def placebo_session(function): @functools.wraps(function) def wrapper(*args, **kwargs): - session_kwargs = { - "region_name": os.environ.get("AWS_DEFAULT_REGION", "us-east-1") - } + session_kwargs = {"region_name": os.environ.get("AWS_DEFAULT_REGION", "us-east-1")} profile_name = os.environ.get("PLACEBO_PROFILE", None) if profile_name: session_kwargs["profile_name"] = profile_name diff --git a/zappa/__init__.py b/zappa/__init__.py index 8c1d448fa..0513a377f 100644 --- a/zappa/__init__.py +++ b/zappa/__init__.py @@ -1,16 +1,12 @@ import sys -SUPPORTED_VERSIONS = [(3, 6), (3, 7), (3, 8)] +SUPPORTED_VERSIONS = [(3, 7), (3, 8), (3, 9)] if sys.version_info[:2] not in SUPPORTED_VERSIONS: - formatted_supported_versions = [ - "{}.{}".format(*version) for version in SUPPORTED_VERSIONS - ] + formatted_supported_versions = ["{}.{}".format(*version) for version in SUPPORTED_VERSIONS] err_msg = "This version of Python ({}.{}) is not supported!\n".format( *sys.version_info - ) + "Zappa (and AWS Lambda) support the following versions of Python: {}".format( - formatted_supported_versions - ) + ) + "Zappa (and AWS Lambda) support the following versions of Python: {}".format(formatted_supported_versions) raise RuntimeError(err_msg) -__version__ = "0.53.0" +__version__ = "0.55.0" diff --git a/zappa/async.py b/zappa/async.py index c573b3283..ff48d6a2d 100644 --- a/zappa/async.py +++ b/zappa/async.py @@ -1,8 +1,8 @@ -# -*- coding: utf-8 -*- import warnings +from .asynchronous import * # noqa: F401 + warnings.warn( 'Module "zappa.async" is deprecated; please use "zappa.asynchronous" instead.', category=DeprecationWarning, ) -from .asynchronous import * diff --git a/zappa/asynchronous.py b/zappa/asynchronous.py index 7ace97332..7c231a750 100644 --- a/zappa/asynchronous.py +++ b/zappa/asynchronous.py @@ -110,7 +110,7 @@ def my_async_func(*args, **kwargs): SNS_CLIENT = aws_session.client("sns") STS_CLIENT = aws_session.client("sts") DYNAMODB_CLIENT = aws_session.client("dynamodb") -except botocore.exceptions.NoRegionError as e: # pragma: no cover +except botocore.exceptions.NoRegionError: # pragma: no cover # This can happen while testing on Travis, but it's taken care of # during class initialization. pass @@ -136,13 +136,7 @@ class LambdaAsyncResponse: Can be used directly or subclassed if the method to send the message is changed. """ - def __init__( - self, - lambda_function_name=None, - aws_region=None, - capture_response=False, - **kwargs - ): + def __init__(self, lambda_function_name=None, aws_region=None, capture_response=False, **kwargs): """ """ if kwargs.get("boto_session"): self.client = kwargs.get("boto_session").client("lambda") @@ -187,7 +181,7 @@ def _send(self, message): Given a message, directly invoke the lamdba function for this task. """ message["command"] = "zappa.asynchronous.route_lambda_task" - payload = json.dumps(message).encode("utf-8") + payload = json.dumps(message) if len(payload) > LAMBDA_ASYNC_PAYLOAD_LIMIT: # pragma: no cover raise AsyncException("Payload too large for async Lambda call") self.response = self.client.invoke( @@ -204,13 +198,7 @@ class SnsAsyncResponse(LambdaAsyncResponse): Serialise the func path and arguments """ - def __init__( - self, - lambda_function_name=None, - aws_region=None, - capture_response=False, - **kwargs - ): + def __init__(self, lambda_function_name=None, aws_region=None, capture_response=False, **kwargs): self.lambda_function_name = lambda_function_name self.aws_region = aws_region @@ -259,7 +247,7 @@ def _send(self, message): Given a message, publish to this topic. """ message["command"] = "zappa.asynchronous.route_sns_task" - payload = json.dumps(message).encode("utf-8") + payload = json.dumps(message) if len(payload) > LAMBDA_ASYNC_PAYLOAD_LIMIT: # pragma: no cover raise AsyncException("Payload too large for SNS") self.response = self.client.publish(TargetArn=self.arn, Message=payload) @@ -360,17 +348,12 @@ def run( and other arguments are similar to @task """ - lambda_function_name = remote_aws_lambda_function_name or os.environ.get( - "AWS_LAMBDA_FUNCTION_NAME" - ) + lambda_function_name = remote_aws_lambda_function_name or os.environ.get("AWS_LAMBDA_FUNCTION_NAME") aws_region = remote_aws_region or os.environ.get("AWS_REGION") task_path = get_func_task_path(func) return ASYNC_CLASSES[service]( - lambda_function_name=lambda_function_name, - aws_region=aws_region, - capture_response=capture_response, - **task_kwargs + lambda_function_name=lambda_function_name, aws_region=aws_region, capture_response=capture_response, **task_kwargs ).send(task_path, args, kwargs) @@ -436,9 +419,7 @@ def _run_async(*args, **kwargs): When outside of Lambda, the func passed to @task is run and we return the actual value. """ - lambda_function_name = lambda_function_name_arg or os.environ.get( - "AWS_LAMBDA_FUNCTION_NAME" - ) + lambda_function_name = lambda_function_name_arg or os.environ.get("AWS_LAMBDA_FUNCTION_NAME") aws_region = aws_region_arg or os.environ.get("AWS_REGION") if (service in ASYNC_CLASSES) and (lambda_function_name): @@ -489,9 +470,7 @@ def get_func_task_path(func): Format the modular task path for a function via inspection. """ module_path = inspect.getmodule(func).__name__ - task_path = "{module_path}.{func_name}".format( - module_path=module_path, func_name=func.__name__ - ) + task_path = "{module_path}.{func_name}".format(module_path=module_path, func_name=func.__name__) return task_path @@ -499,9 +478,7 @@ def get_async_response(response_id): """ Get the response from the async table """ - response = DYNAMODB_CLIENT.get_item( - TableName=ASYNC_RESPONSE_TABLE, Key={"id": {"S": str(response_id)}} - ) + response = DYNAMODB_CLIENT.get_item(TableName=ASYNC_RESPONSE_TABLE, Key={"id": {"S": str(response_id)}}) if "Item" not in response: return None diff --git a/zappa/cli.py b/zappa/cli.py index 44f462753..305e74e30 100755 --- a/zappa/cli.py +++ b/zappa/cli.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - """ Zappa CLI @@ -12,7 +9,6 @@ import collections import importlib import inspect -import logging import os import pkgutil import random @@ -24,6 +20,7 @@ import zipfile from builtins import bytes, input from datetime import datetime, timedelta +from typing import Optional import argcomplete import botocore @@ -38,11 +35,9 @@ from click.exceptions import ClickException from click.globals import push_context from dateutil import parser -from past.builtins import basestring -from .core import API_GATEWAY_REGIONS, Zappa, logger +from .core import API_GATEWAY_REGIONS, Zappa from .utilities import ( - InvalidAwsLambdaName, check_new_version_available, detect_django_settings, detect_flask_apps, @@ -70,9 +65,7 @@ "touch", ] -BOTO3_CONFIG_DOCS_URL = ( - "https://boto3.readthedocs.io/en/latest/guide/quickstart.html#configuration" -) +BOTO3_CONFIG_DOCS_URL = "https://boto3.readthedocs.io/en/latest/guide/quickstart.html#configuration" ## # Main Input Processing @@ -130,9 +123,7 @@ class ZappaCLI: stage_name_env_pattern = re.compile("^[a-zA-Z0-9_]+$") def __init__(self): - self._stage_config_overrides = ( - {} - ) # change using self.override_stage_config_setting(key, val) + self._stage_config_overrides = {} # change using self.override_stage_config_setting(key, val) @property def stage_config(self): @@ -154,16 +145,12 @@ def get_stage_setting(stage, extended_stages=None): try: stage_settings = dict(self.zappa_settings[stage].copy()) except KeyError: - raise ClickException( - "Cannot extend settings for undefined stage '" + stage + "'." - ) + raise ClickException("Cannot extend settings for undefined stage '" + stage + "'.") extends_stage = self.zappa_settings[stage].get("extends", None) if not extends_stage: return stage_settings - extended_settings = get_stage_setting( - stage=extends_stage, extended_stages=extended_stages - ) + extended_settings = get_stage_setting(stage=extends_stage, extended_stages=extended_stages) extended_settings.update(stage_settings) return extended_settings @@ -209,9 +196,7 @@ def handle(self, argv=None): version=pkg_resources.get_distribution("zappa").version, help="Print the zappa version", ) - parser.add_argument( - "--color", default="auto", choices=["auto", "never", "always"] - ) + parser.add_argument("--color", default="auto", choices=["auto", "never", "always"]) env_parser = argparse.ArgumentParser(add_help=False) me_group = env_parser.add_mutually_exclusive_group() @@ -220,15 +205,9 @@ def handle(self, argv=None): me_group.add_argument("stage_env", nargs="?") group = env_parser.add_argument_group() - group.add_argument( - "-a", "--app_function", help="The WSGI application function." - ) - group.add_argument( - "-s", "--settings_file", help="The path to a Zappa settings file." - ) - group.add_argument( - "-q", "--quiet", action="store_true", help="Silence all output." - ) + group.add_argument("-a", "--app_function", help="The WSGI application function.") + group.add_argument("-s", "--settings_file", help="The path to a Zappa settings file.") + group.add_argument("-q", "--quiet", action="store_true", help="Silence all output.") # https://github.com/Miserlou/Zappa/issues/407 # Moved when 'template' command added. # Fuck Terraform. @@ -239,36 +218,25 @@ def handle(self, argv=None): help="Make the output of this command be machine readable.", ) # https://github.com/Miserlou/Zappa/issues/891 - group.add_argument( - "--disable_progress", action="store_true", help="Disable progress bars." - ) + group.add_argument("--disable_progress", action="store_true", help="Disable progress bars.") group.add_argument("--no_venv", action="store_true", help="Skip venv check.") ## # Certify ## subparsers = parser.add_subparsers(title="subcommands", dest="command") - cert_parser = subparsers.add_parser( - "certify", parents=[env_parser], help="Create and install SSL certificate" - ) + cert_parser = subparsers.add_parser("certify", parents=[env_parser], help="Create and install SSL certificate") cert_parser.add_argument( "--manual", action="store_true", - help=( - "Gets new Let's Encrypt certificates, but prints them to console." - "Does not update API Gateway domains." - ), - ) - cert_parser.add_argument( - "-y", "--yes", action="store_true", help="Auto confirm yes." + help=("Gets new Let's Encrypt certificates, but prints them to console." "Does not update API Gateway domains."), ) + cert_parser.add_argument("-y", "--yes", action="store_true", help="Auto confirm yes.") ## # Deploy ## - deploy_parser = subparsers.add_parser( - "deploy", parents=[env_parser], help="Deploy application." - ) + deploy_parser = subparsers.add_parser("deploy", parents=[env_parser], help="Deploy application.") deploy_parser.add_argument( "-z", "--zip", @@ -283,7 +251,7 @@ def handle(self, argv=None): ## # Init ## - init_parser = subparsers.add_parser("init", help="Initialize Zappa app.") + subparsers.add_parser("init", help="Initialize Zappa app.") ## # Package @@ -293,9 +261,7 @@ def handle(self, argv=None): parents=[env_parser], help="Build the application zip package locally.", ) - package_parser.add_argument( - "-o", "--output", help="Name of file to output the package to." - ) + package_parser.add_argument("-o", "--output", help="Name of file to output the package to.") ## # Template @@ -311,52 +277,32 @@ def handle(self, argv=None): required=True, help="ARN of the Lambda function to template to.", ) - template_parser.add_argument( - "-r", "--role-arn", required=True, help="ARN of the Role to template with." - ) - template_parser.add_argument( - "-o", "--output", help="Name of file to output the template to." - ) + template_parser.add_argument("-r", "--role-arn", required=True, help="ARN of the Role to template with.") + template_parser.add_argument("-o", "--output", help="Name of file to output the template to.") ## # Invocation ## - invoke_parser = subparsers.add_parser( - "invoke", parents=[env_parser], help="Invoke remote function." - ) + invoke_parser = subparsers.add_parser("invoke", parents=[env_parser], help="Invoke remote function.") invoke_parser.add_argument( "--raw", action="store_true", - help=( - "When invoking remotely, invoke this python as a string," - " not as a modular path." - ), - ) - invoke_parser.add_argument( - "--no-color", action="store_true", help=("Don't color the output") + help=("When invoking remotely, invoke this python as a string," " not as a modular path."), ) + invoke_parser.add_argument("--no-color", action="store_true", help=("Don't color the output")) invoke_parser.add_argument("command_rest") ## # Manage ## - manage_parser = subparsers.add_parser( - "manage", help="Invoke remote Django manage.py commands." - ) - rest_help = ( - "Command in the form of . is not " - "required if --all is specified" - ) + manage_parser = subparsers.add_parser("manage", help="Invoke remote Django manage.py commands.") + rest_help = "Command in the form of . is not " "required if --all is specified" manage_parser.add_argument("--all", action="store_true", help=all_help) manage_parser.add_argument("command_rest", nargs="+", help=rest_help) - manage_parser.add_argument( - "--no-color", action="store_true", help=("Don't color the output") - ) + manage_parser.add_argument("--no-color", action="store_true", help=("Don't color the output")) # This is explicitly added here because this is the only subcommand that doesn't inherit from env_parser # https://github.com/Miserlou/Zappa/issues/1002 - manage_parser.add_argument( - "-s", "--settings_file", help="The path to a Zappa settings file." - ) + manage_parser.add_argument("-s", "--settings_file", help="The path to a Zappa settings file.") ## # Rollback @@ -403,12 +349,8 @@ def positive_int(s): ## # Log Tailing ## - tail_parser = subparsers.add_parser( - "tail", parents=[env_parser], help="Tail deployment logs." - ) - tail_parser.add_argument( - "--no-color", action="store_true", help="Don't color log tail output." - ) + tail_parser = subparsers.add_parser("tail", parents=[env_parser], help="Tail deployment logs.") + tail_parser.add_argument("--no-color", action="store_true", help="Don't color log tail output.") tail_parser.add_argument( "--http", action="store_true", @@ -425,9 +367,7 @@ def positive_int(s): default="100000s", help="Only show lines since a certain timeframe.", ) - tail_parser.add_argument( - "--filter", type=str, default="", help="Apply a filter pattern to the logs." - ) + tail_parser.add_argument("--filter", type=str, default="", help="Apply a filter pattern to the logs.") tail_parser.add_argument( "--force-color", action="store_true", @@ -442,34 +382,23 @@ def positive_int(s): ## # Undeploy ## - undeploy_parser = subparsers.add_parser( - "undeploy", parents=[env_parser], help="Undeploy application." - ) + undeploy_parser = subparsers.add_parser("undeploy", parents=[env_parser], help="Undeploy application.") undeploy_parser.add_argument( "--remove-logs", action="store_true", - help=( - "Removes log groups of api gateway and lambda task" - " during the undeployment." - ), - ) - undeploy_parser.add_argument( - "-y", "--yes", action="store_true", help="Auto confirm yes." + help=("Removes log groups of api gateway and lambda task" " during the undeployment."), ) + undeploy_parser.add_argument("-y", "--yes", action="store_true", help="Auto confirm yes.") ## # Unschedule ## - subparsers.add_parser( - "unschedule", parents=[env_parser], help="Unschedule functions." - ) + subparsers.add_parser("unschedule", parents=[env_parser], help="Unschedule functions.") ## # Updating ## - update_parser = subparsers.add_parser( - "update", parents=[env_parser], help="Update deployed application." - ) + update_parser = subparsers.add_parser("update", parents=[env_parser], help="Update deployed application.") update_parser.add_argument( "-z", "--zip", @@ -596,7 +525,7 @@ def dispatch_command(self, command, stage): Given a command to execute and stage, execute that command. """ - + self.check_stage_name(stage) self.api_stage = stage if command not in ["status", "manage"]: @@ -664,16 +593,12 @@ def dispatch_command(self, command, stage): if not self.django_settings: print("This command is for Django projects only!") - print( - "If this is a Django project, please define django_settings in your zappa_settings." - ) + print("If this is a Django project, please define django_settings in your zappa_settings.") return command_tail = self.vargs.get("command_rest") if len(command_tail) > 1: - command = " ".join( - command_tail - ) # ex: zappa manage dev "shell --version" + command = " ".join(command_tail) # ex: zappa manage dev "shell --version" else: command = command_tail[0] # ex: zappa manage dev showmigrations admin @@ -694,9 +619,7 @@ def dispatch_command(self, command, stage): keep_open=not self.vargs["disable_keep_open"], ) elif command == "undeploy": # pragma: no cover - self.undeploy( - no_confirm=self.vargs["yes"], remove_logs=self.vargs["remove_logs"] - ) + self.undeploy(no_confirm=self.vargs["yes"], remove_logs=self.vargs["remove_logs"]) elif command == "schedule": # pragma: no cover self.schedule() elif command == "unschedule": # pragma: no cover @@ -716,11 +639,7 @@ def dispatch_command(self, command, stage): def save_python_settings_file(self, output_path=None): settings_path = output_path or "zappa_settings.py" - print( - "Generating Zappa settings Python file and saving to {}".format( - settings_path - ) - ) + print("Generating Zappa settings Python file and saving to {}".format(settings_path)) if not settings_path.endswith("zappa_settings.py"): raise ValueError("Settings file must be named zappa_settings.py") zappa_settings_s = self.get_zappa_settings_string() @@ -778,22 +697,14 @@ def template(self, lambda_arn, role_arn, output=None, json=False): ) if not output: - template_file = ( - self.lambda_name + "-template-" + str(int(time.time())) + ".json" - ) + template_file = self.lambda_name + "-template-" + str(int(time.time())) + ".json" else: template_file = output with open(template_file, "wb") as out: - out.write( - bytes(template.to_json(indent=None, separators=(",", ":")), "utf-8") - ) + out.write(bytes(template.to_json(indent=None, separators=(",", ":")), "utf-8")) if not json: - click.echo( - click.style("Template created", fg="green", bold=True) - + ": " - + click.style(template_file, bold=True) - ) + click.echo(click.style("Template created", fg="green", bold=True) + ": " + click.style(template_file, bold=True)) else: with open(template_file, "r") as out: print(out.read()) @@ -873,18 +784,14 @@ def deploy(self, source_zip=None, docker_image_uri=None): raise ClickException("Unable to upload handler to S3. Quitting.") # Copy the project zip to the current project zip - current_project_name = "{0!s}_{1!s}_current_project.tar.gz".format( - self.api_stage, self.project_name - ) + current_project_name = "{0!s}_{1!s}_current_project.tar.gz".format(self.api_stage, self.project_name) success = self.zappa.copy_on_s3( src_file_name=self.zip_path, dst_file_name=current_project_name, bucket_name=self.s3_bucket_name, ) if not success: # pragma: no cover - raise ClickException( - "Unable to copy the zip to be the current project. Quitting." - ) + raise ClickException("Unable to copy the zip to be the current project. Quitting.") handler_file = self.handler_path else: @@ -892,9 +799,7 @@ def deploy(self, source_zip=None, docker_image_uri=None): # Fixes https://github.com/Miserlou/Zappa/issues/613 try: - self.lambda_arn = self.zappa.get_lambda_function( - function_name=self.lambda_name - ) + self.lambda_arn = self.zappa.get_lambda_function(function_name=self.lambda_name) except botocore.client.ClientError: # Register the Lambda function with that zip as the source # You'll also need to define the path to your lambda_handler code. @@ -933,9 +838,7 @@ def deploy(self, source_zip=None, docker_image_uri=None): self.schedule() endpoint_url = "" - deployment_string = ( - click.style("Deployment complete", fg="green", bold=True) + "!" - ) + deployment_string = click.style("Deployment complete", fg="green", bold=True) + "!" if self.use_alb: kwargs = dict( @@ -949,7 +852,7 @@ def deploy(self, source_zip=None, docker_image_uri=None): if self.use_apigateway: # Create and configure the API Gateway - template = self.zappa.create_stack_template( + self.zappa.create_stack_template( lambda_arn=self.lambda_arn, lambda_name=self.lambda_name, api_key_required=self.api_key_required, @@ -977,9 +880,7 @@ def deploy(self, source_zip=None, docker_image_uri=None): if self.stage_config.get("payload_compression", True): self.zappa.add_api_compression( api_id=api_id, - min_compression_size=self.stage_config.get( - "payload_minimum_compression_size", 0 - ), + min_compression_size=self.stage_config.get("payload_minimum_compression_size", 0), ) # Deploy the API! @@ -991,14 +892,10 @@ def deploy(self, source_zip=None, docker_image_uri=None): if self.api_key is None: self.zappa.create_api_key(api_id=api_id, stage_name=self.api_stage) else: - self.zappa.add_api_stage_to_api_key( - api_key=self.api_key, api_id=api_id, stage_name=self.api_stage - ) + self.zappa.add_api_stage_to_api_key(api_key=self.api_key, api_id=api_id, stage_name=self.api_stage) if self.stage_config.get("touch", True): - self.zappa.wait_until_lambda_function_is_ready( - function_name=self.lambda_name - ) + self.zappa.wait_until_lambda_function_is_updated(function_name=self.lambda_name) self.touch_endpoint(endpoint_url) # Finally, delete the local copy our zip package @@ -1030,16 +927,14 @@ def update(self, source_zip=None, no_upload=False, docker_image_uri=None): # Temporary version check try: updated_time = 1472581018 - function_response = self.zappa.lambda_client.get_function( - FunctionName=self.lambda_name - ) + function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name) conf = function_response["Configuration"] last_updated = parser.parse(conf["LastModified"]) last_updated_unix = time.mktime(last_updated.timetuple()) except botocore.exceptions.BotoCoreError as e: click.echo(click.style(type(e).__name__, fg="red") + ": " + e.args[0]) sys.exit(-1) - except Exception as e: + except Exception: click.echo( click.style("Warning!", fg="red") + " Couldn't get function " @@ -1063,12 +958,7 @@ def update(self, source_zip=None, no_upload=False, docker_image_uri=None): try: self.zappa.create_iam_roles() except botocore.client.ClientError: - click.echo( - click.style("Failed", fg="red") - + " to " - + click.style("manage IAM roles", bold=True) - + "!" - ) + click.echo(click.style("Failed", fg="red") + " to " + click.style("manage IAM roles", bold=True) + "!") click.echo( "You may " + click.style("lack the necessary AWS permissions", bold=True) @@ -1107,23 +997,17 @@ def update(self, source_zip=None, no_upload=False, docker_image_uri=None): disable_progress=self.disable_progress, ) if not success: # pragma: no cover - raise ClickException( - "Unable to upload handler to S3. Quitting." - ) + raise ClickException("Unable to upload handler to S3. Quitting.") # Copy the project zip to the current project zip - current_project_name = "{0!s}_{1!s}_current_project.tar.gz".format( - self.api_stage, self.project_name - ) + current_project_name = "{0!s}_{1!s}_current_project.tar.gz".format(self.api_stage, self.project_name) success = self.zappa.copy_on_s3( src_file_name=self.zip_path, dst_file_name=current_project_name, bucket_name=self.s3_bucket_name, ) if not success: # pragma: no cover - raise ClickException( - "Unable to copy the zip to be the current project. Quitting." - ) + raise ClickException("Unable to copy the zip to be the current project. Quitting.") handler_file = self.handler_path else: @@ -1140,9 +1024,6 @@ def update(self, source_zip=None, no_upload=False, docker_image_uri=None): if docker_image_uri: kwargs["docker_image_uri"] = docker_image_uri self.lambda_arn = self.zappa.update_lambda_function(**kwargs) - self.zappa.wait_until_lambda_function_is_ready( - function_name=self.lambda_name - ) elif source_zip and source_zip.startswith("s3://"): bucket, key_name = parse_s3_url(source_zip) kwargs.update(dict(bucket=bucket, s3_key=key_name)) @@ -1174,6 +1055,7 @@ def update(self, source_zip=None, no_upload=False, docker_image_uri=None): aws_environment_variables=self.aws_environment_variables, aws_kms_key_arn=self.aws_kms_key_arn, layers=self.layers, + wait=False, ) # Finally, delete the local copy our zip package @@ -1212,9 +1094,7 @@ def update(self, source_zip=None, no_upload=False, docker_image_uri=None): if self.stage_config.get("payload_compression", True): self.zappa.add_api_compression( api_id=api_id, - min_compression_size=self.stage_config.get( - "payload_minimum_compression_size", 0 - ), + min_compression_size=self.stage_config.get("payload_minimum_compression_size", 0), ) else: self.zappa.remove_api_compression(api_id=api_id) @@ -1243,17 +1123,9 @@ def update(self, source_zip=None, no_upload=False, docker_image_uri=None): if self.base_path: endpoint_url += "/" + self.base_path - deployed_string = ( - "Your updated Zappa deployment is " - + click.style("live", fg="green", bold=True) - + "!" - ) + deployed_string = "Your updated Zappa deployment is " + click.style("live", fg="green", bold=True) + "!" if self.use_apigateway: - deployed_string = ( - deployed_string - + ": " - + click.style("{}".format(endpoint_url), bold=True) - ) + deployed_string = deployed_string + ": " + click.style("{}".format(endpoint_url), bold=True) api_url = None if endpoint_url and "amazonaws.com" not in endpoint_url: @@ -1263,9 +1135,7 @@ def update(self, source_zip=None, no_upload=False, docker_image_uri=None): deployed_string = deployed_string + " (" + api_url + ")" if self.stage_config.get("touch", True): - self.zappa.wait_until_lambda_function_is_ready( - function_name=self.lambda_name - ) + self.zappa.wait_until_lambda_function_is_updated(function_name=self.lambda_name) if api_url: self.touch_endpoint(api_url) elif endpoint_url: @@ -1280,9 +1150,7 @@ def rollback(self, revision): print("Rolling back..") - self.zappa.rollback_lambda_function_version( - self.lambda_name, versions_back=revision - ) + self.zappa.rollback_lambda_function_version(self.lambda_name, versions_back=revision) print("Done!") def tail( @@ -1353,9 +1221,7 @@ def undeploy(self, no_confirm=False, remove_logs=False): api_id = self.zappa.get_api_id(self.lambda_name) self.zappa.remove_api_key(api_id, self.api_stage) - gateway_id = self.zappa.undeploy_api_gateway( - self.lambda_name, domain_name=domain_name, base_path=base_path - ) + self.zappa.undeploy_api_gateway(self.lambda_name, domain_name=domain_name, base_path=base_path) self.unschedule() # removes event triggers, including warm up event. @@ -1375,9 +1241,7 @@ def update_cognito_triggers(self): lambda_configs = set() for trigger in triggers: lambda_configs.add(trigger["source"].split("_")[0]) - self.zappa.update_cognito( - self.lambda_name, user_pool, lambda_configs, self.lambda_arn - ) + self.zappa.update_cognito(self.lambda_name, user_pool, lambda_configs, self.lambda_arn) def schedule(self): """ @@ -1398,9 +1262,7 @@ def schedule(self): if not events: events = [] - keep_warm_rate = self.stage_config.get( - "keep_warm_expression", "rate(4 minutes)" - ) + keep_warm_rate = self.stage_config.get("keep_warm_expression", "rate(4 minutes)") events.append( { "name": "zappa-keep-warm", @@ -1412,10 +1274,8 @@ def schedule(self): if events: try: - function_response = self.zappa.lambda_client.get_function( - FunctionName=self.lambda_name - ) - except botocore.exceptions.ClientError as e: # pragma: no cover + function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name) + except botocore.exceptions.ClientError: # pragma: no cover click.echo( click.style("Function does not exist", fg="yellow") + ", please " @@ -1433,15 +1293,9 @@ def schedule(self): ) # Add async tasks SNS - if self.stage_config.get( - "async_source", None - ) == "sns" and self.stage_config.get("async_resources", True): - self.lambda_arn = self.zappa.get_lambda_function( - function_name=self.lambda_name - ) - topic_arn = self.zappa.create_async_sns_topic( - lambda_name=self.lambda_name, lambda_arn=self.lambda_arn - ) + if self.stage_config.get("async_source", None) == "sns" and self.stage_config.get("async_resources", True): + self.lambda_arn = self.zappa.get_lambda_function(function_name=self.lambda_name) + topic_arn = self.zappa.create_async_sns_topic(lambda_name=self.lambda_name, lambda_arn=self.lambda_arn) click.echo("SNS Topic created: %s" % topic_arn) # Add async tasks DynamoDB @@ -1449,25 +1303,19 @@ def schedule(self): read_capacity = self.stage_config.get("async_response_table_read_capacity", 1) write_capacity = self.stage_config.get("async_response_table_write_capacity", 1) if table_name and self.stage_config.get("async_resources", True): - created, response_table = self.zappa.create_async_dynamodb_table( - table_name, read_capacity, write_capacity - ) + created, response_table = self.zappa.create_async_dynamodb_table(table_name, read_capacity, write_capacity) if created: click.echo("DynamoDB table created: %s" % table_name) else: click.echo("DynamoDB table exists: %s" % table_name) - provisioned_throughput = response_table["Table"][ - "ProvisionedThroughput" - ] + provisioned_throughput = response_table["Table"]["ProvisionedThroughput"] if ( provisioned_throughput["ReadCapacityUnits"] != read_capacity or provisioned_throughput["WriteCapacityUnits"] != write_capacity ): click.echo( click.style( - "\nWarning! Existing DynamoDB table ({}) does not match configured capacity.\n".format( - table_name - ), + "\nWarning! Existing DynamoDB table ({}) does not match configured capacity.\n".format(table_name), fg="red", ) ) @@ -1487,16 +1335,12 @@ def unschedule(self): function_arn = None try: - function_response = self.zappa.lambda_client.get_function( - FunctionName=self.lambda_name - ) + function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name) function_arn = function_response["Configuration"]["FunctionArn"] - except botocore.exceptions.ClientError as e: # pragma: no cover + except botocore.exceptions.ClientError: # pragma: no cover raise ClickException( "Function does not exist, you should deploy first. Ex: zappa deploy {}. " - "Proceeding to unschedule CloudWatch based events.".format( - self.api_stage - ) + "Proceeding to unschedule CloudWatch based events.".format(self.api_stage) ) print("Unscheduling..") @@ -1507,9 +1351,7 @@ def unschedule(self): ) # Remove async task SNS - if self.stage_config.get( - "async_source", None - ) == "sns" and self.stage_config.get("async_resources", True): + if self.stage_config.get("async_source", None) == "sns" and self.stage_config.get("async_resources", True): removed_arns = self.zappa.remove_async_sns_topic(self.lambda_name) click.echo("SNS Topic removed: %s" % ", ".join(removed_arns)) @@ -1537,25 +1379,28 @@ def invoke(self, function_name, raw_python=False, command=None, no_color=False): invocation_type="RequestResponse", ) - if "LogResult" in response: - if no_color: - print(base64.b64decode(response["LogResult"])) - else: - decoded = base64.b64decode(response["LogResult"]).decode() - formatted = self.format_invoke_command(decoded) - colorized = self.colorize_invoke_command(formatted) - print(colorized) - else: - print(response) + print(self.format_lambda_response(response, not no_color)) # For a successful request FunctionError is not in response. # https://github.com/Miserlou/Zappa/pull/1254/ if "FunctionError" in response: - raise ClickException( - "{} error occurred while invoking command.".format( - response["FunctionError"] - ) - ) + raise ClickException("{} error occurred while invoking command.".format(response["FunctionError"])) + + def format_lambda_response(self, response, colorize=True): + if "LogResult" in response: + logresult_bytes = base64.b64decode(response["LogResult"]) + try: + decoded = logresult_bytes.decode() + except UnicodeDecodeError: + return logresult_bytes + else: + if colorize and sys.stdout.isatty(): + formatted = self.format_invoke_command(decoded) + return self.colorize_invoke_command(formatted) + else: + return decoded + else: + return response def format_invoke_command(self, string): """ @@ -1597,9 +1442,7 @@ def colorize_invoke_command(self, string): if token == "[DEBUG]": format_string = "{}" pattern = re.escape(token) - repl = click.style( - format_string.format(token), bold=True, fg="cyan" - ) + repl = click.style(format_string.format(token), bold=True, fg="cyan") final_string = re.sub(pattern.format(token), repl, final_string) except Exception: # pragma: no cover pass @@ -1616,9 +1459,7 @@ def colorize_invoke_command(self, string): "Max Memory Used:", ]: if token in final_string: - final_string = final_string.replace( - token, click.style(token, bold=True, fg="green") - ) + final_string = final_string.replace(token, click.style(token, bold=True, fg="green")) except Exception: # pragma: no cover pass @@ -1626,9 +1467,7 @@ def colorize_invoke_command(self, string): for token in final_string.replace("\t", " ").split(" "): try: if token.count("-") == 4 and token.replace("-", "").isalnum(): - final_string = final_string.replace( - token, click.style(token, fg="magenta") - ) + final_string = final_string.replace(token, click.style(token, fg="magenta")) except Exception: # pragma: no cover pass @@ -1645,9 +1484,7 @@ def tabular_print(title, value): """ Convenience function for priting formatted table items. """ - click.echo( - "%-*s%s" % (32, click.style("\t" + title, fg="green") + ":", str(value)) - ) + click.echo("%-*s%s" % (32, click.style("\t" + title, fg="green") + ":", str(value))) return # Lambda Env Details @@ -1656,17 +1493,14 @@ def tabular_print(title, value): if not lambda_versions: raise ClickException( click.style( - "No Lambda %s detected in %s - have you deployed yet?" - % (self.lambda_name, self.zappa.aws_region), + "No Lambda %s detected in %s - have you deployed yet?" % (self.lambda_name, self.zappa.aws_region), fg="red", ) ) status_dict = collections.OrderedDict() status_dict["Lambda Versions"] = len(lambda_versions) - function_response = self.zappa.lambda_client.get_function( - FunctionName=self.lambda_name - ) + function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name) conf = function_response["Configuration"] self.lambda_arn = conf["FunctionArn"] status_dict["Lambda Name"] = self.lambda_name @@ -1682,9 +1516,7 @@ def tabular_print(title, value): status_dict["Lambda Handler"] = conf.get("Handler", "") status_dict["Lambda Runtime"] = conf.get("Runtime", "") if "VpcConfig" in conf.keys(): - status_dict["Lambda VPC ID"] = conf.get("VpcConfig", {}).get( - "VpcId", "Not assigned" - ) + status_dict["Lambda VPC ID"] = conf.get("VpcConfig", {}).get("VpcId", "Not assigned") else: status_dict["Lambda VPC ID"] = None @@ -1697,11 +1529,9 @@ def tabular_print(title, value): EndTime=datetime.utcnow(), Period=1440, Statistics=["Sum"], - Dimensions=[ - {"Name": "FunctionName", "Value": "{}".format(self.lambda_name)} - ], + Dimensions=[{"Name": "FunctionName", "Value": "{}".format(self.lambda_name)}], )["Datapoints"][0]["Sum"] - except Exception as e: + except Exception: function_invocations = 0 try: function_errors = self.zappa.cloudwatch.get_metric_statistics( @@ -1711,16 +1541,14 @@ def tabular_print(title, value): EndTime=datetime.utcnow(), Period=1440, Statistics=["Sum"], - Dimensions=[ - {"Name": "FunctionName", "Value": "{}".format(self.lambda_name)} - ], + Dimensions=[{"Name": "FunctionName", "Value": "{}".format(self.lambda_name)}], )["Datapoints"][0]["Sum"] - except Exception as e: + except Exception: function_errors = 0 try: error_rate = "{0:.2f}%".format(function_errors / function_invocations * 100) - except: + except Exception: error_rate = "Error calculating" status_dict["Invocations (24h)"] = int(function_invocations) status_dict["Errors (24h)"] = int(function_errors) @@ -1789,9 +1617,11 @@ def check_stage_name(self, stage_name): calling the CreateDeployment operation: Stage name only allows a-zA-Z0-9_" if the pattern does not match) """ + if not self.use_apigateway: + return True if self.stage_name_env_pattern.match(stage_name): return True - raise ValueError("AWS requires stage name to match a-zA-Z0-9_") + raise ValueError("API stage names must match a-zA-Z0-9_ ; '{0!s}' does not.".format(stage_name)) def check_environment(self, environment): """ @@ -1801,14 +1631,10 @@ def check_environment(self, environment): non_strings = [] for (k, v) in environment.items(): - if not isinstance(v, basestring): + if not isinstance(v, str): non_strings.append(k) if non_strings: - raise ValueError( - "The following environment variables are not strings: {}".format( - ", ".join(non_strings) - ) - ) + raise ValueError("The following environment variables are not strings: {}".format(", ".join(non_strings))) else: return True @@ -1825,9 +1651,7 @@ def init(self, settings_file="zappa_settings.json"): # Ensure that we don't already have a zappa_settings file. if os.path.isfile(settings_file): raise ClickException( - "This project already has a " - + click.style("{0!s} file".format(settings_file), fg="red", bold=True) - + "!" + "This project already has a " + click.style("{0!s} file".format(settings_file), fg="red", bold=True) + "!" ) # Explain system. @@ -1845,18 +1669,13 @@ def init(self, settings_file="zappa_settings.json"): ) click.echo( - click.style("Welcome to ", bold=True) - + click.style("Zappa", fg="green", bold=True) - + click.style("!\n", bold=True) + click.style("Welcome to ", bold=True) + click.style("Zappa", fg="green", bold=True) + click.style("!\n", bold=True) ) click.echo( - click.style("Zappa", bold=True) - + " is a system for running server-less Python web applications" + click.style("Zappa", bold=True) + " is a system for running server-less Python web applications" " on AWS Lambda and AWS API Gateway." ) - click.echo( - "This `init` command will help you create and configure your new Zappa deployment." - ) + click.echo("This `init` command will help you create and configure your new Zappa deployment.") click.echo("Let's get started!\n") # Create Env @@ -1870,10 +1689,7 @@ def init(self, settings_file="zappa_settings.json"): + click.style("production", bold=True) + "'." ) - env = ( - input("What do you want to call this environment (default 'dev'): ") - or "dev" - ) + env = input("What do you want to call this environment (default 'dev'): ") or "dev" try: self.check_stage_name(env) break @@ -1881,7 +1697,8 @@ def init(self, settings_file="zappa_settings.json"): click.echo(click.style("Stage names must match a-zA-Z0-9_", fg="red")) # Detect AWS profiles and regions - # If anyone knows a more straightforward way to easily detect and parse AWS profiles I'm happy to change this, feels like a hack + # If anyone knows a more straightforward way to easily detect and + # parse AWS profiles I'm happy to change this, feels like a hack session = botocore.session.Session() config = session.full_config profiles = config.get("profiles", {}) @@ -1895,16 +1712,15 @@ def init(self, settings_file="zappa_settings.json"): if not profile_names: profile_name, profile = None, None click.echo( - "We couldn't find an AWS profile to use. Before using Zappa, you'll need to set one up. See here for more info: {}".format( + "We couldn't find an AWS profile to use. " + "Before using Zappa, you'll need to set one up. See here for more info: {}".format( click.style(BOTO3_CONFIG_DOCS_URL, fg="blue", underline=True) ) ) elif len(profile_names) == 1: profile_name = profile_names[0] profile = profiles[profile_name] - click.echo( - "Okay, using profile {}!".format(click.style(profile_name, bold=True)) - ) + click.echo("Okay, using profile {}!".format(click.style(profile_name, bold=True))) else: if "default" in profile_names: default_profile = [p for p in profile_names if p == "default"][0] @@ -1933,22 +1749,12 @@ def init(self, settings_file="zappa_settings.json"): # Create Bucket click.echo( - "\nYour Zappa deployments will need to be uploaded to a " - + click.style("private S3 bucket", bold=True) - + "." + "\nYour Zappa deployments will need to be uploaded to a " + click.style("private S3 bucket", bold=True) + "." ) click.echo("If you don't have a bucket yet, we'll create one for you too.") - default_bucket = "zappa-" + "".join( - random.choice(string.ascii_lowercase + string.digits) for _ in range(9) - ) + default_bucket = "zappa-" + "".join(random.choice(string.ascii_lowercase + string.digits) for _ in range(9)) while True: - bucket = ( - input( - "What do you want to call your bucket? (default '%s'): " - % default_bucket - ) - or default_bucket - ) + bucket = input("What do you want to call your bucket? (default '%s'): " % default_bucket) or default_bucket if is_valid_bucket_name(bucket): break @@ -1974,32 +1780,24 @@ def init(self, settings_file="zappa_settings.json"): # Detect Django/Flask try: # pragma: no cover - import django + import django # noqa: F401 has_django = True - except ImportError as e: + except ImportError: has_django = False try: # pragma: no cover - import flask + import flask # noqa: F401 has_flask = True - except ImportError as e: + except ImportError: has_flask = False print("") # App-specific if has_django: # pragma: no cover - click.echo( - "It looks like this is a " - + click.style("Django", bold=True) - + " application!" - ) - click.echo( - "What is the " - + click.style("module path", bold=True) - + " to your projects's Django settings?" - ) + click.echo("It looks like this is a " + click.style("Django", bold=True) + " application!") + click.echo("What is the " + click.style("module path", bold=True) + " to your projects's Django settings?") django_settings = None matches = detect_django_settings() @@ -2012,34 +1810,18 @@ def init(self, settings_file="zappa_settings.json"): bold=True, ) ) - django_settings = ( - input( - "Where are your project's settings? (default '%s'): " - % matches[0] - ) - or matches[0] - ) + django_settings = input("Where are your project's settings? (default '%s'): " % matches[0]) or matches[0] else: - click.echo( - "(This will likely be something like 'your_project.settings')" - ) + click.echo("(This will likely be something like 'your_project.settings')") django_settings = input("Where are your project's settings?: ") django_settings = django_settings.replace("'", "") django_settings = django_settings.replace('"', "") else: matches = None if has_flask: - click.echo( - "It looks like this is a " - + click.style("Flask", bold=True) - + " application." - ) + click.echo("It looks like this is a " + click.style("Flask", bold=True) + " application.") matches = detect_flask_apps() - click.echo( - "What's the " - + click.style("modular path", bold=True) - + " to your app's function?" - ) + click.echo("What's the " + click.style("modular path", bold=True) + " to your app's function?") click.echo("This will likely be something like 'your_module.app'.") app_function = None while app_function in [None, ""]: @@ -2051,13 +1833,7 @@ def init(self, settings_file="zappa_settings.json"): bold=True, ) ) - app_function = ( - input( - "Where is your app's function? (default '%s'): " - % matches[0] - ) - or matches[0] - ) + app_function = input("Where is your app's function? (default '%s'): " % matches[0]) or matches[0] else: app_function = input("Where is your app's function?: ") app_function = app_function.replace("'", "") @@ -2074,9 +1850,7 @@ def init(self, settings_file="zappa_settings.json"): + click.style("all available regions", bold=True) + " in order to provide fast global service." ) - click.echo( - "If you are using Zappa for the first time, you probably don't want to do this!" - ) + click.echo("If you are using Zappa for the first time, you probably don't want to do this!") global_deployment = False while True: global_type = input( @@ -2127,27 +1901,12 @@ def init(self, settings_file="zappa_settings.json"): zappa_settings_json = json.dumps(zappa_settings, sort_keys=True, indent=4) - click.echo( - "\nOkay, here's your " - + click.style("zappa_settings.json", bold=True) - + ":\n" - ) + click.echo("\nOkay, here's your " + click.style("zappa_settings.json", bold=True) + ":\n") click.echo(click.style(zappa_settings_json, fg="yellow", bold=False)) - confirm = ( - input( - "\nDoes this look " - + click.style("okay", bold=True, fg="green") - + "? (default 'y') [y/n]: " - ) - or "yes" - ) + confirm = input("\nDoes this look " + click.style("okay", bold=True, fg="green") + "? (default 'y') [y/n]: ") or "yes" if confirm[0] not in ["y", "Y", "yes", "YES"]: - click.echo( - "" - + click.style("Sorry", bold=True, fg="red") - + " to hear that! Please init again." - ) + click.echo("" + click.style("Sorry", bold=True, fg="red") + " to hear that! Please init again.") return # Write @@ -2164,11 +1923,7 @@ def init(self, settings_file="zappa_settings.json"): ) click.echo(click.style("\t$ zappa deploy --all", bold=True)) - click.echo( - "\nAfter that, you can " - + click.style("update", bold=True) - + " your application code with:\n" - ) + click.echo("\nAfter that, you can " + click.style("update", bold=True) + " your application code with:\n") click.echo(click.style("\t$ zappa update --all", bold=True)) else: click.echo( @@ -2180,11 +1935,7 @@ def init(self, settings_file="zappa_settings.json"): ) click.echo(click.style("\t$ zappa deploy %s" % env, bold=True)) - click.echo( - "\nAfter that, you can " - + click.style("update", bold=True) - + " your application code with:\n" - ) + click.echo("\nAfter that, you can " + click.style("update", bold=True) + " your application code with:\n") click.echo(click.style("\t$ zappa update %s" % env, bold=True)) click.echo( @@ -2211,9 +1962,7 @@ def certify(self, no_confirm=True, manual=False): if not self.domain: raise ClickException( - "Can't certify a domain without " - + click.style("domain", fg="red", bold=True) - + " configured!" + "Can't certify a domain without " + click.style("domain", fg="red", bold=True) + " configured!" ) if not no_confirm: # pragma: no cover @@ -2262,15 +2011,11 @@ def certify(self, no_confirm=True, manual=False): if account_key_location.startswith("s3://"): bucket, key_name = parse_s3_url(account_key_location) - self.zappa.s3_client.download_file( - bucket, key_name, os.path.join(gettempdir(), "account.key") - ) + self.zappa.s3_client.download_file(bucket, key_name, os.path.join(gettempdir(), "account.key")) else: from shutil import copyfile - copyfile( - account_key_location, os.path.join(gettempdir(), "account.key") - ) + copyfile(account_key_location, os.path.join(gettempdir(), "account.key")) # Prepare for Custom SSL elif not account_key_location and not cert_arn: @@ -2295,11 +2040,7 @@ def certify(self, no_confirm=True, manual=False): with open(cert_chain_location) as f: certificate_chain = f.read() - click.echo( - "Certifying domain " - + click.style(self.domain, fg="green", bold=True) - + ".." - ) + click.echo("Certifying domain " + click.style(self.domain, fg="green", bold=True) + "..") # Get cert and update domain. @@ -2307,9 +2048,7 @@ def certify(self, no_confirm=True, manual=False): if not cert_location and not cert_arn: from .letsencrypt import get_cert_and_update_domain - cert_success = get_cert_and_update_domain( - self.zappa, self.lambda_name, self.api_stage, self.domain, manual - ) + cert_success = get_cert_and_update_domain(self.zappa, self.lambda_name, self.api_stage, self.domain, manual) # Custom SSL / ACM else: @@ -2329,7 +2068,8 @@ def certify(self, no_confirm=True, manual=False): if route53: self.zappa.update_route53_records(self.domain, dns_name) print( - "Created a new domain name with supplied certificate. Please note that it can take up to 40 minutes for this domain to be " + "Created a new domain name with supplied certificate. " + "Please note that it can take up to 40 minutes for this domain to be " "created and propagated through AWS, but it requires no further work on your part." ) else: @@ -2349,14 +2089,9 @@ def certify(self, no_confirm=True, manual=False): cert_success = True if cert_success: - click.echo( - "Certificate " + click.style("updated", fg="green", bold=True) + "!" - ) + click.echo("Certificate " + click.style("updated", fg="green", bold=True) + "!") else: - click.echo( - click.style("Failed", fg="red", bold=True) - + " to generate or install certificate! :(" - ) + click.echo(click.style("Failed", fg="red", bold=True) + " to generate or install certificate! :(") click.echo("\n==============\n") shamelessly_promote() @@ -2404,9 +2139,7 @@ def callback(self, position): working_dir = os.getcwd() working_dir_importer = pkgutil.get_importer(working_dir) - module_ = working_dir_importer.find_module(mod_name).load_module( - mod_name - ) + module_ = working_dir_importer.find_module(mod_name).load_module(mod_name) except (ImportError, AttributeError): @@ -2420,21 +2153,15 @@ def callback(self, position): "import {position} callback ".format(position=position), bold=True, ) - + 'module: "{mod_path}"'.format( - mod_path=click.style(mod_path, bold=True) - ) + + 'module: "{mod_path}"'.format(mod_path=click.style(mod_path, bold=True)) ) if not hasattr(module_, cb_func_name): # pragma: no cover raise ClickException( click.style("Failed ", fg="red") + "to " - + click.style( - "find {position} callback ".format(position=position), bold=True - ) - + 'function: "{cb_func_name}" '.format( - cb_func_name=click.style(cb_func_name, bold=True) - ) + + click.style("find {position} callback ".format(position=position), bold=True) + + 'function: "{cb_func_name}" '.format(cb_func_name=click.style(cb_func_name, bold=True)) + 'in module "{mod_path}"'.format(mod_path=mod_path) ) @@ -2455,10 +2182,7 @@ def check_for_update(self): + click.style("Zappa", bold=True) + " is available!" ) - click.echo( - "Upgrade with: " - + click.style("pip install zappa --upgrade", bold=True) - ) + click.echo("Upgrade with: " + click.style("pip install zappa --upgrade", bold=True)) click.echo( "Visit the project page on GitHub to see the latest changes: " + click.style("https://github.com/Zappa/Zappa", bold=True) @@ -2483,24 +2207,9 @@ def load_settings(self, settings_file=None, session=None): # Load up file self.load_settings_file(settings_file) - # Make sure that the stages are valid names: - for stage_name in self.zappa_settings.keys(): - try: - self.check_stage_name(stage_name) - except ValueError: - raise ValueError( - "API stage names must match a-zA-Z0-9_ ; '{0!s}' does not.".format( - stage_name - ) - ) - # Make sure that this stage is our settings if self.api_stage not in self.zappa_settings.keys(): - raise ClickException( - "Please define stage '{0!s}' in your Zappa settings.".format( - self.api_stage - ) - ) + raise ClickException("Please define stage '{0!s}' in your Zappa settings.".format(self.api_stage)) # We need a working title for this project. Use one if supplied, else cwd dirname. if "project_name" in self.stage_config: # pragma: no cover @@ -2519,10 +2228,7 @@ def load_settings(self, settings_file=None, session=None): # Load stage-specific settings self.s3_bucket_name = self.stage_config.get( "s3_bucket", - "zappa-" - + "".join( - random.choice(string.ascii_lowercase + string.digits) for _ in range(9) - ), + "zappa-" + "".join(random.choice(string.ascii_lowercase + string.digits) for _ in range(9)), ) self.vpc_config = self.stage_config.get("vpc_config", {}) self.memory_size = self.stage_config.get("memory_size", 512) @@ -2537,27 +2243,18 @@ def load_settings(self, settings_file=None, session=None): self.base_path = self.stage_config.get("base_path", None) self.timeout_seconds = self.stage_config.get("timeout_seconds", 30) dead_letter_arn = self.stage_config.get("dead_letter_arn", "") - self.dead_letter_config = ( - {"TargetArn": dead_letter_arn} if dead_letter_arn else {} - ) + self.dead_letter_config = {"TargetArn": dead_letter_arn} if dead_letter_arn else {} self.cognito = self.stage_config.get("cognito", None) - self.num_retained_versions = self.stage_config.get( - "num_retained_versions", None - ) + self.num_retained_versions = self.stage_config.get("num_retained_versions", None) # Check for valid values of num_retained_versions - if ( - self.num_retained_versions is not None - and type(self.num_retained_versions) is not int - ): + if self.num_retained_versions is not None and type(self.num_retained_versions) is not int: raise ClickException( "Please supply either an integer or null for num_retained_versions in the zappa_settings.json. Found %s" % type(self.num_retained_versions) ) elif type(self.num_retained_versions) is int and self.num_retained_versions < 1: - raise ClickException( - "The value for num_retained_versions in the zappa_settings.json should be greater than 0." - ) + raise ClickException("The value for num_retained_versions in the zappa_settings.json should be greater than 0.") # Provide legacy support for `use_apigateway`, now `apigateway_enabled`. # https://github.com/Miserlou/Zappa/issues/490 @@ -2565,13 +2262,9 @@ def load_settings(self, settings_file=None, session=None): self.use_apigateway = self.stage_config.get("use_apigateway", True) if self.use_apigateway: self.use_apigateway = self.stage_config.get("apigateway_enabled", True) - self.apigateway_description = self.stage_config.get( - "apigateway_description", None - ) + self.apigateway_description = self.stage_config.get("apigateway_description", None) - self.lambda_handler = self.stage_config.get( - "lambda_handler", "handler.lambda_handler" - ) + self.lambda_handler = self.stage_config.get("lambda_handler", "handler.lambda_handler") # DEPRECATED. https://github.com/Miserlou/Zappa/issues/456 self.remote_env_bucket = self.stage_config.get("remote_env_bucket", None) self.remote_env_file = self.stage_config.get("remote_env_file", None) @@ -2582,28 +2275,18 @@ def load_settings(self, settings_file=None, session=None): self.binary_support = self.stage_config.get("binary_support", True) self.api_key_required = self.stage_config.get("api_key_required", False) self.api_key = self.stage_config.get("api_key") - self.endpoint_configuration = self.stage_config.get( - "endpoint_configuration", None - ) + self.endpoint_configuration = self.stage_config.get("endpoint_configuration", None) self.iam_authorization = self.stage_config.get("iam_authorization", False) self.cors = self.stage_config.get("cors", False) - self.lambda_description = self.stage_config.get( - "lambda_description", "Zappa Deployment" - ) + self.lambda_description = self.stage_config.get("lambda_description", "Zappa Deployment") self.lambda_concurrency = self.stage_config.get("lambda_concurrency", None) self.environment_variables = self.stage_config.get("environment_variables", {}) - self.aws_environment_variables = self.stage_config.get( - "aws_environment_variables", {} - ) + self.aws_environment_variables = self.stage_config.get("aws_environment_variables", {}) self.check_environment(self.environment_variables) self.authorizer = self.stage_config.get("authorizer", {}) - self.runtime = self.stage_config.get( - "runtime", get_runtime_from_python_version() - ) + self.runtime = self.stage_config.get("runtime", get_runtime_from_python_version()) self.aws_kms_key_arn = self.stage_config.get("aws_kms_key_arn", "") - self.context_header_mappings = self.stage_config.get( - "context_header_mappings", {} - ) + self.context_header_mappings = self.stage_config.get("context_header_mappings", {}) self.xray_tracing = self.stage_config.get("xray_tracing", False) self.desired_role_arn = self.stage_config.get("role_arn") self.layers = self.stage_config.get("layers", None) @@ -2666,9 +2349,7 @@ def get_json_or_yaml_settings(self, settings_name="zappa_settings"): and not os.path.isfile(zs_yaml) and not os.path.isfile(zs_toml) ): - raise ClickException( - "Please configure a zappa_settings file or call `zappa init`." - ) + raise ClickException("Please configure a zappa_settings file or call `zappa init`.") # Prefer JSON if os.path.isfile(zs_json): @@ -2690,9 +2371,7 @@ def load_settings_file(self, settings_file=None): if not settings_file: settings_file = self.get_json_or_yaml_settings() if not os.path.isfile(settings_file): - raise ClickException( - "Please configure your zappa_settings file or call `zappa init`." - ) + raise ClickException("Please configure your zappa_settings file or call `zappa init`.") path, ext = os.path.splitext(settings_file) if ext == ".yml" or ext == ".yaml": @@ -2700,27 +2379,21 @@ def load_settings_file(self, settings_file=None): try: self.zappa_settings = yaml.safe_load(yaml_file) except ValueError: # pragma: no cover - raise ValueError( - "Unable to load the Zappa settings YAML. It may be malformed." - ) + raise ValueError("Unable to load the Zappa settings YAML. It may be malformed.") elif ext == ".toml": with open(settings_file) as toml_file: try: self.zappa_settings = toml.load(toml_file) except ValueError: # pragma: no cover - raise ValueError( - "Unable to load the Zappa settings TOML. It may be malformed." - ) + raise ValueError("Unable to load the Zappa settings TOML. It may be malformed.") else: with open(settings_file) as json_file: try: self.zappa_settings = json.load(json_file) except ValueError: # pragma: no cover - raise ValueError( - "Unable to load the Zappa settings JSON. It may be malformed." - ) + raise ValueError("Unable to load the Zappa settings JSON. It may be malformed.") - def create_package(self, output=None): + def create_package(self, output=None, use_zappa_release: Optional[str] = None): """ Ensure that the package can be properly configured, and then create it. @@ -2729,12 +2402,8 @@ def create_package(self, output=None): # Create the Lambda zip package (includes project and virtualenvironment) # Also define the path the handler file so it can be copied to the zip # root for Lambda. - current_file = os.path.dirname( - os.path.abspath(inspect.getfile(inspect.currentframe())) - ) - handler_file = ( - os.sep.join(current_file.split(os.sep)[0:]) + os.sep + "handler.py" - ) + current_file = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + handler_file = os.sep.join(current_file.split(os.sep)[0:]) + os.sep + "handler.py" # Create the zip file(s) if self.stage_config.get("slim_handler", False): @@ -2742,9 +2411,7 @@ def create_package(self, output=None): # https://github.com/Miserlou/Zappa/issues/510 self.zip_path = self.zappa.create_lambda_zip( prefix=self.lambda_name, - use_precompiled_packages=self.stage_config.get( - "use_precompiled_packages", True - ), + use_precompiled_packages=self.stage_config.get("use_precompiled_packages", True), exclude=self.stage_config.get("exclude", []), exclude_glob=self.stage_config.get("exclude_glob", []), disable_progress=self.disable_progress, @@ -2757,7 +2424,7 @@ def create_package(self, output=None): exclude.append(cur_venv.split("/")[-1]) self.handler_path = self.zappa.create_lambda_zip( prefix="handler_{0!s}".format(self.lambda_name), - venv=self.zappa.create_handler_venv(), + venv=self.zappa.create_handler_venv(use_zappa_release=use_zappa_release), handler_file=handler_file, slim_handler=True, exclude=exclude, @@ -2766,18 +2433,13 @@ def create_package(self, output=None): disable_progress=self.disable_progress, ) else: - # This could be python3.6 optimized. - exclude = self.stage_config.get( - "exclude", ["boto3", "dateutil", "botocore", "s3transfer", "concurrent"] - ) + exclude = self.stage_config.get("exclude", ["boto3", "dateutil", "botocore", "s3transfer", "concurrent"]) # Create a single zip that has the handler and application self.zip_path = self.zappa.create_lambda_zip( prefix=self.lambda_name, handler_file=handler_file, - use_precompiled_packages=self.stage_config.get( - "use_precompiled_packages", True - ), + use_precompiled_packages=self.stage_config.get("use_precompiled_packages", True), exclude=exclude, exclude_glob=self.stage_config.get("exclude_glob", []), output=output, @@ -2831,12 +2493,7 @@ def get_zappa_settings_string(self): + "`." ) app_module, app_function = self.app_function.rsplit(".", 1) - settings_s = ( - settings_s - + "APP_MODULE='{0!s}'\nAPP_FUNCTION='{1!s}'\n".format( - app_module, app_function - ) - ) + settings_s = settings_s + "APP_MODULE='{0!s}'\nAPP_FUNCTION='{1!s}'\n".format(app_module, app_function) if self.exception_handler: settings_s += "EXCEPTION_HANDLER='{0!s}'\n".format(self.exception_handler) @@ -2876,9 +2533,7 @@ def get_zappa_settings_string(self): settings_s = settings_s + "REMOTE_ENV='{0!s}'\n".format(self.remote_env) # DEPRECATED. use remove_env instead elif self.remote_env_bucket and self.remote_env_file: - settings_s = settings_s + "REMOTE_ENV='s3://{0!s}/{1!s}'\n".format( - self.remote_env_bucket, self.remote_env_file - ) + settings_s = settings_s + "REMOTE_ENV='s3://{0!s}/{1!s}'\n".format(self.remote_env_bucket, self.remote_env_file) # Local envs env_dict = {} @@ -2890,9 +2545,7 @@ def get_zappa_settings_string(self): # https://github.com/Miserlou/Zappa/issues/604 # https://github.com/Miserlou/Zappa/issues/998 try: - env_dict = dict( - (k.encode("ascii").decode("ascii"), v) for (k, v) in env_dict.items() - ) + env_dict = dict((k.encode("ascii").decode("ascii"), v) for (k, v) in env_dict.items()) except Exception: raise ValueError("Environment variable keys must be ascii.") @@ -2903,25 +2556,19 @@ def get_zappa_settings_string(self): settings_s = settings_s + "PROJECT_NAME='{0!s}'\n".format((self.project_name)) if self.settings_file: - settings_s = settings_s + "SETTINGS_FILE='{0!s}'\n".format( - (self.settings_file) - ) + settings_s = settings_s + "SETTINGS_FILE='{0!s}'\n".format((self.settings_file)) else: settings_s = settings_s + "SETTINGS_FILE=None\n" if self.django_settings: - settings_s = settings_s + "DJANGO_SETTINGS='{0!s}'\n".format( - (self.django_settings) - ) + settings_s = settings_s + "DJANGO_SETTINGS='{0!s}'\n".format((self.django_settings)) else: settings_s = settings_s + "DJANGO_SETTINGS=None\n" # If slim handler, path to project zip if self.stage_config.get("slim_handler", False): - settings_s += ( - "ARCHIVE_PATH='s3://{0!s}/{1!s}_{2!s}_current_project.tar.gz'\n".format( - self.s3_bucket_name, self.api_stage, self.project_name - ) + settings_s += "ARCHIVE_PATH='s3://{0!s}/{1!s}_{2!s}_current_project.tar.gz'\n".format( + self.s3_bucket_name, self.api_stage, self.project_name ) # since includes are for slim handler add the setting here by joining arbitrary list from zappa_settings file @@ -2952,13 +2599,9 @@ def get_zappa_settings_string(self): invocation_source = event_source.get("invocation_source") function = bot_event.get("function") if intent and invocation_source and function: - bot_events_mapping[ - str(intent) + ":" + str(invocation_source) - ] = function + bot_events_mapping[str(intent) + ":" + str(invocation_source)] = function - settings_s = settings_s + "AWS_BOT_EVENT_MAPPING={0!s}\n".format( - bot_events_mapping - ) + settings_s = settings_s + "AWS_BOT_EVENT_MAPPING={0!s}\n".format(bot_events_mapping) # Map cognito triggers cognito_trigger_mapping = {} @@ -2969,9 +2612,7 @@ def get_zappa_settings_string(self): function = trigger.get("function") if source and function: cognito_trigger_mapping[source] = function - settings_s = settings_s + "COGNITO_TRIGGER_MAPPING={0!s}\n".format( - cognito_trigger_mapping - ) + settings_s = settings_s + "COGNITO_TRIGGER_MAPPING={0!s}\n".format(cognito_trigger_mapping) # Authorizer config authorizer_function = self.authorizer.get("function", None) @@ -2994,7 +2635,7 @@ def remove_local_zip(self): os.remove(self.zip_path) if self.handler_path and os.path.isfile(self.handler_path): os.remove(self.handler_path) - except Exception as e: # pragma: no cover + except Exception: # pragma: no cover sys.exit(-1) def remove_uploaded_zip(self): @@ -3021,9 +2662,7 @@ def on_exit(self): self.remove_local_zip() - def print_logs( - self, logs, colorize=True, http=False, non_http=False, force_colorize=None - ): + def print_logs(self, logs, colorize=True, http=False, non_http=False, force_colorize=None): """ Parse, filter and print logs to the console. """ @@ -3111,54 +2750,40 @@ def colorize_log_entry(self, string): if token in ["CRITICAL", "ERROR", "WARNING", "DEBUG", "INFO", "NOTSET"]: final_string = final_string.replace( "[" + token + "]", - click.style("[", fg="cyan") - + click.style(token, fg="cyan", bold=True) - + click.style("]", fg="cyan"), + click.style("[", fg="cyan") + click.style(token, fg="cyan", bold=True) + click.style("]", fg="cyan"), ) else: final_string = final_string.replace( "[" + token + "]", - click.style("[", fg="cyan") - + click.style(token, bold=True) - + click.style("]", fg="cyan"), + click.style("[", fg="cyan") + click.style(token, bold=True) + click.style("]", fg="cyan"), ) # Then do quoted strings quotes = re.findall(r'"[^"]*"', string) for token in quotes: - final_string = final_string.replace( - token, click.style(token, fg="yellow") - ) + final_string = final_string.replace(token, click.style(token, fg="yellow")) # And UUIDs for token in final_string.replace("\t", " ").split(" "): try: if token.count("-") == 4 and token.replace("-", "").isalnum(): - final_string = final_string.replace( - token, click.style(token, fg="magenta") - ) + final_string = final_string.replace(token, click.style(token, fg="magenta")) except Exception: # pragma: no cover pass # And IP addresses try: if token.count(".") == 3 and token.replace(".", "").isnumeric(): - final_string = final_string.replace( - token, click.style(token, fg="red") - ) + final_string = final_string.replace(token, click.style(token, fg="red")) except Exception: # pragma: no cover pass # And status codes try: if token in ["200"]: - final_string = final_string.replace( - token, click.style(token, fg="green") - ) + final_string = final_string.replace(token, click.style(token, fg="green")) if token in ["400", "401", "403", "404", "405", "500"]: - final_string = final_string.replace( - token, click.style(token, fg="red") - ) + final_string = final_string.replace(token, click.style(token, fg="red")) except Exception: # pragma: no cover pass @@ -3175,10 +2800,7 @@ def colorize_log_entry(self, string): # And dates for token in final_string.split("\t"): try: - is_date = parser.parse(token) - final_string = final_string.replace( - token, click.style(token, fg="green") - ) + final_string = final_string.replace(token, click.style(token, fg="green")) except Exception: # pragma: no cover pass @@ -3186,7 +2808,7 @@ def colorize_log_entry(self, string): if final_string[0] != " ": final_string = " " + final_string return final_string - except Exception as e: # pragma: no cover + except Exception: # pragma: no cover return string def execute_prebuild_script(self): @@ -3197,9 +2819,7 @@ def execute_prebuild_script(self): (pb_mod_path, pb_func) = self.prebuild_script.rsplit(".", 1) try: # Prefer prebuild script in working directory - if ( - pb_mod_path.count(".") >= 1 - ): # Prebuild script func is nested in a folder + if pb_mod_path.count(".") >= 1: # Prebuild script func is nested in a folder (mod_folder_path, mod_name) = pb_mod_path.rsplit(".", 1) mod_folder_path_fragments = mod_folder_path.split(".") working_dir = os.path.join(os.getcwd(), *mod_folder_path_fragments) @@ -3219,9 +2839,7 @@ def execute_prebuild_script(self): click.style("Failed ", fg="red") + "to " + click.style("import prebuild script ", bold=True) - + 'module: "{pb_mod_path}"'.format( - pb_mod_path=click.style(pb_mod_path, bold=True) - ) + + 'module: "{pb_mod_path}"'.format(pb_mod_path=click.style(pb_mod_path, bold=True)) ) if not hasattr(module_, pb_func): # pragma: no cover @@ -3229,9 +2847,7 @@ def execute_prebuild_script(self): click.style("Failed ", fg="red") + "to " + click.style("find prebuild script ", bold=True) - + 'function: "{pb_func}" '.format( - pb_func=click.style(pb_func, bold=True) - ) + + 'function: "{pb_func}" '.format(pb_func=click.style(pb_func, bold=True)) + 'in module "{pb_mod_path}"'.format(pb_mod_path=pb_mod_path) ) @@ -3275,13 +2891,9 @@ def deploy_api_gateway(self, api_id): cache_cluster_size=cache_cluster_size, cloudwatch_log_level=self.stage_config.get("cloudwatch_log_level", "OFF"), cloudwatch_data_trace=self.stage_config.get("cloudwatch_data_trace", False), - cloudwatch_metrics_enabled=self.stage_config.get( - "cloudwatch_metrics_enabled", False - ), + cloudwatch_metrics_enabled=self.stage_config.get("cloudwatch_metrics_enabled", False), cache_cluster_ttl=self.stage_config.get("cache_cluster_ttl", 300), - cache_cluster_encrypted=self.stage_config.get( - "cache_cluster_encrypted", False - ), + cache_cluster_encrypted=self.stage_config.get("cache_cluster_encrypted", False), ) return endpoint_url @@ -3328,8 +2940,7 @@ def touch_endpoint(self, endpoint_url): # See: https://github.com/Miserlou/Zappa/pull/1719#issuecomment-471341565 if "PRIVATE" in self.stage_config.get("endpoint_configuration", []): print( - click.style("Warning!", fg="yellow", bold=True) - + " Since you're deploying a private API Gateway endpoint," + click.style("Warning!", fg="yellow", bold=True) + " Since you're deploying a private API Gateway endpoint," " Zappa cannot determine if your function is returning " " a correct status code. You should check your API's response" " manually before considering this deployment complete." @@ -3427,12 +3038,10 @@ def handle(): # pragma: no cover except KeyboardInterrupt: # pragma: no cover cli.on_exit() sys.exit(130) - except Exception as e: + except Exception: cli.on_exit() - click.echo( - "Oh no! An " + click.style("error occurred", fg="red", bold=True) + "! :(" - ) + click.echo("Oh no! An " + click.style("error occurred", fg="red", bold=True) + "! :(") click.echo("\n==============\n") import traceback diff --git a/zappa/core.py b/zappa/core.py index 78fc5ff5b..d19b6dfe1 100644 --- a/zappa/core.py +++ b/zappa/core.py @@ -24,6 +24,7 @@ from builtins import bytes, int from distutils.dir_util import copy_tree from io import open +from typing import Optional import boto3 import botocore @@ -280,7 +281,7 @@ def __init__( load_credentials=True, desired_role_name=None, desired_role_arn=None, - runtime="python3.6", # Detected at runtime in CLI + runtime="python3.7", # Detected at runtime in CLI tags=(), endpoint_urls={}, xray_tracing=False, @@ -305,22 +306,22 @@ def __init__( self.runtime = runtime - if self.runtime == "python3.6": - self.manylinux_suffix_start = "cp36m" - elif self.runtime == "python3.7": + if self.runtime == "python3.7": self.manylinux_suffix_start = "cp37m" - else: + elif self.runtime == "python3.8": # The 'm' has been dropped in python 3.8+ since builds with and without pymalloc are ABI compatible # See https://github.com/pypa/manylinux for a more detailed explanation self.manylinux_suffix_start = "cp38" + else: + self.manylinux_suffix_start = "cp39" - # AWS Lambda supports manylinux1/2010 and manylinux2014 - manylinux_suffixes = ("2014", "2010", "1") + # AWS Lambda supports manylinux1/2010, manylinux2014, and manylinux_2_24 + manylinux_suffixes = ("_2_24", "2014", "2010", "1") self.manylinux_wheel_file_match = re.compile( - f'^.*{self.manylinux_suffix_start}-manylinux({"|".join(manylinux_suffixes)})_x86_64.whl$' + rf'^.*{self.manylinux_suffix_start}-(manylinux_\d+_\d+_x86_64[.])?manylinux({"|".join(manylinux_suffixes)})_x86_64[.]whl$' # noqa: E501 ) self.manylinux_wheel_abi3_file_match = re.compile( - f'^.*cp3.-abi3-manylinux({"|".join(manylinux_suffixes)})_x86_64.whl$' + rf'^.*cp3.-abi3-manylinux({"|".join(manylinux_suffixes)})_x86_64.whl$' ) self.endpoint_urls = endpoint_urls @@ -329,13 +330,14 @@ def __init__( # Some common invocations, such as DB migrations, # can take longer than the default. - # Note that this is set to 300s, but if connected to - # APIGW, Lambda will max out at 30s. + # Config used for direct invocations of Lambda functions from the Zappa CLI. + # Note that the maximum configurable Lambda function execution time (15 minutes) + # is longer than the maximum timeout configurable in API Gateway (30 seconds). # Related: https://github.com/Miserlou/Zappa/issues/205 long_config_dict = { "region_name": aws_region, "connect_timeout": 5, - "read_timeout": 300, + "read_timeout": 900, } long_config = botocore.client.Config(**long_config_dict) @@ -369,21 +371,17 @@ def __init__( def configure_boto_session_method_kwargs(self, service, kw): """Allow for custom endpoint urls for non-AWS (testing and bootleg cloud) deployments""" - if service in self.endpoint_urls and not "endpoint_url" in kw: + if service in self.endpoint_urls and "endpoint_url" not in kw: kw["endpoint_url"] = self.endpoint_urls[service] return kw def boto_client(self, service, *args, **kwargs): """A wrapper to apply configuration options to boto clients""" - return self.boto_session.client( - service, *args, **self.configure_boto_session_method_kwargs(service, kwargs) - ) + return self.boto_session.client(service, *args, **self.configure_boto_session_method_kwargs(service, kwargs)) def boto_resource(self, service, *args, **kwargs): """A wrapper to apply configuration options to boto resources""" - return self.boto_session.resource( - service, *args, **self.configure_boto_session_method_kwargs(service, kwargs) - ) + return self.boto_session.resource(service, *args, **self.configure_boto_session_method_kwargs(service, kwargs)) def cache_param(self, value): """Returns a troposphere Ref to a value cached as a parameter.""" @@ -391,9 +389,7 @@ def cache_param(self, value): if value not in self.cf_parameters: keyname = chr(ord("A") + len(self.cf_parameters)) param = self.cf_template.add_parameter( - troposphere.Parameter( - keyname, Type="String", Default=value, tags=self.tags - ) + troposphere.Parameter(keyname, Type="String", Default=value, tags=self.tags) ) self.cf_parameters[value] = param @@ -409,12 +405,7 @@ def copy_editable_packages(self, egg_links, temp_package_path): for egg_link in egg_links: with open(egg_link, "rb") as df: egg_path = df.read().decode("utf-8").splitlines()[0].strip() - pkgs = set( - [ - x.split(".")[0] - for x in find_packages(egg_path, exclude=["test", "tests"]) - ] - ) + pkgs = set([x.split(".")[0] for x in find_packages(egg_path, exclude=["test", "tests"])]) for pkg in pkgs: copytree( os.path.join(egg_path, pkg), @@ -444,12 +435,10 @@ def get_deps_list(self, pkg_name, installed_distros=None): if package.project_name.lower() == pkg_name.lower(): deps = [(package.project_name, package.version)] for req in package.requires(): - deps += self.get_deps_list( - pkg_name=req.project_name, installed_distros=installed_distros - ) + deps += self.get_deps_list(pkg_name=req.project_name, installed_distros=installed_distros) return list(set(deps)) # de-dupe before returning - def create_handler_venv(self): + def create_handler_venv(self, use_zappa_release: Optional[str] = None): """ Takes the installed zappa and brings it into a fresh virtualenv-like folder. All dependencies are then downloaded. """ @@ -462,34 +451,34 @@ def create_handler_venv(self): ve_path = os.path.join(os.getcwd(), "handler_venv") if os.sys.platform == "win32": - current_site_packages_dir = os.path.join( - current_venv, "Lib", "site-packages" - ) + current_site_packages_dir = os.path.join(current_venv, "Lib", "site-packages") venv_site_packages_dir = os.path.join(ve_path, "Lib", "site-packages") else: - current_site_packages_dir = os.path.join( - current_venv, "lib", get_venv_from_python_version(), "site-packages" - ) - venv_site_packages_dir = os.path.join( - ve_path, "lib", get_venv_from_python_version(), "site-packages" - ) + current_site_packages_dir = os.path.join(current_venv, "lib", get_venv_from_python_version(), "site-packages") + venv_site_packages_dir = os.path.join(ve_path, "lib", get_venv_from_python_version(), "site-packages") if not os.path.isdir(venv_site_packages_dir): os.makedirs(venv_site_packages_dir) # Copy zappa* to the new virtualenv - zappa_things = [ - z for z in os.listdir(current_site_packages_dir) if z.lower()[:5] == "zappa" - ] + zappa_things = [z for z in os.listdir(current_site_packages_dir) if z.lower()[:5] == "zappa"] for z in zappa_things: copytree( os.path.join(current_site_packages_dir, z), os.path.join(venv_site_packages_dir, z), ) - # Use pip to download zappa's dependencies. Copying from current venv causes issues with things like PyYAML that installs as yaml + # Use pip to download zappa's dependencies. + # Copying from current venv causes issues with things like PyYAML that installs as yaml zappa_deps = self.get_deps_list("zappa") - pkg_list = ["{0!s}=={1!s}".format(dep, version) for dep, version in zappa_deps] + pkg_list = [] + for dep, version in zappa_deps: + # allow specified zappa version for slim_handler_test + if dep == "zappa" and use_zappa_release: + pkg_version_str = f"{dep}=={use_zappa_release}" + else: + pkg_version_str = f"{dep}=={version}" + pkg_list.append(pkg_version_str) # Need to manually add setuptools pkg_list.append("setuptools") @@ -526,17 +515,12 @@ def get_current_venv(): try: subprocess.check_output(["pyenv", "help"], stderr=subprocess.STDOUT) except OSError: - print( - "This directory seems to have pyenv's local venv, " - "but pyenv executable was not found." - ) + print("This directory seems to have pyenv's local venv, " "but pyenv executable was not found.") with open(".python-version", "r") as f: # minor fix in how .python-version is read # Related: https://github.com/Miserlou/Zappa/issues/921 env_name = f.readline().strip() - bin_path = subprocess.check_output(["pyenv", "which", "python"]).decode( - "utf-8" - ) + bin_path = subprocess.check_output(["pyenv", "which", "python"]).decode("utf-8") venv = bin_path[: bin_path.rfind(env_name)] + env_name else: # pragma: no cover return None @@ -563,13 +547,11 @@ def create_lambda_zip( """ # Validate archive_format if archive_format not in ["zip", "tarball"]: - raise KeyError( - "The archive format to create a lambda package must be zip or tarball" - ) + raise KeyError("The archive format to create a lambda package must be zip or tarball") # Pip is a weird package. # Calling this function in some environments without this can cause.. funkiness. - import pip + import pip # noqa: 547 if not venv: venv = self.get_current_venv() @@ -597,7 +579,7 @@ def create_lambda_zip( # Make sure that 'concurrent' is always forbidden. # https://github.com/Miserlou/Zappa/issues/827 - if not "concurrent" in exclude: + if "concurrent" not in exclude: exclude.append("concurrent") def splitpath(path): @@ -690,9 +672,7 @@ def splitpath(path): # json.dump(build_info, f) # return True - package_id_file = open( - os.path.join(temp_project_path, "package_info.json"), "w" - ) + package_id_file = open(os.path.join(temp_project_path, "package_info.json"), "w") dumped = json.dumps(package_info, indent=4) try: package_id_file.write(dumped) @@ -706,9 +686,7 @@ def splitpath(path): if os.sys.platform == "win32": site_packages = os.path.join(venv, "Lib", "site-packages") else: - site_packages = os.path.join( - venv, "lib", get_venv_from_python_version(), "site-packages" - ) + site_packages = os.path.join(venv, "lib", get_venv_from_python_version(), "site-packages") egg_links.extend(glob.glob(os.path.join(site_packages, "*.egg-link"))) if minify: @@ -724,9 +702,7 @@ def splitpath(path): copytree(site_packages, temp_package_path, metadata=False, symlinks=False) # We may have 64-bin specific packages too. - site_packages_64 = os.path.join( - venv, "lib64", get_venv_from_python_version(), "site-packages" - ) + site_packages_64 = os.path.join(venv, "lib64", get_venv_from_python_version(), "site-packages") if os.path.exists(site_packages_64): egg_links.extend(glob.glob(os.path.join(site_packages_64, "*.egg-link"))) if minify: @@ -739,9 +715,7 @@ def splitpath(path): ignore=shutil.ignore_patterns(*excludes), ) else: - copytree( - site_packages_64, temp_package_path, metadata=False, symlinks=False - ) + copytree(site_packages_64, temp_package_path, metadata=False, symlinks=False) if egg_links: self.copy_editable_packages(egg_links, temp_package_path) @@ -751,9 +725,7 @@ def splitpath(path): # Then the pre-compiled packages.. if use_precompiled_packages: print("Downloading and installing dependencies..") - installed_packages = self.get_installed_packages( - site_packages, site_packages_64 - ) + installed_packages = self.get_installed_packages(site_packages, site_packages_64) try: for ( @@ -835,21 +807,13 @@ def splitpath(path): if archive_format == "zip": # Actually put the file into the proper place in the zip # Related: https://github.com/Miserlou/Zappa/pull/716 - zipi = zipfile.ZipInfo( - os.path.join( - root.replace(temp_project_path, "").lstrip(os.sep), filename - ) - ) + zipi = zipfile.ZipInfo(os.path.join(root.replace(temp_project_path, "").lstrip(os.sep), filename)) zipi.create_system = 3 zipi.external_attr = 0o755 << int(16) # Is this P2/P3 functional? with open(os.path.join(root, filename), "rb") as f: archivef.writestr(zipi, f.read(), compression_method) elif archive_format == "tarball": - tarinfo = tarfile.TarInfo( - os.path.join( - root.replace(temp_project_path, "").lstrip(os.sep), filename - ) - ) + tarinfo = tarfile.TarInfo(os.path.join(root.replace(temp_project_path, "").lstrip(os.sep), filename)) tarinfo.mode = 0o755 stat = os.stat(os.path.join(root, filename)) @@ -865,19 +829,14 @@ def splitpath(path): # if the directory does not contain any .py file at any level, we can skip the rest dirs[:] = [d for d in dirs if d != root] else: - if ( - "__init__.py" not in files - and not conflicts_with_a_neighbouring_module(root) - ): + if "__init__.py" not in files and not conflicts_with_a_neighbouring_module(root): tmp_init = os.path.join(temp_project_path, "__init__.py") open(tmp_init, "a").close() os.chmod(tmp_init, 0o755) arcname = os.path.join( root.replace(temp_project_path, ""), - os.path.join( - root.replace(temp_project_path, ""), "__init__.py" - ), + os.path.join(root.replace(temp_project_path, ""), "__init__.py"), ) if archive_format == "zip": archivef.write(tmp_init, arcname) @@ -915,8 +874,7 @@ def get_installed_packages(site_packages, site_packages_64): package.project_name.lower(): package.version for package in pkg_resources.WorkingSet() if package.project_name.lower() in package_to_keep - or package.location.lower() - in [site_packages.lower(), site_packages_64.lower()] + or package.location.lower() in [site_packages.lower(), site_packages_64.lower()] } return installed_packages @@ -927,9 +885,7 @@ def download_url_with_progress(url, stream, disable_progress): Downloads a given url in chunks and writes to the provided stream (can be any io stream). Displays the progress bar for the download. """ - resp = requests.get( - url, timeout=float(os.environ.get("PIP_TIMEOUT", 2)), stream=True - ) + resp = requests.get(url, timeout=float(os.environ.get("PIP_TIMEOUT", 2)), stream=True) resp.raw.decode_content = True progress = tqdm( @@ -945,9 +901,7 @@ def download_url_with_progress(url, stream, disable_progress): progress.close() - def get_cached_manylinux_wheel( - self, package_name, package_version, disable_progress=False - ): + def get_cached_manylinux_wheel(self, package_name, package_version, disable_progress=False): """ Gets the locally stored version of a manylinux wheel. If one does not exist, the function downloads it. """ @@ -957,7 +911,7 @@ def get_cached_manylinux_wheel( os.makedirs(cached_wheels_dir) else: # Check if we already have a cached copy - wheel_name = re.sub("[^\w\d.]+", "_", package_name, re.UNICODE) + wheel_name = re.sub(r"[^\w\d.]+", "_", package_name, re.UNICODE) wheel_file = f"{wheel_name}-{package_version}-*_x86_64.whl" wheel_path = os.path.join(cached_wheels_dir, wheel_file) @@ -965,15 +919,11 @@ def get_cached_manylinux_wheel( if re.match(self.manylinux_wheel_file_match, pathname) or re.match( self.manylinux_wheel_abi3_file_match, pathname ): - print( - f" - {package_name}=={package_version}: Using locally cached manylinux wheel" - ) + print(f" - {package_name}=={package_version}: Using locally cached manylinux wheel") return pathname # The file is not cached, download it. - wheel_url, filename = self.get_manylinux_wheel_url( - package_name, package_version - ) + wheel_url, filename = self.get_manylinux_wheel_url(package_name, package_version) if not wheel_url: return None @@ -1014,17 +964,16 @@ def get_manylinux_wheel_url(self, package_name, package_version): else: url = "https://pypi.python.org/pypi/{}/json".format(package_name) try: - res = requests.get( - url, timeout=float(os.environ.get("PIP_TIMEOUT", 1.5)) - ) + res = requests.get(url, timeout=float(os.environ.get("PIP_TIMEOUT", 1.5))) data = res.json() - except Exception as e: # pragma: no cover + except Exception: # pragma: no cover return None, None with open(json_file_path, "wb") as metafile: jsondata = json.dumps(data) metafile.write(bytes(jsondata, "utf-8")) - if package_version not in data["releases"]: + if package_version not in data.get("releases", []): + logger.warning(f"package_version({package_version}) not found in {package_name} metafile={json_file_path}") return None, None for f in data["releases"][package_version]: @@ -1041,7 +990,8 @@ def get_manylinux_wheel_url(self, package_name, package_version): def upload_to_s3(self, source_path, bucket_name, disable_progress=False): r""" Given a file, upload it to S3. - Credentials should be stored in environment variables or ~/.aws/credentials (%USERPROFILE%\.aws\credentials on Windows). + Credentials should be stored in environment variables or + ~/.aws/credentials (%USERPROFILE%\.aws\credentials on Windows). Returns True on success, false on failure. """ try: @@ -1061,12 +1011,7 @@ def upload_to_s3(self, source_path, bucket_name, disable_progress=False): ) if self.tags: - tags = { - "TagSet": [ - {"Key": key, "Value": self.tags[key]} - for key in self.tags.keys() - ] - } + tags = {"TagSet": [{"Key": key, "Value": self.tags[key]} for key in self.tags.keys()]} self.s3_client.put_bucket_tagging(Bucket=bucket_name, Tagging=tags) if not os.path.isfile(source_path) or os.stat(source_path).st_size == 0: @@ -1089,10 +1034,8 @@ def upload_to_s3(self, source_path, bucket_name, disable_progress=False): # which cannot use the progress bar. # Related: https://github.com/boto/boto3/issues/611 try: - self.s3_client.upload_file( - source_path, bucket_name, dest_path, Callback=progress.update - ) - except Exception as e: # pragma: no cover + self.s3_client.upload_file(source_path, bucket_name, dest_path, Callback=progress.update) + except Exception: # pragma: no cover self.s3_client.upload_file(source_path, bucket_name, dest_path) progress.close() @@ -1118,9 +1061,7 @@ def copy_on_s3(self, src_file_name, dst_file_name, bucket_name): copy_src = {"Bucket": bucket_name, "Key": src_file_name} try: - self.s3_client.copy( - CopySource=copy_src, Bucket=bucket_name, Key=dst_file_name - ) + self.s3_client.copy(CopySource=copy_src, Bucket=bucket_name, Key=dst_file_name) return True except botocore.exceptions.ClientError: # pragma: no cover return False @@ -1165,7 +1106,7 @@ def create_lambda_function( publish=True, vpc_config=None, dead_letter_config=None, - runtime="python3.6", + runtime="python3.7", aws_environment_variables=None, aws_kms_key_arn=None, xray_tracing=False, @@ -1176,7 +1117,8 @@ def create_lambda_function( docker_image_uri=None, ): """ - Given a bucket and key (or a local path) of a valid Lambda-zip, a function name and a handler, register that Lambda function. + Given a bucket and key (or a local path) of a valid Lambda-zip, + a function name and a handler, register that Lambda function. """ if not vpc_config: vpc_config = {} @@ -1247,6 +1189,9 @@ def create_lambda_function( ReservedConcurrentExecutions=concurrency, ) + # Wait for lambda to become active, otherwise many operations will fail + self.wait_until_lambda_function_is_active(function_name) + return resource_arn def update_lambda_function( @@ -1261,7 +1206,8 @@ def update_lambda_function( docker_image_uri=None, ): """ - Given a bucket and key (or a local path) of a valid Lambda-zip, a function name and a handler, update that Lambda function's code. + Given a bucket and key (or a local path) of a valid Lambda-zip, + a function name and a handler, update that Lambda function's code. Optionally, delete previous versions if they exceed the optional limit. """ print("Updating Lambda function code..") @@ -1315,9 +1261,7 @@ def update_lambda_function( # Find the existing revision IDs for the given function # Related: https://github.com/Miserlou/Zappa/issues/1402 versions_in_lambda = [] - versions = self.lambda_client.list_versions_by_function( - FunctionName=function_name - ) + versions = self.lambda_client.list_versions_by_function(FunctionName=function_name) for version in versions["Versions"]: versions_in_lambda.append(version["Version"]) while "NextMarker" in versions: @@ -1329,9 +1273,9 @@ def update_lambda_function( versions_in_lambda.remove("$LATEST") # Delete older revisions if their number exceeds the specified limit for version in versions_in_lambda[::-1][num_revisions:]: - self.lambda_client.delete_function( - FunctionName=function_name, Qualifier=version - ) + self.lambda_client.delete_function(FunctionName=function_name, Qualifier=version) + + self.wait_until_lambda_function_is_updated(function_name) return resource_arn @@ -1345,10 +1289,11 @@ def update_lambda_configuration( memory_size=512, publish=True, vpc_config=None, - runtime="python3.6", + runtime="python3.7", aws_environment_variables=None, aws_kms_key_arn=None, layers=None, + wait=True, ): """ Given an existing function ARN, update the configuration variables. @@ -1366,15 +1311,15 @@ def update_lambda_configuration( if not layers: layers = [] + if wait: + # Wait until function is ready, otherwise expected keys will be missing from 'lambda_aws_config'. + self.wait_until_lambda_function_is_updated(function_name) + # Check if there are any remote aws lambda env vars so they don't get trashed. # https://github.com/Miserlou/Zappa/issues/987, Related: https://github.com/Miserlou/Zappa/issues/765 - lambda_aws_config = self.lambda_client.get_function_configuration( - FunctionName=function_name - ) + lambda_aws_config = self.lambda_client.get_function_configuration(FunctionName=function_name) if "Environment" in lambda_aws_config: - lambda_aws_environment_variables = lambda_aws_config["Environment"].get( - "Variables", {} - ) + lambda_aws_environment_variables = lambda_aws_config["Environment"].get("Variables", {}) # Append keys that are remote but not in settings file for key, value in lambda_aws_environment_variables.items(): if key not in aws_environment_variables: @@ -1430,51 +1375,32 @@ def invoke_lambda_function( Payload=payload, ) - def rollback_lambda_function_version( - self, function_name, versions_back=1, publish=True - ): + def rollback_lambda_function_version(self, function_name, versions_back=1, publish=True): """ Rollback the lambda function code 'versions_back' number of revisions. Returns the Function ARN. """ - response = self.lambda_client.list_versions_by_function( - FunctionName=function_name - ) + response = self.lambda_client.list_versions_by_function(FunctionName=function_name) # https://github.com/Miserlou/Zappa/pull/2192 - if ( - len(response.get("Versions", [])) > 1 - and response["Versions"][-1]["PackageType"] == "Image" - ): - raise NotImplementedError( - "Zappa's rollback functionality is not available for Docker based deployments" - ) + if len(response.get("Versions", [])) > 1 and response["Versions"][-1]["PackageType"] == "Image": + raise NotImplementedError("Zappa's rollback functionality is not available for Docker based deployments") # Take into account $LATEST if len(response["Versions"]) < versions_back + 1: print("We do not have {} revisions. Aborting".format(str(versions_back))) return False - revisions = [ - int(revision["Version"]) - for revision in response["Versions"] - if revision["Version"] != "$LATEST" - ] + revisions = [int(revision["Version"]) for revision in response["Versions"] if revision["Version"] != "$LATEST"] revisions.sort(reverse=True) response = self.lambda_client.get_function( - FunctionName="function:{}:{}".format( - function_name, revisions[versions_back] - ) + FunctionName="function:{}:{}".format(function_name, revisions[versions_back]) ) response = requests.get(response["Code"]["Location"]) if response.status_code != 200: - print( - "Failed to get version {} of {} code".format( - versions_back, function_name - ) - ) + print("Failed to get version {} of {} code".format(versions_back, function_name)) return False response = self.lambda_client.update_function_code( @@ -1483,34 +1409,23 @@ def rollback_lambda_function_version( return response["FunctionArn"] - def is_lambda_function_ready(self, function_name): + def wait_until_lambda_function_is_active(self, function_name): """ - Checks if a lambda function is active and no updates are in progress. + Wait until lambda State=Active """ - response = self.lambda_client.get_function(FunctionName=function_name) - return ( - response["Configuration"]["State"] == "Active" - and response["Configuration"]["LastUpdateStatus"] != "InProgress" - ) + # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/lambda.html#waiters + waiter = self.lambda_client.get_waiter("function_active") + print(f"Waiting for lambda function [{function_name}] to become active...") + waiter.wait(FunctionName=function_name) - def wait_until_lambda_function_is_ready(self, function_name): + def wait_until_lambda_function_is_updated(self, function_name): """ - Continuously check if a lambda function is active. - For functions deployed with a docker image instead of a - ZIP package, the function can take a few seconds longer - to be created or update, so we must wait before running any status - checks against the function. + Wait until lambda LastUpdateStatus=Successful """ - show_waiting_message = True - while True: - if self.is_lambda_function_ready(function_name): - break - - if show_waiting_message: - print("Waiting until lambda function is ready.") - show_waiting_message = False - - time.sleep(1) + # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/lambda.html#waiters + waiter = self.lambda_client.get_waiter("function_updated") + print(f"Waiting for lambda function [{function_name}] to be updated...") + waiter.wait(FunctionName=function_name) def get_lambda_function(self, function_name): """ @@ -1525,9 +1440,7 @@ def get_lambda_function_versions(self, function_name): Simply returns the versions available for a Lambda function, given a function name. """ try: - response = self.lambda_client.list_versions_by_function( - FunctionName=function_name - ) + response = self.lambda_client.list_versions_by_function(FunctionName=function_name) return response.get("Versions", []) except Exception: return [] @@ -1552,19 +1465,13 @@ def deploy_lambda_alb(self, lambda_arn, lambda_name, alb_vpc_config, timeout): The `zappa deploy` functionality for ALB infrastructure. """ if not alb_vpc_config: - raise EnvironmentError( - "When creating an ALB, alb_vpc_config must be filled out in zappa_settings." - ) + raise EnvironmentError("When creating an ALB, alb_vpc_config must be filled out in zappa_settings.") if "SubnetIds" not in alb_vpc_config: - raise EnvironmentError( - "When creating an ALB, you must supply two subnets in different availability zones." - ) + raise EnvironmentError("When creating an ALB, you must supply two subnets in different availability zones.") if "SecurityGroupIds" not in alb_vpc_config: alb_vpc_config["SecurityGroupIds"] = [] if not alb_vpc_config.get("CertificateArn"): - raise EnvironmentError( - "When creating an ALB, you must supply a CertificateArn for the HTTPS listener." - ) + raise EnvironmentError("When creating an ALB, you must supply a CertificateArn for the HTTPS listener.") # Related: https://github.com/Miserlou/Zappa/issues/1856 if "Scheme" not in alb_vpc_config: @@ -1599,13 +1506,9 @@ def deploy_lambda_alb(self, lambda_arn, lambda_name, alb_vpc_config, timeout): ) load_balancer_arn = response["LoadBalancers"][0]["LoadBalancerArn"] load_balancer_dns = response["LoadBalancers"][0]["DNSName"] - load_balancer_vpc = response["LoadBalancers"][0]["VpcId"] + # load_balancer_vpc = response["LoadBalancers"][0]["VpcId"] waiter = self.elbv2_client.get_waiter("load_balancer_available") - print( - "Waiting for load balancer [{}] to become active..".format( - load_balancer_arn - ) - ) + print("Waiting for load balancer [{}] to become active..".format(load_balancer_arn)) waiter.wait(LoadBalancerArns=[load_balancer_arn], WaiterConfig={"Delay": 3}) # Match the lambda timeout on the load balancer. @@ -1625,9 +1528,8 @@ def deploy_lambda_alb(self, lambda_arn, lambda_name, alb_vpc_config, timeout): response = self.elbv2_client.create_target_group(**kwargs) if not (response["TargetGroups"]) or len(response["TargetGroups"]) != 1: raise EnvironmentError( - "Failure to create application load balancer target group. Response was in unexpected format. Response was: {}".format( - repr(response) - ) + "Failure to create application load balancer target group. " + "Response was in unexpected format. Response was: {}".format(repr(response)) ) target_group_arn = response["TargetGroups"][0]["TargetGroupArn"] @@ -1688,9 +1590,7 @@ def undeploy_lambda_alb(self, lambda_name): # Locate and delete alb/lambda permissions try: # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/lambda.html#Lambda.Client.remove_permission - self.lambda_client.remove_permission( - FunctionName=lambda_name, StatementId=lambda_name - ) + self.lambda_client.remove_permission(FunctionName=lambda_name, StatementId=lambda_name) except botocore.exceptions.ClientError as e: # pragma: no cover if "ResourceNotFoundException" in e.response["Error"]["Code"]: pass @@ -1699,19 +1599,15 @@ def undeploy_lambda_alb(self, lambda_name): # Locate and delete load balancer try: - # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/elbv2.html#ElasticLoadBalancingv2.Client.describe_load_balancers + # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/elbv2.html#ElasticLoadBalancingv2.Client.describe_load_balancers # noqa: E501 response = self.elbv2_client.describe_load_balancers(Names=[lambda_name]) if not (response["LoadBalancers"]) or len(response["LoadBalancers"]) > 1: raise EnvironmentError( - "Failure to locate/delete ALB named [{}]. Response was: {}".format( - lambda_name, repr(response) - ) + "Failure to locate/delete ALB named [{}]. Response was: {}".format(lambda_name, repr(response)) ) load_balancer_arn = response["LoadBalancers"][0]["LoadBalancerArn"] - # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/elbv2.html#ElasticLoadBalancingv2.Client.describe_listeners - response = self.elbv2_client.describe_listeners( - LoadBalancerArn=load_balancer_arn - ) + # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/elbv2.html#ElasticLoadBalancingv2.Client.describe_listeners # noqa: E501 + response = self.elbv2_client.describe_listeners(LoadBalancerArn=load_balancer_arn) if not (response["Listeners"]): print("No listeners found.") elif len(response["Listeners"]) > 1: @@ -1722,14 +1618,13 @@ def undeploy_lambda_alb(self, lambda_name): ) else: listener_arn = response["Listeners"][0]["ListenerArn"] - # Remove the listener. This explicit deletion of the listener seems necessary to avoid ResourceInUseExceptions when deleting target groups. - # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/elbv2.html#ElasticLoadBalancingv2.Client.delete_listener + # Remove the listener. + # This explicit deletion of the listener seems necessary to avoid ResourceInUseExceptions when deleting target groups. # noqa: E501# noqa: E501 + # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/elbv2.html#ElasticLoadBalancingv2.Client.delete_listener # noqa: E501 response = self.elbv2_client.delete_listener(ListenerArn=listener_arn) # Remove the load balancer and wait for completion - # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/elbv2.html#ElasticLoadBalancingv2.Client.delete_load_balancer - response = self.elbv2_client.delete_load_balancer( - LoadBalancerArn=load_balancer_arn - ) + # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/elbv2.html#ElasticLoadBalancingv2.Client.delete_load_balancer # noqa: E501 + response = self.elbv2_client.delete_load_balancer(LoadBalancerArn=load_balancer_arn) waiter = self.elbv2_client.get_waiter("load_balancers_deleted") print("Waiting for load balancer [{}] to be deleted..".format(lambda_name)) waiter.wait(LoadBalancerArns=[load_balancer_arn], WaiterConfig={"Delay": 3}) @@ -1757,9 +1652,7 @@ def undeploy_lambda_alb(self, lambda_name): ) target_group_arn = response["TargetGroups"][0]["TargetGroupArn"] # Deregister targets and wait for completion - self.elbv2_client.deregister_targets( - TargetGroupArn=target_group_arn, Targets=[{"Id": lambda_arn}] - ) + self.elbv2_client.deregister_targets(TargetGroupArn=target_group_arn, Targets=[{"Id": lambda_arn}]) waiter = self.elbv2_client.get_waiter("target_deregistered") print("Waiting for target [{}] to be deregistered...".format(lambda_name)) waiter.wait( @@ -1802,9 +1695,7 @@ def create_api_gateway_routes( if not description: description = "Created automatically by Zappa." restapi.Description = description - endpoint_configuration = ( - [] if endpoint_configuration is None else endpoint_configuration - ) + endpoint_configuration = [] if endpoint_configuration is None else endpoint_configuration if self.boto_session.region_name == "us-gov-west-1": endpoint_configuration.append("REGIONAL") if endpoint_configuration: @@ -1816,9 +1707,7 @@ def create_api_gateway_routes( self.cf_template.add_resource(restapi) root_id = troposphere.GetAtt(restapi, "RootResourceId") - invocation_prefix = ( - "aws" if self.boto_session.region_name != "us-gov-west-1" else "aws-us-gov" - ) + invocation_prefix = "aws" if self.boto_session.region_name != "us-gov-west-1" else "aws-us-gov" invocations_uri = ( "arn:" + invocation_prefix @@ -1835,15 +1724,13 @@ def create_api_gateway_routes( authorizer_resource = None if authorizer: authorizer_lambda_arn = authorizer.get("arn", lambda_arn) - lambda_uri = "arn:{invocation_prefix}:apigateway:{region_name}:lambda:path/2015-03-31/functions/{lambda_arn}/invocations".format( - invocation_prefix=invocation_prefix, - region_name=self.boto_session.region_name, - lambda_arn=authorizer_lambda_arn, - ) - authorizer_resource = self.create_authorizer( - restapi, lambda_uri, authorizer + lambda_uri = ( + f"arn:{invocation_prefix}:apigateway:{self.boto_session.region_name}:" + f"lambda:path/2015-03-31/functions/{authorizer_lambda_arn}/invocations" ) + authorizer_resource = self.create_authorizer(restapi, lambda_uri, authorizer) + self.create_and_setup_methods( restapi, root_id, @@ -1855,9 +1742,7 @@ def create_api_gateway_routes( ) if cors_options: - self.create_and_setup_cors( - restapi, root_id, invocations_uri, 0, cors_options - ) + self.create_and_setup_cors(restapi, root_id, invocations_uri, 0, cors_options) resource = troposphere.apigateway.Resource("ResourceAnyPathSlashed") self.cf_api_resources.append(resource.title) @@ -1877,9 +1762,7 @@ def create_api_gateway_routes( ) # pragma: no cover if cors_options: - self.create_and_setup_cors( - restapi, resource, invocations_uri, 1, cors_options - ) # pragma: no cover + self.create_and_setup_cors(restapi, resource, invocations_uri, 1, cors_options) # pragma: no cover return restapi def create_authorizer(self, restapi, uri, authorizer): @@ -1894,20 +1777,14 @@ def create_authorizer(self, restapi, uri, authorizer): authorizer_resource.Name = authorizer.get("name", "ZappaAuthorizer") authorizer_resource.Type = authorizer_type authorizer_resource.AuthorizerUri = uri - authorizer_resource.IdentitySource = ( - "method.request.header.%s" % authorizer.get("token_header", "Authorization") - ) + authorizer_resource.IdentitySource = "method.request.header.%s" % authorizer.get("token_header", "Authorization") if identity_validation_expression: - authorizer_resource.IdentityValidationExpression = ( - identity_validation_expression - ) + authorizer_resource.IdentityValidationExpression = identity_validation_expression if authorizer_type == "TOKEN": if not self.credentials_arn: self.get_credentials_arn() - authorizer_resource.AuthorizerResultTtlInSeconds = authorizer.get( - "result_ttl", 300 - ) + authorizer_resource.AuthorizerResultTtlInSeconds = authorizer.get("result_ttl", 300) authorizer_resource.AuthorizerCredentials = self.credentials_arn if authorizer_type == "COGNITO_USER_POOLS": authorizer_resource.ProviderARNs = authorizer.get("provider_arns") @@ -2001,9 +1878,7 @@ def create_and_setup_cors(self, restapi, resource, uri, depth, config): ), "Access-Control-Allow-Origin": "'%s'" % config.get("allowed_origin", "*"), } - method_response.ResponseParameters = { - "method.response.header.%s" % key: True for key in response_headers - } + method_response.ResponseParameters = {"method.response.header.%s" % key: True for key in response_headers} method_response.StatusCode = "200" method.MethodResponses = [method_response] self.cf_template.add_resource(method) @@ -2015,8 +1890,7 @@ def create_and_setup_cors(self, restapi, resource, uri, depth, config): integration.RequestTemplates = {"application/json": '{"statusCode": 200}'} integration_response = troposphere.apigateway.IntegrationResponse() integration_response.ResponseParameters = { - "method.response.header.%s" % key: value - for key, value in response_headers.items() + "method.response.header.%s" % key: value for key, value in response_headers.items() } integration_response.ResponseTemplates = {"application/json": ""} integration_response.StatusCode = "200" @@ -2071,19 +1945,14 @@ def deploy_api_gateway( ], ) - return "https://{}.execute-api.{}.amazonaws.com/{}".format( - api_id, self.boto_session.region_name, stage_name - ) + return "https://{}.execute-api.{}.amazonaws.com/{}".format(api_id, self.boto_session.region_name, stage_name) def add_binary_support(self, api_id, cors=False): """ Add binary support """ response = self.apigateway_client.get_rest_api(restApiId=api_id) - if ( - "binaryMediaTypes" not in response - or "*/*" not in response["binaryMediaTypes"] - ): + if "binaryMediaTypes" not in response or "*/*" not in response["binaryMediaTypes"]: self.apigateway_client.update_rest_api( restApiId=api_id, patchOperations=[{"op": "add", "path": "/binaryMediaTypes/*~1*"}], @@ -2093,11 +1962,7 @@ def add_binary_support(self, api_id, cors=False): # fix for issue 699 and 1035, cors+binary support don't work together # go through each resource and update the contentHandling type response = self.apigateway_client.get_resources(restApiId=api_id) - resource_ids = [ - item["id"] - for item in response["items"] - if "OPTIONS" in item.get("resourceMethods", {}) - ] + resource_ids = [item["id"] for item in response["items"] if "OPTIONS" in item.get("resourceMethods", {})] for resource_id in resource_ids: self.apigateway_client.update_integration( @@ -2126,20 +1991,14 @@ def remove_binary_support(self, api_id, cors=False): if cors: # go through each resource and change the contentHandling type response = self.apigateway_client.get_resources(restApiId=api_id) - resource_ids = [ - item["id"] - for item in response["items"] - if "OPTIONS" in item.get("resourceMethods", {}) - ] + resource_ids = [item["id"] for item in response["items"] if "OPTIONS" in item.get("resourceMethods", {})] for resource_id in resource_ids: self.apigateway_client.update_integration( restApiId=api_id, resourceId=resource_id, httpMethod="OPTIONS", - patchOperations=[ - {"op": "replace", "path": "/contentHandling", "value": ""} - ], + patchOperations=[{"op": "replace", "path": "/contentHandling", "value": ""}], ) def add_api_compression(self, api_id, min_compression_size): @@ -2202,9 +2061,7 @@ def remove_api_key(self, api_id, stage_name): """ Remove a generated API key for api_id and stage_name """ - response = self.apigateway_client.get_api_keys( - limit=1, nameQuery="{}_{}".format(stage_name, api_id) - ) + response = self.apigateway_client.get_api_keys(limit=1, nameQuery="{}_{}".format(stage_name, api_id)) for api_key in response.get("items"): self.apigateway_client.delete_api_key(apiKey="{}".format(api_key["id"])) @@ -2249,8 +2106,6 @@ def undeploy_api_gateway(self, lambda_name, domain_name=None, base_path=None): """ print("Deleting API Gateway..") - api_id = self.get_api_id(lambda_name) - if domain_name: # XXX - Remove Route53 smartly here? @@ -2261,7 +2116,7 @@ def undeploy_api_gateway(self, lambda_name, domain_name=None, base_path=None): domainName=domain_name, basePath="(none)" if base_path is None else base_path, ) - except Exception as e: + except Exception: # We may not have actually set up the domain. pass @@ -2329,22 +2184,13 @@ def update_cognito(self, lambda_name, user_pool, lambda_configs, lambda_arn): description_kwargs[key] = value if "LambdaConfig" not in description_kwargs: description_kwargs["LambdaConfig"] = LambdaConfig - if ( - "TemporaryPasswordValidityDays" - in description_kwargs["Policies"]["PasswordPolicy"] - ): - description_kwargs["AdminCreateUserConfig"].pop( - "UnusedAccountValidityDays", None - ) + if "TemporaryPasswordValidityDays" in description_kwargs["Policies"]["PasswordPolicy"]: + description_kwargs["AdminCreateUserConfig"].pop("UnusedAccountValidityDays", None) if "UnusedAccountValidityDays" in description_kwargs["AdminCreateUserConfig"]: - description_kwargs["Policies"]["PasswordPolicy"][ - "TemporaryPasswordValidityDays" - ] = description_kwargs["AdminCreateUserConfig"].pop( - "UnusedAccountValidityDays", None - ) - result = self.cognito_client.update_user_pool( - UserPoolId=user_pool, **description_kwargs - ) + description_kwargs["Policies"]["PasswordPolicy"]["TemporaryPasswordValidityDays"] = description_kwargs[ + "AdminCreateUserConfig" + ].pop("UnusedAccountValidityDays", None) + result = self.cognito_client.update_user_pool(UserPoolId=user_pool, **description_kwargs) if result["ResponseMetadata"]["HTTPStatusCode"] != 200: print("Cognito: Failed to update user pool", result) @@ -2367,7 +2213,7 @@ def delete_stack(self, name, wait=False): """ try: stack = self.cf_client.describe_stacks(StackName=name)["Stacks"][0] - except: # pragma: no cover + except Exception: # pragma: no cover print("No Zappa stack named {0}".format(name)) return False @@ -2414,11 +2260,11 @@ def create_stack_template( # build a fresh template self.cf_template = troposphere.Template() - self.cf_template.add_description("Automatically generated with Zappa") + self.cf_template.set_description("Automatically generated with Zappa") self.cf_api_resources = [] self.cf_parameters = {} - restapi = self.create_api_gateway_routes( + self.create_api_gateway_routes( lambda_arn, api_name=lambda_name, api_key_required=api_key_required, @@ -2454,17 +2300,11 @@ def update_stack( self.upload_to_s3(template, working_bucket, disable_progress=disable_progress) if self.boto_session.region_name == "us-gov-west-1": - url = "https://s3-us-gov-west-1.amazonaws.com/{0}/{1}".format( - working_bucket, template - ) + url = "https://s3-us-gov-west-1.amazonaws.com/{0}/{1}".format(working_bucket, template) else: url = "https://s3.amazonaws.com/{0}/{1}".format(working_bucket, template) - tags = [ - {"Key": key, "Value": self.tags[key]} - for key in self.tags.keys() - if key != "ZappaProject" - ] + tags = [{"Key": key, "Value": self.tags[key]} for key in self.tags.keys() if key != "ZappaProject"] tags.append({"Key": "ZappaProject", "Value": name}) update = True @@ -2478,12 +2318,8 @@ def update_stack( return if not update: - self.cf_client.create_stack( - StackName=name, Capabilities=capabilities, TemplateURL=url, Tags=tags - ) - print( - "Waiting for stack {0} to create (this can take a bit)..".format(name) - ) + self.cf_client.create_stack(StackName=name, Capabilities=capabilities, TemplateURL=url, Tags=tags) + print("Waiting for stack {0} to create (this can take a bit)..".format(name)) else: try: self.cf_client.update_stack( @@ -2533,11 +2369,7 @@ def update_stack( count = 0 for result in sr.paginate(StackName=name): - done = ( - 1 - for x in result["StackResourceSummaries"] - if "COMPLETE" in x["ResourceStatus"] - ) + done = (1 for x in result["StackResourceSummaries"] if "COMPLETE" in x["ResourceStatus"]) count += sum(done) if count: # We can end up in a situation where we have more resources being created @@ -2571,9 +2403,7 @@ def get_api_url(self, lambda_name, stage_name): """ api_id = self.get_api_id(lambda_name) if api_id: - return "https://{}.execute-api.{}.amazonaws.com/{}".format( - api_id, self.boto_session.region_name, stage_name - ) + return "https://{}.execute-api.{}.amazonaws.com/{}".format(api_id, self.boto_session.region_name, stage_name) else: return None @@ -2582,11 +2412,9 @@ def get_api_id(self, lambda_name): Given a lambda_name, return the API id. """ try: - response = self.cf_client.describe_stack_resource( - StackName=lambda_name, LogicalResourceId="Api" - ) + response = self.cf_client.describe_stack_resource(StackName=lambda_name, LogicalResourceId="Api") return response["StackResourceDetail"].get("PhysicalResourceId", None) - except: # pragma: no cover + except Exception: # pragma: no cover try: # Try the old method (project was probably made on an older, non CF version) response = self.apigateway_client.get_rest_apis(limit=500) @@ -2597,7 +2425,7 @@ def get_api_id(self, lambda_name): logger.exception("Could not get API ID.") return None - except: # pragma: no cover + except Exception: # pragma: no cover # We don't even have an API deployed. That's okay! return None @@ -2653,10 +2481,7 @@ def update_route53_records(self, domain_name, dns_name): """ zone_id = self.get_hosted_zone_id_for_domain(domain_name) - is_apex = ( - self.route53.get_hosted_zone(Id=zone_id)["HostedZone"]["Name"][:-1] - == domain_name - ) + is_apex = self.route53.get_hosted_zone(Id=zone_id)["HostedZone"]["Name"][:-1] == domain_name if is_apex: record_set = { "Name": domain_name, @@ -2685,9 +2510,7 @@ def update_route53_records(self, domain_name, dns_name): # but the alias target name does not lie within the target zone response = self.route53.change_resource_record_sets( HostedZoneId=zone_id, - ChangeBatch={ - "Changes": [{"Action": "UPSERT", "ResourceRecordSet": record_set}] - }, + ChangeBatch={"Changes": [{"Action": "UPSERT", "ResourceRecordSet": record_set}]}, ) return response @@ -2722,16 +2545,7 @@ def update_domain_name( print("Updating domain name!") certificate_name = certificate_name + str(time.time()) - - api_gateway_domain = self.apigateway_client.get_domain_name( - domainName=domain_name - ) - if ( - not certificate_arn - and certificate_body - and certificate_private_key - and certificate_chain - ): + if not certificate_arn and certificate_body and certificate_private_key and certificate_chain: acm_certificate = self.acm_client.import_certificate( Certificate=certificate_body, PrivateKey=certificate_private_key, @@ -2753,9 +2567,7 @@ def update_domain_name( ], ) - def update_domain_base_path_mapping( - self, domain_name, lambda_name, stage, base_path - ): + def update_domain_base_path_mapping(self, domain_name, lambda_name, stage, base_path): """ Update domain base path mapping on API Gateway if it was changed """ @@ -2763,15 +2575,10 @@ def update_domain_base_path_mapping( if not api_id: print("Warning! Can't update base path mapping!") return - base_path_mappings = self.apigateway_client.get_base_path_mappings( - domainName=domain_name - ) + base_path_mappings = self.apigateway_client.get_base_path_mappings(domainName=domain_name) found = False for base_path_mapping in base_path_mappings.get("items", []): - if ( - base_path_mapping["restApiId"] == api_id - and base_path_mapping["stage"] == stage - ): + if base_path_mapping["restApiId"] == api_id and base_path_mapping["stage"] == stage: found = True if base_path_mapping["basePath"] != base_path: self.apigateway_client.update_base_path_mapping( @@ -2800,9 +2607,7 @@ def get_all_zones(self): new_zones = self.route53.list_hosted_zones(MaxItems="100") while new_zones["IsTruncated"]: zones["HostedZones"] += new_zones["HostedZones"] - new_zones = self.route53.list_hosted_zones( - Marker=new_zones["NextMarker"], MaxItems="100" - ) + new_zones = self.route53.list_hosted_zones(Marker=new_zones["NextMarker"], MaxItems="100") zones["HostedZones"] += new_zones["HostedZones"] return zones @@ -2824,17 +2629,12 @@ def get_domain_name(self, domain_name, route53=True): try: zones = self.get_all_zones() for zone in zones["HostedZones"]: - records = self.route53.list_resource_record_sets( - HostedZoneId=zone["Id"] - ) + records = self.route53.list_resource_record_sets(HostedZoneId=zone["Id"]) for record in records["ResourceRecordSets"]: - if ( - record["Type"] in ("CNAME", "A") - and record["Name"][:-1] == domain_name - ): + if record["Type"] in ("CNAME", "A") and record["Name"][:-1] == domain_name: return record - except Exception as e: + except Exception: return None ## @@ -2887,9 +2687,7 @@ def create_iam_roles(self): except botocore.client.ClientError: print("Creating " + self.role_name + " IAM Role..") - role = self.iam.create_role( - RoleName=self.role_name, AssumeRolePolicyDocument=self.assume_policy - ) + role = self.iam.create_role(RoleName=self.role_name, AssumeRolePolicyDocument=self.assume_policy) self.credentials_arn = role.arn updated = True @@ -2897,19 +2695,13 @@ def create_iam_roles(self): policy = self.iam.RolePolicy(self.role_name, "zappa-permissions") try: if policy.policy_document != attach_policy_obj: - print( - "Updating zappa-permissions policy on " - + self.role_name - + " IAM Role." - ) + print("Updating zappa-permissions policy on " + self.role_name + " IAM Role.") policy.put(PolicyDocument=self.attach_policy) updated = True except botocore.client.ClientError: - print( - "Creating zappa-permissions policy on " + self.role_name + " IAM Role." - ) + print("Creating zappa-permissions policy on " + self.role_name + " IAM Role.") policy.put(PolicyDocument=self.attach_policy) updated = True @@ -2917,9 +2709,7 @@ def create_iam_roles(self): role.assume_role_policy_document["Statement"][0]["Principal"]["Service"] ) != set(assume_policy_obj["Statement"][0]["Principal"]["Service"]): print("Updating assume role policy on " + self.role_name + " IAM Role.") - self.iam_client.update_assume_role_policy( - RoleName=self.role_name, PolicyDocument=self.assume_policy - ) + self.iam_client.update_assume_role_policy(RoleName=self.role_name, PolicyDocument=self.assume_policy) updated = True return self.credentials_arn, updated @@ -2933,19 +2723,11 @@ def _clear_policy(self, lambda_name): if policy_response["ResponseMetadata"]["HTTPStatusCode"] == 200: statement = json.loads(policy_response["Policy"])["Statement"] for s in statement: - delete_response = self.lambda_client.remove_permission( - FunctionName=lambda_name, StatementId=s["Sid"] - ) + delete_response = self.lambda_client.remove_permission(FunctionName=lambda_name, StatementId=s["Sid"]) if delete_response["ResponseMetadata"]["HTTPStatusCode"] != 204: - logger.error( - "Failed to delete an obsolete policy statement: {}".format( - policy_response - ) - ) + logger.error("Failed to delete an obsolete policy statement: {}".format(policy_response)) else: - logger.debug( - "Failed to load Lambda function policy: {}".format(policy_response) - ) + logger.debug("Failed to load Lambda function policy: {}".format(policy_response)) except ClientError as e: if e.args[0].find("ResourceNotFoundException") > -1: logger.debug("No policy found, must be first run.") @@ -2961,17 +2743,22 @@ def create_event_permission(self, lambda_name, principal, source_arn): Create permissions to link to an event. Related: http://docs.aws.amazon.com/lambda/latest/dg/with-s3-example-configure-event-source.html """ - logger.debug( - "Adding new permission to invoke Lambda function: {}".format(lambda_name) - ) + logger.debug("Adding new permission to invoke Lambda function: {}".format(lambda_name)) + + account_id: str = self.sts_client.get_caller_identity().get("Account") + permission_response = self.lambda_client.add_permission( FunctionName=lambda_name, - StatementId="".join( - random.choice(string.ascii_uppercase + string.digits) for _ in range(8) - ), + StatementId="".join(random.choice(string.ascii_uppercase + string.digits) for _ in range(8)), Action="lambda:InvokeFunction", Principal=principal, SourceArn=source_arn, + # The SourceAccount argument ensures that only the specified AWS account can invoke the lambda function. + # This prevents a security issue where if a lambda is triggered off of s3 bucket events and the bucket is + # deleted, another AWS account can create a bucket with the same name and potentially trigger the original + # lambda function, since bucket names are global. + # https://github.com/zappa/Zappa/issues/1039 + SourceAccount=account_id, ) if permission_response["ResponseMetadata"]["HTTPStatusCode"] != 201: @@ -3009,9 +2796,7 @@ def schedule_events(self, lambda_arn, lambda_name, events, default=True): function = event["function"] expression = event.get("expression", None) # single expression expressions = event.get("expressions", None) # multiple expression - kwargs = event.get( - "kwargs", {} - ) # optional dict of keyword arguments for the event + kwargs = event.get("kwargs", {}) # optional dict of keyword arguments for the event event_source = event.get("event_source", None) description = event.get("description", function) @@ -3023,23 +2808,16 @@ def schedule_events(self, lambda_arn, lambda_name, events, default=True): self.get_credentials_arn() if expression: - expressions = [ - expression - ] # same code for single and multiple expression + expressions = [expression] # same code for single and multiple expression if expressions: for index, expression in enumerate(expressions): - name = self.get_scheduled_event_name( - event, function, lambda_name, index + rule_name = self.get_scheduled_event_name( + event, + function, + lambda_name, + index, ) - # if it's possible that we truncated name, generate a unique, shortened name - # https://github.com/Miserlou/Zappa/issues/970 - if len(name) >= 64: - rule_name = self.get_hashed_rule_name( - event, function, lambda_name - ) - else: - rule_name = name rule_response = self.events_client.put_rule( Name=rule_name, @@ -3050,14 +2828,10 @@ def schedule_events(self, lambda_arn, lambda_name, events, default=True): ) if "RuleArn" in rule_response: - logger.debug( - "Rule created. ARN {}".format(rule_response["RuleArn"]) - ) + logger.debug("Rule created. ARN {}".format(rule_response["RuleArn"])) # Specific permissions are necessary for any trigger to work. - self.create_event_permission( - lambda_name, "events.amazonaws.com", rule_response["RuleArn"] - ) + self.create_event_permission(lambda_name, "events.amazonaws.com", rule_response["RuleArn"]) # Overwriting the input, supply the original values and add kwargs input_template = ( @@ -3080,10 +2854,7 @@ def schedule_events(self, lambda_arn, lambda_name, events, default=True): Rule=rule_name, Targets=[ { - "Id": "Id" - + "".join( - random.choice(string.digits) for _ in range(12) - ), + "Id": "Id" + "".join(random.choice(string.digits) for _ in range(12)), "Arn": lambda_arn, "InputTransformer": { "InputPathsMap": { @@ -3104,17 +2875,9 @@ def schedule_events(self, lambda_arn, lambda_name, events, default=True): ) if target_response["ResponseMetadata"]["HTTPStatusCode"] == 200: - print( - "Scheduled {} with expression {}!".format( - rule_name, expression - ) - ) + print("Scheduled {} with expression {}!".format(rule_name, expression)) else: - print( - "Problem scheduling {} with expression {}.".format( - rule_name, expression - ) - ) + print("Problem scheduling {} with expression {}.".format(rule_name, expression)) elif event_source: service = self.service_from_arn(event_source["arn"]) @@ -3129,39 +2892,28 @@ def schedule_events(self, lambda_arn, lambda_name, events, default=True): else: svc = service - rule_response = add_event_source( - event_source, lambda_arn, function, self.boto_session - ) + rule_response = add_event_source(event_source, lambda_arn, function, self.boto_session) if rule_response == "successful": print("Created {} event schedule for {}!".format(svc, function)) elif rule_response == "failed": - print( - "Problem creating {} event schedule for {}!".format( - svc, function - ) - ) + print("Problem creating {} event schedule for {}!".format(svc, function)) elif rule_response == "exists": - print( - "{} event schedule for {} already exists - Nothing to do here.".format( - svc, function - ) - ) + print("{} event schedule for {} already exists - Nothing to do here.".format(svc, function)) elif rule_response == "dryrun": - print( - "Dryrun for creating {} event schedule for {}!!".format( - svc, function - ) - ) + print("Dryrun for creating {} event schedule for {}!!".format(svc, function)) else: print( "Could not create event {} - Please define either an expression or an event source".format( - name + rule_name, ) ) - @staticmethod - def get_scheduled_event_name(event, function, lambda_name, index=0): + def get_scheduled_event_name(self, event, function, lambda_name, index=0): + """ + Returns an AWS-valid CloudWatch rule name using a digest of the event name, lambda name, and function. + This allows support for rule names that may be longer than the 64 char limit. + """ name = event.get("name", function) if name != function: # a custom event name has been provided, make sure function name is included as postfix, @@ -3173,28 +2925,25 @@ def get_scheduled_event_name(event, function, lambda_name, index=0): # Related: https://github.com/Miserlou/Zappa/pull/1051 name = "{}-{}".format(index, name) # prefix scheduled event names with lambda name. So we can look them up later via the prefix. - return Zappa.get_event_name(lambda_name, name) + event_name = self.get_event_name(lambda_name, name) + # if it's possible that we truncated name, generate a unique, shortened name + # https://github.com/Miserlou/Zappa/issues/970 + if len(event_name) >= 64: + lambda_name = self.get_hashed_lambda_name(lambda_name) + event_name = self.get_event_name(lambda_name, name) + + return event_name @staticmethod def get_event_name(lambda_name, name): """ Returns an AWS-valid Lambda event name. """ - return "{prefix:.{width}}-{postfix}".format( - prefix=lambda_name, width=max(0, 63 - len(name)), postfix=name - )[:64] + return "{prefix:.{width}}-{postfix}".format(prefix=lambda_name, width=max(0, 63 - len(name)), postfix=name)[:64] @staticmethod - def get_hashed_rule_name(event, function, lambda_name): - """ - Returns an AWS-valid CloudWatch rule name using a digest of the event name, lambda name, and function. - This allows support for rule names that may be longer than the 64 char limit. - """ - event_name = event.get("name", function) - name_hash = hashlib.sha1( - "{}-{}".format(lambda_name, event_name).encode("UTF-8") - ).hexdigest() - return Zappa.get_event_name(name_hash, function) + def get_hashed_lambda_name(lambda_name): + return hashlib.sha1(lambda_name.encode()).hexdigest() def delete_rule(self, rule_name): """ @@ -3214,15 +2963,11 @@ def delete_rule(self, rule_name): if error_code == "AccessDeniedException": raise else: - logger.debug( - "No target found for this rule: {} {}".format(rule_name, e.args[0]) - ) + logger.debug("No target found for this rule: {} {}".format(rule_name, e.args[0])) return if "Targets" in targets and targets["Targets"]: - self.events_client.remove_targets( - Rule=rule_name, Ids=[x["Id"] for x in targets["Targets"]] - ) + self.events_client.remove_targets(Rule=rule_name, Ids=[x["Id"] for x in targets["Targets"]]) else: # pragma: no cover logger.debug("No target to delete") @@ -3237,9 +2982,7 @@ def get_event_rule_names_for_lambda(self, lambda_arn): rule_names = response["RuleNames"] # Iterate when the results are paginated while "NextToken" in response: - response = self.events_client.list_rule_names_by_target( - TargetArn=lambda_arn, NextToken=response["NextToken"] - ) + response = self.events_client.list_rule_names_by_target(TargetArn=lambda_arn, NextToken=response["NextToken"]) rule_names.extend(response["RuleNames"]) return rule_names @@ -3250,9 +2993,7 @@ def get_event_rules_for_lambda(self, lambda_arn): rule_names = self.get_event_rule_names_for_lambda(lambda_arn=lambda_arn) return [self.events_client.describe_rule(Name=r) for r in rule_names] - def unschedule_events( - self, events, lambda_arn=None, lambda_name=None, excluded_source_services=None - ): + def unschedule_events(self, events, lambda_arn=None, lambda_name=None, excluded_source_services=None): excluded_source_services = excluded_source_services or [] """ Given a list of events, unschedule these CloudWatch Events. @@ -3279,15 +3020,11 @@ def unschedule_events( # re-scheduled when a new Lambda function is deployed. Therefore, they should not be removed during zappa # update or zappa schedule. if service not in excluded_source_services: - remove_event_source( - event_source, lambda_arn, function, self.boto_session - ) + remove_event_source(event_source, lambda_arn, function, self.boto_session) print( "Removed event {}{}.".format( name, - " ({})".format(str(event_source["events"])) - if "events" in event_source - else "", + " ({})".format(str(event_source["events"])) if "events" in event_source else "", ) ) @@ -3303,13 +3040,9 @@ def create_async_sns_topic(self, lambda_name, lambda_arn): # Create SNS topic topic_arn = self.sns_client.create_topic(Name=topic_name)["TopicArn"] # Create subscription - self.sns_client.subscribe( - TopicArn=topic_arn, Protocol="lambda", Endpoint=lambda_arn - ) + self.sns_client.subscribe(TopicArn=topic_arn, Protocol="lambda", Endpoint=lambda_arn) # Add Lambda permission for SNS to invoke function - self.create_event_permission( - lambda_name=lambda_name, principal="sns.amazonaws.com", source_arn=topic_arn - ) + self.create_event_permission(lambda_name=lambda_name, principal="sns.amazonaws.com", source_arn=topic_arn) # Add rule for SNS topic as a event source add_event_source( event_source={"arn": topic_arn, "events": ["sns:Publish"]}, @@ -3387,9 +3120,7 @@ def fetch_logs(self, lambda_name, filter_pattern="", limit=10000, start_time=0): Fetch the CloudWatch logs for a given Lambda name. """ log_name = "/aws/lambda/" + lambda_name - streams = self.logs_client.describe_log_streams( - logGroupName=log_name, descending=True, orderBy="LastEventTime" - ) + streams = self.logs_client.describe_log_streams(logGroupName=log_name, descending=True, orderBy="LastEventTime") all_streams = streams["logStreams"] all_names = [stream["logStreamName"] for stream in all_streams] @@ -3442,14 +3173,8 @@ def remove_api_gateway_logs(self, project_name): Removed all logs that are assigned to a given rest api id. """ for rest_api in self.get_rest_apis(project_name): - for stage in self.apigateway_client.get_stages(restApiId=rest_api["id"])[ - "item" - ]: - self.remove_log_group( - "API-Gateway-Execution-Logs_{}/{}".format( - rest_api["id"], stage["stageName"] - ) - ) + for stage in self.apigateway_client.get_stages(restApiId=rest_api["id"])["item"]: + self.remove_log_group("API-Gateway-Execution-Logs_{}/{}".format(rest_api["id"], stage["stageName"])) ## # Route53 Domain Name Entries @@ -3467,21 +3192,11 @@ def get_best_match_zone(all_zones, domain): """Return zone id which name is closer matched with domain name.""" # Related: https://github.com/Miserlou/Zappa/issues/459 - public_zones = [ - zone - for zone in all_zones["HostedZones"] - if not zone["Config"]["PrivateZone"] - ] + public_zones = [zone for zone in all_zones["HostedZones"] if not zone["Config"]["PrivateZone"]] - zones = { - zone["Name"][:-1]: zone["Id"] - for zone in public_zones - if zone["Name"][:-1] in domain - } + zones = {zone["Name"][:-1]: zone["Id"] for zone in public_zones if zone["Name"][:-1] in domain} if zones: - keys = max( - zones.keys(), key=lambda a: len(a) - ) # get longest key -- best match. + keys = max(zones.keys(), key=lambda a: len(a)) # get longest key -- best match. return zones[keys] else: return None @@ -3493,9 +3208,7 @@ def set_dns_challenge_txt(self, zone_id, domain, txt_challenge): print("Setting DNS challenge..") resp = self.route53.change_resource_record_sets( HostedZoneId=zone_id, - ChangeBatch=self.get_dns_challenge_change_batch( - "UPSERT", domain, txt_challenge - ), + ChangeBatch=self.get_dns_challenge_change_batch("UPSERT", domain, txt_challenge), ) return resp @@ -3507,9 +3220,7 @@ def remove_dns_challenge_txt(self, zone_id, domain, txt_challenge): print("Deleting DNS challenge..") resp = self.route53.change_resource_record_sets( HostedZoneId=zone_id, - ChangeBatch=self.get_dns_challenge_change_batch( - "DELETE", domain, txt_challenge - ), + ChangeBatch=self.get_dns_challenge_change_batch("DELETE", domain, txt_challenge), ) return resp @@ -3562,12 +3273,8 @@ def load_credentials(self, boto_session=None, profile_name=None): # If provided, use the supplied profile name. if profile_name: - self.boto_session = boto3.Session( - profile_name=profile_name, region_name=self.aws_region - ) - elif os.environ.get("AWS_ACCESS_KEY_ID") and os.environ.get( - "AWS_SECRET_ACCESS_KEY" - ): + self.boto_session = boto3.Session(profile_name=profile_name, region_name=self.aws_region) + elif os.environ.get("AWS_ACCESS_KEY_ID") and os.environ.get("AWS_SECRET_ACCESS_KEY"): region_name = os.environ.get("AWS_DEFAULT_REGION") or self.aws_region session_kw = { "aws_access_key_id": os.environ.get("AWS_ACCESS_KEY_ID"), @@ -3577,9 +3284,7 @@ def load_credentials(self, boto_session=None, profile_name=None): # If we're executing in a role, AWS_SESSION_TOKEN will be present, too. if os.environ.get("AWS_SESSION_TOKEN"): - session_kw["aws_session_token"] = os.environ.get( - "AWS_SESSION_TOKEN" - ) + session_kw["aws_session_token"] = os.environ.get("AWS_SESSION_TOKEN") self.boto_session = boto3.Session(**session_kw) else: diff --git a/zappa/handler.py b/zappa/handler.py index 41336dc15..ed0cc9835 100644 --- a/zappa/handler.py +++ b/zappa/handler.py @@ -20,7 +20,7 @@ from zappa.middleware import ZappaWSGIMiddleware from zappa.utilities import merge_headers, parse_s3_url from zappa.wsgi import common_log, create_wsgi_request -except ImportError as e: # pragma: no cover +except ImportError: # pragma: no cover from .middleware import ZappaWSGIMiddleware from .utilities import merge_headers, parse_s3_url from .wsgi import common_log, create_wsgi_request @@ -101,30 +101,21 @@ def __init__(self, settings_name="zappa_settings", session=None): # https://github.com/Miserlou/Zappa/issues/776 is_slim_handler = getattr(self.settings, "SLIM_HANDLER", False) if is_slim_handler: - included_libraries = getattr( - self.settings, "INCLUDE", ["libmysqlclient.so.18"] - ) + included_libraries = getattr(self.settings, "INCLUDE", []) try: - from ctypes import cdll, util + from ctypes import cdll for library in included_libraries: try: cdll.LoadLibrary(os.path.join(os.getcwd(), library)) except OSError: - print( - "Failed to find library: {}...right filename?".format( - library - ) - ) + print("Failed to find library: {}...right filename?".format(library)) except ImportError: print("Failed to import cytpes library") # This is a non-WSGI application # https://github.com/Miserlou/Zappa/pull/748 - if ( - not hasattr(self.settings, "APP_MODULE") - and not self.settings.DJANGO_SETTINGS - ): + if not hasattr(self.settings, "APP_MODULE") and not self.settings.DJANGO_SETTINGS: self.app_module = None wsgi_app_function = None # This is probably a normal WSGI app (Or django with overloaded wsgi application) @@ -138,9 +129,7 @@ def __init__(self, settings_name="zappa_settings", session=None): # add the Lambda root path into the sys.path self.trailing_slash = True - os.environ[ - SETTINGS_ENVIRONMENT_VARIABLE - ] = self.settings.DJANGO_SETTINGS + os.environ[SETTINGS_ENVIRONMENT_VARIABLE] = self.settings.DJANGO_SETTINGS else: self.trailing_slash = False @@ -248,7 +237,7 @@ def import_module_and_get_function(whole_function): @classmethod def lambda_handler(cls, event, context): # pragma: no cover - handler = cls() + handler = global_handler or cls() exception_handler = handler.settings.EXCEPTION_HANDLER try: return handler.handler(event, context) @@ -285,9 +274,7 @@ def run_function(app_function, event, context): # getargspec does not support python 3 method with type hints # Related issue: https://github.com/Miserlou/Zappa/issues/1452 if hasattr(inspect, "getfullargspec"): # Python 3 - args, varargs, keywords, defaults, _, _, _ = inspect.getfullargspec( - app_function - ) + args, varargs, keywords, defaults, _, _, _ = inspect.getfullargspec(app_function) else: # Python 2 args, varargs, keywords, defaults = inspect.getargspec(app_function) num_args = len(args) @@ -299,8 +286,7 @@ def run_function(app_function, event, context): result = app_function(event, context) else: raise RuntimeError( - "Function signature is invalid. Expected a function that accepts at most " - "2 arguments or varargs." + "Function signature is invalid. Expected a function that accepts at most " "2 arguments or varargs." ) return result @@ -343,9 +329,7 @@ def get_function_from_bot_intent_trigger(self, event): if intent: intent = intent.get("name") if intent: - return self.settings.AWS_BOT_EVENT_MAPPING.get( - "{}:{}".format(intent, event.get("invocationSource")) - ) + return self.settings.AWS_BOT_EVENT_MAPPING.get("{}:{}".format(intent, event.get("invocationSource"))) def get_function_for_cognito_trigger(self, trigger): """ @@ -419,7 +403,7 @@ def handler(self, event, context): try: # Support both for tests from zappa.ext.django_zappa import get_django_wsgi - except ImportError as e: # pragma: no cover + except ImportError: # pragma: no cover from django_zappa_app import get_django_wsgi # Get the Django WSGI app from our extension @@ -466,9 +450,7 @@ def handler(self, event, context): policy = self.run_function(app_function, event, context) return policy else: - logger.error( - "Cannot find a function to process the authorization request." - ) + logger.error("Cannot find a function to process the authorization request.") raise Exception("Unauthorized") # This is an AWS Cognito Trigger Event @@ -481,11 +463,7 @@ def handler(self, event, context): result = self.run_function(app_function, event, context) logger.debug(result) else: - logger.error( - "Cannot find a function to handle cognito trigger {}".format( - triggerSource - ) - ) + logger.error("Cannot find a function to handle cognito trigger {}".format(triggerSource)) return result # This is a CloudWatch event @@ -512,9 +490,7 @@ def handler(self, event, context): script_name = "" is_elb_context = False headers = merge_headers(event) - if event.get("requestContext", None) and event["requestContext"].get( - "elb", None - ): + if event.get("requestContext", None) and event["requestContext"].get("elb", None): # Related: https://github.com/Miserlou/Zappa/issues/1715 # inputs/outputs for lambda loadbalancer # https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html @@ -578,9 +554,7 @@ def handler(self, event, context): # base64 encoding and status description if is_elb_context: zappa_returndict.setdefault("isBase64Encoded", False) - zappa_returndict.setdefault( - "statusDescription", response.status - ) + zappa_returndict.setdefault("statusDescription", response.status) if response.data: if ( @@ -588,9 +562,7 @@ def handler(self, event, context): and not response.mimetype.startswith("text/") and response.mimetype != "application/json" ): - zappa_returndict["body"] = base64.b64encode( - response.data - ).decode("utf-8") + zappa_returndict["body"] = base64.b64encode(response.data).decode("utf-8") zappa_returndict["isBase64Encoded"] = True else: zappa_returndict["body"] = response.get_data(as_text=True) @@ -603,9 +575,7 @@ def handler(self, event, context): if "multiValueHeaders" in event: zappa_returndict["multiValueHeaders"] = {} for key, value in response.headers: - zappa_returndict["multiValueHeaders"][ - key - ] = response.headers.getlist(key) + zappa_returndict["multiValueHeaders"][key] = response.headers.getlist(key) # Calculate the total response time, # and log it in the Common Log format. @@ -646,9 +616,7 @@ def handler(self, event, context): content["statusCode"] = 500 body = {"message": message} if settings.DEBUG: # only include traceback if debug is on. - body["traceback"] = traceback.format_exception( - *exc_info - ) # traceback as a list for readability. + body["traceback"] = traceback.format_exception(*exc_info) # traceback as a list for readability. content["body"] = json.dumps(str(body), sort_keys=True, indent=4) return content @@ -659,7 +627,10 @@ def lambda_handler(event, context): # pragma: no cover def keep_warm_callback(event, context): """Method is triggered by the CloudWatch event scheduled when keep_warm setting is set to true.""" - lambda_handler( - event={}, context=context - ) # overriding event with an empty one so that web app initialization will + lambda_handler(event={}, context=context) # overriding event with an empty one so that web app initialization will # be triggered. + + +global_handler = None +if os.environ.get("INSTANTIATE_LAMBDA_HANDLER_ON_IMPORT"): + global_handler = LambdaHandler() diff --git a/zappa/letsencrypt.py b/zappa/letsencrypt.py index 9ecd63f02..671e45d14 100755 --- a/zappa/letsencrypt.py +++ b/zappa/letsencrypt.py @@ -21,7 +21,6 @@ import re import shutil import subprocess -import sys import tempfile import textwrap import time @@ -80,7 +79,10 @@ def get_cert_and_update_domain( stage=api_stage, ) print( - "Created a new domain name. Please note that it can take up to 40 minutes for this domain to be created and propagated through AWS, but it requires no further work on your part." + "Created a new domain name. " + "Please note that it can take up to 40 minutes " + "for this domain to be created and propagated through AWS, " + "but it requires no further work on your part." ) else: zappa_instance.update_domain_name( @@ -213,9 +215,7 @@ def get_boulder_header(key_bytes): "jwk": { "e": _b64(binascii.unhexlify(pub_exp.encode("utf-8"))), "kty": "RSA", - "n": _b64( - binascii.unhexlify(re.sub(r"(\s|:)", "", pub_hex).encode("utf-8")) - ), + "n": _b64(binascii.unhexlify(re.sub(r"(\s|:)", "", pub_hex).encode("utf-8"))), }, } @@ -270,15 +270,9 @@ def get_cert(zappa_instance, log=LOGGER, CA=DEFAULT_CA): }, ) if code != 201: - raise ValueError( - "Error requesting challenges: {0} {1}".format(code, result) - ) - - challenge = [ - ch - for ch in json.loads(result.decode("utf8"))["challenges"] - if ch["type"] == "dns-01" - ][0] + raise ValueError("Error requesting challenges: {0} {1}".format(code, result)) + + challenge = [ch for ch in json.loads(result.decode("utf8"))["challenges"] if ch["type"] == "dns-01"][0] token = re.sub(r"[^A-Za-z0-9_\-]", "_", challenge["token"]) keyauthorization = "{0}.{1}".format(token, thumbprint).encode("utf-8") @@ -330,20 +324,14 @@ def verify_challenge(uri): resp = urlopen(uri) challenge_status = json.loads(resp.read().decode("utf8")) except IOError as e: - raise ValueError( - "Error checking challenge: {0} {1}".format( - e.code, json.loads(e.read().decode("utf8")) - ) - ) + raise ValueError("Error checking challenge: {0} {1}".format(e.code, json.loads(e.read().decode("utf8")))) if challenge_status["status"] == "pending": time.sleep(2) elif challenge_status["status"] == "valid": LOGGER.info("Domain verified!") break else: - raise ValueError( - "Domain challenge did not pass: {0}".format(challenge_status) - ) + raise ValueError("Domain challenge did not pass: {0}".format(challenge_status)) def sign_certificate(): @@ -381,10 +369,8 @@ def encode_certificate(result): """ Encode cert bytes to PEM encoded cert file. """ - cert_body = ( - """-----BEGIN CERTIFICATE-----\n{0}\n-----END CERTIFICATE-----\n""".format( - "\n".join(textwrap.wrap(base64.b64encode(result).decode("utf8"), 64)) - ) + cert_body = """-----BEGIN CERTIFICATE-----\n{0}\n-----END CERTIFICATE-----\n""".format( + "\n".join(textwrap.wrap(base64.b64encode(result).decode("utf8"), 64)) ) signed_crt = open("{}/signed.crt".format(gettempdir()), "w") signed_crt.write(cert_body) @@ -424,9 +410,7 @@ def _send_signed_request(url, payload): "-sign", os.path.join(gettempdir(), "account.key"), ] - proc = subprocess.Popen( - cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE - ) + proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate("{0}.{1}".format(protected64, payload64).encode("utf8")) if proc.returncode != 0: # pragma: no cover raise IOError("OpenSSL Error: {0}".format(err)) diff --git a/zappa/middleware.py b/zappa/middleware.py index 2f7bb2bfe..920db0c94 100644 --- a/zappa/middleware.py +++ b/zappa/middleware.py @@ -44,11 +44,7 @@ def encode_response(status, headers, exc_info=None): Related: https://github.com/Miserlou/Zappa/issues/1965 """ - new_headers = [ - header - for header in headers - if ((type(header[0]) != str) or (header[0].lower() != "set-cookie")) - ] + new_headers = [header for header in headers if ((type(header[0]) != str) or (header[0].lower() != "set-cookie"))] cookie_headers = [ (header[0].lower(), header[1]) for header in headers diff --git a/zappa/utilities.py b/zappa/utilities.py index 6149a575a..72ad9f0f7 100644 --- a/zappa/utilities.py +++ b/zappa/utilities.py @@ -13,7 +13,6 @@ import botocore import durationpy -from past.builtins import basestring LOG = logging.getLogger(__name__) @@ -44,7 +43,7 @@ def copy_file(src, dst, item): st = os.lstat(s) mode = stat.S_IMODE(st.st_mode) os.lchmod(d, mode) - except: + except Exception: pass # lchmod not available elif os.path.isdir(s): copytree(s, d, metadata, symlinks, ignore) @@ -105,16 +104,14 @@ def string_to_timestamp(timestring): # Uses an extended version of Go's duration string. try: delta = durationpy.from_str(timestring) - past = datetime.datetime.utcnow() - delta + past = datetime.datetime.now(datetime.timezone.utc) - delta ts = calendar.timegm(past.timetuple()) return ts - except Exception as e: + except Exception: pass if ts: return ts - # else: - # print("Unable to parse timestring.") return 0 @@ -137,9 +134,7 @@ def detect_django_settings(): continue full = os.path.join(root, filename) package_path = full.replace(os.getcwd(), "") - package_module = ( - package_path.replace(os.sep, ".").split(".", 1)[1].replace(".py", "") - ) + package_module = package_path.replace(os.sep, ".").split(".", 1)[1].replace(".py", "") matches.append(package_module) return matches @@ -175,11 +170,7 @@ def detect_flask_apps(): continue package_path = full.replace(os.getcwd(), "") - package_module = ( - package_path.replace(os.sep, ".") - .split(".", 1)[1] - .replace(".py", "") - ) + package_module = package_path.replace(os.sep, ".").split(".", 1)[1].replace(".py", "") app_module = package_module + "." + app matches.append(app_module) @@ -196,12 +187,12 @@ def get_runtime_from_python_version(): if sys.version_info[0] < 3: raise ValueError("Python 2.x is no longer supported.") else: - if sys.version_info[1] <= 6: - return "python3.6" - elif sys.version_info[1] <= 7: + if sys.version_info[1] <= 7: return "python3.7" - else: + elif sys.version_info[1] <= 8: return "python3.8" + else: + return "python3.9" ## @@ -219,9 +210,7 @@ def get_topic_name(lambda_name): ## -def get_event_source( - event_source, lambda_arn, target_function, boto_session, dry=False -): +def get_event_source(event_source, lambda_arn, target_function, boto_session, dry=False): """ Given an event_source dictionary item, a session and a lambda_arn, @@ -333,9 +322,7 @@ def status(self, function): uuid = self._get_uuid(function) if uuid: try: - response = self._lambda.call( - "get_event_source_mapping", UUID=self._get_uuid(function) - ) + response = self._lambda.call("get_event_source_mapping", UUID=self._get_uuid(function)) LOG.debug(response) except botocore.exceptions.ClientError: LOG.debug("event source %s does not exist", self.arn) @@ -361,9 +348,7 @@ def add_filters(self, function): ) kappa.event_source.sns.LOG.debug(response) except Exception: - kappa.event_source.sns.LOG.exception( - "Unable to add filters for SNS topic %s", self.arn - ) + kappa.event_source.sns.LOG.exception("Unable to add filters for SNS topic %s", self.arn) def add(self, function): super().add(function) @@ -422,16 +407,12 @@ def autoreturn(self, function_name): return event_source_obj, ctx, funk -def add_event_source( - event_source, lambda_arn, target_function, boto_session, dry=False -): +def add_event_source(event_source, lambda_arn, target_function, boto_session, dry=False): """ Given an event_source dictionary, create the object and add the event source. """ - event_source_obj, ctx, funk = get_event_source( - event_source, lambda_arn, target_function, boto_session, dry=False - ) + event_source_obj, ctx, funk = get_event_source(event_source, lambda_arn, target_function, boto_session, dry=False) # TODO: Detect changes in config and refine exists algorithm if not dry: if not event_source_obj.status(funk): @@ -443,16 +424,12 @@ def add_event_source( return "dryrun" -def remove_event_source( - event_source, lambda_arn, target_function, boto_session, dry=False -): +def remove_event_source(event_source, lambda_arn, target_function, boto_session, dry=False): """ Given an event_source dictionary, create the object and remove the event source. """ - event_source_obj, ctx, funk = get_event_source( - event_source, lambda_arn, target_function, boto_session, dry=False - ) + event_source_obj, ctx, funk = get_event_source(event_source, lambda_arn, target_function, boto_session, dry=False) # This is slightly dirty, but necessary for using Kappa this way. funk.arn = lambda_arn @@ -463,16 +440,12 @@ def remove_event_source( return event_source_obj -def get_event_source_status( - event_source, lambda_arn, target_function, boto_session, dry=False -): +def get_event_source_status(event_source, lambda_arn, target_function, boto_session, dry=False): """ Given an event_source dictionary, create the object and get the event source status. """ - event_source_obj, ctx, funk = get_event_source( - event_source, lambda_arn, target_function, boto_session, dry=False - ) + event_source_obj, ctx, funk = get_event_source(event_source, lambda_arn, target_function, boto_session, dry=False) return event_source_obj.status(funk) @@ -518,7 +491,7 @@ def validate_name(name, maxlen=80): Return: the name Raise: InvalidAwsLambdaName, if the name is invalid. """ - if not isinstance(name, basestring): + if not isinstance(name, str): msg = "Name must be of type string" raise InvalidAwsLambdaName(msg) if len(name) > maxlen: @@ -538,20 +511,12 @@ def contains_python_files_or_subdirs(folder): Checks (recursively) if the directory contains .py or .pyc files """ for root, dirs, files in os.walk(folder): - if [ - filename - for filename in files - if filename.endswith(".py") or filename.endswith(".pyc") - ]: + if [filename for filename in files if filename.endswith(".py") or filename.endswith(".pyc")]: return True for d in dirs: for _, subdirs, subfiles in os.walk(d): - if [ - filename - for filename in subfiles - if filename.endswith(".py") or filename.endswith(".pyc") - ]: + if [filename for filename in subfiles if filename.endswith(".py") or filename.endswith(".pyc")]: return True return False @@ -578,7 +543,8 @@ def titlecase_keys(d): # https://github.com/Miserlou/Zappa/issues/1688 def is_valid_bucket_name(name): """ - Checks if an S3 bucket name is valid according to https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html#bucketnamingrules + Checks if an S3 bucket name is valid according to: + https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html#bucketnamingrules """ # Bucket names must be at least 3 and no more than 63 characters long. if len(name) < 3 or len(name) > 63: diff --git a/zappa/wsgi.py b/zappa/wsgi.py index 9aadfbe27..4220c053f 100644 --- a/zappa/wsgi.py +++ b/zappa/wsgi.py @@ -26,9 +26,7 @@ def create_wsgi_request( create and return a valid WSGI request environ. """ method = event_info["httpMethod"] - headers = ( - merge_headers(event_info) or {} - ) # Allow for the AGW console 'Test' button to work (Pull #735) + headers = merge_headers(event_info) or {} # Allow for the AGW console 'Test' button to work (Pull #735) """ API Gateway and ALB both started allowing for multi-value querystring