From 22d35a99d8704a734f2b0ab5e8dbc87ce85eba00 Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Sat, 7 May 2022 08:57:25 +0200 Subject: [PATCH 01/72] Initial commit --- .github/workflows/run-e2e-tests.yml | 38 +++ Makefile | 8 +- poetry.lock | 235 +++++++++++++++++- pyproject.toml | 119 ++++----- tests/e2e/__init__.py | 0 tests/e2e/conftest.py | 124 +++++++++ tests/e2e/logger/__init__.py | 0 tests/e2e/logger/handlers/basic_handler.py | 17 ++ .../e2e/logger/handlers/no_context_handler.py | 13 + tests/e2e/logger/test_logger.py | 139 +++++++++++ tests/e2e/metrics/__init__.py | 0 tests/e2e/metrics/handlers/basic_handler.py | 16 ++ tests/e2e/metrics/test_metrics.py | 49 ++++ tests/e2e/tracer/__init__.py | 0 tests/e2e/tracer/handlers/basic_handler.py | 16 ++ tests/e2e/tracer/test_tracer.py | 53 ++++ tests/e2e/utils.py | 94 +++++++ 17 files changed, 860 insertions(+), 61 deletions(-) create mode 100644 .github/workflows/run-e2e-tests.yml create mode 100644 tests/e2e/__init__.py create mode 100644 tests/e2e/conftest.py create mode 100644 tests/e2e/logger/__init__.py create mode 100644 tests/e2e/logger/handlers/basic_handler.py create mode 100644 tests/e2e/logger/handlers/no_context_handler.py create mode 100644 tests/e2e/logger/test_logger.py create mode 100644 tests/e2e/metrics/__init__.py create mode 100644 tests/e2e/metrics/handlers/basic_handler.py create mode 100644 tests/e2e/metrics/test_metrics.py create mode 100644 tests/e2e/tracer/__init__.py create mode 100644 tests/e2e/tracer/handlers/basic_handler.py create mode 100644 tests/e2e/tracer/test_tracer.py create mode 100644 tests/e2e/utils.py diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml new file mode 100644 index 00000000000..592213c2f39 --- /dev/null +++ b/.github/workflows/run-e2e-tests.yml @@ -0,0 +1,38 @@ +name: run-e2e-tests +on: + workflow_dispatch: {} +env: + AWS_DEFAULT_REGION: us-east-1 + E2E_TESTS_PATH: tests/e2e/ +jobs: + run: + ######################### + # Force Github action to run only a single job at a time (based on the group name) + # This is to prevent "race-condition" in building e2e tests infrastructure + ######################### + concurrency: + group: e2e-tests + runs-on: ubuntu-latest + permissions: + id-token: write # needed to interact with GitHub's OIDC Token endpoint. + contents: read + steps: + - name: "Checkout" + uses: actions/checkout@v3 + ######################### + # Release new version + ######################### + - name: "Use Python 3" + uses: actions/setup-python@v3 + with: + python-version: "3.x" # Version range or exact version of a Python version to use, using SemVer's version range syntax + architecture: "x64" # optional x64 or x86. Defaults to x64 if not specified + - name: Install dependencies + run: make dev + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1.6.1 + with: + role-to-assume: ${{ secrets.E2E_DEPLOYMENT_ROLE_ARN }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} + - name: Test + run: make e2e-test diff --git a/Makefile b/Makefile index 73667eb5f58..4e532e760cd 100644 --- a/Makefile +++ b/Makefile @@ -8,6 +8,7 @@ dev: pip install --upgrade pip pre-commit poetry poetry install --extras "pydantic" pre-commit install + npm install -g aws-cdk format: poetry run isort aws_lambda_powertools tests @@ -17,14 +18,17 @@ lint: format poetry run flake8 aws_lambda_powertools/* tests/* test: - poetry run pytest -m "not perf" --cov=aws_lambda_powertools --cov-report=xml + poetry run pytest -m "not (perf or e2e)" --cov=aws_lambda_powertools --cov-report=xml poetry run pytest --cache-clear tests/performance unit-test: poetry run pytest tests/unit +e2e-test: + poetry run pytest -n 3 --dist loadscope -rP --durations=0 --durations-min=1 tests/e2e + coverage-html: - poetry run pytest -m "not perf" --cov=aws_lambda_powertools --cov-report=html + poetry run pytest -m "not (perf or e2e)" --cov=aws_lambda_powertools --cov-report=html pre-commit: pre-commit run --show-diff-on-failure diff --git a/poetry.lock b/poetry.lock index f0ce338de99..dca8096af8f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -20,6 +20,33 @@ docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] +[[package]] +name = "aws-cdk-lib" +version = "2.20.0" +description = "Version 2 of the AWS Cloud Development Kit library" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +constructs = ">=10.0.0,<11.0.0" +jsii = ">=1.55.1,<2.0.0" +publication = ">=0.0.3" + +[[package]] +name = "aws-cdk.aws-lambda-python-alpha" +version = "2.20.0a0" +description = "The CDK Construct Library for AWS Lambda in Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +aws-cdk-lib = ">=2.20.0,<3.0.0" +constructs = ">=10.0.0,<11.0.0" +jsii = ">=1.55.1,<2.0.0" +publication = ">=0.0.3" + [[package]] name = "aws-xray-sdk" version = "2.9.0" @@ -107,6 +134,33 @@ urllib3 = ">=1.25.4,<1.27" [package.extras] crt = ["awscrt (==0.12.5)"] +[[package]] +name = "cattrs" +version = "1.0.0" +description = "Composable complex class support for attrs." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +attrs = ">=17.3" + +[package.extras] +dev = ["bumpversion", "wheel", "watchdog", "flake8", "tox", "coverage", "sphinx", "pytest", "hypothesis", "pendulum"] + +[[package]] +name = "cattrs" +version = "22.1.0" +description = "Composable complex class support for attrs and dataclasses." +category = "dev" +optional = false +python-versions = ">=3.7,<4.0" + +[package.dependencies] +attrs = ">=20" +exceptiongroup = {version = "*", markers = "python_version <= \"3.10\""} +typing_extensions = {version = "*", markers = "python_version >= \"3.7\" and python_version < \"3.8\""} + [[package]] name = "certifi" version = "2021.10.8" @@ -146,6 +200,18 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "constructs" +version = "10.0.110" +description = "A programming model for software-defined state" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +jsii = ">=1.56.0,<2.0.0" +publication = ">=0.0.3" + [[package]] name = "coverage" version = "6.2" @@ -168,6 +234,14 @@ category = "main" optional = false python-versions = ">=3.6, <3.7" +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +category = "dev" +optional = false +python-versions = ">=3.5" + [[package]] name = "dnspython" version = "2.1.0" @@ -203,6 +277,28 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "exceptiongroup" +version = "1.0.0rc2" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=6)", "coverage (>=6)"] + +[[package]] +name = "execnet" +version = "1.9.0" +description = "execnet: rapid multi-Python deployment" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +testing = ["pre-commit"] + [[package]] name = "fastjsonschema" version = "2.15.3" @@ -409,6 +505,21 @@ zipp = ">=0.5" docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +[[package]] +name = "importlib-resources" +version = "5.4.0" +description = "Read resources from Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] + [[package]] name = "iniconfig" version = "1.1.1" @@ -453,6 +564,24 @@ category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "jsii" +version = "1.56.0" +description = "Python client for jsii runtime" +category = "dev" +optional = false +python-versions = "~=3.6" + +[package.dependencies] +attrs = ">=21.2,<22.0" +cattrs = [ + {version = ">=1.0.0,<1.1.0", markers = "python_version < \"3.7\""}, + {version = ">=1.8,<22.2", markers = "python_version >= \"3.7\""}, +] +importlib-resources = {version = "*", markers = "python_version < \"3.7\""} +python-dateutil = "*" +typing-extensions = ">=3.7,<5.0" + [[package]] name = "mako" version = "1.1.6" @@ -687,6 +816,14 @@ importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "publication" +version = "0.0.3" +description = "Publication helps you maintain public-api-friendly modules by preventing unintentional access to private implementation details via introspection." +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "py" version = "1.11.0" @@ -808,6 +945,18 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] +[[package]] +name = "pytest-forked" +version = "1.4.0" +description = "run tests in isolated forked subprocesses" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +py = "*" +pytest = ">=3.10" + [[package]] name = "pytest-mock" version = "3.6.1" @@ -822,6 +971,24 @@ pytest = ">=5.0" [package.extras] dev = ["pre-commit", "tox", "pytest-asyncio"] +[[package]] +name = "pytest-xdist" +version = "2.5.0" +description = "pytest xdist plugin for distributed testing and loop-on-failing modes" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +execnet = ">=1.1" +pytest = ">=6.2.0" +pytest-forked = "*" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + [[package]] name = "python-dateutil" version = "2.8.2" @@ -883,6 +1050,18 @@ urllib3 = ">=1.21.1,<1.27" socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] +[[package]] +name = "retry" +version = "0.9.2" +description = "Easy to use retry decorator." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +decorator = ">=3.4.2" +py = ">=1.4.26,<2.0.0" + [[package]] name = "ruamel.yaml" version = "0.17.17" @@ -1056,7 +1235,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.2" -content-hash = "57759324149561b2a8c1f2564a3826bb0ee744a9f8a2a8d099dcbd50cf452ef8" +content-hash = "39f4e8285abda1fed81b4e4585a7dcf3911e16cb24626675e74fbd35e95da7cd" [metadata.files] atomicwrites = [ @@ -1067,6 +1246,14 @@ attrs = [ {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, ] +aws-cdk-lib = [ + {file = "aws-cdk-lib-2.20.0.tar.gz", hash = "sha256:0014fa03ccaee987ead9eb6bad9329f5801bf34bb2322a509ec034b7c7d2e859"}, + {file = "aws_cdk_lib-2.20.0-py3-none-any.whl", hash = "sha256:ff32bd82f17f512bde6731e917675974934626ddac420a7cec0010b4a20320c6"}, +] +"aws-cdk.aws-lambda-python-alpha" = [ + {file = "aws-cdk.aws-lambda-python-alpha-2.20.0a0.tar.gz", hash = "sha256:ea45c76e5aa48b906f960ef24fa6a14ff2f59d000787735a644207717853ff0a"}, + {file = "aws_cdk.aws_lambda_python_alpha-2.20.0a0-py3-none-any.whl", hash = "sha256:6d453ef890ba3b7421897100cff7a009ddb32a3402a1d51f514026a441bf52e1"}, +] aws-xray-sdk = [ {file = "aws-xray-sdk-2.9.0.tar.gz", hash = "sha256:b0cd972db218d4d8f7b53ad806fc6184626b924c4997ae58fc9f2a8cd1281568"}, {file = "aws_xray_sdk-2.9.0-py2.py3-none-any.whl", hash = "sha256:98216b3ac8281b51b59a8703f8ec561c460807d9d0679838f5c0179d381d7e58"}, @@ -1087,6 +1274,12 @@ botocore = [ {file = "botocore-1.23.17-py3-none-any.whl", hash = "sha256:54240370476d8e67a97664d2c47df451f0e1d30e9d50ea0a88da4c2c27981159"}, {file = "botocore-1.23.17.tar.gz", hash = "sha256:a9753b5220b5cc1bb8078086dc8ee10aa7da482b279dd0347965e9145a557003"}, ] +cattrs = [ + {file = "cattrs-1.0.0-py2.py3-none-any.whl", hash = "sha256:616972ae3dfa6e623a40ad3cb845420e64942989152774ab055e5c2b2f89f997"}, + {file = "cattrs-1.0.0.tar.gz", hash = "sha256:b7ab5cf8ad127c42eefd01410c1c6e28569a45a255ea80ed968511873c433c7a"}, + {file = "cattrs-22.1.0-py3-none-any.whl", hash = "sha256:d55c477b4672f93606e992049f15d526dc7867e6c756cd6256d4af92e2b1e364"}, + {file = "cattrs-22.1.0.tar.gz", hash = "sha256:94b67b64cf92c994f8784c40c082177dc916e0489a73a9a36b24eb18a9db40c6"}, +] certifi = [ {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, @@ -1103,6 +1296,10 @@ colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] +constructs = [ + {file = "constructs-10.0.110-py3-none-any.whl", hash = "sha256:40cd4b0c4397019a69773299ca6a389c7874958aec1699b654df883333db10ec"}, + {file = "constructs-10.0.110.tar.gz", hash = "sha256:c65b217aad02545fbf9c99e93ea11ed2054b9488ee022bd933599ad7441c8d37"}, +] coverage = [ {file = "coverage-6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b"}, {file = "coverage-6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0"}, @@ -1156,6 +1353,10 @@ dataclasses = [ {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"}, ] +decorator = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] dnspython = [ {file = "dnspython-2.1.0-py3-none-any.whl", hash = "sha256:95d12f6ef0317118d2a1a6fc49aac65ffec7eb8087474158f42f26a639135216"}, {file = "dnspython-2.1.0.zip", hash = "sha256:e4a87f0b573201a0f3727fa18a516b055fd1107e0e5477cded4a2de497df1dd4"}, @@ -1167,6 +1368,14 @@ email-validator = [ eradicate = [ {file = "eradicate-2.0.0.tar.gz", hash = "sha256:27434596f2c5314cc9b31410c93d8f7e8885747399773cd088d3adea647a60c8"}, ] +exceptiongroup = [ + {file = "exceptiongroup-1.0.0rc2-py3-none-any.whl", hash = "sha256:83e465152bd0bc2bc40d9b75686854260f86946bb947c652b5cafc31cdff70e7"}, + {file = "exceptiongroup-1.0.0rc2.tar.gz", hash = "sha256:4d254b05231bed1d43079bdcfe0f1d66c0ab4783e6777a329355f9b78de3ad83"}, +] +execnet = [ + {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, + {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, +] fastjsonschema = [ {file = "fastjsonschema-2.15.3-py3-none-any.whl", hash = "sha256:ddb0b1d8243e6e3abb822bd14e447a89f4ab7439342912d590444831fa00b6a0"}, {file = "fastjsonschema-2.15.3.tar.gz", hash = "sha256:0a572f0836962d844c1fc435e200b2e4f4677e4e6611a2e3bdd01ba697c275ec"}, @@ -1233,6 +1442,10 @@ importlib-metadata = [ {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, ] +importlib-resources = [ + {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, + {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"}, +] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, @@ -1249,6 +1462,10 @@ jmespath = [ {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"}, {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"}, ] +jsii = [ + {file = "jsii-1.56.0-py3-none-any.whl", hash = "sha256:8e61eb860a9a76c66cde44ce3a1e6f66b4b5ab3683131ca49124785f75f3792c"}, + {file = "jsii-1.56.0.tar.gz", hash = "sha256:d393c72aa1864de301b95b65b161efc8838999b32099cd8d7cda6a03cea3cff9"}, +] mako = [ {file = "Mako-1.1.6-py2.py3-none-any.whl", hash = "sha256:afaf8e515d075b22fad7d7b8b30e4a1c90624ff2f3733a06ec125f5a5f043a57"}, {file = "Mako-1.1.6.tar.gz", hash = "sha256:4e9e345a41924a954251b95b4b28e14a301145b544901332e658907a7464b6b2"}, @@ -1410,6 +1627,10 @@ pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] +publication = [ + {file = "publication-0.0.3-py2.py3-none-any.whl", hash = "sha256:0248885351febc11d8a1098d5c8e3ab2dabcf3e8c0c96db1e17ecd12b53afbe6"}, + {file = "publication-0.0.3.tar.gz", hash = "sha256:68416a0de76dddcdd2930d1c8ef853a743cc96c82416c4e4d3b5d901c6276dc4"}, +] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, @@ -1483,10 +1704,18 @@ pytest-cov = [ {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, ] +pytest-forked = [ + {file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"}, + {file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"}, +] pytest-mock = [ {file = "pytest-mock-3.6.1.tar.gz", hash = "sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62"}, {file = "pytest_mock-3.6.1-py3-none-any.whl", hash = "sha256:30c2f2cc9759e76eee674b81ea28c9f0b94f8f0445a1b87762cadf774f0df7e3"}, ] +pytest-xdist = [ + {file = "pytest-xdist-2.5.0.tar.gz", hash = "sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf"}, + {file = "pytest_xdist-2.5.0-py3-none-any.whl", hash = "sha256:6fe5c74fec98906deb8f2d2b616b5c782022744978e7bd4695d39c8f42d0ce65"}, +] python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, @@ -1534,6 +1763,10 @@ requests = [ {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, ] +retry = [ + {file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"}, + {file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"}, +] "ruamel.yaml" = [ {file = "ruamel.yaml-0.17.17-py3-none-any.whl", hash = "sha256:9af3ec5d7f8065582f3aa841305465025d0afd26c5fb54e15b964e11838fc74f"}, {file = "ruamel.yaml-0.17.17.tar.gz", hash = "sha256:9751de4cbb57d4bfbf8fc394e125ed4a2f170fbff3dc3d78abf50be85924f8be"}, diff --git a/pyproject.toml b/pyproject.toml index f4e38896d7d..04bd9490b97 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,105 +1,104 @@ [tool.poetry] -name = "aws_lambda_powertools" -version = "1.25.7" -description = "A suite of utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, batching, idempotency, feature flags, and more." authors = ["Amazon Web Services"] -include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"] -classifiers=[ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Natural Language :: English", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", ] -repository="https://github.com/awslabs/aws-lambda-powertools-python" -readme = "README.md" +description = "A suite of utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, batching, idempotency, feature flags, and more." +include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"] keywords = ["aws_lambda_powertools", "aws", "tracing", "logging", "lambda", "powertools", "feature_flags", "idempotency", "middleware"] license = "MIT-0" +name = "aws_lambda_powertools" +readme = "README.md" +repository = "https://github.com/awslabs/aws-lambda-powertools-python" +version = "1.25.7" [tool.poetry.dependencies] -python = "^3.6.2" aws-xray-sdk = "^2.8.0" -fastjsonschema = "^2.14.5" boto3 = "^1.18" +email-validator = {version = "*", optional = true} +fastjsonschema = "^2.14.5" jmespath = "^0.10.0" -pydantic = {version = "^1.8.2", optional = true } -email-validator = {version = "*", optional = true } +pydantic = {version = "^1.8.2", optional = true} +python = "^3.6.2" [tool.poetry.dev-dependencies] -coverage = {extras = ["toml"], version = "^6.2"} -pytest = "^6.2.5" +aws-cdk-lib = "^2.20.0" +bandit = "^1.7.1" black = "^21.12b0" +coverage = {extras = ["toml"], version = "^6.2"} flake8 = "^4.0.1" flake8-black = "^0.2.3" +flake8-bugbear = "^22.1.11" flake8-builtins = "^1.5.3" flake8-comprehensions = "^3.7.0" flake8-debugger = "^4.0.0" +flake8-eradicate = "^1.2.0" flake8-fixme = "^1.1.1" flake8-isort = "^4.1.1" flake8-variables-names = "^0.0.4" isort = "^5.10.1" -pytest-cov = "^3.0.0" -pytest-mock = "^3.5.1" +mike = "^0.6.0" +mkdocs-git-revision-date-plugin = "^0.3.1" +mkdocs-material = "^8.2.4" +mypy = "^0.931" pdoc3 = "^0.10.0" +pytest = "^6.2.5" pytest-asyncio = "^0.16.0" -bandit = "^1.7.1" +pytest-cov = "^3.0.0" +pytest-mock = "^3.5.1" +pytest-xdist = "^2.5.0" radon = "^5.1.0" xenon = "^0.8.0" -flake8-eradicate = "^1.2.0" -flake8-bugbear = "^22.1.11" -mkdocs-git-revision-date-plugin = "^0.3.1" -mike = "^0.6.0" -mypy = "^0.931" -mkdocs-material = "^8.2.4" - +retry = "^0.9.2" +"aws-cdk.aws-lambda-python-alpha" = "^2.20.0-alpha.0" [tool.poetry.extras] pydantic = ["pydantic", "email-validator"] [tool.coverage.run] -source = ["aws_lambda_powertools"] -omit = ["tests/*", "aws_lambda_powertools/exceptions/*", "aws_lambda_powertools/utilities/parser/types.py", "aws_lambda_powertools/utilities/jmespath_utils/envelopes.py"] branch = true +omit = [ + "tests/*", + "aws_lambda_powertools/exceptions/*", + "aws_lambda_powertools/utilities/parser/types.py", + "aws_lambda_powertools/utilities/jmespath_utils/envelopes.py", +] +source = ["aws_lambda_powertools"] [tool.coverage.html] directory = "test_report" title = "Lambda Powertools Test Coverage" [tool.coverage.report] -fail_under = 90 exclude_lines = [ - # Have to re-enable the standard pragma - "pragma: no cover", - - # Don't complain about missing debug-only code: - "def __repr__", - "if self.debug", - - # Don't complain if tests don't hit defensive assertion code: - "raise AssertionError", - "raise NotImplementedError", - - # Don't complain if non-runnable code isn't run: - "if 0:", - "if __name__ == .__main__.:", - - # Ignore type function overload - "@overload", + # Have to re-enable the standard pragma + "pragma: no cover", # Don't complain about missing debug-only code: + "def __repr__", + "if self.debug", # Don't complain if tests don't hit defensive assertion code: + "raise AssertionError", + "raise NotImplementedError", # Don't complain if non-runnable code isn't run: + "if 0:", + "if __name__ == .__main__.:", # Ignore type function overload + "@overload", ] +fail_under = 90 [tool.isort] -multi_line_output = 3 -include_trailing_comma = true force_grid_wrap = 0 -use_parentheses = true +include_trailing_comma = true line_length = 120 +multi_line_output = 3 skip = "example" +use_parentheses = true [tool.black] -line-length = 120 exclude = ''' ( @@ -118,16 +117,20 @@ exclude = ''' | example ) ''' +line-length = 120 [tool.pytest.ini_options] -minversion = "6.0" addopts = "-ra -vv" +markers = [ + "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')", + "e2e: marks e2e tests to be deselected (deselect with '-m \"not e2e\"')", +] +minversion = "6.0" testpaths = "./tests" -markers = "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')" [build-system] -requires = ["poetry>=0.12"] build-backend = "poetry.masonry.api" +requires = ["poetry>=0.12"] # NOTE # As of now, Feb 2020, flake8 don't support pyproject diff --git a/tests/e2e/__init__.py b/tests/e2e/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py new file mode 100644 index 00000000000..a5e5a69107d --- /dev/null +++ b/tests/e2e/conftest.py @@ -0,0 +1,124 @@ +import subprocess +import tempfile +import uuid +from pathlib import Path + +import boto3 +import pytest +from aws_cdk import App, BundlingOptions, CfnOutput, DockerVolume, Stack, aws_lambda_python_alpha, aws_logs +from aws_cdk.aws_lambda import Code, Function, LayerVersion, Runtime, Tracing + + +def get_data(outputs, key): + value = None + for output in outputs: + if output["OutputKey"] == key: + value = output["OutputValue"] + return value + + +def load_handler_file(tmp_filename, handler_filename): + + with open(tmp_filename, mode="wb+") as tmp: + with open(handler_filename, mode="rb") as handler: + for line in handler: + tmp.write(line) + return tmp + + +# Create CDK cloud assembly code +def cdk_infrastructure(handler_file, stack_name, environment_variables, **config): + integration_test_app = App() + stack = Stack(integration_test_app, stack_name) + powertools_layer = LayerVersion.from_layer_version_arn( + stack, + "aws-lambda-powertools", + "arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPython:15", + ) + # TODO Create only one layer per test suite as it takes 4 additional minutes to deploy stack without cache + # TODO layer creation breaks hot-swap deployment as CDK complains that change contains non-Asset changes. + # powertools_layer = aws_lambda_python_alpha.PythonLayerVersion( + # stack, + # "aws-lambda-powertools", + # layer_version_name="aws-lambda-powertools", + # entry=".", + # compatible_runtimes=runtimes, + # ) + code = Code.from_asset(str(Path(handler_file).parent)) + # powertools_root_dir = "." + # tmp_handler_dir = str(Path(handler_file).parent) + # code = Code.from_asset( + # path=powertools_root_dir, + # bundling=BundlingOptions( + # image=Runtime.PYTHON_3_9.bundling_image, + # volumes=[DockerVolume(container_path=tmp_handler_dir, host_path=tmp_handler_dir)], + # user="root", + # command=[ + # "bash", + # "-c", + # f"pip install poetry && poetry export -f requirements.txt --without-hashes > requirements.txt && pip install -r requirements.txt -t /asset-output/ && rsync -r aws_lambda_powertools /asset-output/ && rsync -r {tmp_handler_dir}/ /asset-output", + # ], + # ), + # ) + + function_python = Function( + stack, + "MyFunction", + runtime=Runtime.PYTHON_3_9, + code=code, + handler=f"{Path(handler_file).stem}.lambda_handler", + layers=[powertools_layer], + log_retention=aws_logs.RetentionDays.ONE_DAY, + environment=environment_variables, + tracing=Tracing.ACTIVE if config.get("tracing") == "ACTIVE" else Tracing.DISABLED, + ) + + CfnOutput(stack, "lambdaArn", value=function_python.function_arn) + integration_test_app.synth() + return integration_test_app + + +# Deploy synthesized code using CDK CLI +def deploy_app(path, stack_name, cf_client): + result = subprocess.run( + [ + "cdk", + "deploy", + "--app", + str(path), + "--require-approval", + "never", + "--hotswap", + ], + capture_output=True, + text=True, + check=True, + ) + print(result.returncode, result.stdout, result.stderr) + + outputs = cf_client.describe_stacks(StackName=stack_name)["Stacks"][0]["Outputs"] + return outputs + + +@pytest.fixture(scope="session") +def deploy_infrastructure(): + cf_client = boto3.Session().client("cloudformation") + # in order to use hotswap we create tmp file that we specify as cdk lambda asset + # and we dynamically change its content + with tempfile.TemporaryDirectory() as tmp_dir: + tmp_filename = f"{tmp_dir}/tmp.py" + stack_name = f"test-lambda-{uuid.uuid4()}" + + def deploy(handler_filename, environment_variables, **config): + load_handler_file(tmp_filename=tmp_filename, handler_filename=handler_filename) + app = cdk_infrastructure( + handler_file=tmp_filename, stack_name=stack_name, environment_variables=environment_variables, **config + ) + + outputs = deploy_app(path=app.outdir, stack_name=stack_name, cf_client=cf_client) + lambda_arn = get_data(outputs=outputs, key="lambdaArn") + return lambda_arn + + yield deploy + # Ensure stack deletion is triggered at the end of the test session + cf_client.delete_stack(StackName=stack_name) diff --git a/tests/e2e/logger/__init__.py b/tests/e2e/logger/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/logger/handlers/basic_handler.py b/tests/e2e/logger/handlers/basic_handler.py new file mode 100644 index 00000000000..6ba1613964f --- /dev/null +++ b/tests/e2e/logger/handlers/basic_handler.py @@ -0,0 +1,17 @@ +import os + +from aws_lambda_powertools import Logger + +logger = Logger() + +MESSAGE = os.environ["MESSAGE"] +ADDITIONAL_KEY = os.environ["ADDITIONAL_KEY"] + + +@logger.inject_lambda_context(log_event=True) +def lambda_handler(event, context): + logger.debug(MESSAGE) + logger.info(MESSAGE) + logger.append_keys(**{f"{ADDITIONAL_KEY}": "test"}) + logger.info(MESSAGE) + return "success" diff --git a/tests/e2e/logger/handlers/no_context_handler.py b/tests/e2e/logger/handlers/no_context_handler.py new file mode 100644 index 00000000000..3e4e31306fa --- /dev/null +++ b/tests/e2e/logger/handlers/no_context_handler.py @@ -0,0 +1,13 @@ +import os + +from aws_lambda_powertools import Logger + +logger = Logger() + +MESSAGE = os.environ["MESSAGE"] +ADDITIONAL_KEY = os.environ["ADDITIONAL_KEY"] + + +def lambda_handler(event, context): + logger.info(MESSAGE) + return "success" diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py new file mode 100644 index 00000000000..c7bfeb2c14c --- /dev/null +++ b/tests/e2e/logger/test_logger.py @@ -0,0 +1,139 @@ +import datetime +import os + +import boto3 +import pytest + +from .. import utils + +dirname = os.path.dirname(__file__) + + +@pytest.fixture(scope="module") +def config(): + return {"MESSAGE": "logger message test", "LOG_LEVEL": "INFO", "ADDITIONAL_KEY": "extra_info"} + + +@pytest.fixture(scope="module") +def deploy_basic_lambda(deploy_infrastructure, config): + lambda_arn = deploy_infrastructure( + handler_filename=f"{dirname}/handlers/basic_handler.py", + environment_variables=config, + ) + epoch = int(datetime.datetime.now().timestamp() * 1000) + result = utils.trigger_lambda(lambda_arn=lambda_arn) + + assert result["Payload"].read() == b'"success"' + return lambda_arn, epoch + + +@pytest.fixture(scope="module") +def deploy_no_context_lambda(deploy_infrastructure, config): + lambda_arn = deploy_infrastructure( + handler_filename=f"{dirname}/handlers/no_context_handler.py", environment_variables=config + ) + + epoch = int(datetime.datetime.now().timestamp() * 1000) + result = utils.trigger_lambda(lambda_arn=lambda_arn) + + assert result["Payload"].read() == b'"success"' + return lambda_arn, epoch + + +@pytest.mark.e2e +def test_basic_lambda_logs_visible(deploy_basic_lambda, config): + filtered_logs = utils.get_logs( + start_time=deploy_basic_lambda[1], + lambda_function_name=deploy_basic_lambda[0].split(":")[-1], + log_client=boto3.client("logs"), + ) + assert any(log.message == config["MESSAGE"] and log.level == config["LOG_LEVEL"] for log in filtered_logs) + + +@pytest.mark.e2e +def test_basic_lambda_no_debug_logs_visible(deploy_basic_lambda, config): + filtered_logs = utils.get_logs( + start_time=deploy_basic_lambda[1], + lambda_function_name=deploy_basic_lambda[0].split(":")[-1], + log_client=boto3.client("logs"), + ) + assert not any(log.message == config["MESSAGE"] and log.level == "DEBUG" for log in filtered_logs) + + +@pytest.mark.e2e +def test_basic_lambda_contextual_data_logged(deploy_basic_lambda): + filtered_logs = utils.get_logs( + start_time=deploy_basic_lambda[1], + lambda_function_name=deploy_basic_lambda[0].split(":")[-1], + log_client=boto3.client("logs"), + ) + assert all( + ( + log.xray_trace_id + and log.function_request_id + and log.function_arn + and log.function_memory_size + and log.function_name + and log.cold_start + ) + for log in filtered_logs + ) + + +@pytest.mark.e2e +def test_basic_lambda_additional_key_persistence_basic_lambda(deploy_basic_lambda, config): + filtered_logs = utils.get_logs( + start_time=deploy_basic_lambda[1], + lambda_function_name=deploy_basic_lambda[0].split(":")[-1], + log_client=boto3.client("logs"), + ) + assert any( + log.extra_info and log.message == config["MESSAGE"] and log.level == config["LOG_LEVEL"] + for log in filtered_logs + ) + + +@pytest.mark.e2e +def test_basic_lambda_empty_event_logged(deploy_basic_lambda): + filtered_logs = utils.get_logs( + start_time=deploy_basic_lambda[1], + lambda_function_name=deploy_basic_lambda[0].split(":")[-1], + log_client=boto3.client("logs"), + ) + assert any(log.message == {} for log in filtered_logs) + + +# Deploy new lambda using cdk hotswap mechanism +@pytest.mark.e2e +def test_no_context_lambda_contextual_data_not_logged(deploy_no_context_lambda): + filtered_logs = utils.get_logs( + start_time=deploy_no_context_lambda[1], + lambda_function_name=deploy_no_context_lambda[0].split(":")[-1], + log_client=boto3.client("logs"), + ) + assert not any( + ( + log.xray_trace_id + and log.function_request_id + and log.function_arn + and log.function_memory_size + and log.function_name + and log.cold_start + ) + for log in filtered_logs + ) + + +@pytest.mark.e2e +def test_no_context_lambda_event_not_logged(deploy_no_context_lambda): + filtered_logs = utils.get_logs( + start_time=deploy_no_context_lambda[1], + lambda_function_name=deploy_no_context_lambda[0].split(":")[-1], + log_client=boto3.client("logs"), + ) + assert not any(log.message == {} for log in filtered_logs) + + +### Add tests for cold start and non-cold start executions +### Test errors +### Test child loggers diff --git a/tests/e2e/metrics/__init__.py b/tests/e2e/metrics/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/metrics/handlers/basic_handler.py b/tests/e2e/metrics/handlers/basic_handler.py new file mode 100644 index 00000000000..121cacf7bb3 --- /dev/null +++ b/tests/e2e/metrics/handlers/basic_handler.py @@ -0,0 +1,16 @@ +import os + +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import MetricUnit + +METRIC_NAMESPACE = os.environ["METRIC_NAMESPACE"] +METRIC_NAME = os.environ["METRIC_NAME"] +SERVICE_NAME = os.environ["SERVICE_NAME"] + +metrics = Metrics(namespace=METRIC_NAMESPACE, service=SERVICE_NAME) + + +@metrics.log_metrics +def lambda_handler(event, context): + metrics.add_metric(name=METRIC_NAME, unit=MetricUnit.Count, value=1) + return "success" diff --git a/tests/e2e/metrics/test_metrics.py b/tests/e2e/metrics/test_metrics.py new file mode 100644 index 00000000000..407141685f0 --- /dev/null +++ b/tests/e2e/metrics/test_metrics.py @@ -0,0 +1,49 @@ +import datetime +import os +import uuid + +import boto3 +import pytest + +from .. import utils + +dirname = os.path.dirname(__file__) + + +@pytest.fixture(scope="module") +def config(): + return { + "METRIC_NAMESPACE": f"powertools-e2e-metric-{uuid.uuid4()}", + "METRIC_NAME": "business-metric", + "SERVICE_NAME": "test-powertools-service", + } + + +@pytest.fixture(scope="module") +def deploy_basic_lambda(deploy_infrastructure, config): + lambda_arn = deploy_infrastructure( + handler_filename=f"{dirname}/handlers/basic_handler.py", + environment_variables=config, + ) + start_date = datetime.datetime.now(datetime.timezone.utc) + result = utils.trigger_lambda(lambda_arn=lambda_arn) + assert result["Payload"].read() == b'"success"' + return start_date + + +@pytest.mark.e2e +def test_basic_lambda_metric_visible(deploy_basic_lambda, config): + start_date = deploy_basic_lambda + end_date = start_date + datetime.timedelta(minutes=5) + + metrics = utils.get_metrics( + start_date=start_date, + end_date=end_date, + namespace=config["METRIC_NAMESPACE"], + metric_name=config["METRIC_NAME"], + service_name=config["SERVICE_NAME"], + cw_client=boto3.client(service_name="cloudwatch"), + ) + assert metrics["Timestamps"] and len(metrics["Timestamps"]) == 1 + assert metrics["Values"] and len(metrics["Values"]) == 1 + assert metrics["Values"][0] == 1 diff --git a/tests/e2e/tracer/__init__.py b/tests/e2e/tracer/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/tracer/handlers/basic_handler.py b/tests/e2e/tracer/handlers/basic_handler.py new file mode 100644 index 00000000000..bd0cbfd0d57 --- /dev/null +++ b/tests/e2e/tracer/handlers/basic_handler.py @@ -0,0 +1,16 @@ +import os + +from aws_lambda_powertools import Tracer + +tracer = Tracer() +tracer = Tracer(service="e2e-tests-app") + +ANNOTATION_KEY = os.environ["ANNOTATION_KEY"] +ANNOTATION_VALUE = os.environ["ANNOTATION_VALUE"] + + +@tracer.capture_lambda_handler +def lambda_handler(event, context): + tracer.put_annotation(key=ANNOTATION_KEY, value=ANNOTATION_VALUE) + tracer.put_metadata(key=ANNOTATION_KEY, value=ANNOTATION_VALUE) + return "success" diff --git a/tests/e2e/tracer/test_tracer.py b/tests/e2e/tracer/test_tracer.py new file mode 100644 index 00000000000..eaa72c182b3 --- /dev/null +++ b/tests/e2e/tracer/test_tracer.py @@ -0,0 +1,53 @@ +import datetime +import json +import os +import uuid + +import boto3 +import pytest + +from .. import utils + +dirname = os.path.dirname(__file__) + + +@pytest.fixture(scope="module") +def config(): + return {"ANNOTATION_KEY": f"e2e-tracer-{uuid.uuid4()}", "ANNOTATION_VALUE": "stored"} + + +@pytest.fixture(scope="module") +def deploy_basic_lambda(deploy_infrastructure, config): + lambda_arn = deploy_infrastructure( + handler_filename=f"{dirname}/handlers/basic_handler.py", environment_variables=config, tracing="ACTIVE" + ) + start_date = datetime.datetime.utcnow() + result = utils.trigger_lambda(lambda_arn=lambda_arn) + assert result["Payload"].read() == b'"success"' + return lambda_arn, start_date + + +@pytest.mark.e2e +def test_basic_lambda_trace_visible(deploy_basic_lambda, config): + start_date = deploy_basic_lambda[1] + end_date = start_date + datetime.timedelta(minutes=5) + + trace = utils.get_traces( + start_date=start_date, + end_date=end_date, + lambda_function_name=deploy_basic_lambda[0].split(":")[-1], + xray_client=boto3.client("xray"), + ) + + for segment in trace["Traces"][0]["Segments"]: + document = json.loads(segment["Document"]) + if document["origin"] == "AWS::Lambda::Function": + for subsegment in document["subsegments"]: + if subsegment["name"] == "Invocation": + print(subsegment) + for x_subsegment in subsegment["subsegments"]: + metadata = x_subsegment["metadata"] + annotation = x_subsegment["annotations"] + + assert metadata["e2e-tests-app"][config["ANNOTATION_KEY"]] == config["ANNOTATION_VALUE"] + assert annotation["Service"] == "e2e-tests-app" diff --git a/tests/e2e/utils.py b/tests/e2e/utils.py new file mode 100644 index 00000000000..84f463782a5 --- /dev/null +++ b/tests/e2e/utils.py @@ -0,0 +1,94 @@ +import json +from typing import Any, Optional, Union + +import boto3 +from pydantic import BaseModel +from retry import retry + + +class Log(BaseModel): + level: str + location: str + message: Union[dict, str] + timestamp: str + service: str + cold_start: Optional[bool] + function_name: Optional[str] + function_memory_size: Optional[str] + function_arn: Optional[str] + function_request_id: Optional[str] + xray_trace_id: Optional[str] + extra_info: Optional[str] + + +@retry(ValueError, delay=1, jitter=1, tries=5) +def get_logs(lambda_function_name: str, log_client: Any, start_time: int): + response = log_client.filter_log_events(logGroupName=f"/aws/lambda/{lambda_function_name}", startTime=start_time) + if not response["events"]: + raise ValueError("Empty response from Cloudwatch Logs. Repeating...") + filtered_logs = [] + for event in response["events"]: + try: + message = Log(**json.loads(event["message"])) + except json.decoder.JSONDecodeError: + continue + filtered_logs.append(message) + + return filtered_logs + + +def trigger_lambda(lambda_arn): + lambda_client = boto3.client("lambda") + response = lambda_client.invoke(FunctionName=lambda_arn, InvocationType="RequestResponse") + return response + + +@retry(ValueError, delay=1, jitter=1, tries=5) +def get_metrics(namespace, cw_client, start_date, end_date, metric_name, service_name): + + response = cw_client.get_metric_data( + MetricDataQueries=[ + { + "Id": "m1", + "MetricStat": { + "Metric": { + "Namespace": namespace, + "MetricName": metric_name, + "Dimensions": [{"Name": "service", "Value": service_name}], + }, + "Period": 600, + "Stat": "Maximum", + }, + "ReturnData": True, + }, + ], + StartTime=start_date, + EndTime=end_date, + ) + + result = response["MetricDataResults"][0] + if not result["Values"]: + raise ValueError("Empty response from Cloudwatch. Repeating...") + return result + + +@retry(ValueError, delay=2, jitter=0.5, tries=10) +def get_traces(lambda_function_name: str, xray_client, start_date, end_date): + paginator = xray_client.get_paginator("get_trace_summaries") + response_iterator = paginator.paginate( + StartTime=start_date, + EndTime=end_date, + TimeRangeType="Event", + Sampling=False, + FilterExpression=f'service("{lambda_function_name}")', + ) + + traces = [trace["TraceSummaries"][0]["Id"] for trace in response_iterator if trace["TraceSummaries"]] + if not traces: + raise ValueError("Empty response from X-RAY. Repeating...") + + trace_details = xray_client.batch_get_traces( + TraceIds=traces, + ) + + return trace_details From 5dde53eaf6e7de252cca2df5e27815c7f77ede20 Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Sat, 7 May 2022 11:46:42 +0200 Subject: [PATCH 02/72] Rewrite code to use only cf deployment --- poetry.lock | 1763 ++++++++++++++++------------- pyproject.toml | 8 +- tests/e2e/conftest.py | 175 ++- tests/e2e/logger/test_logger.py | 113 +- tests/e2e/metrics/test_metrics.py | 13 +- tests/e2e/tracer/test_tracer.py | 16 +- tests/e2e/utils.py | 65 ++ 7 files changed, 1184 insertions(+), 969 deletions(-) diff --git a/poetry.lock b/poetry.lock index 507743c89b7..f03c0dd53b5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,45 +1,45 @@ [[package]] -category = "dev" -description = "Atomic file writes." name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.4.0" [[package]] -category = "dev" -description = "Classes Without Boilerplate" name = "attrs" +version = "21.2.0" +description = "Classes Without Boilerplate" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "21.2.0" [package.extras] -dev = [ - "coverage[toml] (>=5.0.2)", - "hypothesis", - "pympler", - "pytest (>=4.3.0)", - "six", - "mypy", - "pytest-mypy-plugins", - "zope.interface", - "furo", - "sphinx", - "sphinx-notfound-page", - "pre-commit", -] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] [[package]] -category = "main" -description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit information from within their applications to the AWS X-Ray service." +name = "aws-cdk-lib" +version = "2.23.0" +description = "Version 2 of the AWS Cloud Development Kit library" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +constructs = ">=10.0.0,<11.0.0" +jsii = ">=1.57.0,<2.0.0" +publication = ">=0.0.3" + +[[package]] name = "aws-xray-sdk" +version = "2.9.0" +description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit information from within their applications to the AWS X-Ray service." +category = "main" optional = false python-versions = "*" -version = "2.9.0" [package.dependencies] botocore = ">=1.11.3" @@ -47,26 +47,26 @@ future = "*" wrapt = "*" [[package]] -category = "dev" -description = "Security oriented static analyser for python code." name = "bandit" +version = "1.7.1" +description = "Security oriented static analyser for python code." +category = "dev" optional = false python-versions = ">=3.5" -version = "1.7.1" [package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} GitPython = ">=1.0.1" PyYAML = ">=5.3.1" -colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} stevedore = ">=1.20.0" [[package]] -category = "dev" -description = "The uncompromising code formatter." name = "black" +version = "21.12b0" +description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.6.2" -version = "21.12b0" [package.dependencies] click = ">=7.1.2" @@ -77,8 +77,8 @@ platformdirs = ">=2" tomli = ">=0.2.6,<2.0.0" typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} typing-extensions = [ - {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}, - {version = "!=3.10.0.1", markers = "python_version >= \"3.10\""}, + {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}, + {version = "!=3.10.0.1", markers = "python_version >= \"3.10\""}, ] [package.extras] @@ -89,12 +89,12 @@ python2 = ["typed-ast (>=1.4.3)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] -category = "main" -description = "The AWS SDK for Python" name = "boto3" +version = "1.21.44" +description = "The AWS SDK for Python" +category = "main" optional = false python-versions = ">= 3.6" -version = "1.21.44" [package.dependencies] botocore = ">=1.24.44,<1.25.0" @@ -105,12 +105,12 @@ s3transfer = ">=0.5.0,<0.6.0" crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] -category = "main" -description = "Low-level, data-driven core of boto 3." name = "botocore" +version = "1.24.44" +description = "Low-level, data-driven core of boto 3." +category = "main" optional = false python-versions = ">= 3.6" -version = "1.24.44" [package.dependencies] jmespath = ">=0.7.1,<2.0.0" @@ -121,51 +121,90 @@ urllib3 = ">=1.25.4,<1.27" crt = ["awscrt (==0.13.8)"] [[package]] +name = "cattrs" +version = "1.0.0" +description = "Composable complex class support for attrs." category = "dev" -description = "Python package for providing Mozilla's CA Bundle." -name = "certifi" optional = false python-versions = "*" -version = "2021.10.8" + +[package.dependencies] +attrs = ">=17.3" + +[package.extras] +dev = ["bumpversion", "wheel", "watchdog", "flake8", "tox", "coverage", "sphinx", "pytest", "hypothesis", "pendulum"] [[package]] +name = "cattrs" +version = "22.1.0" +description = "Composable complex class support for attrs and dataclasses." category = "dev" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7,<4.0" + +[package.dependencies] +attrs = ">=20" +exceptiongroup = {version = "*", markers = "python_version <= \"3.10\""} +typing_extensions = {version = "*", markers = "python_version >= \"3.7\" and python_version < \"3.8\""} + +[[package]] +name = "certifi" +version = "2021.10.8" +description = "Python package for providing Mozilla's CA Bundle." +category = "dev" +optional = false +python-versions = "*" + +[[package]] name = "charset-normalizer" +version = "2.0.8" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "dev" optional = false python-versions = ">=3.5.0" -version = "2.0.8" [package.extras] unicode_backport = ["unicodedata2"] [[package]] -category = "dev" -description = "Composable command line interface toolkit" name = "click" +version = "8.0.3" +description = "Composable command line interface toolkit" +category = "dev" optional = false python-versions = ">=3.6" -version = "8.0.3" [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [[package]] -category = "dev" -description = "Cross-platform colored terminal text." name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.4.4" [[package]] +name = "constructs" +version = "10.1.1" +description = "A programming model for software-defined state" category = "dev" -description = "Code coverage measurement for Python" -name = "coverage" optional = false python-versions = ">=3.6" + +[package.dependencies] +jsii = ">=1.57.0,<2.0.0" +publication = ">=0.0.3" + +[[package]] +name = "coverage" version = "6.2" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.6" [package.dependencies] tomli = {version = "*", optional = true, markers = "extra == \"toml\""} @@ -174,66 +213,96 @@ tomli = {version = "*", optional = true, markers = "extra == \"toml\""} toml = ["tomli"] [[package]] -category = "main" -description = "A backport of the dataclasses module for Python 3.6" name = "dataclasses" +version = "0.8" +description = "A backport of the dataclasses module for Python 3.6" +category = "main" optional = false python-versions = ">=3.6, <3.7" -version = "0.8" [[package]] -category = "main" -description = "DNS toolkit" +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] name = "dnspython" +version = "2.1.0" +description = "DNS toolkit" +category = "main" optional = true python-versions = ">=3.6" -version = "2.1.0" [package.extras] -curio = ["curio (>=1.2)", "sniffio (>=1.1)"] dnssec = ["cryptography (>=2.6)"] doh = ["requests", "requests-toolbelt"] idna = ["idna (>=2.1)"] +curio = ["curio (>=1.2)", "sniffio (>=1.1)"] trio = ["trio (>=0.14.0)", "sniffio (>=1.1)"] [[package]] -category = "main" -description = "A robust email syntax and deliverability validation library for Python 2.x/3.x." name = "email-validator" +version = "1.1.3" +description = "A robust email syntax and deliverability validation library for Python 2.x/3.x." +category = "main" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -version = "1.1.3" [package.dependencies] dnspython = ">=1.15.0" idna = ">=2.0.0" [[package]] -category = "dev" -description = "Removes commented-out code." name = "eradicate" +version = "2.0.0" +description = "Removes commented-out code." +category = "dev" optional = false python-versions = "*" -version = "2.0.0" [[package]] -category = "main" -description = "Fastest Python implementation of JSON schema" +name = "exceptiongroup" +version = "1.0.0rc5" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "execnet" +version = "1.9.0" +description = "execnet: rapid multi-Python deployment" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +testing = ["pre-commit"] + +[[package]] name = "fastjsonschema" +version = "2.15.3" +description = "Fastest Python implementation of JSON schema" +category = "main" optional = false python-versions = "*" -version = "2.15.3" [package.extras] devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] [[package]] -category = "dev" -description = "the modular source code checker: pep8 pyflakes and co" name = "flake8" +version = "4.0.1" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" optional = false python-versions = ">=3.6" -version = "4.0.1" [package.dependencies] importlib-metadata = {version = "<4.3", markers = "python_version < \"3.8\""} @@ -242,12 +311,12 @@ pycodestyle = ">=2.8.0,<2.9.0" pyflakes = ">=2.4.0,<2.5.0" [[package]] -category = "dev" -description = "flake8 plugin to call black as a code style validator" name = "flake8-black" +version = "0.2.3" +description = "flake8 plugin to call black as a code style validator" +category = "dev" optional = false python-versions = "*" -version = "0.2.3" [package.dependencies] black = "*" @@ -255,12 +324,12 @@ flake8 = ">=3.0.0" toml = "*" [[package]] -category = "dev" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." name = "flake8-bugbear" +version = "22.4.25" +description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." +category = "dev" optional = false python-versions = ">=3.6" -version = "22.4.25" [package.dependencies] attrs = ">=19.2.0" @@ -270,12 +339,12 @@ flake8 = ">=3.0.0" dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit"] [[package]] -category = "dev" -description = "Check for python builtins being used as variables or parameters." name = "flake8-builtins" +version = "1.5.3" +description = "Check for python builtins being used as variables or parameters." +category = "dev" optional = false python-versions = "*" -version = "1.5.3" [package.dependencies] flake8 = "*" @@ -284,24 +353,24 @@ flake8 = "*" test = ["coverage", "coveralls", "mock", "pytest", "pytest-cov"] [[package]] -category = "dev" -description = "A flake8 plugin to help you write better list/set/dict comprehensions." name = "flake8-comprehensions" +version = "3.7.0" +description = "A flake8 plugin to help you write better list/set/dict comprehensions." +category = "dev" optional = false python-versions = ">=3.6" -version = "3.7.0" [package.dependencies] flake8 = ">=3.0,<3.2.0 || >3.2.0,<5" importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [[package]] -category = "dev" -description = "ipdb/pdb statement checker plugin for flake8" name = "flake8-debugger" +version = "4.0.0" +description = "ipdb/pdb statement checker plugin for flake8" +category = "dev" optional = false python-versions = ">=3.6" -version = "4.0.0" [package.dependencies] flake8 = ">=3.0" @@ -309,12 +378,12 @@ pycodestyle = "*" six = "*" [[package]] -category = "dev" -description = "Flake8 plugin to find commented out code" name = "flake8-eradicate" +version = "1.2.1" +description = "Flake8 plugin to find commented out code" +category = "dev" optional = false python-versions = ">=3.6,<4.0" -version = "1.2.1" [package.dependencies] attrs = "*" @@ -322,20 +391,20 @@ eradicate = ">=2.0,<3.0" flake8 = ">=3.5,<5" [[package]] -category = "dev" -description = "Check for FIXME, TODO and other temporary developer notes. Plugin for flake8." name = "flake8-fixme" +version = "1.1.1" +description = "Check for FIXME, TODO and other temporary developer notes. Plugin for flake8." +category = "dev" optional = false python-versions = "*" -version = "1.1.1" [[package]] -category = "dev" -description = "flake8 plugin that integrates isort ." name = "flake8-isort" +version = "4.1.1" +description = "flake8 plugin that integrates isort ." +category = "dev" optional = false python-versions = "*" -version = "4.1.1" [package.dependencies] flake8 = ">=3.2.1,<5" @@ -346,28 +415,28 @@ testfixtures = ">=6.8.0,<7" test = ["pytest-cov"] [[package]] -category = "dev" -description = "A flake8 extension that helps to make more readable variables names" name = "flake8-variables-names" +version = "0.0.4" +description = "A flake8 extension that helps to make more readable variables names" +category = "dev" optional = false python-versions = "*" -version = "0.0.4" [[package]] -category = "main" -description = "Clean single-source support for Python 3 and 2" name = "future" +version = "0.18.2" +description = "Clean single-source support for Python 3 and 2" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -version = "0.18.2" [[package]] -category = "dev" -description = "Copy your docs directly to the gh-pages branch." name = "ghp-import" +version = "2.0.2" +description = "Copy your docs directly to the gh-pages branch." +category = "dev" optional = false python-versions = "*" -version = "2.0.2" [package.dependencies] python-dateutil = ">=2.8.1" @@ -376,43 +445,43 @@ python-dateutil = ">=2.8.1" dev = ["twine", "markdown", "flake8", "wheel"] [[package]] -category = "dev" -description = "Git Object Database" name = "gitdb" +version = "4.0.9" +description = "Git Object Database" +category = "dev" optional = false python-versions = ">=3.6" -version = "4.0.9" [package.dependencies] smmap = ">=3.0.1,<6" [[package]] -category = "dev" -description = "Python Git Library" name = "gitpython" +version = "3.1.20" +description = "Python Git Library" +category = "dev" optional = false python-versions = ">=3.6" -version = "3.1.20" [package.dependencies] gitdb = ">=4.0.1,<5" typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.10\""} [[package]] -category = "main" -description = "Internationalized Domain Names in Applications (IDNA)" name = "idna" +version = "3.3" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" -version = "3.3" [[package]] -category = "dev" -description = "Read metadata from Python packages" name = "importlib-metadata" +version = "4.2.0" +description = "Read metadata from Python packages" +category = "dev" optional = false python-versions = ">=3.6" -version = "4.2.0" [package.dependencies] typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} @@ -420,50 +489,52 @@ zipp = ">=0.5" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = [ - "pytest (>=4.6)", - "pytest-checkdocs (>=2.4)", - "pytest-flake8", - "pytest-cov", - "pytest-enabler (>=1.0.1)", - "packaging", - "pep517", - "pyfakefs", - "flufl.flake8", - "pytest-black (>=0.3.7)", - "pytest-mypy", - "importlib-resources (>=1.3)", -] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] [[package]] +name = "importlib-resources" +version = "5.4.0" +description = "Read resources from Python packages" category = "dev" -description = "iniconfig: brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] + +[[package]] name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = "*" -version = "1.1.1" [[package]] -category = "dev" -description = "A Python utility / library to sort Python imports." name = "isort" +version = "5.10.1" +description = "A Python utility / library to sort Python imports." +category = "dev" optional = false python-versions = ">=3.6.1,<4.0" -version = "5.10.1" [package.extras] -colors = ["colorama (>=0.4.3,<0.5.0)"] pipfile_deprecated_finder = ["pipreqs", "requirementslib"] -plugins = ["setuptools"] requirements_deprecated_finder = ["pipreqs", "pip-api"] +colors = ["colorama (>=0.4.3,<0.5.0)"] +plugins = ["setuptools"] [[package]] -category = "dev" -description = "A very fast and expressive template engine." name = "jinja2" +version = "3.0.3" +description = "A very fast and expressive template engine." +category = "dev" optional = false python-versions = ">=3.6" -version = "3.0.3" [package.dependencies] MarkupSafe = ">=2.0" @@ -472,20 +543,38 @@ MarkupSafe = ">=2.0" i18n = ["Babel (>=2.7)"] [[package]] -category = "main" -description = "JSON Matching Expressions" name = "jmespath" +version = "0.10.0" +description = "JSON Matching Expressions" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -version = "0.10.0" [[package]] +name = "jsii" +version = "1.57.0" +description = "Python client for jsii runtime" category = "dev" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = "~=3.6" + +[package.dependencies] +attrs = ">=21.2,<22.0" +cattrs = [ + {version = ">=1.0.0,<1.1.0", markers = "python_version < \"3.7\""}, + {version = ">=1.8,<22.2", markers = "python_version >= \"3.7\""}, +] +importlib-resources = {version = "*", markers = "python_version < \"3.7\""} +python-dateutil = "*" +typing-extensions = ">=3.7,<5.0" + +[[package]] name = "mako" +version = "1.1.6" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.1.6" [package.dependencies] MarkupSafe = ">=0.9.2" @@ -495,12 +584,12 @@ babel = ["babel"] lingua = ["lingua"] [[package]] -category = "dev" -description = "Create Python CLI apps with little to no effort at all!" name = "mando" +version = "0.6.4" +description = "Create Python CLI apps with little to no effort at all!" +category = "dev" optional = false python-versions = "*" -version = "0.6.4" [package.dependencies] six = "*" @@ -509,47 +598,47 @@ six = "*" restructuredText = ["rst2ansi"] [[package]] -category = "dev" -description = "Python implementation of Markdown." name = "markdown" +version = "3.3.5" +description = "Python implementation of Markdown." +category = "dev" optional = false python-versions = ">=3.6" -version = "3.3.5" [package.extras] testing = ["coverage", "pyyaml"] [[package]] -category = "dev" -description = "Safely add untrusted strings to HTML/XML markup." name = "markupsafe" +version = "2.0.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" optional = false python-versions = ">=3.6" -version = "2.0.1" [[package]] -category = "dev" -description = "McCabe checker, plugin for flake8" name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +category = "dev" optional = false python-versions = "*" -version = "0.6.1" [[package]] -category = "dev" -description = "A deep merge function for 🐍." name = "mergedeep" +version = "1.3.4" +description = "A deep merge function for 🐍." +category = "dev" optional = false python-versions = ">=3.6" -version = "1.3.4" [[package]] -category = "dev" -description = "Manage multiple versions of your MkDocs-powered documentation" name = "mike" +version = "0.6.0" +description = "Manage multiple versions of your MkDocs-powered documentation" +category = "dev" optional = false python-versions = "*" -version = "0.6.0" [package.dependencies] jinja2 = "*" @@ -562,22 +651,22 @@ dev = ["coverage", "flake8 (>=3.0)", "pypandoc (>=1.4)"] test = ["coverage", "flake8 (>=3.0)"] [[package]] -category = "dev" -description = "Project documentation with Markdown." name = "mkdocs" +version = "1.2.3" +description = "Project documentation with Markdown." +category = "dev" optional = false python-versions = ">=3.6" -version = "1.2.3" [package.dependencies] -Jinja2 = ">=2.10.1" -Markdown = ">=3.2.1" -PyYAML = ">=3.10" click = ">=3.3" ghp-import = ">=1.0" importlib-metadata = ">=3.10" +Jinja2 = ">=2.10.1" +Markdown = ">=3.2.1" mergedeep = ">=1.3.4" packaging = ">=20.5" +PyYAML = ">=3.10" pyyaml-env-tag = ">=0.1" watchdog = ">=2.0" @@ -585,12 +674,12 @@ watchdog = ">=2.0" i18n = ["babel (>=2.9.0)"] [[package]] -category = "dev" -description = "MkDocs plugin for setting revision date from git per markdown file." name = "mkdocs-git-revision-date-plugin" +version = "0.3.2" +description = "MkDocs plugin for setting revision date from git per markdown file." +category = "dev" optional = false python-versions = ">=3.4" -version = "0.3.2" [package.dependencies] GitPython = "*" @@ -598,12 +687,12 @@ jinja2 = "*" mkdocs = ">=0.17" [[package]] -category = "dev" -description = "A Material Design theme for MkDocs" name = "mkdocs-material" +version = "8.2.7" +description = "A Material Design theme for MkDocs" +category = "dev" optional = false python-versions = ">=3.6" -version = "8.2.7" [package.dependencies] jinja2 = ">=2.11.1,<3.1" @@ -614,20 +703,20 @@ pygments = ">=2.10" pymdown-extensions = ">=9.0" [[package]] -category = "dev" -description = "Extension pack for Python Markdown." name = "mkdocs-material-extensions" +version = "1.0.3" +description = "Extension pack for Python Markdown." +category = "dev" optional = false python-versions = ">=3.6" -version = "1.0.3" [[package]] -category = "dev" -description = "Optional static typing for Python" name = "mypy" +version = "0.950" +description = "Optional static typing for Python" +category = "dev" optional = false python-versions = ">=3.6" -version = "0.950" [package.dependencies] mypy-extensions = ">=0.4.3" @@ -641,71 +730,71 @@ python2 = ["typed-ast (>=1.4.0,<2)"] reports = ["lxml"] [[package]] -category = "dev" -description = "Experimental type system extensions for programs checked with the mypy typechecker." name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" optional = false python-versions = "*" -version = "0.4.3" [[package]] -category = "dev" -description = "Core utilities for Python packages" name = "packaging" +version = "21.3" +description = "Core utilities for Python packages" +category = "dev" optional = false python-versions = ">=3.6" -version = "21.3" [package.dependencies] pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] -category = "dev" -description = "Utility library for gitignore style pattern matching of file paths." name = "pathspec" +version = "0.9.0" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -version = "0.9.0" [[package]] -category = "dev" -description = "Python Build Reasonableness" name = "pbr" +version = "5.8.0" +description = "Python Build Reasonableness" +category = "dev" optional = false python-versions = ">=2.6" -version = "5.8.0" [[package]] -category = "dev" -description = "Auto-generate API documentation for Python projects." name = "pdoc3" +version = "0.10.0" +description = "Auto-generate API documentation for Python projects." +category = "dev" optional = false python-versions = ">= 3.6" -version = "0.10.0" [package.dependencies] mako = "*" markdown = ">=3.0" [[package]] -category = "dev" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." name = "platformdirs" +version = "2.4.0" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" optional = false python-versions = ">=3.6" -version = "2.4.0" [package.extras] docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] [[package]] -category = "dev" -description = "plugin and hook calling mechanisms for python" name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.6" -version = "1.0.0" [package.dependencies] importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} @@ -715,28 +804,36 @@ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] +name = "publication" +version = "0.0.3" +description = "Publication helps you maintain public-api-friendly modules by preventing unintentional access to private implementation details via introspection." category = "dev" -description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = "*" + +[[package]] name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "1.11.0" [[package]] -category = "dev" -description = "Python style guide checker" name = "pycodestyle" +version = "2.8.0" +description = "Python style guide checker" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "2.8.0" [[package]] -category = "main" -description = "Data validation and settings management using python 3.6 type hinting" name = "pydantic" +version = "1.9.0" +description = "Data validation and settings management using python 3.6 type hinting" +category = "main" optional = true python-versions = ">=3.6.1" -version = "1.9.0" [package.dependencies] dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} @@ -747,50 +844,50 @@ dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] [[package]] -category = "dev" -description = "passive checker of Python programs" name = "pyflakes" +version = "2.4.0" +description = "passive checker of Python programs" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.4.0" [[package]] -category = "dev" -description = "Pygments is a syntax highlighting package written in Python." name = "pygments" +version = "2.11.2" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" optional = false python-versions = ">=3.5" -version = "2.11.2" [[package]] -category = "dev" -description = "Extension pack for Python Markdown." name = "pymdown-extensions" +version = "9.1" +description = "Extension pack for Python Markdown." +category = "dev" optional = false python-versions = ">=3.6" -version = "9.1" [package.dependencies] Markdown = ">=3.2" [[package]] -category = "dev" -description = "Python parsing module" name = "pyparsing" +version = "3.0.6" +description = "Python parsing module" +category = "dev" optional = false python-versions = ">=3.6" -version = "3.0.6" [package.extras] diagrams = ["jinja2", "railroad-diagrams"] [[package]] -category = "dev" -description = "pytest: simple powerful testing with Python" name = "pytest" +version = "7.0.1" +description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.6" -version = "7.0.1" [package.dependencies] atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} @@ -807,12 +904,12 @@ tomli = ">=1.0.0" testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] [[package]] -category = "dev" -description = "Pytest support for asyncio." name = "pytest-asyncio" +version = "0.16.0" +description = "Pytest support for asyncio." +category = "dev" optional = false python-versions = ">= 3.6" -version = "0.16.0" [package.dependencies] pytest = ">=5.4.0" @@ -821,12 +918,12 @@ pytest = ">=5.4.0" testing = ["coverage", "hypothesis (>=5.7.1)"] [[package]] -category = "dev" -description = "Pytest plugin for measuring coverage." name = "pytest-cov" +version = "3.0.0" +description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=3.6" -version = "3.0.0" [package.dependencies] coverage = {version = ">=5.2.1", extras = ["toml"]} @@ -836,12 +933,24 @@ pytest = ">=4.6" testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] [[package]] +name = "pytest-forked" +version = "1.4.0" +description = "run tests in isolated forked subprocesses" category = "dev" -description = "Thin-wrapper around the mock package for easier use with pytest" -name = "pytest-mock" optional = false python-versions = ">=3.6" + +[package.dependencies] +py = "*" +pytest = ">=3.10" + +[[package]] +name = "pytest-mock" version = "3.6.1" +description = "Thin-wrapper around the mock package for easier use with pytest" +category = "dev" +optional = false +python-versions = ">=3.6" [package.dependencies] pytest = ">=5.0" @@ -850,42 +959,60 @@ pytest = ">=5.0" dev = ["pre-commit", "tox", "pytest-asyncio"] [[package]] -category = "main" -description = "Extensions to the standard Python datetime module" +name = "pytest-xdist" +version = "2.5.0" +description = "pytest xdist plugin for distributed testing and loop-on-failing modes" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +execnet = ">=1.1" +pytest = ">=6.2.0" +pytest-forked = "*" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + +[[package]] name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -version = "2.8.2" [package.dependencies] six = ">=1.5" [[package]] -category = "dev" -description = "YAML parser and emitter for Python" name = "pyyaml" +version = "5.4.1" +description = "YAML parser and emitter for Python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -version = "5.4.1" [[package]] -category = "dev" -description = "A custom YAML tag for referencing environment variables in YAML files. " name = "pyyaml-env-tag" +version = "0.1" +description = "A custom YAML tag for referencing environment variables in YAML files. " +category = "dev" optional = false python-versions = ">=3.6" -version = "0.1" [package.dependencies] pyyaml = "*" [[package]] -category = "dev" -description = "Code Metrics in Python" name = "radon" +version = "5.1.0" +description = "Code Metrics in Python" +category = "dev" optional = false python-versions = "*" -version = "5.1.0" [package.dependencies] colorama = {version = ">=0.4.1", markers = "python_version > \"3.4\""} @@ -893,12 +1020,12 @@ future = "*" mando = ">=0.6,<0.7" [[package]] -category = "dev" -description = "Python HTTP for Humans." name = "requests" +version = "2.26.0" +description = "Python HTTP for Humans." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -version = "2.26.0" [package.dependencies] certifi = ">=2017.4.17" @@ -911,12 +1038,24 @@ socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] [[package]] +name = "retry" +version = "0.9.2" +description = "Easy to use retry decorator." category = "dev" -description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +optional = false +python-versions = "*" + +[package.dependencies] +decorator = ">=3.4.2" +py = ">=1.4.26,<2.0.0" + +[[package]] name = "ruamel.yaml" +version = "0.17.17" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +category = "dev" optional = false python-versions = ">=3" -version = "0.17.17" [package.dependencies] "ruamel.yaml.clib" = {version = ">=0.1.2", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.10\""} @@ -926,20 +1065,20 @@ docs = ["ryd"] jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] [[package]] -category = "dev" -description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" name = "ruamel.yaml.clib" +version = "0.2.6" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +category = "dev" optional = false python-versions = ">=3.5" -version = "0.2.6" [[package]] -category = "main" -description = "An Amazon S3 Transfer Manager" name = "s3transfer" +version = "0.5.0" +description = "An Amazon S3 Transfer Manager" +category = "main" optional = false python-versions = ">= 3.6" -version = "0.5.0" [package.dependencies] botocore = ">=1.12.36,<2.0a.0" @@ -948,40 +1087,40 @@ botocore = ">=1.12.36,<2.0a.0" crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] [[package]] -category = "main" -description = "Python 2 and 3 compatibility utilities" name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -version = "1.16.0" [[package]] -category = "dev" -description = "A pure Python implementation of a sliding window memory map manager" name = "smmap" +version = "5.0.0" +description = "A pure Python implementation of a sliding window memory map manager" +category = "dev" optional = false python-versions = ">=3.6" -version = "5.0.0" [[package]] -category = "dev" -description = "Manage dynamic plugins for Python applications" name = "stevedore" +version = "3.5.0" +description = "Manage dynamic plugins for Python applications" +category = "dev" optional = false python-versions = ">=3.6" -version = "3.5.0" [package.dependencies] importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""} pbr = ">=2.0.0,<2.1.0 || >2.1.0" [[package]] -category = "dev" -description = "A collection of helpers and mock objects for unit tests and doc tests." name = "testfixtures" +version = "6.18.3" +description = "A collection of helpers and mock objects for unit tests and doc tests." +category = "dev" optional = false python-versions = "*" -version = "6.18.3" [package.extras] build = ["setuptools-git", "wheel", "twine"] @@ -989,44 +1128,44 @@ docs = ["sphinx", "zope.component", "sybil", "twisted", "mock", "django (<2)", " test = ["pytest (>=3.6)", "pytest-cov", "pytest-django", "zope.component", "sybil", "twisted", "mock", "django (<2)", "django"] [[package]] -category = "dev" -description = "Python Library for Tom's Obvious, Minimal Language" name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -version = "0.10.2" [[package]] -category = "dev" -description = "A lil' TOML parser" name = "tomli" +version = "1.2.2" +description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.6" -version = "1.2.2" [[package]] -category = "dev" -description = "a fork of Python 2 and 3 ast modules with type comment support" name = "typed-ast" +version = "1.4.3" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" optional = false python-versions = "*" -version = "1.4.3" [[package]] -category = "main" -description = "Backported and Experimental Type Hints for Python 3.6+" name = "typing-extensions" +version = "4.0.1" +description = "Backported and Experimental Type Hints for Python 3.6+" +category = "main" optional = false python-versions = ">=3.6" -version = "4.0.1" [[package]] -category = "main" -description = "HTTP library with thread-safe connection pooling, file post, and more." name = "urllib3" +version = "1.26.7" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" -version = "1.26.7" [package.extras] brotli = ["brotlipy (>=0.6.0)"] @@ -1034,31 +1173,31 @@ secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "cer socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] -category = "dev" -description = "Filesystem events monitoring" name = "watchdog" +version = "2.1.6" +description = "Filesystem events monitoring" +category = "dev" optional = false python-versions = ">=3.6" -version = "2.1.6" [package.extras] watchmedo = ["PyYAML (>=3.10)"] [[package]] -category = "main" -description = "Module for decorators, wrappers and monkey patching." name = "wrapt" +version = "1.13.3" +description = "Module for decorators, wrappers and monkey patching." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -version = "1.13.3" [[package]] -category = "dev" -description = "Monitor code metrics for Python on your CI server" name = "xenon" +version = "0.9.0" +description = "Monitor code metrics for Python on your CI server" +category = "dev" optional = false python-versions = "*" -version = "0.9.0" [package.dependencies] PyYAML = ">=4.2b1,<7.0" @@ -1066,695 +1205,735 @@ radon = ">=4,<6" requests = ">=2.0,<3.0" [[package]] -category = "dev" -description = "Backport of pathlib-compatible object wrapper for zip files" name = "zipp" +version = "3.6.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" optional = false python-versions = ">=3.6" -version = "3.6.0" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = [ - "pytest (>=4.6)", - "pytest-checkdocs (>=2.4)", - "pytest-flake8", - "pytest-cov", - "pytest-enabler (>=1.0.1)", - "jaraco.itertools", - "func-timeout", - "pytest-black (>=0.3.7)", - "pytest-mypy", -] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [extras] pydantic = ["pydantic", "email-validator"] [metadata] -content-hash = "cf8b7764b84b398aeca74c548ad9872294a4f772ea1b1a93899ac6d70b2ef2b6" lock-version = "1.1" python-versions = "^3.6.2" +content-hash = "2c4ec9fac23dea3893cf3c60d5ed5728d63c4c46252a14ffbee52d30ebaec74f" [metadata.files] atomicwrites = [ - {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, - {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, + {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, + {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, ] attrs = [ - {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, - {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, + {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, + {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, +] +aws-cdk-lib = [ + {file = "aws-cdk-lib-2.23.0.tar.gz", hash = "sha256:3e07d1c6b320795d38567be183e56c2125b4c4492589775257aabec3d3e2a384"}, + {file = "aws_cdk_lib-2.23.0-py3-none-any.whl", hash = "sha256:1ec04a146d3364cd0fc4da08e3f8ca25e28df68abaa90641936db17a415ca4bc"}, ] aws-xray-sdk = [ - {file = "aws-xray-sdk-2.9.0.tar.gz", hash = "sha256:b0cd972db218d4d8f7b53ad806fc6184626b924c4997ae58fc9f2a8cd1281568"}, - {file = "aws_xray_sdk-2.9.0-py2.py3-none-any.whl", hash = "sha256:98216b3ac8281b51b59a8703f8ec561c460807d9d0679838f5c0179d381d7e58"}, + {file = "aws-xray-sdk-2.9.0.tar.gz", hash = "sha256:b0cd972db218d4d8f7b53ad806fc6184626b924c4997ae58fc9f2a8cd1281568"}, + {file = "aws_xray_sdk-2.9.0-py2.py3-none-any.whl", hash = "sha256:98216b3ac8281b51b59a8703f8ec561c460807d9d0679838f5c0179d381d7e58"}, ] bandit = [ - {file = "bandit-1.7.1-py3-none-any.whl", hash = "sha256:f5acd838e59c038a159b5c621cf0f8270b279e884eadd7b782d7491c02add0d4"}, - {file = "bandit-1.7.1.tar.gz", hash = "sha256:a81b00b5436e6880fa8ad6799bc830e02032047713cbb143a12939ac67eb756c"}, + {file = "bandit-1.7.1-py3-none-any.whl", hash = "sha256:f5acd838e59c038a159b5c621cf0f8270b279e884eadd7b782d7491c02add0d4"}, + {file = "bandit-1.7.1.tar.gz", hash = "sha256:a81b00b5436e6880fa8ad6799bc830e02032047713cbb143a12939ac67eb756c"}, ] black = [ - {file = "black-21.12b0-py3-none-any.whl", hash = "sha256:a615e69ae185e08fdd73e4715e260e2479c861b5740057fde6e8b4e3b7dd589f"}, - {file = "black-21.12b0.tar.gz", hash = "sha256:77b80f693a569e2e527958459634f18df9b0ba2625ba4e0c2d5da5be42e6f2b3"}, + {file = "black-21.12b0-py3-none-any.whl", hash = "sha256:a615e69ae185e08fdd73e4715e260e2479c861b5740057fde6e8b4e3b7dd589f"}, + {file = "black-21.12b0.tar.gz", hash = "sha256:77b80f693a569e2e527958459634f18df9b0ba2625ba4e0c2d5da5be42e6f2b3"}, ] boto3 = [ - {file = "boto3-1.21.44-py3-none-any.whl", hash = "sha256:0789842ca7d722723d7e9fae2158aea6f304c14df08929f9c62b6a277705ff39"}, - {file = "boto3-1.21.44.tar.gz", hash = "sha256:1300661bd4defa42d7e019d515fbfd2984170cf3f5c0bf6bc275fbd9498faf5f"}, + {file = "boto3-1.21.44-py3-none-any.whl", hash = "sha256:0789842ca7d722723d7e9fae2158aea6f304c14df08929f9c62b6a277705ff39"}, + {file = "boto3-1.21.44.tar.gz", hash = "sha256:1300661bd4defa42d7e019d515fbfd2984170cf3f5c0bf6bc275fbd9498faf5f"}, ] botocore = [ - {file = "botocore-1.24.44-py3-none-any.whl", hash = "sha256:ed07772c924984e5b3c1005f7ba4600cebd4169c23307cf6e92cccadf0b5d2e7"}, - {file = "botocore-1.24.44.tar.gz", hash = "sha256:0030a11eac972be46859263820885ba650503622c5acfe58966f482d42cc538d"}, + {file = "botocore-1.24.44-py3-none-any.whl", hash = "sha256:ed07772c924984e5b3c1005f7ba4600cebd4169c23307cf6e92cccadf0b5d2e7"}, + {file = "botocore-1.24.44.tar.gz", hash = "sha256:0030a11eac972be46859263820885ba650503622c5acfe58966f482d42cc538d"}, +] +cattrs = [ + {file = "cattrs-1.0.0-py2.py3-none-any.whl", hash = "sha256:616972ae3dfa6e623a40ad3cb845420e64942989152774ab055e5c2b2f89f997"}, + {file = "cattrs-1.0.0.tar.gz", hash = "sha256:b7ab5cf8ad127c42eefd01410c1c6e28569a45a255ea80ed968511873c433c7a"}, + {file = "cattrs-22.1.0-py3-none-any.whl", hash = "sha256:d55c477b4672f93606e992049f15d526dc7867e6c756cd6256d4af92e2b1e364"}, + {file = "cattrs-22.1.0.tar.gz", hash = "sha256:94b67b64cf92c994f8784c40c082177dc916e0489a73a9a36b24eb18a9db40c6"}, ] certifi = [ - {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, - {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, + {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, + {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, ] charset-normalizer = [ - {file = "charset-normalizer-2.0.8.tar.gz", hash = "sha256:735e240d9a8506778cd7a453d97e817e536bb1fc29f4f6961ce297b9c7a917b0"}, - {file = "charset_normalizer-2.0.8-py3-none-any.whl", hash = "sha256:83fcdeb225499d6344c8f7f34684c2981270beacc32ede2e669e94f7fa544405"}, + {file = "charset-normalizer-2.0.8.tar.gz", hash = "sha256:735e240d9a8506778cd7a453d97e817e536bb1fc29f4f6961ce297b9c7a917b0"}, + {file = "charset_normalizer-2.0.8-py3-none-any.whl", hash = "sha256:83fcdeb225499d6344c8f7f34684c2981270beacc32ede2e669e94f7fa544405"}, ] click = [ - {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, - {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, + {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, + {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, ] colorama = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +constructs = [ + {file = "constructs-10.1.1-py3-none-any.whl", hash = "sha256:c1f3deb196f54e070ded3c92c4339f73ef2b6022d35fb34908c0ebfa7ef8a640"}, + {file = "constructs-10.1.1.tar.gz", hash = "sha256:6ce0dd1352367237b5d7c51a25740482c852735d2a5e067c536acc1657f39ea5"}, ] coverage = [ - {file = "coverage-6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:92b8c845527eae547a2a6617d336adc56394050c3ed8a6918683646328fbb6da"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c7912d1526299cb04c88288e148c6c87c0df600eca76efd99d84396cfe00ef1d"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5d2033d5db1d58ae2d62f095e1aefb6988af65b4b12cb8987af409587cc0739"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3feac4084291642165c3a0d9eaebedf19ffa505016c4d3db15bfe235718d4971"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:276651978c94a8c5672ea60a2656e95a3cce2a3f31e9fb2d5ebd4c215d095840"}, - {file = "coverage-6.2-cp310-cp310-win32.whl", hash = "sha256:f506af4f27def639ba45789fa6fde45f9a217da0be05f8910458e4557eed020c"}, - {file = "coverage-6.2-cp310-cp310-win_amd64.whl", hash = "sha256:3f7c17209eef285c86f819ff04a6d4cbee9b33ef05cbcaae4c0b4e8e06b3ec8f"}, - {file = "coverage-6.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:13362889b2d46e8d9f97c421539c97c963e34031ab0cb89e8ca83a10cc71ac76"}, - {file = "coverage-6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22e60a3ca5acba37d1d4a2ee66e051f5b0e1b9ac950b5b0cf4aa5366eda41d47"}, - {file = "coverage-6.2-cp311-cp311-win_amd64.whl", hash = "sha256:b637c57fdb8be84e91fac60d9325a66a5981f8086c954ea2772efe28425eaf64"}, - {file = "coverage-6.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f467bbb837691ab5a8ca359199d3429a11a01e6dfb3d9dcc676dc035ca93c0a9"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2641f803ee9f95b1f387f3e8f3bf28d83d9b69a39e9911e5bfee832bea75240d"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1219d760ccfafc03c0822ae2e06e3b1248a8e6d1a70928966bafc6838d3c9e48"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9a2b5b52be0a8626fcbffd7e689781bf8c2ac01613e77feda93d96184949a98e"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8e2c35a4c1f269704e90888e56f794e2d9c0262fb0c1b1c8c4ee44d9b9e77b5d"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b09c972ce9200264c35a1d53d43ca55ef61836d9ec60f0d44273a31aa9f17"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e3db840a4dee542e37e09f30859f1612da90e1c5239a6a2498c473183a50e781"}, - {file = "coverage-6.2-cp36-cp36m-win32.whl", hash = "sha256:4e547122ca2d244f7c090fe3f4b5a5861255ff66b7ab6d98f44a0222aaf8671a"}, - {file = "coverage-6.2-cp36-cp36m-win_amd64.whl", hash = "sha256:01774a2c2c729619760320270e42cd9e797427ecfddd32c2a7b639cdc481f3c0"}, - {file = "coverage-6.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb8b8ee99b3fffe4fd86f4c81b35a6bf7e4462cba019997af2fe679365db0c49"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:619346d57c7126ae49ac95b11b0dc8e36c1dd49d148477461bb66c8cf13bb521"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a7726f74ff63f41e95ed3a89fef002916c828bb5fcae83b505b49d81a066884"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cfd9386c1d6f13b37e05a91a8583e802f8059bebfccde61a418c5808dea6bbfa"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:17e6c11038d4ed6e8af1407d9e89a2904d573be29d51515f14262d7f10ef0a64"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c254b03032d5a06de049ce8bca8338a5185f07fb76600afff3c161e053d88617"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dca38a21e4423f3edb821292e97cec7ad38086f84313462098568baedf4331f8"}, - {file = "coverage-6.2-cp37-cp37m-win32.whl", hash = "sha256:600617008aa82032ddeace2535626d1bc212dfff32b43989539deda63b3f36e4"}, - {file = "coverage-6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:bf154ba7ee2fd613eb541c2bc03d3d9ac667080a737449d1a3fb342740eb1a74"}, - {file = "coverage-6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f9afb5b746781fc2abce26193d1c817b7eb0e11459510fba65d2bd77fe161d9e"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edcada2e24ed68f019175c2b2af2a8b481d3d084798b8c20d15d34f5c733fa58"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9c8c4283e17690ff1a7427123ffb428ad6a52ed720d550e299e8291e33184dc"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f614fc9956d76d8a88a88bb41ddc12709caa755666f580af3a688899721efecd"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9365ed5cce5d0cf2c10afc6add145c5037d3148585b8ae0e77cc1efdd6aa2953"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8bdfe9ff3a4ea37d17f172ac0dff1e1c383aec17a636b9b35906babc9f0f5475"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:63c424e6f5b4ab1cf1e23a43b12f542b0ec2e54f99ec9f11b75382152981df57"}, - {file = "coverage-6.2-cp38-cp38-win32.whl", hash = "sha256:49dbff64961bc9bdd2289a2bda6a3a5a331964ba5497f694e2cbd540d656dc1c"}, - {file = "coverage-6.2-cp38-cp38-win_amd64.whl", hash = "sha256:9a29311bd6429be317c1f3fe4bc06c4c5ee45e2fa61b2a19d4d1d6111cb94af2"}, - {file = "coverage-6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03b20e52b7d31be571c9c06b74746746d4eb82fc260e594dc662ed48145e9efd"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:215f8afcc02a24c2d9a10d3790b21054b58d71f4b3c6f055d4bb1b15cecce685"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a4bdeb0a52d1d04123b41d90a4390b096f3ef38eee35e11f0b22c2d031222c6c"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c332d8f8d448ded473b97fefe4a0983265af21917d8b0cdcb8bb06b2afe632c3"}, - {file = "coverage-6.2-cp39-cp39-win32.whl", hash = "sha256:6e1394d24d5938e561fbeaa0cd3d356207579c28bd1792f25a068743f2d5b282"}, - {file = "coverage-6.2-cp39-cp39-win_amd64.whl", hash = "sha256:86f2e78b1eff847609b1ca8050c9e1fa3bd44ce755b2ec30e70f2d3ba3844644"}, - {file = "coverage-6.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de"}, - {file = "coverage-6.2.tar.gz", hash = "sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8"}, + {file = "coverage-6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b"}, + {file = "coverage-6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0"}, + {file = "coverage-6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:92b8c845527eae547a2a6617d336adc56394050c3ed8a6918683646328fbb6da"}, + {file = "coverage-6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c7912d1526299cb04c88288e148c6c87c0df600eca76efd99d84396cfe00ef1d"}, + {file = "coverage-6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5d2033d5db1d58ae2d62f095e1aefb6988af65b4b12cb8987af409587cc0739"}, + {file = "coverage-6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3feac4084291642165c3a0d9eaebedf19ffa505016c4d3db15bfe235718d4971"}, + {file = "coverage-6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:276651978c94a8c5672ea60a2656e95a3cce2a3f31e9fb2d5ebd4c215d095840"}, + {file = "coverage-6.2-cp310-cp310-win32.whl", hash = "sha256:f506af4f27def639ba45789fa6fde45f9a217da0be05f8910458e4557eed020c"}, + {file = "coverage-6.2-cp310-cp310-win_amd64.whl", hash = "sha256:3f7c17209eef285c86f819ff04a6d4cbee9b33ef05cbcaae4c0b4e8e06b3ec8f"}, + {file = "coverage-6.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:13362889b2d46e8d9f97c421539c97c963e34031ab0cb89e8ca83a10cc71ac76"}, + {file = "coverage-6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22e60a3ca5acba37d1d4a2ee66e051f5b0e1b9ac950b5b0cf4aa5366eda41d47"}, + {file = "coverage-6.2-cp311-cp311-win_amd64.whl", hash = "sha256:b637c57fdb8be84e91fac60d9325a66a5981f8086c954ea2772efe28425eaf64"}, + {file = "coverage-6.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f467bbb837691ab5a8ca359199d3429a11a01e6dfb3d9dcc676dc035ca93c0a9"}, + {file = "coverage-6.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2641f803ee9f95b1f387f3e8f3bf28d83d9b69a39e9911e5bfee832bea75240d"}, + {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1219d760ccfafc03c0822ae2e06e3b1248a8e6d1a70928966bafc6838d3c9e48"}, + {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9a2b5b52be0a8626fcbffd7e689781bf8c2ac01613e77feda93d96184949a98e"}, + {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8e2c35a4c1f269704e90888e56f794e2d9c0262fb0c1b1c8c4ee44d9b9e77b5d"}, + {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b09c972ce9200264c35a1d53d43ca55ef61836d9ec60f0d44273a31aa9f17"}, + {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e3db840a4dee542e37e09f30859f1612da90e1c5239a6a2498c473183a50e781"}, + {file = "coverage-6.2-cp36-cp36m-win32.whl", hash = "sha256:4e547122ca2d244f7c090fe3f4b5a5861255ff66b7ab6d98f44a0222aaf8671a"}, + {file = "coverage-6.2-cp36-cp36m-win_amd64.whl", hash = "sha256:01774a2c2c729619760320270e42cd9e797427ecfddd32c2a7b639cdc481f3c0"}, + {file = "coverage-6.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb8b8ee99b3fffe4fd86f4c81b35a6bf7e4462cba019997af2fe679365db0c49"}, + {file = "coverage-6.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:619346d57c7126ae49ac95b11b0dc8e36c1dd49d148477461bb66c8cf13bb521"}, + {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a7726f74ff63f41e95ed3a89fef002916c828bb5fcae83b505b49d81a066884"}, + {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cfd9386c1d6f13b37e05a91a8583e802f8059bebfccde61a418c5808dea6bbfa"}, + {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:17e6c11038d4ed6e8af1407d9e89a2904d573be29d51515f14262d7f10ef0a64"}, + {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c254b03032d5a06de049ce8bca8338a5185f07fb76600afff3c161e053d88617"}, + {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dca38a21e4423f3edb821292e97cec7ad38086f84313462098568baedf4331f8"}, + {file = "coverage-6.2-cp37-cp37m-win32.whl", hash = "sha256:600617008aa82032ddeace2535626d1bc212dfff32b43989539deda63b3f36e4"}, + {file = "coverage-6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:bf154ba7ee2fd613eb541c2bc03d3d9ac667080a737449d1a3fb342740eb1a74"}, + {file = "coverage-6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f9afb5b746781fc2abce26193d1c817b7eb0e11459510fba65d2bd77fe161d9e"}, + {file = "coverage-6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edcada2e24ed68f019175c2b2af2a8b481d3d084798b8c20d15d34f5c733fa58"}, + {file = "coverage-6.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9c8c4283e17690ff1a7427123ffb428ad6a52ed720d550e299e8291e33184dc"}, + {file = "coverage-6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f614fc9956d76d8a88a88bb41ddc12709caa755666f580af3a688899721efecd"}, + {file = "coverage-6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9365ed5cce5d0cf2c10afc6add145c5037d3148585b8ae0e77cc1efdd6aa2953"}, + {file = "coverage-6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8bdfe9ff3a4ea37d17f172ac0dff1e1c383aec17a636b9b35906babc9f0f5475"}, + {file = "coverage-6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:63c424e6f5b4ab1cf1e23a43b12f542b0ec2e54f99ec9f11b75382152981df57"}, + {file = "coverage-6.2-cp38-cp38-win32.whl", hash = "sha256:49dbff64961bc9bdd2289a2bda6a3a5a331964ba5497f694e2cbd540d656dc1c"}, + {file = "coverage-6.2-cp38-cp38-win_amd64.whl", hash = "sha256:9a29311bd6429be317c1f3fe4bc06c4c5ee45e2fa61b2a19d4d1d6111cb94af2"}, + {file = "coverage-6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03b20e52b7d31be571c9c06b74746746d4eb82fc260e594dc662ed48145e9efd"}, + {file = "coverage-6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:215f8afcc02a24c2d9a10d3790b21054b58d71f4b3c6f055d4bb1b15cecce685"}, + {file = "coverage-6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a4bdeb0a52d1d04123b41d90a4390b096f3ef38eee35e11f0b22c2d031222c6c"}, + {file = "coverage-6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c332d8f8d448ded473b97fefe4a0983265af21917d8b0cdcb8bb06b2afe632c3"}, + {file = "coverage-6.2-cp39-cp39-win32.whl", hash = "sha256:6e1394d24d5938e561fbeaa0cd3d356207579c28bd1792f25a068743f2d5b282"}, + {file = "coverage-6.2-cp39-cp39-win_amd64.whl", hash = "sha256:86f2e78b1eff847609b1ca8050c9e1fa3bd44ce755b2ec30e70f2d3ba3844644"}, + {file = "coverage-6.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de"}, + {file = "coverage-6.2.tar.gz", hash = "sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8"}, ] dataclasses = [ - {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, - {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"}, + {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, + {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"}, +] +decorator = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] dnspython = [ - {file = "dnspython-2.1.0-py3-none-any.whl", hash = "sha256:95d12f6ef0317118d2a1a6fc49aac65ffec7eb8087474158f42f26a639135216"}, - {file = "dnspython-2.1.0.zip", hash = "sha256:e4a87f0b573201a0f3727fa18a516b055fd1107e0e5477cded4a2de497df1dd4"}, + {file = "dnspython-2.1.0-py3-none-any.whl", hash = "sha256:95d12f6ef0317118d2a1a6fc49aac65ffec7eb8087474158f42f26a639135216"}, + {file = "dnspython-2.1.0.zip", hash = "sha256:e4a87f0b573201a0f3727fa18a516b055fd1107e0e5477cded4a2de497df1dd4"}, ] email-validator = [ - {file = "email_validator-1.1.3-py2.py3-none-any.whl", hash = "sha256:5675c8ceb7106a37e40e2698a57c056756bf3f272cfa8682a4f87ebd95d8440b"}, - {file = "email_validator-1.1.3.tar.gz", hash = "sha256:aa237a65f6f4da067119b7df3f13e89c25c051327b2b5b66dc075f33d62480d7"}, + {file = "email_validator-1.1.3-py2.py3-none-any.whl", hash = "sha256:5675c8ceb7106a37e40e2698a57c056756bf3f272cfa8682a4f87ebd95d8440b"}, + {file = "email_validator-1.1.3.tar.gz", hash = "sha256:aa237a65f6f4da067119b7df3f13e89c25c051327b2b5b66dc075f33d62480d7"}, ] eradicate = [ - {file = "eradicate-2.0.0.tar.gz", hash = "sha256:27434596f2c5314cc9b31410c93d8f7e8885747399773cd088d3adea647a60c8"}, + {file = "eradicate-2.0.0.tar.gz", hash = "sha256:27434596f2c5314cc9b31410c93d8f7e8885747399773cd088d3adea647a60c8"}, +] +exceptiongroup = [ + {file = "exceptiongroup-1.0.0rc5-py3-none-any.whl", hash = "sha256:295a9d7847f9ad08267f47c701a676ec70a64200a360dd49eb513f72209b09f4"}, + {file = "exceptiongroup-1.0.0rc5.tar.gz", hash = "sha256:665422550b9653acd46e9cd35d933f28c5158ca4c058c53619cfa112915cd69e"}, +] +execnet = [ + {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, + {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, ] fastjsonschema = [ - {file = "fastjsonschema-2.15.3-py3-none-any.whl", hash = "sha256:ddb0b1d8243e6e3abb822bd14e447a89f4ab7439342912d590444831fa00b6a0"}, - {file = "fastjsonschema-2.15.3.tar.gz", hash = "sha256:0a572f0836962d844c1fc435e200b2e4f4677e4e6611a2e3bdd01ba697c275ec"}, + {file = "fastjsonschema-2.15.3-py3-none-any.whl", hash = "sha256:ddb0b1d8243e6e3abb822bd14e447a89f4ab7439342912d590444831fa00b6a0"}, + {file = "fastjsonschema-2.15.3.tar.gz", hash = "sha256:0a572f0836962d844c1fc435e200b2e4f4677e4e6611a2e3bdd01ba697c275ec"}, ] flake8 = [ - {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, - {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, + {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, + {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, ] flake8-black = [ - {file = "flake8-black-0.2.3.tar.gz", hash = "sha256:c199844bc1b559d91195ebe8620216f21ed67f2cc1ff6884294c91a0d2492684"}, - {file = "flake8_black-0.2.3-py3-none-any.whl", hash = "sha256:cc080ba5b3773b69ba102b6617a00cc4ecbad8914109690cfda4d565ea435d96"}, + {file = "flake8-black-0.2.3.tar.gz", hash = "sha256:c199844bc1b559d91195ebe8620216f21ed67f2cc1ff6884294c91a0d2492684"}, + {file = "flake8_black-0.2.3-py3-none-any.whl", hash = "sha256:cc080ba5b3773b69ba102b6617a00cc4ecbad8914109690cfda4d565ea435d96"}, ] flake8-bugbear = [ - {file = "flake8-bugbear-22.4.25.tar.gz", hash = "sha256:f7c080563fca75ee6b205d06b181ecba22b802babb96b0b084cc7743d6908a55"}, - {file = "flake8_bugbear-22.4.25-py3-none-any.whl", hash = "sha256:ec374101cddf65bd7a96d393847d74e58d3b98669dbf9768344c39b6290e8bd6"}, + {file = "flake8-bugbear-22.4.25.tar.gz", hash = "sha256:f7c080563fca75ee6b205d06b181ecba22b802babb96b0b084cc7743d6908a55"}, + {file = "flake8_bugbear-22.4.25-py3-none-any.whl", hash = "sha256:ec374101cddf65bd7a96d393847d74e58d3b98669dbf9768344c39b6290e8bd6"}, ] flake8-builtins = [ - {file = "flake8-builtins-1.5.3.tar.gz", hash = "sha256:09998853b2405e98e61d2ff3027c47033adbdc17f9fe44ca58443d876eb00f3b"}, - {file = "flake8_builtins-1.5.3-py2.py3-none-any.whl", hash = "sha256:7706babee43879320376861897e5d1468e396a40b8918ed7bccf70e5f90b8687"}, + {file = "flake8-builtins-1.5.3.tar.gz", hash = "sha256:09998853b2405e98e61d2ff3027c47033adbdc17f9fe44ca58443d876eb00f3b"}, + {file = "flake8_builtins-1.5.3-py2.py3-none-any.whl", hash = "sha256:7706babee43879320376861897e5d1468e396a40b8918ed7bccf70e5f90b8687"}, ] flake8-comprehensions = [ - {file = "flake8-comprehensions-3.7.0.tar.gz", hash = "sha256:6b3218b2dde8ac5959c6476cde8f41a79e823c22feb656be2710cd2a3232cef9"}, - {file = "flake8_comprehensions-3.7.0-py3-none-any.whl", hash = "sha256:a5d7aea6315bbbd6fbcb2b4e80bff6a54d1600155e26236e555d0c6fe1d62522"}, + {file = "flake8-comprehensions-3.7.0.tar.gz", hash = "sha256:6b3218b2dde8ac5959c6476cde8f41a79e823c22feb656be2710cd2a3232cef9"}, + {file = "flake8_comprehensions-3.7.0-py3-none-any.whl", hash = "sha256:a5d7aea6315bbbd6fbcb2b4e80bff6a54d1600155e26236e555d0c6fe1d62522"}, ] flake8-debugger = [ - {file = "flake8-debugger-4.0.0.tar.gz", hash = "sha256:e43dc777f7db1481db473210101ec2df2bd39a45b149d7218a618e954177eda6"}, - {file = "flake8_debugger-4.0.0-py3-none-any.whl", hash = "sha256:82e64faa72e18d1bdd0000407502ebb8ecffa7bc027c62b9d4110ce27c091032"}, + {file = "flake8-debugger-4.0.0.tar.gz", hash = "sha256:e43dc777f7db1481db473210101ec2df2bd39a45b149d7218a618e954177eda6"}, + {file = "flake8_debugger-4.0.0-py3-none-any.whl", hash = "sha256:82e64faa72e18d1bdd0000407502ebb8ecffa7bc027c62b9d4110ce27c091032"}, ] flake8-eradicate = [ - {file = "flake8-eradicate-1.2.1.tar.gz", hash = "sha256:e486f8ab7e2dba3667223688e9239158fbf4ecaa88125e2283bcda81171412b7"}, - {file = "flake8_eradicate-1.2.1-py3-none-any.whl", hash = "sha256:00d77faefb64cef18b3c1b48a004c3a2ad663aa3cf85650f422437d25ece6441"}, + {file = "flake8-eradicate-1.2.1.tar.gz", hash = "sha256:e486f8ab7e2dba3667223688e9239158fbf4ecaa88125e2283bcda81171412b7"}, + {file = "flake8_eradicate-1.2.1-py3-none-any.whl", hash = "sha256:00d77faefb64cef18b3c1b48a004c3a2ad663aa3cf85650f422437d25ece6441"}, ] flake8-fixme = [ - {file = "flake8-fixme-1.1.1.tar.gz", hash = "sha256:50cade07d27a4c30d4f12351478df87339e67640c83041b664724bda6d16f33a"}, - {file = "flake8_fixme-1.1.1-py2.py3-none-any.whl", hash = "sha256:226a6f2ef916730899f29ac140bed5d4a17e5aba79f00a0e3ae1eff1997cb1ac"}, + {file = "flake8-fixme-1.1.1.tar.gz", hash = "sha256:50cade07d27a4c30d4f12351478df87339e67640c83041b664724bda6d16f33a"}, + {file = "flake8_fixme-1.1.1-py2.py3-none-any.whl", hash = "sha256:226a6f2ef916730899f29ac140bed5d4a17e5aba79f00a0e3ae1eff1997cb1ac"}, ] flake8-isort = [ - {file = "flake8-isort-4.1.1.tar.gz", hash = "sha256:d814304ab70e6e58859bc5c3e221e2e6e71c958e7005239202fee19c24f82717"}, - {file = "flake8_isort-4.1.1-py3-none-any.whl", hash = "sha256:c4e8b6dcb7be9b71a02e6e5d4196cefcef0f3447be51e82730fb336fff164949"}, + {file = "flake8-isort-4.1.1.tar.gz", hash = "sha256:d814304ab70e6e58859bc5c3e221e2e6e71c958e7005239202fee19c24f82717"}, + {file = "flake8_isort-4.1.1-py3-none-any.whl", hash = "sha256:c4e8b6dcb7be9b71a02e6e5d4196cefcef0f3447be51e82730fb336fff164949"}, ] flake8-variables-names = [ - {file = "flake8_variables_names-0.0.4.tar.gz", hash = "sha256:d6fa0571a807c72940b5773827c5760421ea6f8206595ff0a8ecfa01e42bf2cf"}, + {file = "flake8_variables_names-0.0.4.tar.gz", hash = "sha256:d6fa0571a807c72940b5773827c5760421ea6f8206595ff0a8ecfa01e42bf2cf"}, ] future = [ - {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, + {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, ] ghp-import = [ - {file = "ghp-import-2.0.2.tar.gz", hash = "sha256:947b3771f11be850c852c64b561c600fdddf794bab363060854c1ee7ad05e071"}, - {file = "ghp_import-2.0.2-py3-none-any.whl", hash = "sha256:5f8962b30b20652cdffa9c5a9812f7de6bcb56ec475acac579807719bf242c46"}, + {file = "ghp-import-2.0.2.tar.gz", hash = "sha256:947b3771f11be850c852c64b561c600fdddf794bab363060854c1ee7ad05e071"}, + {file = "ghp_import-2.0.2-py3-none-any.whl", hash = "sha256:5f8962b30b20652cdffa9c5a9812f7de6bcb56ec475acac579807719bf242c46"}, ] gitdb = [ - {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, - {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, + {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, + {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, ] gitpython = [ - {file = "GitPython-3.1.20-py3-none-any.whl", hash = "sha256:b1e1c269deab1b08ce65403cf14e10d2ef1f6c89e33ea7c5e5bb0222ea593b8a"}, - {file = "GitPython-3.1.20.tar.gz", hash = "sha256:df0e072a200703a65387b0cfdf0466e3bab729c0458cf6b7349d0e9877636519"}, + {file = "GitPython-3.1.20-py3-none-any.whl", hash = "sha256:b1e1c269deab1b08ce65403cf14e10d2ef1f6c89e33ea7c5e5bb0222ea593b8a"}, + {file = "GitPython-3.1.20.tar.gz", hash = "sha256:df0e072a200703a65387b0cfdf0466e3bab729c0458cf6b7349d0e9877636519"}, ] idna = [ - {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, - {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, - {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, + {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, + {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, +] +importlib-resources = [ + {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, + {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"}, ] iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] isort = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, + {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, + {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, ] jinja2 = [ - {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, - {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, + {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, + {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, ] jmespath = [ - {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"}, - {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"}, + {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"}, + {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"}, +] +jsii = [ + {file = "jsii-1.57.0-py3-none-any.whl", hash = "sha256:4888091986a9ed8d50b042cc9c35a9564dd54c19e78adb890bf06d9ffac1b325"}, + {file = "jsii-1.57.0.tar.gz", hash = "sha256:ff7a3c51c1a653dd8a4342043b5f8e40b928bc617e3141e0d5d66175d22a754b"}, ] mako = [ - {file = "Mako-1.1.6-py2.py3-none-any.whl", hash = "sha256:afaf8e515d075b22fad7d7b8b30e4a1c90624ff2f3733a06ec125f5a5f043a57"}, - {file = "Mako-1.1.6.tar.gz", hash = "sha256:4e9e345a41924a954251b95b4b28e14a301145b544901332e658907a7464b6b2"}, + {file = "Mako-1.1.6-py2.py3-none-any.whl", hash = "sha256:afaf8e515d075b22fad7d7b8b30e4a1c90624ff2f3733a06ec125f5a5f043a57"}, + {file = "Mako-1.1.6.tar.gz", hash = "sha256:4e9e345a41924a954251b95b4b28e14a301145b544901332e658907a7464b6b2"}, ] mando = [ - {file = "mando-0.6.4-py2.py3-none-any.whl", hash = "sha256:4ce09faec7e5192ffc3c57830e26acba0fd6cd11e1ee81af0d4df0657463bd1c"}, - {file = "mando-0.6.4.tar.gz", hash = "sha256:79feb19dc0f097daa64a1243db578e7674909b75f88ac2220f1c065c10a0d960"}, + {file = "mando-0.6.4-py2.py3-none-any.whl", hash = "sha256:4ce09faec7e5192ffc3c57830e26acba0fd6cd11e1ee81af0d4df0657463bd1c"}, + {file = "mando-0.6.4.tar.gz", hash = "sha256:79feb19dc0f097daa64a1243db578e7674909b75f88ac2220f1c065c10a0d960"}, ] markdown = [ - {file = "Markdown-3.3.5-py3-none-any.whl", hash = "sha256:0d2d09f75cb8d1ffc6770c65c61770b23a61708101f47bda416a002a0edbc480"}, - {file = "Markdown-3.3.5.tar.gz", hash = "sha256:26e9546bfbcde5fcd072bd8f612c9c1b6e2677cb8aadbdf65206674f46dde069"}, + {file = "Markdown-3.3.5-py3-none-any.whl", hash = "sha256:0d2d09f75cb8d1ffc6770c65c61770b23a61708101f47bda416a002a0edbc480"}, + {file = "Markdown-3.3.5.tar.gz", hash = "sha256:26e9546bfbcde5fcd072bd8f612c9c1b6e2677cb8aadbdf65206674f46dde069"}, ] markupsafe = [ - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, - {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, + {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, ] mccabe = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] mergedeep = [ - {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, - {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, + {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, + {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, ] mike = [ - {file = "mike-0.6.0-py3-none-any.whl", hash = "sha256:cef9b9c803ff5c3fbb410f51f5ceb00902a9fe16d9fabd93b69c65cf481ab5a1"}, - {file = "mike-0.6.0.tar.gz", hash = "sha256:6d6239de2a60d733da2f34617e9b9a14c4b5437423b47e524f14dc96d6ce5f2f"}, + {file = "mike-0.6.0-py3-none-any.whl", hash = "sha256:cef9b9c803ff5c3fbb410f51f5ceb00902a9fe16d9fabd93b69c65cf481ab5a1"}, + {file = "mike-0.6.0.tar.gz", hash = "sha256:6d6239de2a60d733da2f34617e9b9a14c4b5437423b47e524f14dc96d6ce5f2f"}, ] mkdocs = [ - {file = "mkdocs-1.2.3-py3-none-any.whl", hash = "sha256:a1fa8c2d0c1305d7fc2b9d9f607c71778572a8b110fb26642aa00296c9e6d072"}, - {file = "mkdocs-1.2.3.tar.gz", hash = "sha256:89f5a094764381cda656af4298727c9f53dc3e602983087e1fe96ea1df24f4c1"}, + {file = "mkdocs-1.2.3-py3-none-any.whl", hash = "sha256:a1fa8c2d0c1305d7fc2b9d9f607c71778572a8b110fb26642aa00296c9e6d072"}, + {file = "mkdocs-1.2.3.tar.gz", hash = "sha256:89f5a094764381cda656af4298727c9f53dc3e602983087e1fe96ea1df24f4c1"}, ] mkdocs-git-revision-date-plugin = [ - {file = "mkdocs_git_revision_date_plugin-0.3.2-py3-none-any.whl", hash = "sha256:2e67956cb01823dd2418e2833f3623dee8604cdf223bddd005fe36226a56f6ef"}, + {file = "mkdocs_git_revision_date_plugin-0.3.2-py3-none-any.whl", hash = "sha256:2e67956cb01823dd2418e2833f3623dee8604cdf223bddd005fe36226a56f6ef"}, ] mkdocs-material = [ - {file = "mkdocs-material-8.2.7.tar.gz", hash = "sha256:3314d94ccc11481b1a3aa4f7babb4fb2bc47daa2fa8ace2463665952116f409b"}, - {file = "mkdocs_material-8.2.7-py2.py3-none-any.whl", hash = "sha256:20c13aa0a54841e1f1c080edb0e3573407884e4abea51ee25573061189bec83e"}, + {file = "mkdocs-material-8.2.7.tar.gz", hash = "sha256:3314d94ccc11481b1a3aa4f7babb4fb2bc47daa2fa8ace2463665952116f409b"}, + {file = "mkdocs_material-8.2.7-py2.py3-none-any.whl", hash = "sha256:20c13aa0a54841e1f1c080edb0e3573407884e4abea51ee25573061189bec83e"}, ] mkdocs-material-extensions = [ - {file = "mkdocs-material-extensions-1.0.3.tar.gz", hash = "sha256:bfd24dfdef7b41c312ede42648f9eb83476ea168ec163b613f9abd12bbfddba2"}, - {file = "mkdocs_material_extensions-1.0.3-py3-none-any.whl", hash = "sha256:a82b70e533ce060b2a5d9eb2bc2e1be201cf61f901f93704b4acf6e3d5983a44"}, + {file = "mkdocs-material-extensions-1.0.3.tar.gz", hash = "sha256:bfd24dfdef7b41c312ede42648f9eb83476ea168ec163b613f9abd12bbfddba2"}, + {file = "mkdocs_material_extensions-1.0.3-py3-none-any.whl", hash = "sha256:a82b70e533ce060b2a5d9eb2bc2e1be201cf61f901f93704b4acf6e3d5983a44"}, ] mypy = [ - {file = "mypy-0.950-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cf9c261958a769a3bd38c3e133801ebcd284ffb734ea12d01457cb09eacf7d7b"}, - {file = "mypy-0.950-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5b5bd0ffb11b4aba2bb6d31b8643902c48f990cc92fda4e21afac658044f0c0"}, - {file = "mypy-0.950-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e7647df0f8fc947388e6251d728189cfadb3b1e558407f93254e35abc026e22"}, - {file = "mypy-0.950-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eaff8156016487c1af5ffa5304c3e3fd183edcb412f3e9c72db349faf3f6e0eb"}, - {file = "mypy-0.950-cp310-cp310-win_amd64.whl", hash = "sha256:563514c7dc504698fb66bb1cf897657a173a496406f1866afae73ab5b3cdb334"}, - {file = "mypy-0.950-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dd4d670eee9610bf61c25c940e9ade2d0ed05eb44227275cce88701fee014b1f"}, - {file = "mypy-0.950-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ca75ecf2783395ca3016a5e455cb322ba26b6d33b4b413fcdedfc632e67941dc"}, - {file = "mypy-0.950-cp36-cp36m-win_amd64.whl", hash = "sha256:6003de687c13196e8a1243a5e4bcce617d79b88f83ee6625437e335d89dfebe2"}, - {file = "mypy-0.950-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4c653e4846f287051599ed8f4b3c044b80e540e88feec76b11044ddc5612ffed"}, - {file = "mypy-0.950-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e19736af56947addedce4674c0971e5dceef1b5ec7d667fe86bcd2b07f8f9075"}, - {file = "mypy-0.950-cp37-cp37m-win_amd64.whl", hash = "sha256:ef7beb2a3582eb7a9f37beaf38a28acfd801988cde688760aea9e6cc4832b10b"}, - {file = "mypy-0.950-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0112752a6ff07230f9ec2f71b0d3d4e088a910fdce454fdb6553e83ed0eced7d"}, - {file = "mypy-0.950-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ee0a36edd332ed2c5208565ae6e3a7afc0eabb53f5327e281f2ef03a6bc7687a"}, - {file = "mypy-0.950-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77423570c04aca807508a492037abbd72b12a1fb25a385847d191cd50b2c9605"}, - {file = "mypy-0.950-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5ce6a09042b6da16d773d2110e44f169683d8cc8687e79ec6d1181a72cb028d2"}, - {file = "mypy-0.950-cp38-cp38-win_amd64.whl", hash = "sha256:5b231afd6a6e951381b9ef09a1223b1feabe13625388db48a8690f8daa9b71ff"}, - {file = "mypy-0.950-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0384d9f3af49837baa92f559d3fa673e6d2652a16550a9ee07fc08c736f5e6f8"}, - {file = "mypy-0.950-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1fdeb0a0f64f2a874a4c1f5271f06e40e1e9779bf55f9567f149466fc7a55038"}, - {file = "mypy-0.950-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:61504b9a5ae166ba5ecfed9e93357fd51aa693d3d434b582a925338a2ff57fd2"}, - {file = "mypy-0.950-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a952b8bc0ae278fc6316e6384f67bb9a396eb30aced6ad034d3a76120ebcc519"}, - {file = "mypy-0.950-cp39-cp39-win_amd64.whl", hash = "sha256:eaea21d150fb26d7b4856766e7addcf929119dd19fc832b22e71d942835201ef"}, - {file = "mypy-0.950-py3-none-any.whl", hash = "sha256:a4d9898f46446bfb6405383b57b96737dcfd0a7f25b748e78ef3e8c576bba3cb"}, - {file = "mypy-0.950.tar.gz", hash = "sha256:1b333cfbca1762ff15808a0ef4f71b5d3eed8528b23ea1c3fb50543c867d68de"}, + {file = "mypy-0.950-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cf9c261958a769a3bd38c3e133801ebcd284ffb734ea12d01457cb09eacf7d7b"}, + {file = "mypy-0.950-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5b5bd0ffb11b4aba2bb6d31b8643902c48f990cc92fda4e21afac658044f0c0"}, + {file = "mypy-0.950-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e7647df0f8fc947388e6251d728189cfadb3b1e558407f93254e35abc026e22"}, + {file = "mypy-0.950-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eaff8156016487c1af5ffa5304c3e3fd183edcb412f3e9c72db349faf3f6e0eb"}, + {file = "mypy-0.950-cp310-cp310-win_amd64.whl", hash = "sha256:563514c7dc504698fb66bb1cf897657a173a496406f1866afae73ab5b3cdb334"}, + {file = "mypy-0.950-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dd4d670eee9610bf61c25c940e9ade2d0ed05eb44227275cce88701fee014b1f"}, + {file = "mypy-0.950-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ca75ecf2783395ca3016a5e455cb322ba26b6d33b4b413fcdedfc632e67941dc"}, + {file = "mypy-0.950-cp36-cp36m-win_amd64.whl", hash = "sha256:6003de687c13196e8a1243a5e4bcce617d79b88f83ee6625437e335d89dfebe2"}, + {file = "mypy-0.950-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4c653e4846f287051599ed8f4b3c044b80e540e88feec76b11044ddc5612ffed"}, + {file = "mypy-0.950-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e19736af56947addedce4674c0971e5dceef1b5ec7d667fe86bcd2b07f8f9075"}, + {file = "mypy-0.950-cp37-cp37m-win_amd64.whl", hash = "sha256:ef7beb2a3582eb7a9f37beaf38a28acfd801988cde688760aea9e6cc4832b10b"}, + {file = "mypy-0.950-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0112752a6ff07230f9ec2f71b0d3d4e088a910fdce454fdb6553e83ed0eced7d"}, + {file = "mypy-0.950-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ee0a36edd332ed2c5208565ae6e3a7afc0eabb53f5327e281f2ef03a6bc7687a"}, + {file = "mypy-0.950-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77423570c04aca807508a492037abbd72b12a1fb25a385847d191cd50b2c9605"}, + {file = "mypy-0.950-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5ce6a09042b6da16d773d2110e44f169683d8cc8687e79ec6d1181a72cb028d2"}, + {file = "mypy-0.950-cp38-cp38-win_amd64.whl", hash = "sha256:5b231afd6a6e951381b9ef09a1223b1feabe13625388db48a8690f8daa9b71ff"}, + {file = "mypy-0.950-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0384d9f3af49837baa92f559d3fa673e6d2652a16550a9ee07fc08c736f5e6f8"}, + {file = "mypy-0.950-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1fdeb0a0f64f2a874a4c1f5271f06e40e1e9779bf55f9567f149466fc7a55038"}, + {file = "mypy-0.950-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:61504b9a5ae166ba5ecfed9e93357fd51aa693d3d434b582a925338a2ff57fd2"}, + {file = "mypy-0.950-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a952b8bc0ae278fc6316e6384f67bb9a396eb30aced6ad034d3a76120ebcc519"}, + {file = "mypy-0.950-cp39-cp39-win_amd64.whl", hash = "sha256:eaea21d150fb26d7b4856766e7addcf929119dd19fc832b22e71d942835201ef"}, + {file = "mypy-0.950-py3-none-any.whl", hash = "sha256:a4d9898f46446bfb6405383b57b96737dcfd0a7f25b748e78ef3e8c576bba3cb"}, + {file = "mypy-0.950.tar.gz", hash = "sha256:1b333cfbca1762ff15808a0ef4f71b5d3eed8528b23ea1c3fb50543c867d68de"}, ] mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] pathspec = [ - {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, - {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, + {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, + {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, ] pbr = [ - {file = "pbr-5.8.0-py2.py3-none-any.whl", hash = "sha256:176e8560eaf61e127817ef93d8a844803abb27a4d4637f0ff3bb783129be2e0a"}, - {file = "pbr-5.8.0.tar.gz", hash = "sha256:672d8ebee84921862110f23fcec2acea191ef58543d34dfe9ef3d9f13c31cddf"}, + {file = "pbr-5.8.0-py2.py3-none-any.whl", hash = "sha256:176e8560eaf61e127817ef93d8a844803abb27a4d4637f0ff3bb783129be2e0a"}, + {file = "pbr-5.8.0.tar.gz", hash = "sha256:672d8ebee84921862110f23fcec2acea191ef58543d34dfe9ef3d9f13c31cddf"}, ] pdoc3 = [ - {file = "pdoc3-0.10.0-py3-none-any.whl", hash = "sha256:ba45d1ada1bd987427d2bf5cdec30b2631a3ff5fb01f6d0e77648a572ce6028b"}, - {file = "pdoc3-0.10.0.tar.gz", hash = "sha256:5f22e7bcb969006738e1aa4219c75a32f34c2d62d46dc9d2fb2d3e0b0287e4b7"}, + {file = "pdoc3-0.10.0-py3-none-any.whl", hash = "sha256:ba45d1ada1bd987427d2bf5cdec30b2631a3ff5fb01f6d0e77648a572ce6028b"}, + {file = "pdoc3-0.10.0.tar.gz", hash = "sha256:5f22e7bcb969006738e1aa4219c75a32f34c2d62d46dc9d2fb2d3e0b0287e4b7"}, ] platformdirs = [ - {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, - {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, + {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, + {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, ] pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] +publication = [ + {file = "publication-0.0.3-py2.py3-none-any.whl", hash = "sha256:0248885351febc11d8a1098d5c8e3ab2dabcf3e8c0c96db1e17ecd12b53afbe6"}, + {file = "publication-0.0.3.tar.gz", hash = "sha256:68416a0de76dddcdd2930d1c8ef853a743cc96c82416c4e4d3b5d901c6276dc4"}, ] py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] pycodestyle = [ - {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, - {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, + {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, + {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, ] pydantic = [ - {file = "pydantic-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cb23bcc093697cdea2708baae4f9ba0e972960a835af22560f6ae4e7e47d33f5"}, - {file = "pydantic-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1d5278bd9f0eee04a44c712982343103bba63507480bfd2fc2790fa70cd64cf4"}, - {file = "pydantic-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab624700dc145aa809e6f3ec93fb8e7d0f99d9023b713f6a953637429b437d37"}, - {file = "pydantic-1.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8d7da6f1c1049eefb718d43d99ad73100c958a5367d30b9321b092771e96c25"}, - {file = "pydantic-1.9.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3c3b035103bd4e2e4a28da9da7ef2fa47b00ee4a9cf4f1a735214c1bcd05e0f6"}, - {file = "pydantic-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3011b975c973819883842c5ab925a4e4298dffccf7782c55ec3580ed17dc464c"}, - {file = "pydantic-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:086254884d10d3ba16da0588604ffdc5aab3f7f09557b998373e885c690dd398"}, - {file = "pydantic-1.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0fe476769acaa7fcddd17cadd172b156b53546ec3614a4d880e5d29ea5fbce65"}, - {file = "pydantic-1.9.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8e9dcf1ac499679aceedac7e7ca6d8641f0193c591a2d090282aaf8e9445a46"}, - {file = "pydantic-1.9.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1e4c28f30e767fd07f2ddc6f74f41f034d1dd6bc526cd59e63a82fe8bb9ef4c"}, - {file = "pydantic-1.9.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c86229333cabaaa8c51cf971496f10318c4734cf7b641f08af0a6fbf17ca3054"}, - {file = "pydantic-1.9.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:c0727bda6e38144d464daec31dff936a82917f431d9c39c39c60a26567eae3ed"}, - {file = "pydantic-1.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:dee5ef83a76ac31ab0c78c10bd7d5437bfdb6358c95b91f1ba7ff7b76f9996a1"}, - {file = "pydantic-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9c9bdb3af48e242838f9f6e6127de9be7063aad17b32215ccc36a09c5cf1070"}, - {file = "pydantic-1.9.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ee7e3209db1e468341ef41fe263eb655f67f5c5a76c924044314e139a1103a2"}, - {file = "pydantic-1.9.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b6037175234850ffd094ca77bf60fb54b08b5b22bc85865331dd3bda7a02fa1"}, - {file = "pydantic-1.9.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b2571db88c636d862b35090ccf92bf24004393f85c8870a37f42d9f23d13e032"}, - {file = "pydantic-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8b5ac0f1c83d31b324e57a273da59197c83d1bb18171e512908fe5dc7278a1d6"}, - {file = "pydantic-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bbbc94d0c94dd80b3340fc4f04fd4d701f4b038ebad72c39693c794fd3bc2d9d"}, - {file = "pydantic-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e0896200b6a40197405af18828da49f067c2fa1f821491bc8f5bde241ef3f7d7"}, - {file = "pydantic-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bdfdadb5994b44bd5579cfa7c9b0e1b0e540c952d56f627eb227851cda9db77"}, - {file = "pydantic-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:574936363cd4b9eed8acdd6b80d0143162f2eb654d96cb3a8ee91d3e64bf4cf9"}, - {file = "pydantic-1.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c556695b699f648c58373b542534308922c46a1cda06ea47bc9ca45ef5b39ae6"}, - {file = "pydantic-1.9.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f947352c3434e8b937e3aa8f96f47bdfe6d92779e44bb3f41e4c213ba6a32145"}, - {file = "pydantic-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5e48ef4a8b8c066c4a31409d91d7ca372a774d0212da2787c0d32f8045b1e034"}, - {file = "pydantic-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:96f240bce182ca7fe045c76bcebfa0b0534a1bf402ed05914a6f1dadff91877f"}, - {file = "pydantic-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:815ddebb2792efd4bba5488bc8fde09c29e8ca3227d27cf1c6990fc830fd292b"}, - {file = "pydantic-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c5b77947b9e85a54848343928b597b4f74fc364b70926b3c4441ff52620640c"}, - {file = "pydantic-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c68c3bc88dbda2a6805e9a142ce84782d3930f8fdd9655430d8576315ad97ce"}, - {file = "pydantic-1.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a79330f8571faf71bf93667d3ee054609816f10a259a109a0738dac983b23c3"}, - {file = "pydantic-1.9.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f5a64b64ddf4c99fe201ac2724daada8595ada0d102ab96d019c1555c2d6441d"}, - {file = "pydantic-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a733965f1a2b4090a5238d40d983dcd78f3ecea221c7af1497b845a9709c1721"}, - {file = "pydantic-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cc6a4cb8a118ffec2ca5fcb47afbacb4f16d0ab8b7350ddea5e8ef7bcc53a16"}, - {file = "pydantic-1.9.0-py3-none-any.whl", hash = "sha256:085ca1de245782e9b46cefcf99deecc67d418737a1fd3f6a4f511344b613a5b3"}, - {file = "pydantic-1.9.0.tar.gz", hash = "sha256:742645059757a56ecd886faf4ed2441b9c0cd406079c2b4bee51bcc3fbcd510a"}, + {file = "pydantic-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cb23bcc093697cdea2708baae4f9ba0e972960a835af22560f6ae4e7e47d33f5"}, + {file = "pydantic-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1d5278bd9f0eee04a44c712982343103bba63507480bfd2fc2790fa70cd64cf4"}, + {file = "pydantic-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab624700dc145aa809e6f3ec93fb8e7d0f99d9023b713f6a953637429b437d37"}, + {file = "pydantic-1.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8d7da6f1c1049eefb718d43d99ad73100c958a5367d30b9321b092771e96c25"}, + {file = "pydantic-1.9.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3c3b035103bd4e2e4a28da9da7ef2fa47b00ee4a9cf4f1a735214c1bcd05e0f6"}, + {file = "pydantic-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3011b975c973819883842c5ab925a4e4298dffccf7782c55ec3580ed17dc464c"}, + {file = "pydantic-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:086254884d10d3ba16da0588604ffdc5aab3f7f09557b998373e885c690dd398"}, + {file = "pydantic-1.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0fe476769acaa7fcddd17cadd172b156b53546ec3614a4d880e5d29ea5fbce65"}, + {file = "pydantic-1.9.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8e9dcf1ac499679aceedac7e7ca6d8641f0193c591a2d090282aaf8e9445a46"}, + {file = "pydantic-1.9.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1e4c28f30e767fd07f2ddc6f74f41f034d1dd6bc526cd59e63a82fe8bb9ef4c"}, + {file = "pydantic-1.9.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c86229333cabaaa8c51cf971496f10318c4734cf7b641f08af0a6fbf17ca3054"}, + {file = "pydantic-1.9.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:c0727bda6e38144d464daec31dff936a82917f431d9c39c39c60a26567eae3ed"}, + {file = "pydantic-1.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:dee5ef83a76ac31ab0c78c10bd7d5437bfdb6358c95b91f1ba7ff7b76f9996a1"}, + {file = "pydantic-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9c9bdb3af48e242838f9f6e6127de9be7063aad17b32215ccc36a09c5cf1070"}, + {file = "pydantic-1.9.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ee7e3209db1e468341ef41fe263eb655f67f5c5a76c924044314e139a1103a2"}, + {file = "pydantic-1.9.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b6037175234850ffd094ca77bf60fb54b08b5b22bc85865331dd3bda7a02fa1"}, + {file = "pydantic-1.9.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b2571db88c636d862b35090ccf92bf24004393f85c8870a37f42d9f23d13e032"}, + {file = "pydantic-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8b5ac0f1c83d31b324e57a273da59197c83d1bb18171e512908fe5dc7278a1d6"}, + {file = "pydantic-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bbbc94d0c94dd80b3340fc4f04fd4d701f4b038ebad72c39693c794fd3bc2d9d"}, + {file = "pydantic-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e0896200b6a40197405af18828da49f067c2fa1f821491bc8f5bde241ef3f7d7"}, + {file = "pydantic-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bdfdadb5994b44bd5579cfa7c9b0e1b0e540c952d56f627eb227851cda9db77"}, + {file = "pydantic-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:574936363cd4b9eed8acdd6b80d0143162f2eb654d96cb3a8ee91d3e64bf4cf9"}, + {file = "pydantic-1.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c556695b699f648c58373b542534308922c46a1cda06ea47bc9ca45ef5b39ae6"}, + {file = "pydantic-1.9.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f947352c3434e8b937e3aa8f96f47bdfe6d92779e44bb3f41e4c213ba6a32145"}, + {file = "pydantic-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5e48ef4a8b8c066c4a31409d91d7ca372a774d0212da2787c0d32f8045b1e034"}, + {file = "pydantic-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:96f240bce182ca7fe045c76bcebfa0b0534a1bf402ed05914a6f1dadff91877f"}, + {file = "pydantic-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:815ddebb2792efd4bba5488bc8fde09c29e8ca3227d27cf1c6990fc830fd292b"}, + {file = "pydantic-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c5b77947b9e85a54848343928b597b4f74fc364b70926b3c4441ff52620640c"}, + {file = "pydantic-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c68c3bc88dbda2a6805e9a142ce84782d3930f8fdd9655430d8576315ad97ce"}, + {file = "pydantic-1.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a79330f8571faf71bf93667d3ee054609816f10a259a109a0738dac983b23c3"}, + {file = "pydantic-1.9.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f5a64b64ddf4c99fe201ac2724daada8595ada0d102ab96d019c1555c2d6441d"}, + {file = "pydantic-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a733965f1a2b4090a5238d40d983dcd78f3ecea221c7af1497b845a9709c1721"}, + {file = "pydantic-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cc6a4cb8a118ffec2ca5fcb47afbacb4f16d0ab8b7350ddea5e8ef7bcc53a16"}, + {file = "pydantic-1.9.0-py3-none-any.whl", hash = "sha256:085ca1de245782e9b46cefcf99deecc67d418737a1fd3f6a4f511344b613a5b3"}, + {file = "pydantic-1.9.0.tar.gz", hash = "sha256:742645059757a56ecd886faf4ed2441b9c0cd406079c2b4bee51bcc3fbcd510a"}, ] pyflakes = [ - {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, - {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, + {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, + {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, ] pygments = [ - {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"}, - {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"}, + {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"}, + {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"}, ] pymdown-extensions = [ - {file = "pymdown-extensions-9.1.tar.gz", hash = "sha256:74247f2c80f1d9e3c7242abe1c16317da36c6f26c7ad4b8a7f457f0ec20f0365"}, - {file = "pymdown_extensions-9.1-py3-none-any.whl", hash = "sha256:b03e66f91f33af4a6e7a0e20c740313522995f69a03d86316b1449766c473d0e"}, + {file = "pymdown-extensions-9.1.tar.gz", hash = "sha256:74247f2c80f1d9e3c7242abe1c16317da36c6f26c7ad4b8a7f457f0ec20f0365"}, + {file = "pymdown_extensions-9.1-py3-none-any.whl", hash = "sha256:b03e66f91f33af4a6e7a0e20c740313522995f69a03d86316b1449766c473d0e"}, ] pyparsing = [ - {file = "pyparsing-3.0.6-py3-none-any.whl", hash = "sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4"}, - {file = "pyparsing-3.0.6.tar.gz", hash = "sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"}, + {file = "pyparsing-3.0.6-py3-none-any.whl", hash = "sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4"}, + {file = "pyparsing-3.0.6.tar.gz", hash = "sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"}, ] pytest = [ - {file = "pytest-7.0.1-py3-none-any.whl", hash = "sha256:9ce3ff477af913ecf6321fe337b93a2c0dcf2a0a1439c43f5452112c1e4280db"}, - {file = "pytest-7.0.1.tar.gz", hash = "sha256:e30905a0c131d3d94b89624a1cc5afec3e0ba2fbdb151867d8e0ebd49850f171"}, + {file = "pytest-7.0.1-py3-none-any.whl", hash = "sha256:9ce3ff477af913ecf6321fe337b93a2c0dcf2a0a1439c43f5452112c1e4280db"}, + {file = "pytest-7.0.1.tar.gz", hash = "sha256:e30905a0c131d3d94b89624a1cc5afec3e0ba2fbdb151867d8e0ebd49850f171"}, ] pytest-asyncio = [ - {file = "pytest-asyncio-0.16.0.tar.gz", hash = "sha256:7496c5977ce88c34379df64a66459fe395cd05543f0a2f837016e7144391fcfb"}, - {file = "pytest_asyncio-0.16.0-py3-none-any.whl", hash = "sha256:5f2a21273c47b331ae6aa5b36087047b4899e40f03f18397c0e65fa5cca54e9b"}, + {file = "pytest-asyncio-0.16.0.tar.gz", hash = "sha256:7496c5977ce88c34379df64a66459fe395cd05543f0a2f837016e7144391fcfb"}, + {file = "pytest_asyncio-0.16.0-py3-none-any.whl", hash = "sha256:5f2a21273c47b331ae6aa5b36087047b4899e40f03f18397c0e65fa5cca54e9b"}, ] pytest-cov = [ - {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, - {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, + {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, + {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, +] +pytest-forked = [ + {file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"}, + {file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"}, ] pytest-mock = [ - {file = "pytest-mock-3.6.1.tar.gz", hash = "sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62"}, - {file = "pytest_mock-3.6.1-py3-none-any.whl", hash = "sha256:30c2f2cc9759e76eee674b81ea28c9f0b94f8f0445a1b87762cadf774f0df7e3"}, + {file = "pytest-mock-3.6.1.tar.gz", hash = "sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62"}, + {file = "pytest_mock-3.6.1-py3-none-any.whl", hash = "sha256:30c2f2cc9759e76eee674b81ea28c9f0b94f8f0445a1b87762cadf774f0df7e3"}, +] +pytest-xdist = [ + {file = "pytest-xdist-2.5.0.tar.gz", hash = "sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf"}, + {file = "pytest_xdist-2.5.0-py3-none-any.whl", hash = "sha256:6fe5c74fec98906deb8f2d2b616b5c782022744978e7bd4695d39c8f42d0ce65"}, ] python-dateutil = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] pyyaml = [ - {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, - {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, - {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"}, - {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, - {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, - {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, - {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347"}, - {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541"}, - {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, - {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, - {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, - {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, - {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa"}, - {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"}, - {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, - {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, - {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, - {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, - {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247"}, - {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc"}, - {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, - {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, - {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, - {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, - {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122"}, - {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6"}, - {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, - {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, - {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, + {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, + {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, + {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"}, + {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, + {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541"}, + {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, + {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, + {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"}, + {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, + {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, + {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc"}, + {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, + {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, + {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6"}, + {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, + {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, + {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, ] pyyaml-env-tag = [ - {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, - {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, + {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, + {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, ] radon = [ - {file = "radon-5.1.0-py2.py3-none-any.whl", hash = "sha256:fa74e018197f1fcb54578af0f675d8b8e2342bd8e0b72bef8197bc4c9e645f36"}, - {file = "radon-5.1.0.tar.gz", hash = "sha256:cb1d8752e5f862fb9e20d82b5f758cbc4fb1237c92c9a66450ea0ea7bf29aeee"}, + {file = "radon-5.1.0-py2.py3-none-any.whl", hash = "sha256:fa74e018197f1fcb54578af0f675d8b8e2342bd8e0b72bef8197bc4c9e645f36"}, + {file = "radon-5.1.0.tar.gz", hash = "sha256:cb1d8752e5f862fb9e20d82b5f758cbc4fb1237c92c9a66450ea0ea7bf29aeee"}, ] requests = [ - {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, - {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, + {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, + {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, +] +retry = [ + {file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"}, + {file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"}, ] "ruamel.yaml" = [ - {file = "ruamel.yaml-0.17.17-py3-none-any.whl", hash = "sha256:9af3ec5d7f8065582f3aa841305465025d0afd26c5fb54e15b964e11838fc74f"}, - {file = "ruamel.yaml-0.17.17.tar.gz", hash = "sha256:9751de4cbb57d4bfbf8fc394e125ed4a2f170fbff3dc3d78abf50be85924f8be"}, + {file = "ruamel.yaml-0.17.17-py3-none-any.whl", hash = "sha256:9af3ec5d7f8065582f3aa841305465025d0afd26c5fb54e15b964e11838fc74f"}, + {file = "ruamel.yaml-0.17.17.tar.gz", hash = "sha256:9751de4cbb57d4bfbf8fc394e125ed4a2f170fbff3dc3d78abf50be85924f8be"}, ] "ruamel.yaml.clib" = [ - {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6e7be2c5bcb297f5b82fee9c665eb2eb7001d1050deaba8471842979293a80b0"}, - {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:221eca6f35076c6ae472a531afa1c223b9c29377e62936f61bc8e6e8bdc5f9e7"}, - {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win32.whl", hash = "sha256:1070ba9dd7f9370d0513d649420c3b362ac2d687fe78c6e888f5b12bf8bc7bee"}, - {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:77df077d32921ad46f34816a9a16e6356d8100374579bc35e15bab5d4e9377de"}, - {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:cfdb9389d888c5b74af297e51ce357b800dd844898af9d4a547ffc143fa56751"}, - {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7b2927e92feb51d830f531de4ccb11b320255ee95e791022555971c466af4527"}, - {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win32.whl", hash = "sha256:ada3f400d9923a190ea8b59c8f60680c4ef8a4b0dfae134d2f2ff68429adfab5"}, - {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win_amd64.whl", hash = "sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c"}, - {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d67f273097c368265a7b81e152e07fb90ed395df6e552b9fa858c6d2c9f42502"}, - {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:72a2b8b2ff0a627496aad76f37a652bcef400fd861721744201ef1b45199ab78"}, - {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win32.whl", hash = "sha256:9efef4aab5353387b07f6b22ace0867032b900d8e91674b5d8ea9150db5cae94"}, - {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win_amd64.whl", hash = "sha256:846fc8336443106fe23f9b6d6b8c14a53d38cef9a375149d61f99d78782ea468"}, - {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd"}, - {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:78988ed190206672da0f5d50c61afef8f67daa718d614377dcd5e3ed85ab4a99"}, - {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win32.whl", hash = "sha256:a49e0161897901d1ac9c4a79984b8410f450565bbad64dbfcbf76152743a0cdb"}, - {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:bf75d28fa071645c529b5474a550a44686821decebdd00e21127ef1fd566eabe"}, - {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a32f8d81ea0c6173ab1b3da956869114cae53ba1e9f72374032e33ba3118c233"}, - {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7f7ecb53ae6848f959db6ae93bdff1740e651809780822270eab111500842a84"}, - {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win32.whl", hash = "sha256:89221ec6d6026f8ae859c09b9718799fea22c0e8da8b766b0b2c9a9ba2db326b"}, - {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:31ea73e564a7b5fbbe8188ab8b334393e06d997914a4e184975348f204790277"}, - {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed"}, - {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1866cf2c284a03b9524a5cc00daca56d80057c5ce3cdc86a52020f4c720856f0"}, - {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win32.whl", hash = "sha256:3fb9575a5acd13031c57a62cc7823e5d2ff8bc3835ba4d94b921b4e6ee664104"}, - {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:825d5fccef6da42f3c8eccd4281af399f21c02b32d98e113dbc631ea6a6ecbc7"}, - {file = "ruamel.yaml.clib-0.2.6.tar.gz", hash = "sha256:4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6e7be2c5bcb297f5b82fee9c665eb2eb7001d1050deaba8471842979293a80b0"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:221eca6f35076c6ae472a531afa1c223b9c29377e62936f61bc8e6e8bdc5f9e7"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win32.whl", hash = "sha256:1070ba9dd7f9370d0513d649420c3b362ac2d687fe78c6e888f5b12bf8bc7bee"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:77df077d32921ad46f34816a9a16e6356d8100374579bc35e15bab5d4e9377de"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:cfdb9389d888c5b74af297e51ce357b800dd844898af9d4a547ffc143fa56751"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7b2927e92feb51d830f531de4ccb11b320255ee95e791022555971c466af4527"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win32.whl", hash = "sha256:ada3f400d9923a190ea8b59c8f60680c4ef8a4b0dfae134d2f2ff68429adfab5"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win_amd64.whl", hash = "sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d67f273097c368265a7b81e152e07fb90ed395df6e552b9fa858c6d2c9f42502"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:72a2b8b2ff0a627496aad76f37a652bcef400fd861721744201ef1b45199ab78"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win32.whl", hash = "sha256:9efef4aab5353387b07f6b22ace0867032b900d8e91674b5d8ea9150db5cae94"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win_amd64.whl", hash = "sha256:846fc8336443106fe23f9b6d6b8c14a53d38cef9a375149d61f99d78782ea468"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:78988ed190206672da0f5d50c61afef8f67daa718d614377dcd5e3ed85ab4a99"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win32.whl", hash = "sha256:a49e0161897901d1ac9c4a79984b8410f450565bbad64dbfcbf76152743a0cdb"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:bf75d28fa071645c529b5474a550a44686821decebdd00e21127ef1fd566eabe"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a32f8d81ea0c6173ab1b3da956869114cae53ba1e9f72374032e33ba3118c233"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7f7ecb53ae6848f959db6ae93bdff1740e651809780822270eab111500842a84"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win32.whl", hash = "sha256:89221ec6d6026f8ae859c09b9718799fea22c0e8da8b766b0b2c9a9ba2db326b"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:31ea73e564a7b5fbbe8188ab8b334393e06d997914a4e184975348f204790277"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1866cf2c284a03b9524a5cc00daca56d80057c5ce3cdc86a52020f4c720856f0"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win32.whl", hash = "sha256:3fb9575a5acd13031c57a62cc7823e5d2ff8bc3835ba4d94b921b4e6ee664104"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:825d5fccef6da42f3c8eccd4281af399f21c02b32d98e113dbc631ea6a6ecbc7"}, + {file = "ruamel.yaml.clib-0.2.6.tar.gz", hash = "sha256:4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd"}, ] s3transfer = [ - {file = "s3transfer-0.5.0-py3-none-any.whl", hash = "sha256:9c1dc369814391a6bda20ebbf4b70a0f34630592c9aa520856bf384916af2803"}, - {file = "s3transfer-0.5.0.tar.gz", hash = "sha256:50ed823e1dc5868ad40c8dc92072f757aa0e653a192845c94a3b676f4a62da4c"}, + {file = "s3transfer-0.5.0-py3-none-any.whl", hash = "sha256:9c1dc369814391a6bda20ebbf4b70a0f34630592c9aa520856bf384916af2803"}, + {file = "s3transfer-0.5.0.tar.gz", hash = "sha256:50ed823e1dc5868ad40c8dc92072f757aa0e653a192845c94a3b676f4a62da4c"}, ] six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] smmap = [ - {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, - {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, + {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, + {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, ] stevedore = [ - {file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"}, - {file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"}, + {file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"}, + {file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"}, ] testfixtures = [ - {file = "testfixtures-6.18.3-py2.py3-none-any.whl", hash = "sha256:6ddb7f56a123e1a9339f130a200359092bd0a6455e31838d6c477e8729bb7763"}, - {file = "testfixtures-6.18.3.tar.gz", hash = "sha256:2600100ae96ffd082334b378e355550fef8b4a529a6fa4c34f47130905c7426d"}, + {file = "testfixtures-6.18.3-py2.py3-none-any.whl", hash = "sha256:6ddb7f56a123e1a9339f130a200359092bd0a6455e31838d6c477e8729bb7763"}, + {file = "testfixtures-6.18.3.tar.gz", hash = "sha256:2600100ae96ffd082334b378e355550fef8b4a529a6fa4c34f47130905c7426d"}, ] toml = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] tomli = [ - {file = "tomli-1.2.2-py3-none-any.whl", hash = "sha256:f04066f68f5554911363063a30b108d2b5a5b1a010aa8b6132af78489fe3aade"}, - {file = "tomli-1.2.2.tar.gz", hash = "sha256:c6ce0015eb38820eaf32b5db832dbc26deb3dd427bd5f6556cf0acac2c214fee"}, + {file = "tomli-1.2.2-py3-none-any.whl", hash = "sha256:f04066f68f5554911363063a30b108d2b5a5b1a010aa8b6132af78489fe3aade"}, + {file = "tomli-1.2.2.tar.gz", hash = "sha256:c6ce0015eb38820eaf32b5db832dbc26deb3dd427bd5f6556cf0acac2c214fee"}, ] typed-ast = [ - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, - {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, - {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, - {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, - {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, - {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, - {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, - {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, - {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, - {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, - {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, - {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, - {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, - {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, - {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, - {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, + {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, + {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, + {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, + {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, + {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, + {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, + {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, + {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, + {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, ] typing-extensions = [ - {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, - {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, + {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, + {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, ] urllib3 = [ - {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"}, - {file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"}, + {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"}, + {file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"}, ] watchdog = [ - {file = "watchdog-2.1.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9693f35162dc6208d10b10ddf0458cc09ad70c30ba689d9206e02cd836ce28a3"}, - {file = "watchdog-2.1.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aba5c812f8ee8a3ff3be51887ca2d55fb8e268439ed44110d3846e4229eb0e8b"}, - {file = "watchdog-2.1.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ae38bf8ba6f39d5b83f78661273216e7db5b00f08be7592062cb1fc8b8ba542"}, - {file = "watchdog-2.1.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ad6f1796e37db2223d2a3f302f586f74c72c630b48a9872c1e7ae8e92e0ab669"}, - {file = "watchdog-2.1.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:922a69fa533cb0c793b483becaaa0845f655151e7256ec73630a1b2e9ebcb660"}, - {file = "watchdog-2.1.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b2fcf9402fde2672545b139694284dc3b665fd1be660d73eca6805197ef776a3"}, - {file = "watchdog-2.1.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3386b367e950a11b0568062b70cc026c6f645428a698d33d39e013aaeda4cc04"}, - {file = "watchdog-2.1.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f1c00aa35f504197561060ca4c21d3cc079ba29cf6dd2fe61024c70160c990b"}, - {file = "watchdog-2.1.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b52b88021b9541a60531142b0a451baca08d28b74a723d0c99b13c8c8d48d604"}, - {file = "watchdog-2.1.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8047da932432aa32c515ec1447ea79ce578d0559362ca3605f8e9568f844e3c6"}, - {file = "watchdog-2.1.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e92c2d33858c8f560671b448205a268096e17870dcf60a9bb3ac7bfbafb7f5f9"}, - {file = "watchdog-2.1.6-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b7d336912853d7b77f9b2c24eeed6a5065d0a0cc0d3b6a5a45ad6d1d05fb8cd8"}, - {file = "watchdog-2.1.6-py3-none-manylinux2014_aarch64.whl", hash = "sha256:cca7741c0fcc765568350cb139e92b7f9f3c9a08c4f32591d18ab0a6ac9e71b6"}, - {file = "watchdog-2.1.6-py3-none-manylinux2014_armv7l.whl", hash = "sha256:25fb5240b195d17de949588628fdf93032ebf163524ef08933db0ea1f99bd685"}, - {file = "watchdog-2.1.6-py3-none-manylinux2014_i686.whl", hash = "sha256:be9be735f827820a06340dff2ddea1fb7234561fa5e6300a62fe7f54d40546a0"}, - {file = "watchdog-2.1.6-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0d19fb2441947b58fbf91336638c2b9f4cc98e05e1045404d7a4cb7cddc7a65"}, - {file = "watchdog-2.1.6-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:3becdb380d8916c873ad512f1701f8a92ce79ec6978ffde92919fd18d41da7fb"}, - {file = "watchdog-2.1.6-py3-none-manylinux2014_s390x.whl", hash = "sha256:ae67501c95606072aafa865b6ed47343ac6484472a2f95490ba151f6347acfc2"}, - {file = "watchdog-2.1.6-py3-none-manylinux2014_x86_64.whl", hash = "sha256:e0f30db709c939cabf64a6dc5babb276e6d823fd84464ab916f9b9ba5623ca15"}, - {file = "watchdog-2.1.6-py3-none-win32.whl", hash = "sha256:e02794ac791662a5eafc6ffeaf9bcc149035a0e48eb0a9d40a8feb4622605a3d"}, - {file = "watchdog-2.1.6-py3-none-win_amd64.whl", hash = "sha256:bd9ba4f332cf57b2c1f698be0728c020399ef3040577cde2939f2e045b39c1e5"}, - {file = "watchdog-2.1.6-py3-none-win_ia64.whl", hash = "sha256:a0f1c7edf116a12f7245be06120b1852275f9506a7d90227648b250755a03923"}, - {file = "watchdog-2.1.6.tar.gz", hash = "sha256:a36e75df6c767cbf46f61a91c70b3ba71811dfa0aca4a324d9407a06a8b7a2e7"}, + {file = "watchdog-2.1.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9693f35162dc6208d10b10ddf0458cc09ad70c30ba689d9206e02cd836ce28a3"}, + {file = "watchdog-2.1.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aba5c812f8ee8a3ff3be51887ca2d55fb8e268439ed44110d3846e4229eb0e8b"}, + {file = "watchdog-2.1.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ae38bf8ba6f39d5b83f78661273216e7db5b00f08be7592062cb1fc8b8ba542"}, + {file = "watchdog-2.1.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ad6f1796e37db2223d2a3f302f586f74c72c630b48a9872c1e7ae8e92e0ab669"}, + {file = "watchdog-2.1.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:922a69fa533cb0c793b483becaaa0845f655151e7256ec73630a1b2e9ebcb660"}, + {file = "watchdog-2.1.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b2fcf9402fde2672545b139694284dc3b665fd1be660d73eca6805197ef776a3"}, + {file = "watchdog-2.1.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3386b367e950a11b0568062b70cc026c6f645428a698d33d39e013aaeda4cc04"}, + {file = "watchdog-2.1.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f1c00aa35f504197561060ca4c21d3cc079ba29cf6dd2fe61024c70160c990b"}, + {file = "watchdog-2.1.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b52b88021b9541a60531142b0a451baca08d28b74a723d0c99b13c8c8d48d604"}, + {file = "watchdog-2.1.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8047da932432aa32c515ec1447ea79ce578d0559362ca3605f8e9568f844e3c6"}, + {file = "watchdog-2.1.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e92c2d33858c8f560671b448205a268096e17870dcf60a9bb3ac7bfbafb7f5f9"}, + {file = "watchdog-2.1.6-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b7d336912853d7b77f9b2c24eeed6a5065d0a0cc0d3b6a5a45ad6d1d05fb8cd8"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_aarch64.whl", hash = "sha256:cca7741c0fcc765568350cb139e92b7f9f3c9a08c4f32591d18ab0a6ac9e71b6"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_armv7l.whl", hash = "sha256:25fb5240b195d17de949588628fdf93032ebf163524ef08933db0ea1f99bd685"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_i686.whl", hash = "sha256:be9be735f827820a06340dff2ddea1fb7234561fa5e6300a62fe7f54d40546a0"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0d19fb2441947b58fbf91336638c2b9f4cc98e05e1045404d7a4cb7cddc7a65"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:3becdb380d8916c873ad512f1701f8a92ce79ec6978ffde92919fd18d41da7fb"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_s390x.whl", hash = "sha256:ae67501c95606072aafa865b6ed47343ac6484472a2f95490ba151f6347acfc2"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_x86_64.whl", hash = "sha256:e0f30db709c939cabf64a6dc5babb276e6d823fd84464ab916f9b9ba5623ca15"}, + {file = "watchdog-2.1.6-py3-none-win32.whl", hash = "sha256:e02794ac791662a5eafc6ffeaf9bcc149035a0e48eb0a9d40a8feb4622605a3d"}, + {file = "watchdog-2.1.6-py3-none-win_amd64.whl", hash = "sha256:bd9ba4f332cf57b2c1f698be0728c020399ef3040577cde2939f2e045b39c1e5"}, + {file = "watchdog-2.1.6-py3-none-win_ia64.whl", hash = "sha256:a0f1c7edf116a12f7245be06120b1852275f9506a7d90227648b250755a03923"}, + {file = "watchdog-2.1.6.tar.gz", hash = "sha256:a36e75df6c767cbf46f61a91c70b3ba71811dfa0aca4a324d9407a06a8b7a2e7"}, ] wrapt = [ - {file = "wrapt-1.13.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e05e60ff3b2b0342153be4d1b597bbcfd8330890056b9619f4ad6b8d5c96a81a"}, - {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:85148f4225287b6a0665eef08a178c15097366d46b210574a658c1ff5b377489"}, - {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:2dded5496e8f1592ec27079b28b6ad2a1ef0b9296d270f77b8e4a3a796cf6909"}, - {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e94b7d9deaa4cc7bac9198a58a7240aaf87fe56c6277ee25fa5b3aa1edebd229"}, - {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:498e6217523111d07cd67e87a791f5e9ee769f9241fcf8a379696e25806965af"}, - {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ec7e20258ecc5174029a0f391e1b948bf2906cd64c198a9b8b281b811cbc04de"}, - {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:87883690cae293541e08ba2da22cacaae0a092e0ed56bbba8d018cc486fbafbb"}, - {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:f99c0489258086308aad4ae57da9e8ecf9e1f3f30fa35d5e170b4d4896554d80"}, - {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6a03d9917aee887690aa3f1747ce634e610f6db6f6b332b35c2dd89412912bca"}, - {file = "wrapt-1.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:936503cb0a6ed28dbfa87e8fcd0a56458822144e9d11a49ccee6d9a8adb2ac44"}, - {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f9c51d9af9abb899bd34ace878fbec8bf357b3194a10c4e8e0a25512826ef056"}, - {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:220a869982ea9023e163ba915077816ca439489de6d2c09089b219f4e11b6785"}, - {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0877fe981fd76b183711d767500e6b3111378ed2043c145e21816ee589d91096"}, - {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:43e69ffe47e3609a6aec0fe723001c60c65305784d964f5007d5b4fb1bc6bf33"}, - {file = "wrapt-1.13.3-cp310-cp310-win32.whl", hash = "sha256:78dea98c81915bbf510eb6a3c9c24915e4660302937b9ae05a0947164248020f"}, - {file = "wrapt-1.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:ea3e746e29d4000cd98d572f3ee2a6050a4f784bb536f4ac1f035987fc1ed83e"}, - {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:8c73c1a2ec7c98d7eaded149f6d225a692caa1bd7b2401a14125446e9e90410d"}, - {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:086218a72ec7d986a3eddb7707c8c4526d677c7b35e355875a0fe2918b059179"}, - {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:e92d0d4fa68ea0c02d39f1e2f9cb5bc4b4a71e8c442207433d8db47ee79d7aa3"}, - {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:d4a5f6146cfa5c7ba0134249665acd322a70d1ea61732723c7d3e8cc0fa80755"}, - {file = "wrapt-1.13.3-cp35-cp35m-win32.whl", hash = "sha256:8aab36778fa9bba1a8f06a4919556f9f8c7b33102bd71b3ab307bb3fecb21851"}, - {file = "wrapt-1.13.3-cp35-cp35m-win_amd64.whl", hash = "sha256:944b180f61f5e36c0634d3202ba8509b986b5fbaf57db3e94df11abee244ba13"}, - {file = "wrapt-1.13.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2ebdde19cd3c8cdf8df3fc165bc7827334bc4e353465048b36f7deeae8ee0918"}, - {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:610f5f83dd1e0ad40254c306f4764fcdc846641f120c3cf424ff57a19d5f7ade"}, - {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5601f44a0f38fed36cc07db004f0eedeaadbdcec90e4e90509480e7e6060a5bc"}, - {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e6906d6f48437dfd80464f7d7af1740eadc572b9f7a4301e7dd3d65db285cacf"}, - {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:766b32c762e07e26f50d8a3468e3b4228b3736c805018e4b0ec8cc01ecd88125"}, - {file = "wrapt-1.13.3-cp36-cp36m-win32.whl", hash = "sha256:5f223101f21cfd41deec8ce3889dc59f88a59b409db028c469c9b20cfeefbe36"}, - {file = "wrapt-1.13.3-cp36-cp36m-win_amd64.whl", hash = "sha256:f122ccd12fdc69628786d0c947bdd9cb2733be8f800d88b5a37c57f1f1d73c10"}, - {file = "wrapt-1.13.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:46f7f3af321a573fc0c3586612db4decb7eb37172af1bc6173d81f5b66c2e068"}, - {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:778fd096ee96890c10ce96187c76b3e99b2da44e08c9e24d5652f356873f6709"}, - {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0cb23d36ed03bf46b894cfec777eec754146d68429c30431c99ef28482b5c1df"}, - {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:96b81ae75591a795d8c90edc0bfaab44d3d41ffc1aae4d994c5aa21d9b8e19a2"}, - {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7dd215e4e8514004c8d810a73e342c536547038fb130205ec4bba9f5de35d45b"}, - {file = "wrapt-1.13.3-cp37-cp37m-win32.whl", hash = "sha256:47f0a183743e7f71f29e4e21574ad3fa95676136f45b91afcf83f6a050914829"}, - {file = "wrapt-1.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fd76c47f20984b43d93de9a82011bb6e5f8325df6c9ed4d8310029a55fa361ea"}, - {file = "wrapt-1.13.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b73d4b78807bd299b38e4598b8e7bd34ed55d480160d2e7fdaabd9931afa65f9"}, - {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ec9465dd69d5657b5d2fa6133b3e1e989ae27d29471a672416fd729b429eb554"}, - {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dd91006848eb55af2159375134d724032a2d1d13bcc6f81cd8d3ed9f2b8e846c"}, - {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ae9de71eb60940e58207f8e71fe113c639da42adb02fb2bcbcaccc1ccecd092b"}, - {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:51799ca950cfee9396a87f4a1240622ac38973b6df5ef7a41e7f0b98797099ce"}, - {file = "wrapt-1.13.3-cp38-cp38-win32.whl", hash = "sha256:4b9c458732450ec42578b5642ac53e312092acf8c0bfce140ada5ca1ac556f79"}, - {file = "wrapt-1.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:7dde79d007cd6dfa65afe404766057c2409316135cb892be4b1c768e3f3a11cb"}, - {file = "wrapt-1.13.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:981da26722bebb9247a0601e2922cedf8bb7a600e89c852d063313102de6f2cb"}, - {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:705e2af1f7be4707e49ced9153f8d72131090e52be9278b5dbb1498c749a1e32"}, - {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25b1b1d5df495d82be1c9d2fad408f7ce5ca8a38085e2da41bb63c914baadff7"}, - {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:77416e6b17926d953b5c666a3cb718d5945df63ecf922af0ee576206d7033b5e"}, - {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:865c0b50003616f05858b22174c40ffc27a38e67359fa1495605f96125f76640"}, - {file = "wrapt-1.13.3-cp39-cp39-win32.whl", hash = "sha256:0a017a667d1f7411816e4bf214646d0ad5b1da2c1ea13dec6c162736ff25a374"}, - {file = "wrapt-1.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:81bd7c90d28a4b2e1df135bfbd7c23aee3050078ca6441bead44c42483f9ebfb"}, - {file = "wrapt-1.13.3.tar.gz", hash = "sha256:1fea9cd438686e6682271d36f3481a9f3636195578bab9ca3382e2f5f01fc185"}, + {file = "wrapt-1.13.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e05e60ff3b2b0342153be4d1b597bbcfd8330890056b9619f4ad6b8d5c96a81a"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:85148f4225287b6a0665eef08a178c15097366d46b210574a658c1ff5b377489"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:2dded5496e8f1592ec27079b28b6ad2a1ef0b9296d270f77b8e4a3a796cf6909"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e94b7d9deaa4cc7bac9198a58a7240aaf87fe56c6277ee25fa5b3aa1edebd229"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:498e6217523111d07cd67e87a791f5e9ee769f9241fcf8a379696e25806965af"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ec7e20258ecc5174029a0f391e1b948bf2906cd64c198a9b8b281b811cbc04de"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:87883690cae293541e08ba2da22cacaae0a092e0ed56bbba8d018cc486fbafbb"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:f99c0489258086308aad4ae57da9e8ecf9e1f3f30fa35d5e170b4d4896554d80"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6a03d9917aee887690aa3f1747ce634e610f6db6f6b332b35c2dd89412912bca"}, + {file = "wrapt-1.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:936503cb0a6ed28dbfa87e8fcd0a56458822144e9d11a49ccee6d9a8adb2ac44"}, + {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f9c51d9af9abb899bd34ace878fbec8bf357b3194a10c4e8e0a25512826ef056"}, + {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:220a869982ea9023e163ba915077816ca439489de6d2c09089b219f4e11b6785"}, + {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0877fe981fd76b183711d767500e6b3111378ed2043c145e21816ee589d91096"}, + {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:43e69ffe47e3609a6aec0fe723001c60c65305784d964f5007d5b4fb1bc6bf33"}, + {file = "wrapt-1.13.3-cp310-cp310-win32.whl", hash = "sha256:78dea98c81915bbf510eb6a3c9c24915e4660302937b9ae05a0947164248020f"}, + {file = "wrapt-1.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:ea3e746e29d4000cd98d572f3ee2a6050a4f784bb536f4ac1f035987fc1ed83e"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:8c73c1a2ec7c98d7eaded149f6d225a692caa1bd7b2401a14125446e9e90410d"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:086218a72ec7d986a3eddb7707c8c4526d677c7b35e355875a0fe2918b059179"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:e92d0d4fa68ea0c02d39f1e2f9cb5bc4b4a71e8c442207433d8db47ee79d7aa3"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:d4a5f6146cfa5c7ba0134249665acd322a70d1ea61732723c7d3e8cc0fa80755"}, + {file = "wrapt-1.13.3-cp35-cp35m-win32.whl", hash = "sha256:8aab36778fa9bba1a8f06a4919556f9f8c7b33102bd71b3ab307bb3fecb21851"}, + {file = "wrapt-1.13.3-cp35-cp35m-win_amd64.whl", hash = "sha256:944b180f61f5e36c0634d3202ba8509b986b5fbaf57db3e94df11abee244ba13"}, + {file = "wrapt-1.13.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2ebdde19cd3c8cdf8df3fc165bc7827334bc4e353465048b36f7deeae8ee0918"}, + {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:610f5f83dd1e0ad40254c306f4764fcdc846641f120c3cf424ff57a19d5f7ade"}, + {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5601f44a0f38fed36cc07db004f0eedeaadbdcec90e4e90509480e7e6060a5bc"}, + {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e6906d6f48437dfd80464f7d7af1740eadc572b9f7a4301e7dd3d65db285cacf"}, + {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:766b32c762e07e26f50d8a3468e3b4228b3736c805018e4b0ec8cc01ecd88125"}, + {file = "wrapt-1.13.3-cp36-cp36m-win32.whl", hash = "sha256:5f223101f21cfd41deec8ce3889dc59f88a59b409db028c469c9b20cfeefbe36"}, + {file = "wrapt-1.13.3-cp36-cp36m-win_amd64.whl", hash = "sha256:f122ccd12fdc69628786d0c947bdd9cb2733be8f800d88b5a37c57f1f1d73c10"}, + {file = "wrapt-1.13.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:46f7f3af321a573fc0c3586612db4decb7eb37172af1bc6173d81f5b66c2e068"}, + {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:778fd096ee96890c10ce96187c76b3e99b2da44e08c9e24d5652f356873f6709"}, + {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0cb23d36ed03bf46b894cfec777eec754146d68429c30431c99ef28482b5c1df"}, + {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:96b81ae75591a795d8c90edc0bfaab44d3d41ffc1aae4d994c5aa21d9b8e19a2"}, + {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7dd215e4e8514004c8d810a73e342c536547038fb130205ec4bba9f5de35d45b"}, + {file = "wrapt-1.13.3-cp37-cp37m-win32.whl", hash = "sha256:47f0a183743e7f71f29e4e21574ad3fa95676136f45b91afcf83f6a050914829"}, + {file = "wrapt-1.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fd76c47f20984b43d93de9a82011bb6e5f8325df6c9ed4d8310029a55fa361ea"}, + {file = "wrapt-1.13.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b73d4b78807bd299b38e4598b8e7bd34ed55d480160d2e7fdaabd9931afa65f9"}, + {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ec9465dd69d5657b5d2fa6133b3e1e989ae27d29471a672416fd729b429eb554"}, + {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dd91006848eb55af2159375134d724032a2d1d13bcc6f81cd8d3ed9f2b8e846c"}, + {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ae9de71eb60940e58207f8e71fe113c639da42adb02fb2bcbcaccc1ccecd092b"}, + {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:51799ca950cfee9396a87f4a1240622ac38973b6df5ef7a41e7f0b98797099ce"}, + {file = "wrapt-1.13.3-cp38-cp38-win32.whl", hash = "sha256:4b9c458732450ec42578b5642ac53e312092acf8c0bfce140ada5ca1ac556f79"}, + {file = "wrapt-1.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:7dde79d007cd6dfa65afe404766057c2409316135cb892be4b1c768e3f3a11cb"}, + {file = "wrapt-1.13.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:981da26722bebb9247a0601e2922cedf8bb7a600e89c852d063313102de6f2cb"}, + {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:705e2af1f7be4707e49ced9153f8d72131090e52be9278b5dbb1498c749a1e32"}, + {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25b1b1d5df495d82be1c9d2fad408f7ce5ca8a38085e2da41bb63c914baadff7"}, + {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:77416e6b17926d953b5c666a3cb718d5945df63ecf922af0ee576206d7033b5e"}, + {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:865c0b50003616f05858b22174c40ffc27a38e67359fa1495605f96125f76640"}, + {file = "wrapt-1.13.3-cp39-cp39-win32.whl", hash = "sha256:0a017a667d1f7411816e4bf214646d0ad5b1da2c1ea13dec6c162736ff25a374"}, + {file = "wrapt-1.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:81bd7c90d28a4b2e1df135bfbd7c23aee3050078ca6441bead44c42483f9ebfb"}, + {file = "wrapt-1.13.3.tar.gz", hash = "sha256:1fea9cd438686e6682271d36f3481a9f3636195578bab9ca3382e2f5f01fc185"}, ] xenon = [ - {file = "xenon-0.9.0-py2.py3-none-any.whl", hash = "sha256:994c80c7f1c6d40596b600b93734d85a5739208f31895ef99f1e4d362caf9e35"}, - {file = "xenon-0.9.0.tar.gz", hash = "sha256:d2b9cb6c6260f771a432c1e588e51fddb17858f88f73ef641e7532f7a5f58fb8"}, + {file = "xenon-0.9.0-py2.py3-none-any.whl", hash = "sha256:994c80c7f1c6d40596b600b93734d85a5739208f31895ef99f1e4d362caf9e35"}, + {file = "xenon-0.9.0.tar.gz", hash = "sha256:d2b9cb6c6260f771a432c1e588e51fddb17858f88f73ef641e7532f7a5f58fb8"}, ] zipp = [ - {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, - {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, + {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, + {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, ] diff --git a/pyproject.toml b/pyproject.toml index b7bd4695222..5bac979a9d2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,6 +30,7 @@ python = "^3.6.2" [tool.poetry.dev-dependencies] # 2022-04-21: jmespath was removed, to be re-added once we drop python 3.6. # issue #1148 +aws-cdk-lib = "^2.23.0" bandit = "^1.7.1" black = "^21.12b0" coverage = {extras = ["toml"], version = "^6.2"} @@ -53,7 +54,9 @@ pytest = "^7.0.1" pytest-asyncio = "^0.16.0" pytest-cov = "^3.0.0" pytest-mock = "^3.5.1" +pytest-xdist = "^2.5.0" radon = "^5.1.0" +retry = "^0.9.2" xenon = "^0.9.0" [tool.poetry.extras] @@ -118,7 +121,10 @@ line-length = 120 [tool.pytest.ini_options] addopts = "-ra -vv" -markers = "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')" +markers = [ + "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')", + "e2e: marks e2e tests to be deselected (deselect with '-m \"not e2e\"')", +] minversion = "6.0" testpaths = "./tests" diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py index a5e5a69107d..1fe21f1cdb3 100644 --- a/tests/e2e/conftest.py +++ b/tests/e2e/conftest.py @@ -1,124 +1,95 @@ -import subprocess -import tempfile import uuid -from pathlib import Path import boto3 import pytest -from aws_cdk import App, BundlingOptions, CfnOutput, DockerVolume, Stack, aws_lambda_python_alpha, aws_logs -from aws_cdk.aws_lambda import Code, Function, LayerVersion, Runtime, Tracing +import yaml +from aws_cdk import App, CfnOutput, RemovalPolicy, Stack, aws_lambda_python_alpha, aws_logs +from aws_cdk.aws_lambda import Code, Function, Runtime, Tracing +from . import utils -def get_data(outputs, key): - value = None - for output in outputs: - if output["OutputKey"] == key: - value = output["OutputValue"] - return value - -def load_handler_file(tmp_filename, handler_filename): - - with open(tmp_filename, mode="wb+") as tmp: - with open(handler_filename, mode="rb") as handler: - for line in handler: - tmp.write(line) - return tmp +def transform_output(outputs): + return {output["OutputKey"]: output["OutputValue"] for output in outputs if output["OutputKey"]} # Create CDK cloud assembly code -def cdk_infrastructure(handler_file, stack_name, environment_variables, **config): +def prepare_infrastructure(handlers_name, handlers_dir, stack_name, environment_variables, **config): integration_test_app = App() stack = Stack(integration_test_app, stack_name) - powertools_layer = LayerVersion.from_layer_version_arn( + powertools_layer = aws_lambda_python_alpha.PythonLayerVersion( stack, "aws-lambda-powertools", - "arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPython:15", + layer_version_name="aws-lambda-powertools", + entry=".", + compatible_runtimes=[Runtime.PYTHON_3_9], ) - # TODO Create only one layer per test suite as it takes 4 additional minutes to deploy stack without cache - # TODO layer creation breaks hot-swap deployment as CDK complains that change contains non-Asset changes. - # powertools_layer = aws_lambda_python_alpha.PythonLayerVersion( - # stack, - # "aws-lambda-powertools", - # layer_version_name="aws-lambda-powertools", - # entry=".", - # compatible_runtimes=runtimes, - # ) - code = Code.from_asset(str(Path(handler_file).parent)) - # powertools_root_dir = "." - # tmp_handler_dir = str(Path(handler_file).parent) - # code = Code.from_asset( - # path=powertools_root_dir, - # bundling=BundlingOptions( - # image=Runtime.PYTHON_3_9.bundling_image, - # volumes=[DockerVolume(container_path=tmp_handler_dir, host_path=tmp_handler_dir)], - # user="root", - # command=[ - # "bash", - # "-c", - # f"pip install poetry && poetry export -f requirements.txt --without-hashes > requirements.txt && pip install -r requirements.txt -t /asset-output/ && rsync -r aws_lambda_powertools /asset-output/ && rsync -r {tmp_handler_dir}/ /asset-output", - # ], - # ), - # ) - - function_python = Function( - stack, - "MyFunction", - runtime=Runtime.PYTHON_3_9, - code=code, - handler=f"{Path(handler_file).stem}.lambda_handler", - layers=[powertools_layer], - log_retention=aws_logs.RetentionDays.ONE_DAY, - environment=environment_variables, - tracing=Tracing.ACTIVE if config.get("tracing") == "ACTIVE" else Tracing.DISABLED, + code = Code.from_asset(handlers_dir) + + for filename in handlers_name: + + function_python = Function( + stack, + f"{filename}-lambda", + runtime=Runtime.PYTHON_3_9, + code=code, + handler=f"{filename}.lambda_handler", + layers=[powertools_layer], + environment=environment_variables, + tracing=Tracing.ACTIVE if config.get("tracing") == "ACTIVE" else Tracing.DISABLED, + ) + + aws_logs.LogGroup( + stack, + f"{filename}-lg", + log_group_name=f"/aws/lambda/{function_python.function_name}", + retention=aws_logs.RetentionDays.ONE_DAY, + removal_policy=RemovalPolicy.DESTROY, + ) + CfnOutput(stack, f"{filename}_arn", value=function_python.function_arn) + return ( + integration_test_app.synth().get_stack_by_name(stack_name).template, + integration_test_app.synth().directory, + integration_test_app.synth().artifacts, ) - CfnOutput(stack, "lambdaArn", value=function_python.function_arn) - integration_test_app.synth() - return integration_test_app - - -# Deploy synthesized code using CDK CLI -def deploy_app(path, stack_name, cf_client): - result = subprocess.run( - [ - "cdk", - "deploy", - "--app", - str(path), - "--require-approval", - "never", - "--hotswap", - ], - capture_output=True, - text=True, - check=True, - ) - print(result.returncode, result.stdout, result.stderr) - outputs = cf_client.describe_stacks(StackName=stack_name)["Stacks"][0]["Outputs"] - return outputs +def deploy_infrastructure(template, asset_root_dir, stack_name, client): + + utils.upload_assets(template, asset_root_dir) + + response = client.create_stack( + StackName=stack_name, + TemplateBody=yaml.dump(template), + TimeoutInMinutes=10, + OnFailure="DO_NOTHING", + Capabilities=["CAPABILITY_IAM"], + ) + waiter = client.get_waiter("stack_create_complete") + waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 2, "MaxAttempts": 50}) + response = client.describe_stacks(StackName=stack_name) + return response["Stacks"][0]["Outputs"] @pytest.fixture(scope="session") -def deploy_infrastructure(): +def deploy(): cf_client = boto3.Session().client("cloudformation") - # in order to use hotswap we create tmp file that we specify as cdk lambda asset - # and we dynamically change its content - with tempfile.TemporaryDirectory() as tmp_dir: - tmp_filename = f"{tmp_dir}/tmp.py" - stack_name = f"test-lambda-{uuid.uuid4()}" - - def deploy(handler_filename, environment_variables, **config): - load_handler_file(tmp_filename=tmp_filename, handler_filename=handler_filename) - app = cdk_infrastructure( - handler_file=tmp_filename, stack_name=stack_name, environment_variables=environment_variables, **config - ) - - outputs = deploy_app(path=app.outdir, stack_name=stack_name, cf_client=cf_client) - lambda_arn = get_data(outputs=outputs, key="lambdaArn") - return lambda_arn - - yield deploy - # Ensure stack deletion is triggered at the end of the test session - cf_client.delete_stack(StackName=stack_name) + stack_name = f"test-lambda-{uuid.uuid4()}" + + def deploy(handlers_name, handlers_dir, environment_variables, **config): + + template, asset_root_dir, artifact = prepare_infrastructure( + handlers_name=handlers_name, + handlers_dir=handlers_dir, + stack_name=stack_name, + environment_variables=environment_variables, + **config, + ) + outputs = deploy_infrastructure( + template=template, asset_root_dir=asset_root_dir, stack_name=stack_name, client=cf_client + ) + return transform_output(outputs) + + yield deploy + # Ensure stack deletion is triggered at the end of the test session + cf_client.delete_stack(StackName=stack_name) diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index c7bfeb2c14c..b3f09f3b66c 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -1,5 +1,6 @@ import datetime import os +from functools import lru_cache import boto3 import pytest @@ -15,78 +16,74 @@ def config(): @pytest.fixture(scope="module") -def deploy_basic_lambda(deploy_infrastructure, config): - lambda_arn = deploy_infrastructure( - handler_filename=f"{dirname}/handlers/basic_handler.py", +def deploy_lambdas(deploy, config): + handlers_dir = f"{dirname}/handlers/" + + lambda_arns = deploy( + handlers_name=utils.find_handlers(handlers_dir), + handlers_dir=handlers_dir, environment_variables=config, ) - epoch = int(datetime.datetime.now().timestamp() * 1000) - result = utils.trigger_lambda(lambda_arn=lambda_arn) - assert result["Payload"].read() == b'"success"' - return lambda_arn, epoch + for name, arn in lambda_arns.items(): + utils.trigger_lambda(lambda_arn=arn) + print(f"lambda {name} triggered") + return lambda_arns @pytest.fixture(scope="module") -def deploy_no_context_lambda(deploy_infrastructure, config): - lambda_arn = deploy_infrastructure( - handler_filename=f"{dirname}/handlers/no_context_handler.py", environment_variables=config - ) - - epoch = int(datetime.datetime.now().timestamp() * 1000) - result = utils.trigger_lambda(lambda_arn=lambda_arn) +def trigger_lambdas(deploy_lambdas): + for name, arn in deploy_lambdas.items(): + utils.trigger_lambda(lambda_arn=arn) + print(f"lambda {name} triggered") - assert result["Payload"].read() == b'"success"' - return lambda_arn, epoch +@lru_cache(maxsize=10, typed=False) +def fetch_logs(lambda_arn): + start_time = int(datetime.datetime.now().timestamp() * 1000) + result = utils.trigger_lambda(lambda_arn=lambda_arn) -@pytest.mark.e2e -def test_basic_lambda_logs_visible(deploy_basic_lambda, config): filtered_logs = utils.get_logs( - start_time=deploy_basic_lambda[1], - lambda_function_name=deploy_basic_lambda[0].split(":")[-1], + start_time=start_time, + lambda_function_name=lambda_arn.split(":")[-1], log_client=boto3.client("logs"), ) + return filtered_logs + + +@pytest.mark.e2e +def test_basic_lambda_logs_visible(deploy_lambdas, config): + + filtered_logs = fetch_logs(lambda_arn=deploy_lambdas["basichandlerarn"]) + assert any(log.message == config["MESSAGE"] and log.level == config["LOG_LEVEL"] for log in filtered_logs) @pytest.mark.e2e -def test_basic_lambda_no_debug_logs_visible(deploy_basic_lambda, config): - filtered_logs = utils.get_logs( - start_time=deploy_basic_lambda[1], - lambda_function_name=deploy_basic_lambda[0].split(":")[-1], - log_client=boto3.client("logs"), - ) +def test_basic_lambda_no_debug_logs_visible(deploy_lambdas, config): + filtered_logs = fetch_logs(lambda_arn=deploy_lambdas["basichandlerarn"]) + assert not any(log.message == config["MESSAGE"] and log.level == "DEBUG" for log in filtered_logs) @pytest.mark.e2e -def test_basic_lambda_contextual_data_logged(deploy_basic_lambda): - filtered_logs = utils.get_logs( - start_time=deploy_basic_lambda[1], - lambda_function_name=deploy_basic_lambda[0].split(":")[-1], - log_client=boto3.client("logs"), - ) - assert all( - ( +def test_basic_lambda_contextual_data_logged(deploy_lambdas): + filtered_logs = fetch_logs(lambda_arn=deploy_lambdas["basichandlerarn"]) + for log in filtered_logs: + assert ( log.xray_trace_id and log.function_request_id and log.function_arn and log.function_memory_size and log.function_name - and log.cold_start + and str(log.cold_start) ) - for log in filtered_logs - ) @pytest.mark.e2e -def test_basic_lambda_additional_key_persistence_basic_lambda(deploy_basic_lambda, config): - filtered_logs = utils.get_logs( - start_time=deploy_basic_lambda[1], - lambda_function_name=deploy_basic_lambda[0].split(":")[-1], - log_client=boto3.client("logs"), - ) +def test_basic_lambda_additional_key_persistence_basic_lambda(deploy_lambdas, config): + filtered_logs = fetch_logs(lambda_arn=deploy_lambdas["basichandlerarn"]) + assert any( log.extra_info and log.message == config["MESSAGE"] and log.level == config["LOG_LEVEL"] for log in filtered_logs @@ -94,23 +91,16 @@ def test_basic_lambda_additional_key_persistence_basic_lambda(deploy_basic_lambd @pytest.mark.e2e -def test_basic_lambda_empty_event_logged(deploy_basic_lambda): - filtered_logs = utils.get_logs( - start_time=deploy_basic_lambda[1], - lambda_function_name=deploy_basic_lambda[0].split(":")[-1], - log_client=boto3.client("logs"), - ) +def test_basic_lambda_empty_event_logged(deploy_lambdas): + filtered_logs = fetch_logs(lambda_arn=deploy_lambdas["basichandlerarn"]) + assert any(log.message == {} for log in filtered_logs) -# Deploy new lambda using cdk hotswap mechanism @pytest.mark.e2e -def test_no_context_lambda_contextual_data_not_logged(deploy_no_context_lambda): - filtered_logs = utils.get_logs( - start_time=deploy_no_context_lambda[1], - lambda_function_name=deploy_no_context_lambda[0].split(":")[-1], - log_client=boto3.client("logs"), - ) +def test_no_context_lambda_contextual_data_not_logged(deploy_lambdas): + filtered_logs = fetch_logs(lambda_arn=deploy_lambdas["nocontexthandlerarn"]) + assert not any( ( log.xray_trace_id @@ -118,19 +108,16 @@ def test_no_context_lambda_contextual_data_not_logged(deploy_no_context_lambda): and log.function_arn and log.function_memory_size and log.function_name - and log.cold_start + and str(log.cold_start) ) for log in filtered_logs ) @pytest.mark.e2e -def test_no_context_lambda_event_not_logged(deploy_no_context_lambda): - filtered_logs = utils.get_logs( - start_time=deploy_no_context_lambda[1], - lambda_function_name=deploy_no_context_lambda[0].split(":")[-1], - log_client=boto3.client("logs"), - ) +def test_no_context_lambda_event_not_logged(deploy_lambdas): + filtered_logs = fetch_logs(lambda_arn=deploy_lambdas["nocontexthandlerarn"]) + assert not any(log.message == {} for log in filtered_logs) diff --git a/tests/e2e/metrics/test_metrics.py b/tests/e2e/metrics/test_metrics.py index 407141685f0..abcd9ef5545 100644 --- a/tests/e2e/metrics/test_metrics.py +++ b/tests/e2e/metrics/test_metrics.py @@ -20,9 +20,12 @@ def config(): @pytest.fixture(scope="module") -def deploy_basic_lambda(deploy_infrastructure, config): - lambda_arn = deploy_infrastructure( - handler_filename=f"{dirname}/handlers/basic_handler.py", +def deploy_lambdas(deploy, config): + handlers_dir = f"{dirname}/handlers/" + + lambda_arn = deploy( + handlers_name=utils.find_handlers(handlers_dir), + handlers_dir=handlers_dir, environment_variables=config, ) start_date = datetime.datetime.now(datetime.timezone.utc) @@ -32,8 +35,8 @@ def deploy_basic_lambda(deploy_infrastructure, config): @pytest.mark.e2e -def test_basic_lambda_metric_visible(deploy_basic_lambda, config): - start_date = deploy_basic_lambda +def test_basic_lambda_metric_visible(deploy_lambdas, config): + start_date = deploy_lambdas end_date = start_date + datetime.timedelta(minutes=5) metrics = utils.get_metrics( diff --git a/tests/e2e/tracer/test_tracer.py b/tests/e2e/tracer/test_tracer.py index eaa72c182b3..27d277daa9e 100644 --- a/tests/e2e/tracer/test_tracer.py +++ b/tests/e2e/tracer/test_tracer.py @@ -17,9 +17,13 @@ def config(): @pytest.fixture(scope="module") -def deploy_basic_lambda(deploy_infrastructure, config): - lambda_arn = deploy_infrastructure( - handler_filename=f"{dirname}/handlers/basic_handler.py", environment_variables=config, tracing="ACTIVE" +def deploy_lambdas(deploy, config): + handlers_dir = f"{dirname}/handlers/" + + lambda_arn = deploy( + handlers_name=utils.find_handlers(handlers_dir), + handlers_dir=handlers_dir, + environment_variables=config, ) start_date = datetime.datetime.utcnow() result = utils.trigger_lambda(lambda_arn=lambda_arn) @@ -28,14 +32,14 @@ def deploy_basic_lambda(deploy_infrastructure, config): @pytest.mark.e2e -def test_basic_lambda_trace_visible(deploy_basic_lambda, config): - start_date = deploy_basic_lambda[1] +def test_basic_lambda_trace_visible(deploy_lambdas, config): + start_date = deploy_lambdas[1] end_date = start_date + datetime.timedelta(minutes=5) trace = utils.get_traces( start_date=start_date, end_date=end_date, - lambda_function_name=deploy_basic_lambda[0].split(":")[-1], + lambda_function_name=deploy_lambdas[0].split(":")[-1], xray_client=boto3.client("xray"), ) diff --git a/tests/e2e/utils.py b/tests/e2e/utils.py index 84f463782a5..733343ac682 100644 --- a/tests/e2e/utils.py +++ b/tests/e2e/utils.py @@ -1,4 +1,8 @@ +import io import json +import os +import zipfile +from pathlib import Path from typing import Any, Optional, Union import boto3 @@ -43,6 +47,17 @@ def trigger_lambda(lambda_arn): return response +def find_handlers(directory): + handlers = [] + for _, _, files in os.walk(directory): + for file in files: + if file.endswith(".py"): + filename = Path(file).stem + handlers.append(filename) + print("handlers", handlers) + return handlers + + @retry(ValueError, delay=1, jitter=1, tries=5) def get_metrics(namespace, cw_client, start_date, end_date, metric_name, service_name): @@ -92,3 +107,53 @@ def get_traces(lambda_function_name: str, xray_client, start_date, end_date): ) return trace_details + + +def get_all_file_paths(directory): + file_paths = [] + for root, directories, files in os.walk(directory): + for filename in files: + file_paths.append(os.path.join(root, filename)) + return file_paths + + +def upload_assets(template, asset_root_dir): + s3_client = boto3.client("s3") + s3_resource = boto3.resource("s3") + account_id = boto3.client("sts").get_caller_identity()["Account"] + region = boto3.Session().region_name + assets = find_assets(template, account_id, region) + + for s3_key, bucket in assets.items(): + s3_bucket = s3_resource.Bucket(bucket) + if bool(list(s3_bucket.objects.filter(Prefix=s3_key))): + print("object exists, skipping") + continue + + buf = io.BytesIO() + asset_dir = f"{asset_root_dir}/asset.{Path(s3_key).with_suffix('')}" + os.chdir(asset_dir) + files = get_all_file_paths(directory=".") + with zipfile.ZipFile(buf, "w", compression=zipfile.ZIP_DEFLATED) as zf: + for file in files: + zf.write(os.path.join(file)) + buf.seek(0) + s3_client.upload_fileobj(Fileobj=buf, Bucket=bucket, Key=s3_key) + + +def find_assets(template, account_id, region): + assets = {} + for name, resource in template["Resources"].items(): + bucket = None + S3Key = None + + if resource["Properties"].get("Code"): + bucket = resource["Properties"]["Code"]["S3Bucket"] + S3Key = resource["Properties"]["Code"]["S3Key"] + elif resource["Properties"].get("Content"): + bucket = resource["Properties"]["Content"]["S3Bucket"] + S3Key = resource["Properties"]["Content"]["S3Key"] + if S3Key and bucket: + assets[S3Key] = bucket["Fn::Sub"].replace("${AWS::AccountId}", account_id).replace("${AWS::Region}", region) + + return assets From 25c77238bc3a04e80f2e40338f050e7b72893c5c Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Thu, 19 May 2022 11:51:11 +0200 Subject: [PATCH 03/72] Refactor code to use common libs --- poetry.lock | 20 ++- pyproject.toml | 1 + tests/e2e/conftest.py | 104 +++------------ tests/e2e/logger/test_logger.py | 160 ++++++++++++++--------- tests/e2e/metrics/test_metrics.py | 41 ++---- tests/e2e/tracer/test_tracer.py | 40 ++---- tests/e2e/utils/__init__.py | 0 tests/e2e/{utils.py => utils/helpers.py} | 91 ++----------- tests/e2e/utils/infrastructure.py | 149 +++++++++++++++++++++ 9 files changed, 329 insertions(+), 277 deletions(-) create mode 100644 tests/e2e/utils/__init__.py rename tests/e2e/{utils.py => utils/helpers.py} (50%) create mode 100644 tests/e2e/utils/infrastructure.py diff --git a/poetry.lock b/poetry.lock index f03c0dd53b5..809a5bb5a0e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -33,6 +33,20 @@ constructs = ">=10.0.0,<11.0.0" jsii = ">=1.57.0,<2.0.0" publication = ">=0.0.3" +[[package]] +name = "aws-cdk.aws-lambda-python-alpha" +version = "2.23.0a0" +description = "The CDK Construct Library for AWS Lambda in Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +aws-cdk-lib = ">=2.23.0,<3.0.0" +constructs = ">=10.0.0,<11.0.0" +jsii = ">=1.57.0,<2.0.0" +publication = ">=0.0.3" + [[package]] name = "aws-xray-sdk" version = "2.9.0" @@ -1222,7 +1236,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.2" -content-hash = "2c4ec9fac23dea3893cf3c60d5ed5728d63c4c46252a14ffbee52d30ebaec74f" +content-hash = "f0aa750ae44c234811d1c59860915845d1ac3c3c7474d5f7ee6e3fbe0d31bfb4" [metadata.files] atomicwrites = [ @@ -1237,6 +1251,10 @@ aws-cdk-lib = [ {file = "aws-cdk-lib-2.23.0.tar.gz", hash = "sha256:3e07d1c6b320795d38567be183e56c2125b4c4492589775257aabec3d3e2a384"}, {file = "aws_cdk_lib-2.23.0-py3-none-any.whl", hash = "sha256:1ec04a146d3364cd0fc4da08e3f8ca25e28df68abaa90641936db17a415ca4bc"}, ] +"aws-cdk.aws-lambda-python-alpha" = [ + {file = "aws-cdk.aws-lambda-python-alpha-2.23.0a0.tar.gz", hash = "sha256:1b877ad6ced425e8b5aac1e6a54db8b3ca912f91738aa38b11d2127c1156f7dd"}, + {file = "aws_cdk.aws_lambda_python_alpha-2.23.0a0-py3-none-any.whl", hash = "sha256:782e5fdbda8e1a7ff71bd22c1638ec7e9cdf019a2b1304b6a4a05687b2571e0d"}, +] aws-xray-sdk = [ {file = "aws-xray-sdk-2.9.0.tar.gz", hash = "sha256:b0cd972db218d4d8f7b53ad806fc6184626b924c4997ae58fc9f2a8cd1281568"}, {file = "aws_xray_sdk-2.9.0-py2.py3-none-any.whl", hash = "sha256:98216b3ac8281b51b59a8703f8ec561c460807d9d0679838f5c0179d381d7e58"}, diff --git a/pyproject.toml b/pyproject.toml index 5bac979a9d2..69b6eef9d7a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,6 +31,7 @@ python = "^3.6.2" # 2022-04-21: jmespath was removed, to be re-added once we drop python 3.6. # issue #1148 aws-cdk-lib = "^2.23.0" +"aws-cdk.aws-lambda-python-alpha" = "2.23.0-alpha.0" bandit = "^1.7.1" black = "^21.12b0" coverage = {extras = ["toml"], version = "^6.2"} diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py index 1fe21f1cdb3..4e5032657a5 100644 --- a/tests/e2e/conftest.py +++ b/tests/e2e/conftest.py @@ -1,95 +1,31 @@ +import datetime import uuid -import boto3 import pytest -import yaml -from aws_cdk import App, CfnOutput, RemovalPolicy, Stack, aws_lambda_python_alpha, aws_logs -from aws_cdk.aws_lambda import Code, Function, Runtime, Tracing -from . import utils +from tests.e2e.utils import helpers, infrastructure -def transform_output(outputs): - return {output["OutputKey"]: output["OutputValue"] for output in outputs if output["OutputKey"]} - - -# Create CDK cloud assembly code -def prepare_infrastructure(handlers_name, handlers_dir, stack_name, environment_variables, **config): - integration_test_app = App() - stack = Stack(integration_test_app, stack_name) - powertools_layer = aws_lambda_python_alpha.PythonLayerVersion( - stack, - "aws-lambda-powertools", - layer_version_name="aws-lambda-powertools", - entry=".", - compatible_runtimes=[Runtime.PYTHON_3_9], - ) - code = Code.from_asset(handlers_dir) - - for filename in handlers_name: - - function_python = Function( - stack, - f"{filename}-lambda", - runtime=Runtime.PYTHON_3_9, - code=code, - handler=f"{filename}.lambda_handler", - layers=[powertools_layer], - environment=environment_variables, - tracing=Tracing.ACTIVE if config.get("tracing") == "ACTIVE" else Tracing.DISABLED, - ) - - aws_logs.LogGroup( - stack, - f"{filename}-lg", - log_group_name=f"/aws/lambda/{function_python.function_name}", - retention=aws_logs.RetentionDays.ONE_DAY, - removal_policy=RemovalPolicy.DESTROY, - ) - CfnOutput(stack, f"{filename}_arn", value=function_python.function_arn) - return ( - integration_test_app.synth().get_stack_by_name(stack_name).template, - integration_test_app.synth().directory, - integration_test_app.synth().artifacts, - ) - - -def deploy_infrastructure(template, asset_root_dir, stack_name, client): - - utils.upload_assets(template, asset_root_dir) - - response = client.create_stack( - StackName=stack_name, - TemplateBody=yaml.dump(template), - TimeoutInMinutes=10, - OnFailure="DO_NOTHING", - Capabilities=["CAPABILITY_IAM"], - ) - waiter = client.get_waiter("stack_create_complete") - waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 2, "MaxAttempts": 50}) - response = client.describe_stacks(StackName=stack_name) - return response["Stacks"][0]["Outputs"] - - -@pytest.fixture(scope="session") -def deploy(): - cf_client = boto3.Session().client("cloudformation") +@pytest.fixture(scope="module") +def execute_lambda(config, request): stack_name = f"test-lambda-{uuid.uuid4()}" + test_dir = request.fspath.dirname + handlers_dir = f"{test_dir}/handlers/" + + infra = infrastructure.Infrastructure( + stack_name=stack_name, + handlers_dir=handlers_dir, + config=config["parameters"], + environment_variables=config["environment_variables"], + ) - def deploy(handlers_name, handlers_dir, environment_variables, **config): + lambda_arns = infra.deploy() + execution_time = datetime.datetime.utcnow() - template, asset_root_dir, artifact = prepare_infrastructure( - handlers_name=handlers_name, - handlers_dir=handlers_dir, - stack_name=stack_name, - environment_variables=environment_variables, - **config, - ) - outputs = deploy_infrastructure( - template=template, asset_root_dir=asset_root_dir, stack_name=stack_name, client=cf_client - ) - return transform_output(outputs) + for name, arn in lambda_arns.items(): + helpers.trigger_lambda(lambda_arn=arn, client=infra.lambda_client) + print(f"lambda {name} triggered") - yield deploy + yield {"arns": lambda_arns, "execution_time": execution_time} # Ensure stack deletion is triggered at the end of the test session - cf_client.delete_stack(StackName=stack_name) + infra.delete() diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index b3f09f3b66c..5a2910547ac 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -1,74 +1,72 @@ -import datetime -import os -from functools import lru_cache - import boto3 import pytest -from .. import utils - -dirname = os.path.dirname(__file__) +from ..utils import helpers @pytest.fixture(scope="module") def config(): - return {"MESSAGE": "logger message test", "LOG_LEVEL": "INFO", "ADDITIONAL_KEY": "extra_info"} + return { + "parameters": {}, + "environment_variables": { + "MESSAGE": "logger message test", + "LOG_LEVEL": "INFO", + "ADDITIONAL_KEY": "extra_info", + }, + } -@pytest.fixture(scope="module") -def deploy_lambdas(deploy, config): - handlers_dir = f"{dirname}/handlers/" - - lambda_arns = deploy( - handlers_name=utils.find_handlers(handlers_dir), - handlers_dir=handlers_dir, - environment_variables=config, +@pytest.mark.e2e +def test_basic_lambda_logs_visible(execute_lambda, config): + # GIVEN + lambda_arn = execute_lambda["arns"]["basichandlerarn"] + timestamp = int(execute_lambda["execution_time"].timestamp() * 1000) + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs( + lambda_function_name=lambda_arn.split(":")[-1], start_time=timestamp, log_client=cw_client ) - for name, arn in lambda_arns.items(): - utils.trigger_lambda(lambda_arn=arn) - print(f"lambda {name} triggered") - return lambda_arns - - -@pytest.fixture(scope="module") -def trigger_lambdas(deploy_lambdas): - for name, arn in deploy_lambdas.items(): - utils.trigger_lambda(lambda_arn=arn) - print(f"lambda {name} triggered") - - -@lru_cache(maxsize=10, typed=False) -def fetch_logs(lambda_arn): - start_time = int(datetime.datetime.now().timestamp() * 1000) - result = utils.trigger_lambda(lambda_arn=lambda_arn) - - filtered_logs = utils.get_logs( - start_time=start_time, - lambda_function_name=lambda_arn.split(":")[-1], - log_client=boto3.client("logs"), + # THEN + assert any( + log.message == config["environment_variables"]["MESSAGE"] + and log.level == config["environment_variables"]["LOG_LEVEL"] + for log in filtered_logs ) - return filtered_logs @pytest.mark.e2e -def test_basic_lambda_logs_visible(deploy_lambdas, config): - - filtered_logs = fetch_logs(lambda_arn=deploy_lambdas["basichandlerarn"]) +def test_basic_lambda_no_debug_logs_visible(execute_lambda, config): + # GIVEN + lambda_arn = execute_lambda["arns"]["basichandlerarn"] + timestamp = int(execute_lambda["execution_time"].timestamp() * 1000) + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs( + lambda_function_name=lambda_arn.split(":")[-1], start_time=timestamp, log_client=cw_client + ) - assert any(log.message == config["MESSAGE"] and log.level == config["LOG_LEVEL"] for log in filtered_logs) + # THEN + assert not any( + log.message == config["environment_variables"]["MESSAGE"] and log.level == "DEBUG" for log in filtered_logs + ) @pytest.mark.e2e -def test_basic_lambda_no_debug_logs_visible(deploy_lambdas, config): - filtered_logs = fetch_logs(lambda_arn=deploy_lambdas["basichandlerarn"]) - - assert not any(log.message == config["MESSAGE"] and log.level == "DEBUG" for log in filtered_logs) - +def test_basic_lambda_contextual_data_logged(execute_lambda): + # GIVEN + lambda_arn = execute_lambda["arns"]["basichandlerarn"] + timestamp = int(execute_lambda["execution_time"].timestamp() * 1000) + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs( + lambda_function_name=lambda_arn.split(":")[-1], start_time=timestamp, log_client=cw_client + ) -@pytest.mark.e2e -def test_basic_lambda_contextual_data_logged(deploy_lambdas): - filtered_logs = fetch_logs(lambda_arn=deploy_lambdas["basichandlerarn"]) + # THEN for log in filtered_logs: assert ( log.xray_trace_id @@ -81,26 +79,58 @@ def test_basic_lambda_contextual_data_logged(deploy_lambdas): @pytest.mark.e2e -def test_basic_lambda_additional_key_persistence_basic_lambda(deploy_lambdas, config): - filtered_logs = fetch_logs(lambda_arn=deploy_lambdas["basichandlerarn"]) +def test_basic_lambda_additional_key_persistence_basic_lambda(execute_lambda, config): + # GIVEN + lambda_arn = execute_lambda["arns"]["basichandlerarn"] + timestamp = int(execute_lambda["execution_time"].timestamp() * 1000) + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs( + lambda_function_name=lambda_arn.split(":")[-1], start_time=timestamp, log_client=cw_client + ) + + # THEN assert any( - log.extra_info and log.message == config["MESSAGE"] and log.level == config["LOG_LEVEL"] + log.extra_info + and log.message == config["environment_variables"]["MESSAGE"] + and log.level == config["environment_variables"]["LOG_LEVEL"] for log in filtered_logs ) @pytest.mark.e2e -def test_basic_lambda_empty_event_logged(deploy_lambdas): - filtered_logs = fetch_logs(lambda_arn=deploy_lambdas["basichandlerarn"]) +def test_basic_lambda_empty_event_logged(execute_lambda): + + # GIVEN + lambda_arn = execute_lambda["arns"]["basichandlerarn"] + timestamp = int(execute_lambda["execution_time"].timestamp() * 1000) + cw_client = boto3.client("logs") + # WHEN + filtered_logs = helpers.get_logs( + lambda_function_name=lambda_arn.split(":")[-1], start_time=timestamp, log_client=cw_client + ) + + # THEN assert any(log.message == {} for log in filtered_logs) @pytest.mark.e2e -def test_no_context_lambda_contextual_data_not_logged(deploy_lambdas): - filtered_logs = fetch_logs(lambda_arn=deploy_lambdas["nocontexthandlerarn"]) +def test_no_context_lambda_contextual_data_not_logged(execute_lambda): + + # GIVEN + lambda_arn = execute_lambda["arns"]["nocontexthandlerarn"] + timestamp = int(execute_lambda["execution_time"].timestamp() * 1000) + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs( + lambda_function_name=lambda_arn.split(":")[-1], start_time=timestamp, log_client=cw_client + ) + # THEN assert not any( ( log.xray_trace_id @@ -115,9 +145,19 @@ def test_no_context_lambda_contextual_data_not_logged(deploy_lambdas): @pytest.mark.e2e -def test_no_context_lambda_event_not_logged(deploy_lambdas): - filtered_logs = fetch_logs(lambda_arn=deploy_lambdas["nocontexthandlerarn"]) +def test_no_context_lambda_event_not_logged(execute_lambda): + + # GIVEN + lambda_arn = execute_lambda["arns"]["nocontexthandlerarn"] + timestamp = int(execute_lambda["execution_time"].timestamp() * 1000) + cw_client = boto3.client("logs") + + # WHEN + filtered_logs = helpers.get_logs( + lambda_function_name=lambda_arn.split(":")[-1], start_time=timestamp, log_client=cw_client + ) + # THEN assert not any(log.message == {} for log in filtered_logs) diff --git a/tests/e2e/metrics/test_metrics.py b/tests/e2e/metrics/test_metrics.py index abcd9ef5545..31be11ec7cf 100644 --- a/tests/e2e/metrics/test_metrics.py +++ b/tests/e2e/metrics/test_metrics.py @@ -1,50 +1,35 @@ import datetime -import os import uuid import boto3 import pytest -from .. import utils - -dirname = os.path.dirname(__file__) +from ..utils import helpers @pytest.fixture(scope="module") def config(): return { - "METRIC_NAMESPACE": f"powertools-e2e-metric-{uuid.uuid4()}", - "METRIC_NAME": "business-metric", - "SERVICE_NAME": "test-powertools-service", + "parameters": {}, + "environment_variables": { + "METRIC_NAMESPACE": f"powertools-e2e-metric-{uuid.uuid4()}", + "METRIC_NAME": "business-metric", + "SERVICE_NAME": "test-powertools-service", + }, } -@pytest.fixture(scope="module") -def deploy_lambdas(deploy, config): - handlers_dir = f"{dirname}/handlers/" - - lambda_arn = deploy( - handlers_name=utils.find_handlers(handlers_dir), - handlers_dir=handlers_dir, - environment_variables=config, - ) - start_date = datetime.datetime.now(datetime.timezone.utc) - result = utils.trigger_lambda(lambda_arn=lambda_arn) - assert result["Payload"].read() == b'"success"' - return start_date - - @pytest.mark.e2e -def test_basic_lambda_metric_visible(deploy_lambdas, config): - start_date = deploy_lambdas +def test_basic_lambda_metric_visible(execute_lambda, config): + start_date = execute_lambda["execution_time"] end_date = start_date + datetime.timedelta(minutes=5) - metrics = utils.get_metrics( + metrics = helpers.get_metrics( start_date=start_date, end_date=end_date, - namespace=config["METRIC_NAMESPACE"], - metric_name=config["METRIC_NAME"], - service_name=config["SERVICE_NAME"], + namespace=config["environment_variables"]["METRIC_NAMESPACE"], + metric_name=config["environment_variables"]["METRIC_NAME"], + service_name=config["environment_variables"]["SERVICE_NAME"], cw_client=boto3.client(service_name="cloudwatch"), ) assert metrics["Timestamps"] and len(metrics["Timestamps"]) == 1 diff --git a/tests/e2e/tracer/test_tracer.py b/tests/e2e/tracer/test_tracer.py index 27d277daa9e..6478c72ebb9 100644 --- a/tests/e2e/tracer/test_tracer.py +++ b/tests/e2e/tracer/test_tracer.py @@ -1,45 +1,31 @@ import datetime import json -import os import uuid import boto3 import pytest -from .. import utils - -dirname = os.path.dirname(__file__) +from ..utils import helpers @pytest.fixture(scope="module") def config(): - return {"ANNOTATION_KEY": f"e2e-tracer-{uuid.uuid4()}", "ANNOTATION_VALUE": "stored"} - - -@pytest.fixture(scope="module") -def deploy_lambdas(deploy, config): - handlers_dir = f"{dirname}/handlers/" - - lambda_arn = deploy( - handlers_name=utils.find_handlers(handlers_dir), - handlers_dir=handlers_dir, - environment_variables=config, - ) - start_date = datetime.datetime.utcnow() - result = utils.trigger_lambda(lambda_arn=lambda_arn) - assert result["Payload"].read() == b'"success"' - return lambda_arn, start_date + return { + "parameters": {"tracing": "ACTIVE"}, + "environment_variables": {"ANNOTATION_KEY": f"e2e-tracer-{uuid.uuid4()}", "ANNOTATION_VALUE": "stored"}, + } @pytest.mark.e2e -def test_basic_lambda_trace_visible(deploy_lambdas, config): - start_date = deploy_lambdas[1] +def test_basic_lambda_trace_visible(execute_lambda, config): + lambda_arn = execute_lambda["arns"]["basichandlerarn"] + start_date = execute_lambda["execution_time"] end_date = start_date + datetime.timedelta(minutes=5) - trace = utils.get_traces( + trace = helpers.get_traces( start_date=start_date, end_date=end_date, - lambda_function_name=deploy_lambdas[0].split(":")[-1], + lambda_function_name=lambda_arn.split(":")[-1], xray_client=boto3.client("xray"), ) @@ -48,10 +34,12 @@ def test_basic_lambda_trace_visible(deploy_lambdas, config): if document["origin"] == "AWS::Lambda::Function": for subsegment in document["subsegments"]: if subsegment["name"] == "Invocation": - print(subsegment) for x_subsegment in subsegment["subsegments"]: metadata = x_subsegment["metadata"] annotation = x_subsegment["annotations"] - assert metadata["e2e-tests-app"][config["ANNOTATION_KEY"]] == config["ANNOTATION_VALUE"] + assert ( + metadata["e2e-tests-app"][config["environment_variables"]["ANNOTATION_KEY"]] + == config["environment_variables"]["ANNOTATION_VALUE"] + ) assert annotation["Service"] == "e2e-tests-app" diff --git a/tests/e2e/utils/__init__.py b/tests/e2e/utils/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/utils.py b/tests/e2e/utils/helpers.py similarity index 50% rename from tests/e2e/utils.py rename to tests/e2e/utils/helpers.py index 733343ac682..0af6002b707 100644 --- a/tests/e2e/utils.py +++ b/tests/e2e/utils/helpers.py @@ -1,15 +1,12 @@ -import io import json -import os -import zipfile -from pathlib import Path +from functools import lru_cache from typing import Any, Optional, Union -import boto3 from pydantic import BaseModel from retry import retry +# Helper methods && Class class Log(BaseModel): level: str location: str @@ -25,8 +22,14 @@ class Log(BaseModel): extra_info: Optional[str] -@retry(ValueError, delay=1, jitter=1, tries=5) -def get_logs(lambda_function_name: str, log_client: Any, start_time: int): +def trigger_lambda(lambda_arn, client): + response = client.invoke(FunctionName=lambda_arn, InvocationType="RequestResponse") + return response + + +@lru_cache(maxsize=10, typed=False) +@retry(ValueError, delay=1, jitter=1, tries=10) +def get_logs(lambda_function_name: str, log_client: Any, start_time: int, **kwargs): response = log_client.filter_log_events(logGroupName=f"/aws/lambda/{lambda_function_name}", startTime=start_time) if not response["events"]: raise ValueError("Empty response from Cloudwatch Logs. Repeating...") @@ -41,26 +44,9 @@ def get_logs(lambda_function_name: str, log_client: Any, start_time: int): return filtered_logs -def trigger_lambda(lambda_arn): - lambda_client = boto3.client("lambda") - response = lambda_client.invoke(FunctionName=lambda_arn, InvocationType="RequestResponse") - return response - - -def find_handlers(directory): - handlers = [] - for _, _, files in os.walk(directory): - for file in files: - if file.endswith(".py"): - filename = Path(file).stem - handlers.append(filename) - print("handlers", handlers) - return handlers - - -@retry(ValueError, delay=1, jitter=1, tries=5) +@lru_cache(maxsize=10, typed=False) +@retry(ValueError, delay=1, jitter=1, tries=10) def get_metrics(namespace, cw_client, start_date, end_date, metric_name, service_name): - response = cw_client.get_metric_data( MetricDataQueries=[ { @@ -80,14 +66,13 @@ def get_metrics(namespace, cw_client, start_date, end_date, metric_name, service StartTime=start_date, EndTime=end_date, ) - result = response["MetricDataResults"][0] if not result["Values"]: raise ValueError("Empty response from Cloudwatch. Repeating...") return result -@retry(ValueError, delay=2, jitter=0.5, tries=10) +@retry(ValueError, delay=1, jitter=1, tries=10) def get_traces(lambda_function_name: str, xray_client, start_date, end_date): paginator = xray_client.get_paginator("get_trace_summaries") response_iterator = paginator.paginate( @@ -107,53 +92,3 @@ def get_traces(lambda_function_name: str, xray_client, start_date, end_date): ) return trace_details - - -def get_all_file_paths(directory): - file_paths = [] - for root, directories, files in os.walk(directory): - for filename in files: - file_paths.append(os.path.join(root, filename)) - return file_paths - - -def upload_assets(template, asset_root_dir): - s3_client = boto3.client("s3") - s3_resource = boto3.resource("s3") - account_id = boto3.client("sts").get_caller_identity()["Account"] - region = boto3.Session().region_name - assets = find_assets(template, account_id, region) - - for s3_key, bucket in assets.items(): - s3_bucket = s3_resource.Bucket(bucket) - if bool(list(s3_bucket.objects.filter(Prefix=s3_key))): - print("object exists, skipping") - continue - - buf = io.BytesIO() - asset_dir = f"{asset_root_dir}/asset.{Path(s3_key).with_suffix('')}" - os.chdir(asset_dir) - files = get_all_file_paths(directory=".") - with zipfile.ZipFile(buf, "w", compression=zipfile.ZIP_DEFLATED) as zf: - for file in files: - zf.write(os.path.join(file)) - buf.seek(0) - s3_client.upload_fileobj(Fileobj=buf, Bucket=bucket, Key=s3_key) - - -def find_assets(template, account_id, region): - assets = {} - for name, resource in template["Resources"].items(): - bucket = None - S3Key = None - - if resource["Properties"].get("Code"): - bucket = resource["Properties"]["Code"]["S3Bucket"] - S3Key = resource["Properties"]["Code"]["S3Key"] - elif resource["Properties"].get("Content"): - bucket = resource["Properties"]["Content"]["S3Bucket"] - S3Key = resource["Properties"]["Content"]["S3Key"] - if S3Key and bucket: - assets[S3Key] = bucket["Fn::Sub"].replace("${AWS::AccountId}", account_id).replace("${AWS::Region}", region) - - return assets diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py new file mode 100644 index 00000000000..5e0fedd297f --- /dev/null +++ b/tests/e2e/utils/infrastructure.py @@ -0,0 +1,149 @@ +import io +import os +import zipfile +from pathlib import Path + +import boto3 +import yaml +from aws_cdk import App, CfnOutput, RemovalPolicy, Stack, aws_lambda_python_alpha, aws_logs +from aws_cdk.aws_lambda import Code, Function, Runtime, Tracing + + +class Infrastructure: + def __init__(self, stack_name: str, handlers_dir: str, config: dict, environment_variables: dict) -> None: + session = boto3.Session(profile_name="aws-mploski-root") + self.s3_client = session.client("s3") + self.lambda_client = session.client("lambda") + self.cf_client = session.client("cloudformation") + self.s3_resource = session.resource("s3") + self.account_id = session.client("sts").get_caller_identity()["Account"] + self.region = boto3.Session().region_name + self.stack_name = stack_name + self.handlers_dir = handlers_dir + self.config = config + self.environment_variables = environment_variables + + def deploy(self) -> dict: + + handlers = self._find_files(directory=self.handlers_dir, only_py=True) + + template, asset_root_dir, artifact = self._prepare_stack( + handlers=handlers, + handlers_dir=self.handlers_dir, + stack_name=self.stack_name, + environment_variables=self.environment_variables, + **self.config, + ) + self._upload_assets(template, asset_root_dir) + + response = self._deploy_stack(self.stack_name, template) + + return self._transform_output(response["Stacks"][0]["Outputs"]) + + def delete(self): + self.cf_client.delete_stack(StackName=self.stack_name) + + def _find_files(self, directory, only_py=False) -> list: + file_paths = [] + for root, _, files in os.walk(directory): + for filename in files: + if only_py: + if filename.endswith(".py"): + file_paths.append(os.path.join(root, filename)) + else: + file_paths.append(os.path.join(root, filename)) + return file_paths + + # Create CDK cloud assembly code + def _prepare_stack(self, handlers, handlers_dir, stack_name, environment_variables, **config): + integration_test_app = App() + stack = Stack(integration_test_app, stack_name) + powertools_layer = aws_lambda_python_alpha.PythonLayerVersion( + stack, + "aws-lambda-powertools", + layer_version_name="aws-lambda-powertools", + entry=".", + compatible_runtimes=[Runtime.PYTHON_3_9], + ) + code = Code.from_asset(handlers_dir) + + for filename_path in handlers: + filename = Path(filename_path).stem + function_python = Function( + stack, + f"{filename}-lambda", + runtime=Runtime.PYTHON_3_9, + code=code, + handler=f"{filename}.lambda_handler", + layers=[powertools_layer], + environment=environment_variables, + tracing=Tracing.ACTIVE if config.get("tracing") == "ACTIVE" else Tracing.DISABLED, + ) + + aws_logs.LogGroup( + stack, + f"{filename}-lg", + log_group_name=f"/aws/lambda/{function_python.function_name}", + retention=aws_logs.RetentionDays.ONE_DAY, + removal_policy=RemovalPolicy.DESTROY, + ) + CfnOutput(stack, f"{filename}_arn", value=function_python.function_arn) + return ( + integration_test_app.synth().get_stack_by_name(stack_name).template, + integration_test_app.synth().directory, + integration_test_app.synth().artifacts, + ) + + def _upload_assets(self, template, asset_root_dir): + + assets = self._find_assets(template, self.account_id, self.region) + + for s3_key, bucket in assets.items(): + s3_bucket = self.s3_resource.Bucket(bucket) + if bool(list(s3_bucket.objects.filter(Prefix=s3_key))): + print("object exists, skipping") + continue + + buf = io.BytesIO() + asset_dir = f"{asset_root_dir}/asset.{Path(s3_key).with_suffix('')}" + os.chdir(asset_dir) + files = self._find_files(directory=".") + with zipfile.ZipFile(buf, "w", compression=zipfile.ZIP_DEFLATED) as zf: + for file in files: + zf.write(os.path.join(file)) + buf.seek(0) + self.s3_client.upload_fileobj(Fileobj=buf, Bucket=bucket, Key=s3_key) + + def _deploy_stack(self, stack_name, template): + response = self.cf_client.create_stack( + StackName=stack_name, + TemplateBody=yaml.dump(template), + TimeoutInMinutes=10, + OnFailure="DO_NOTHING", + Capabilities=["CAPABILITY_IAM"], + ) + waiter = self.cf_client.get_waiter("stack_create_complete") + waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 2, "MaxAttempts": 50}) + response = self.cf_client.describe_stacks(StackName=stack_name) + return response + + def _find_assets(self, template, account_id, region): + assets = {} + for name, resource in template["Resources"].items(): + bucket = None + S3Key = None + + if resource["Properties"].get("Code"): + bucket = resource["Properties"]["Code"]["S3Bucket"] + S3Key = resource["Properties"]["Code"]["S3Key"] + elif resource["Properties"].get("Content"): + bucket = resource["Properties"]["Content"]["S3Bucket"] + S3Key = resource["Properties"]["Content"]["S3Key"] + if S3Key and bucket: + assets[S3Key] = ( + bucket["Fn::Sub"].replace("${AWS::AccountId}", account_id).replace("${AWS::Region}", region) + ) + return assets + + def _transform_output(self, outputs): + return {output["OutputKey"]: output["OutputValue"] for output in outputs if output["OutputKey"]} From 081b7d9720636b1f58c32b395f8fe8f9c0c21d82 Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Thu, 19 May 2022 13:22:13 +0200 Subject: [PATCH 04/72] Enable deploying stack with concrete python version --- .github/workflows/run-e2e-tests.yml | 9 ++++++--- tests/e2e/utils/infrastructure.py | 14 ++++++++++++-- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml index 592213c2f39..d0e311042d2 100644 --- a/.github/workflows/run-e2e-tests.yml +++ b/.github/workflows/run-e2e-tests.yml @@ -16,17 +16,20 @@ jobs: permissions: id-token: write # needed to interact with GitHub's OIDC Token endpoint. contents: read + strategy: + matrix: + version: ["3.8", "3.9", "3.10"] steps: - name: "Checkout" uses: actions/checkout@v3 ######################### # Release new version ######################### - - name: "Use Python 3" + - name: "Use Python" uses: actions/setup-python@v3 with: - python-version: "3.x" # Version range or exact version of a Python version to use, using SemVer's version range syntax - architecture: "x64" # optional x64 or x86. Defaults to x64 if not specified + python-version: ${{ matrix.version }} + architecture: "x64" - name: Install dependencies run: make dev - name: Configure AWS credentials diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 5e0fedd297f..2ca188de12d 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -1,6 +1,8 @@ import io import os +import sys import zipfile +from enum import Enum from pathlib import Path import boto3 @@ -8,6 +10,14 @@ from aws_cdk import App, CfnOutput, RemovalPolicy, Stack, aws_lambda_python_alpha, aws_logs from aws_cdk.aws_lambda import Code, Function, Runtime, Tracing +PYTHON_RUNTIME_VERSION = f"V{''.join(map(str, sys.version_info[:2]))}" + + +class PythonVersion(Enum): + V37 = Runtime.PYTHON_3_7 + V38 = Runtime.PYTHON_3_8 + V39 = Runtime.PYTHON_3_9 + class Infrastructure: def __init__(self, stack_name: str, handlers_dir: str, config: dict, environment_variables: dict) -> None: @@ -63,7 +73,7 @@ def _prepare_stack(self, handlers, handlers_dir, stack_name, environment_variabl "aws-lambda-powertools", layer_version_name="aws-lambda-powertools", entry=".", - compatible_runtimes=[Runtime.PYTHON_3_9], + compatible_runtimes=[PythonVersion[PYTHON_RUNTIME_VERSION].value], ) code = Code.from_asset(handlers_dir) @@ -72,7 +82,7 @@ def _prepare_stack(self, handlers, handlers_dir, stack_name, environment_variabl function_python = Function( stack, f"{filename}-lambda", - runtime=Runtime.PYTHON_3_9, + runtime=PythonVersion[PYTHON_RUNTIME_VERSION].value, code=code, handler=f"{filename}.lambda_handler", layers=[powertools_layer], From 7d34af12fe759bf03b4de99206c2c0b52bca22ed Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Mon, 13 Jun 2022 09:55:52 +0200 Subject: [PATCH 05/72] Remove alpha dependency from CDK. Fix package creation --- Makefile | 2 +- poetry.lock | 20 +------ pyproject.toml | 1 - tests/e2e/conftest.py | 16 ++++-- tests/e2e/logger/test_logger.py | 24 ++++---- tests/e2e/metrics/test_metrics.py | 5 +- tests/e2e/tracer/test_tracer.py | 3 +- tests/e2e/utils/Dockerfile | 14 +++++ tests/e2e/utils/helpers.py | 11 ++-- tests/e2e/utils/infrastructure.py | 95 +++++++++++++++++++------------ tests/e2e/utils/py.typed | 0 11 files changed, 113 insertions(+), 78 deletions(-) create mode 100644 tests/e2e/utils/Dockerfile create mode 100644 tests/e2e/utils/py.typed diff --git a/Makefile b/Makefile index 6312416d7a5..719c4a63484 100644 --- a/Makefile +++ b/Makefile @@ -25,7 +25,7 @@ unit-test: poetry run pytest tests/unit e2e-test: - poetry run pytest -n 3 --dist loadscope -rP --durations=0 --durations-min=1 tests/e2e + poetry run pytest -n 3 --dist loadscope --durations=0 --durations-min=1 tests/e2e coverage-html: poetry run pytest -m "not (perf or e2e)" --cov=aws_lambda_powertools --cov-report=html diff --git a/poetry.lock b/poetry.lock index 809a5bb5a0e..f03c0dd53b5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -33,20 +33,6 @@ constructs = ">=10.0.0,<11.0.0" jsii = ">=1.57.0,<2.0.0" publication = ">=0.0.3" -[[package]] -name = "aws-cdk.aws-lambda-python-alpha" -version = "2.23.0a0" -description = "The CDK Construct Library for AWS Lambda in Python" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -aws-cdk-lib = ">=2.23.0,<3.0.0" -constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.57.0,<2.0.0" -publication = ">=0.0.3" - [[package]] name = "aws-xray-sdk" version = "2.9.0" @@ -1236,7 +1222,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.2" -content-hash = "f0aa750ae44c234811d1c59860915845d1ac3c3c7474d5f7ee6e3fbe0d31bfb4" +content-hash = "2c4ec9fac23dea3893cf3c60d5ed5728d63c4c46252a14ffbee52d30ebaec74f" [metadata.files] atomicwrites = [ @@ -1251,10 +1237,6 @@ aws-cdk-lib = [ {file = "aws-cdk-lib-2.23.0.tar.gz", hash = "sha256:3e07d1c6b320795d38567be183e56c2125b4c4492589775257aabec3d3e2a384"}, {file = "aws_cdk_lib-2.23.0-py3-none-any.whl", hash = "sha256:1ec04a146d3364cd0fc4da08e3f8ca25e28df68abaa90641936db17a415ca4bc"}, ] -"aws-cdk.aws-lambda-python-alpha" = [ - {file = "aws-cdk.aws-lambda-python-alpha-2.23.0a0.tar.gz", hash = "sha256:1b877ad6ced425e8b5aac1e6a54db8b3ca912f91738aa38b11d2127c1156f7dd"}, - {file = "aws_cdk.aws_lambda_python_alpha-2.23.0a0-py3-none-any.whl", hash = "sha256:782e5fdbda8e1a7ff71bd22c1638ec7e9cdf019a2b1304b6a4a05687b2571e0d"}, -] aws-xray-sdk = [ {file = "aws-xray-sdk-2.9.0.tar.gz", hash = "sha256:b0cd972db218d4d8f7b53ad806fc6184626b924c4997ae58fc9f2a8cd1281568"}, {file = "aws_xray_sdk-2.9.0-py2.py3-none-any.whl", hash = "sha256:98216b3ac8281b51b59a8703f8ec561c460807d9d0679838f5c0179d381d7e58"}, diff --git a/pyproject.toml b/pyproject.toml index 69b6eef9d7a..5bac979a9d2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,6 @@ python = "^3.6.2" # 2022-04-21: jmespath was removed, to be re-added once we drop python 3.6. # issue #1148 aws-cdk-lib = "^2.23.0" -"aws-cdk.aws-lambda-python-alpha" = "2.23.0-alpha.0" bandit = "^1.7.1" black = "^21.12b0" coverage = {extras = ["toml"], version = "^6.2"} diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py index 4e5032657a5..0db34008d69 100644 --- a/tests/e2e/conftest.py +++ b/tests/e2e/conftest.py @@ -1,13 +1,23 @@ import datetime import uuid +from typing import Generator, TypedDict import pytest +from e2e.utils import helpers, infrastructure -from tests.e2e.utils import helpers, infrastructure + +class LambdaConfig(TypedDict): + parameters: dict + environment_variables: dict[str, str] + + +class LambdaExecution(TypedDict): + arns: dict[str, str] + execution_time: datetime.datetime @pytest.fixture(scope="module") -def execute_lambda(config, request): +def execute_lambda(config, request) -> Generator[LambdaExecution, None, None]: stack_name = f"test-lambda-{uuid.uuid4()}" test_dir = request.fspath.dirname handlers_dir = f"{test_dir}/handlers/" @@ -24,8 +34,6 @@ def execute_lambda(config, request): for name, arn in lambda_arns.items(): helpers.trigger_lambda(lambda_arn=arn, client=infra.lambda_client) - print(f"lambda {name} triggered") - yield {"arns": lambda_arns, "execution_time": execution_time} # Ensure stack deletion is triggered at the end of the test session infra.delete() diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index 5a2910547ac..e325be018ee 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -1,11 +1,14 @@ +from typing import TypedDict + import boto3 import pytest +from .. import conftest from ..utils import helpers @pytest.fixture(scope="module") -def config(): +def config() -> conftest.LambdaConfig: return { "parameters": {}, "environment_variables": { @@ -17,7 +20,7 @@ def config(): @pytest.mark.e2e -def test_basic_lambda_logs_visible(execute_lambda, config): +def test_basic_lambda_logs_visible(execute_lambda: conftest.LambdaExecution, config: conftest.LambdaConfig): # GIVEN lambda_arn = execute_lambda["arns"]["basichandlerarn"] timestamp = int(execute_lambda["execution_time"].timestamp() * 1000) @@ -25,7 +28,7 @@ def test_basic_lambda_logs_visible(execute_lambda, config): # WHEN filtered_logs = helpers.get_logs( - lambda_function_name=lambda_arn.split(":")[-1], start_time=timestamp, log_client=cw_client + lambda_function_name=lambda_arn.split(":")[-1], start_time=timestamp, log_client=cw_client, run="first" ) # THEN @@ -37,7 +40,7 @@ def test_basic_lambda_logs_visible(execute_lambda, config): @pytest.mark.e2e -def test_basic_lambda_no_debug_logs_visible(execute_lambda, config): +def test_basic_lambda_no_debug_logs_visible(execute_lambda: conftest.LambdaExecution, config: conftest.LambdaConfig): # GIVEN lambda_arn = execute_lambda["arns"]["basichandlerarn"] timestamp = int(execute_lambda["execution_time"].timestamp() * 1000) @@ -55,7 +58,7 @@ def test_basic_lambda_no_debug_logs_visible(execute_lambda, config): @pytest.mark.e2e -def test_basic_lambda_contextual_data_logged(execute_lambda): +def test_basic_lambda_contextual_data_logged(execute_lambda: conftest.LambdaExecution): # GIVEN lambda_arn = execute_lambda["arns"]["basichandlerarn"] timestamp = int(execute_lambda["execution_time"].timestamp() * 1000) @@ -79,9 +82,10 @@ def test_basic_lambda_contextual_data_logged(execute_lambda): @pytest.mark.e2e -def test_basic_lambda_additional_key_persistence_basic_lambda(execute_lambda, config): +def test_basic_lambda_additional_key_persistence_basic_lambda( + execute_lambda: conftest.LambdaExecution, config: conftest.LambdaConfig +): # GIVEN - lambda_arn = execute_lambda["arns"]["basichandlerarn"] timestamp = int(execute_lambda["execution_time"].timestamp() * 1000) cw_client = boto3.client("logs") @@ -101,7 +105,7 @@ def test_basic_lambda_additional_key_persistence_basic_lambda(execute_lambda, co @pytest.mark.e2e -def test_basic_lambda_empty_event_logged(execute_lambda): +def test_basic_lambda_empty_event_logged(execute_lambda: conftest.LambdaExecution): # GIVEN lambda_arn = execute_lambda["arns"]["basichandlerarn"] @@ -118,7 +122,7 @@ def test_basic_lambda_empty_event_logged(execute_lambda): @pytest.mark.e2e -def test_no_context_lambda_contextual_data_not_logged(execute_lambda): +def test_no_context_lambda_contextual_data_not_logged(execute_lambda: conftest.LambdaExecution): # GIVEN lambda_arn = execute_lambda["arns"]["nocontexthandlerarn"] @@ -145,7 +149,7 @@ def test_no_context_lambda_contextual_data_not_logged(execute_lambda): @pytest.mark.e2e -def test_no_context_lambda_event_not_logged(execute_lambda): +def test_no_context_lambda_event_not_logged(execute_lambda: conftest.LambdaExecution): # GIVEN lambda_arn = execute_lambda["arns"]["nocontexthandlerarn"] diff --git a/tests/e2e/metrics/test_metrics.py b/tests/e2e/metrics/test_metrics.py index 31be11ec7cf..89158498766 100644 --- a/tests/e2e/metrics/test_metrics.py +++ b/tests/e2e/metrics/test_metrics.py @@ -4,11 +4,12 @@ import boto3 import pytest +from .. import conftest from ..utils import helpers @pytest.fixture(scope="module") -def config(): +def config() -> conftest.LambdaConfig: return { "parameters": {}, "environment_variables": { @@ -20,7 +21,7 @@ def config(): @pytest.mark.e2e -def test_basic_lambda_metric_visible(execute_lambda, config): +def test_basic_lambda_metric_visible(execute_lambda: conftest.LambdaExecution, config: conftest.LambdaConfig): start_date = execute_lambda["execution_time"] end_date = start_date + datetime.timedelta(minutes=5) diff --git a/tests/e2e/tracer/test_tracer.py b/tests/e2e/tracer/test_tracer.py index 6478c72ebb9..b4d3bb2251f 100644 --- a/tests/e2e/tracer/test_tracer.py +++ b/tests/e2e/tracer/test_tracer.py @@ -5,6 +5,7 @@ import boto3 import pytest +from .. import conftest from ..utils import helpers @@ -17,7 +18,7 @@ def config(): @pytest.mark.e2e -def test_basic_lambda_trace_visible(execute_lambda, config): +def test_basic_lambda_trace_visible(execute_lambda: conftest.LambdaExecution, config: conftest.LambdaConfig): lambda_arn = execute_lambda["arns"]["basichandlerarn"] start_date = execute_lambda["execution_time"] end_date = start_date + datetime.timedelta(minutes=5) diff --git a/tests/e2e/utils/Dockerfile b/tests/e2e/utils/Dockerfile new file mode 100644 index 00000000000..69bcca5033c --- /dev/null +++ b/tests/e2e/utils/Dockerfile @@ -0,0 +1,14 @@ +# The correct AWS SAM build image based on the runtime of the function will be +# passed as build arg. The default allows to do `docker build .` when testing. +ARG IMAGE=public.ecr.aws/sam/build-python3.7 +FROM $IMAGE + +ARG PIP_INDEX_URL +ARG PIP_EXTRA_INDEX_URL +ARG HTTPS_PROXY + +# Upgrade pip (required by cryptography v3.4 and above, which is a dependency of poetry) +RUN pip install --upgrade pip +RUN pip install pipenv poetry + +CMD [ "python" ] \ No newline at end of file diff --git a/tests/e2e/utils/helpers.py b/tests/e2e/utils/helpers.py index 0af6002b707..399c2096ecc 100644 --- a/tests/e2e/utils/helpers.py +++ b/tests/e2e/utils/helpers.py @@ -1,4 +1,5 @@ import json +from datetime import datetime from functools import lru_cache from typing import Any, Optional, Union @@ -22,14 +23,14 @@ class Log(BaseModel): extra_info: Optional[str] -def trigger_lambda(lambda_arn, client): +def trigger_lambda(lambda_arn: str, client: Any): response = client.invoke(FunctionName=lambda_arn, InvocationType="RequestResponse") return response @lru_cache(maxsize=10, typed=False) @retry(ValueError, delay=1, jitter=1, tries=10) -def get_logs(lambda_function_name: str, log_client: Any, start_time: int, **kwargs): +def get_logs(lambda_function_name: str, log_client: Any, start_time: int, **kwargs: dict): response = log_client.filter_log_events(logGroupName=f"/aws/lambda/{lambda_function_name}", startTime=start_time) if not response["events"]: raise ValueError("Empty response from Cloudwatch Logs. Repeating...") @@ -46,7 +47,9 @@ def get_logs(lambda_function_name: str, log_client: Any, start_time: int, **kwar @lru_cache(maxsize=10, typed=False) @retry(ValueError, delay=1, jitter=1, tries=10) -def get_metrics(namespace, cw_client, start_date, end_date, metric_name, service_name): +def get_metrics( + namespace: str, cw_client: Any, start_date: datetime, end_date: datetime, metric_name: str, service_name: str +): response = cw_client.get_metric_data( MetricDataQueries=[ { @@ -73,7 +76,7 @@ def get_metrics(namespace, cw_client, start_date, end_date, metric_name, service @retry(ValueError, delay=1, jitter=1, tries=10) -def get_traces(lambda_function_name: str, xray_client, start_date, end_date): +def get_traces(lambda_function_name: str, xray_client: Any, start_date: datetime, end_date: datetime): paginator = xray_client.get_paginator("get_trace_summaries") response_iterator = paginator.paginate( StartTime=start_date, diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 2ca188de12d..8c4298a98fe 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -4,24 +4,26 @@ import zipfile from enum import Enum from pathlib import Path +from typing import Any import boto3 import yaml -from aws_cdk import App, CfnOutput, RemovalPolicy, Stack, aws_lambda_python_alpha, aws_logs -from aws_cdk.aws_lambda import Code, Function, Runtime, Tracing +from aws_cdk import App, AssetStaging, BundlingOptions, CfnOutput, DockerImage, RemovalPolicy, Stack, aws_logs +from aws_cdk.aws_lambda import Code, Function, LayerVersion, Runtime, Tracing PYTHON_RUNTIME_VERSION = f"V{''.join(map(str, sys.version_info[:2]))}" class PythonVersion(Enum): - V37 = Runtime.PYTHON_3_7 - V38 = Runtime.PYTHON_3_8 - V39 = Runtime.PYTHON_3_9 + V36 = {"runtime": Runtime.PYTHON_3_6, "image": Runtime.PYTHON_3_6.bundling_image.image} + V37 = {"runtime": Runtime.PYTHON_3_7, "image": Runtime.PYTHON_3_7.bundling_image.image} + V38 = {"runtime": Runtime.PYTHON_3_8, "image": Runtime.PYTHON_3_8.bundling_image.image} + V39 = {"runtime": Runtime.PYTHON_3_9, "image": Runtime.PYTHON_3_9.bundling_image.image} class Infrastructure: def __init__(self, stack_name: str, handlers_dir: str, config: dict, environment_variables: dict) -> None: - session = boto3.Session(profile_name="aws-mploski-root") + session = boto3.Session() self.s3_client = session.client("s3") self.lambda_client = session.client("lambda") self.cf_client = session.client("cloudformation") @@ -33,11 +35,9 @@ def __init__(self, stack_name: str, handlers_dir: str, config: dict, environment self.config = config self.environment_variables = environment_variables - def deploy(self) -> dict: - + def deploy(self) -> dict[str, str]: handlers = self._find_files(directory=self.handlers_dir, only_py=True) - - template, asset_root_dir, artifact = self._prepare_stack( + template, asset_root_dir = self.prepare_stack( handlers=handlers, handlers_dir=self.handlers_dir, stack_name=self.stack_name, @@ -53,28 +53,13 @@ def deploy(self) -> dict: def delete(self): self.cf_client.delete_stack(StackName=self.stack_name) - def _find_files(self, directory, only_py=False) -> list: - file_paths = [] - for root, _, files in os.walk(directory): - for filename in files: - if only_py: - if filename.endswith(".py"): - file_paths.append(os.path.join(root, filename)) - else: - file_paths.append(os.path.join(root, filename)) - return file_paths - # Create CDK cloud assembly code - def _prepare_stack(self, handlers, handlers_dir, stack_name, environment_variables, **config): + def prepare_stack( + self, handlers: list[str], handlers_dir: str, stack_name: str, environment_variables: dict, **config: dict + ): integration_test_app = App() stack = Stack(integration_test_app, stack_name) - powertools_layer = aws_lambda_python_alpha.PythonLayerVersion( - stack, - "aws-lambda-powertools", - layer_version_name="aws-lambda-powertools", - entry=".", - compatible_runtimes=[PythonVersion[PYTHON_RUNTIME_VERSION].value], - ) + powertools_layer = self._create_layer(stack) code = Code.from_asset(handlers_dir) for filename_path in handlers: @@ -82,7 +67,7 @@ def _prepare_stack(self, handlers, handlers_dir, stack_name, environment_variabl function_python = Function( stack, f"{filename}-lambda", - runtime=PythonVersion[PYTHON_RUNTIME_VERSION].value, + runtime=PythonVersion[PYTHON_RUNTIME_VERSION].value["runtime"], code=code, handler=f"{filename}.lambda_handler", layers=[powertools_layer], @@ -101,10 +86,47 @@ def _prepare_stack(self, handlers, handlers_dir, stack_name, environment_variabl return ( integration_test_app.synth().get_stack_by_name(stack_name).template, integration_test_app.synth().directory, - integration_test_app.synth().artifacts, ) - def _upload_assets(self, template, asset_root_dir): + def _find_files(self, directory: str, only_py: bool = False) -> list: + file_paths = [] + for root, _, files in os.walk(directory): + for filename in files: + if only_py: + if filename.endswith(".py"): + file_paths.append(os.path.join(root, filename)) + else: + file_paths.append(os.path.join(root, filename)) + return file_paths + + def _create_layer(self, stack): + output_dir = Path(AssetStaging.BUNDLING_OUTPUT_DIR, "python") + input_dir = Path(AssetStaging.BUNDLING_INPUT_DIR, "aws_lambda_powertools") + powertools_layer = LayerVersion( + stack, + "aws-lambda-powertools", + layer_version_name="aws-lambda-powertools", + compatible_runtimes=[PythonVersion[PYTHON_RUNTIME_VERSION].value["runtime"]], + code=Code.from_asset( + path=".", + exclude=["*.pyc"], + bundling=BundlingOptions( + image=DockerImage.from_build( + str(Path(__file__).parent), + build_args={"IMAGE": PythonVersion[PYTHON_RUNTIME_VERSION].value["image"]}, + ), + command=[ + "bash", + "-c", + f"poetry export --with-credentials --format requirements.txt --output requirements.txt && pip install -r requirements.txt -t {output_dir} && cp -R {input_dir} {output_dir}", + ], + ), + ), + ) + + return powertools_layer + + def _upload_assets(self, template: dict, asset_root_dir: str): assets = self._find_assets(template, self.account_id, self.region) @@ -124,7 +146,7 @@ def _upload_assets(self, template, asset_root_dir): buf.seek(0) self.s3_client.upload_fileobj(Fileobj=buf, Bucket=bucket, Key=s3_key) - def _deploy_stack(self, stack_name, template): + def _deploy_stack(self, stack_name: str, template: Any): response = self.cf_client.create_stack( StackName=stack_name, TemplateBody=yaml.dump(template), @@ -137,9 +159,9 @@ def _deploy_stack(self, stack_name, template): response = self.cf_client.describe_stacks(StackName=stack_name) return response - def _find_assets(self, template, account_id, region): + def _find_assets(self, template: dict, account_id: str, region: str): assets = {} - for name, resource in template["Resources"].items(): + for _, resource in template["Resources"].items(): bucket = None S3Key = None @@ -153,7 +175,8 @@ def _find_assets(self, template, account_id, region): assets[S3Key] = ( bucket["Fn::Sub"].replace("${AWS::AccountId}", account_id).replace("${AWS::Region}", region) ) + return assets - def _transform_output(self, outputs): + def _transform_output(self, outputs: dict): return {output["OutputKey"]: output["OutputValue"] for output in outputs if output["OutputKey"]} diff --git a/tests/e2e/utils/py.typed b/tests/e2e/utils/py.typed new file mode 100644 index 00000000000..e69de29bb2d From 2cb4c40da83dc9963662f755fcef794f56dfd977 Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Mon, 13 Jun 2022 22:48:26 +0200 Subject: [PATCH 06/72] Fix different python runtimes errors --- .github/workflows/run-e2e-tests.yml | 8 +------- tests/e2e/conftest.py | 14 +++++++++++--- tests/e2e/logger/test_logger.py | 2 -- tests/e2e/utils/infrastructure.py | 27 ++++++++++++++------------- 4 files changed, 26 insertions(+), 25 deletions(-) diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml index d0e311042d2..3620944661b 100644 --- a/.github/workflows/run-e2e-tests.yml +++ b/.github/workflows/run-e2e-tests.yml @@ -6,19 +6,13 @@ env: E2E_TESTS_PATH: tests/e2e/ jobs: run: - ######################### - # Force Github action to run only a single job at a time (based on the group name) - # This is to prevent "race-condition" in building e2e tests infrastructure - ######################### - concurrency: - group: e2e-tests runs-on: ubuntu-latest permissions: id-token: write # needed to interact with GitHub's OIDC Token endpoint. contents: read strategy: matrix: - version: ["3.8", "3.9", "3.10"] + version: ["3.6", "3.7", "3.8", "3.9"] steps: - name: "Checkout" uses: actions/checkout@v3 diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py index 0db34008d69..8d2a147f51a 100644 --- a/tests/e2e/conftest.py +++ b/tests/e2e/conftest.py @@ -1,6 +1,14 @@ import datetime +import sys import uuid -from typing import Generator, TypedDict + +# We only need typing_extensions for python versions <3.8 +if sys.version_info >= (3, 8): + from typing import TypedDict +else: + from typing_extensions import TypedDict + +from typing import Dict, Generator import pytest from e2e.utils import helpers, infrastructure @@ -8,11 +16,11 @@ class LambdaConfig(TypedDict): parameters: dict - environment_variables: dict[str, str] + environment_variables: Dict[str, str] class LambdaExecution(TypedDict): - arns: dict[str, str] + arns: Dict[str, str] execution_time: datetime.datetime diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index e325be018ee..402c7b3e080 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -1,5 +1,3 @@ -from typing import TypedDict - import boto3 import pytest diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 8c4298a98fe..26dac696eca 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -4,7 +4,7 @@ import zipfile from enum import Enum from pathlib import Path -from typing import Any +from typing import Dict, List import boto3 import yaml @@ -29,13 +29,13 @@ def __init__(self, stack_name: str, handlers_dir: str, config: dict, environment self.cf_client = session.client("cloudformation") self.s3_resource = session.resource("s3") self.account_id = session.client("sts").get_caller_identity()["Account"] - self.region = boto3.Session().region_name + self.region = session.region_name self.stack_name = stack_name self.handlers_dir = handlers_dir self.config = config self.environment_variables = environment_variables - def deploy(self) -> dict[str, str]: + def deploy(self) -> Dict[str, str]: handlers = self._find_files(directory=self.handlers_dir, only_py=True) template, asset_root_dir = self.prepare_stack( handlers=handlers, @@ -55,7 +55,7 @@ def delete(self): # Create CDK cloud assembly code def prepare_stack( - self, handlers: list[str], handlers_dir: str, stack_name: str, environment_variables: dict, **config: dict + self, handlers: List[str], handlers_dir: str, stack_name: str, environment_variables: dict, **config: dict ): integration_test_app = App() stack = Stack(integration_test_app, stack_name) @@ -99,9 +99,9 @@ def _find_files(self, directory: str, only_py: bool = False) -> list: file_paths.append(os.path.join(root, filename)) return file_paths - def _create_layer(self, stack): - output_dir = Path(AssetStaging.BUNDLING_OUTPUT_DIR, "python") - input_dir = Path(AssetStaging.BUNDLING_INPUT_DIR, "aws_lambda_powertools") + def _create_layer(self, stack: Stack): + output_dir = Path(str(AssetStaging.BUNDLING_OUTPUT_DIR), "python") + input_dir = Path(str(AssetStaging.BUNDLING_INPUT_DIR), "aws_lambda_powertools") powertools_layer = LayerVersion( stack, "aws-lambda-powertools", @@ -109,7 +109,6 @@ def _create_layer(self, stack): compatible_runtimes=[PythonVersion[PYTHON_RUNTIME_VERSION].value["runtime"]], code=Code.from_asset( path=".", - exclude=["*.pyc"], bundling=BundlingOptions( image=DockerImage.from_build( str(Path(__file__).parent), @@ -118,12 +117,14 @@ def _create_layer(self, stack): command=[ "bash", "-c", - f"poetry export --with-credentials --format requirements.txt --output requirements.txt && pip install -r requirements.txt -t {output_dir} && cp -R {input_dir} {output_dir}", + f"poetry export --with-credentials --format requirements.txt --output /tmp/requirements.txt &&\ + pip install -r /tmp/requirements.txt -t {output_dir} &&\ + cp -R {input_dir} {output_dir} &&\ + find {output_dir}/ -regex '^.*\\(__pycache__\\|\\.py[co]\\)$' -delete", ], ), ), ) - return powertools_layer def _upload_assets(self, template: dict, asset_root_dir: str): @@ -146,16 +147,16 @@ def _upload_assets(self, template: dict, asset_root_dir: str): buf.seek(0) self.s3_client.upload_fileobj(Fileobj=buf, Bucket=bucket, Key=s3_key) - def _deploy_stack(self, stack_name: str, template: Any): + def _deploy_stack(self, stack_name: str, template: dict): response = self.cf_client.create_stack( StackName=stack_name, TemplateBody=yaml.dump(template), TimeoutInMinutes=10, - OnFailure="DO_NOTHING", + OnFailure="ROLLBACK", Capabilities=["CAPABILITY_IAM"], ) waiter = self.cf_client.get_waiter("stack_create_complete") - waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 2, "MaxAttempts": 50}) + waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 10, "MaxAttempts": 50}) response = self.cf_client.describe_stacks(StackName=stack_name) return response From bb0080a316fdd7ad23f640fce50c3575a8514aa5 Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Mon, 13 Jun 2022 23:44:21 +0200 Subject: [PATCH 07/72] Remove cdk cli installation --- Makefile | 1 - 1 file changed, 1 deletion(-) diff --git a/Makefile b/Makefile index 719c4a63484..d11a567ea58 100644 --- a/Makefile +++ b/Makefile @@ -8,7 +8,6 @@ dev: pip install --upgrade pip pre-commit poetry poetry install --extras "pydantic" pre-commit install - npm install -g aws-cdk format: poetry run isort aws_lambda_powertools tests From 62ddaf77c011b2c4f85c5c7330340a406f80b8c1 Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Tue, 14 Jun 2022 00:00:33 +0200 Subject: [PATCH 08/72] Fix conflicts --- poetry.lock | 204 ++++++++++++++++++++++++++++++++----------------- pyproject.toml | 128 ++++++++++++++++--------------- 2 files changed, 200 insertions(+), 132 deletions(-) diff --git a/poetry.lock b/poetry.lock index f03c0dd53b5..02220b76823 100644 --- a/poetry.lock +++ b/poetry.lock @@ -245,8 +245,8 @@ trio = ["trio (>=0.14.0)", "sniffio (>=1.1)"] [[package]] name = "email-validator" -version = "1.1.3" -description = "A robust email syntax and deliverability validation library for Python 2.x/3.x." +version = "1.2.1" +description = "A robust email syntax and deliverability validation library." category = "main" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" @@ -265,7 +265,7 @@ python-versions = "*" [[package]] name = "exceptiongroup" -version = "1.0.0rc5" +version = "1.0.0rc8" description = "Backport of PEP 654 (exception groups)" category = "dev" optional = false @@ -712,7 +712,7 @@ python-versions = ">=3.6" [[package]] name = "mypy" -version = "0.950" +version = "0.961" description = "Optional static typing for Python" category = "dev" optional = false @@ -729,6 +729,50 @@ dmypy = ["psutil (>=4.0)"] python2 = ["typed-ast (>=1.4.0,<2)"] reports = ["lxml"] +[[package]] +name = "mypy-boto3-appconfig" +version = "1.24.0" +description = "Type annotations for boto3.AppConfig 1.24.0 service generated with mypy-boto3-builder 7.6.1" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = ">=4.1.0" + +[[package]] +name = "mypy-boto3-dynamodb" +version = "1.24.0" +description = "Type annotations for boto3.DynamoDB 1.24.0 service generated with mypy-boto3-builder 7.6.1" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = ">=4.1.0" + +[[package]] +name = "mypy-boto3-secretsmanager" +version = "1.24.0" +description = "Type annotations for boto3.SecretsManager 1.24.0 service generated with mypy-boto3-builder 7.6.1" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = ">=4.1.0" + +[[package]] +name = "mypy-boto3-ssm" +version = "1.24.0" +description = "Type annotations for boto3.SSM 1.24.0 service generated with mypy-boto3-builder 7.6.1" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = ">=4.1.0" + [[package]] name = "mypy-extensions" version = "0.4.3" @@ -829,8 +873,8 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pydantic" -version = "1.9.0" -description = "Data validation and settings management using python 3.6 type hinting" +version = "1.9.1" +description = "Data validation and settings management using python type hints" category = "main" optional = true python-versions = ">=3.6.1" @@ -1153,7 +1197,7 @@ python-versions = "*" [[package]] name = "typing-extensions" -version = "4.0.1" +version = "4.1.1" description = "Backported and Experimental Type Hints for Python 3.6+" category = "main" optional = false @@ -1222,7 +1266,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.2" -content-hash = "2c4ec9fac23dea3893cf3c60d5ed5728d63c4c46252a14ffbee52d30ebaec74f" +content-hash = "e457c68bd754118733c7ad1c54d389f4aa3b06164d947fae5d682566e202b776" [metadata.files] atomicwrites = [ @@ -1345,15 +1389,15 @@ dnspython = [ {file = "dnspython-2.1.0.zip", hash = "sha256:e4a87f0b573201a0f3727fa18a516b055fd1107e0e5477cded4a2de497df1dd4"}, ] email-validator = [ - {file = "email_validator-1.1.3-py2.py3-none-any.whl", hash = "sha256:5675c8ceb7106a37e40e2698a57c056756bf3f272cfa8682a4f87ebd95d8440b"}, - {file = "email_validator-1.1.3.tar.gz", hash = "sha256:aa237a65f6f4da067119b7df3f13e89c25c051327b2b5b66dc075f33d62480d7"}, + {file = "email_validator-1.2.1-py2.py3-none-any.whl", hash = "sha256:c8589e691cf73eb99eed8d10ce0e9cbb05a0886ba920c8bcb7c82873f4c5789c"}, + {file = "email_validator-1.2.1.tar.gz", hash = "sha256:6757aea012d40516357c0ac2b1a4c31219ab2f899d26831334c5d069e8b6c3d8"}, ] eradicate = [ {file = "eradicate-2.0.0.tar.gz", hash = "sha256:27434596f2c5314cc9b31410c93d8f7e8885747399773cd088d3adea647a60c8"}, ] exceptiongroup = [ - {file = "exceptiongroup-1.0.0rc5-py3-none-any.whl", hash = "sha256:295a9d7847f9ad08267f47c701a676ec70a64200a360dd49eb513f72209b09f4"}, - {file = "exceptiongroup-1.0.0rc5.tar.gz", hash = "sha256:665422550b9653acd46e9cd35d933f28c5158ca4c058c53619cfa112915cd69e"}, + {file = "exceptiongroup-1.0.0rc8-py3-none-any.whl", hash = "sha256:ab0a968e1ef769e55d9a596f4a89f7be9ffedbc9fdefdb77cc68cf5c33ce1035"}, + {file = "exceptiongroup-1.0.0rc8.tar.gz", hash = "sha256:6990c24f06b8d33c8065cfe43e5e8a4bfa384e0358be036af9cc60b6321bd11a"}, ] execnet = [ {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, @@ -1560,29 +1604,45 @@ mkdocs-material-extensions = [ {file = "mkdocs_material_extensions-1.0.3-py3-none-any.whl", hash = "sha256:a82b70e533ce060b2a5d9eb2bc2e1be201cf61f901f93704b4acf6e3d5983a44"}, ] mypy = [ - {file = "mypy-0.950-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cf9c261958a769a3bd38c3e133801ebcd284ffb734ea12d01457cb09eacf7d7b"}, - {file = "mypy-0.950-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5b5bd0ffb11b4aba2bb6d31b8643902c48f990cc92fda4e21afac658044f0c0"}, - {file = "mypy-0.950-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e7647df0f8fc947388e6251d728189cfadb3b1e558407f93254e35abc026e22"}, - {file = "mypy-0.950-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eaff8156016487c1af5ffa5304c3e3fd183edcb412f3e9c72db349faf3f6e0eb"}, - {file = "mypy-0.950-cp310-cp310-win_amd64.whl", hash = "sha256:563514c7dc504698fb66bb1cf897657a173a496406f1866afae73ab5b3cdb334"}, - {file = "mypy-0.950-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dd4d670eee9610bf61c25c940e9ade2d0ed05eb44227275cce88701fee014b1f"}, - {file = "mypy-0.950-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ca75ecf2783395ca3016a5e455cb322ba26b6d33b4b413fcdedfc632e67941dc"}, - {file = "mypy-0.950-cp36-cp36m-win_amd64.whl", hash = "sha256:6003de687c13196e8a1243a5e4bcce617d79b88f83ee6625437e335d89dfebe2"}, - {file = "mypy-0.950-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4c653e4846f287051599ed8f4b3c044b80e540e88feec76b11044ddc5612ffed"}, - {file = "mypy-0.950-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e19736af56947addedce4674c0971e5dceef1b5ec7d667fe86bcd2b07f8f9075"}, - {file = "mypy-0.950-cp37-cp37m-win_amd64.whl", hash = "sha256:ef7beb2a3582eb7a9f37beaf38a28acfd801988cde688760aea9e6cc4832b10b"}, - {file = "mypy-0.950-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0112752a6ff07230f9ec2f71b0d3d4e088a910fdce454fdb6553e83ed0eced7d"}, - {file = "mypy-0.950-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ee0a36edd332ed2c5208565ae6e3a7afc0eabb53f5327e281f2ef03a6bc7687a"}, - {file = "mypy-0.950-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77423570c04aca807508a492037abbd72b12a1fb25a385847d191cd50b2c9605"}, - {file = "mypy-0.950-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5ce6a09042b6da16d773d2110e44f169683d8cc8687e79ec6d1181a72cb028d2"}, - {file = "mypy-0.950-cp38-cp38-win_amd64.whl", hash = "sha256:5b231afd6a6e951381b9ef09a1223b1feabe13625388db48a8690f8daa9b71ff"}, - {file = "mypy-0.950-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0384d9f3af49837baa92f559d3fa673e6d2652a16550a9ee07fc08c736f5e6f8"}, - {file = "mypy-0.950-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1fdeb0a0f64f2a874a4c1f5271f06e40e1e9779bf55f9567f149466fc7a55038"}, - {file = "mypy-0.950-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:61504b9a5ae166ba5ecfed9e93357fd51aa693d3d434b582a925338a2ff57fd2"}, - {file = "mypy-0.950-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a952b8bc0ae278fc6316e6384f67bb9a396eb30aced6ad034d3a76120ebcc519"}, - {file = "mypy-0.950-cp39-cp39-win_amd64.whl", hash = "sha256:eaea21d150fb26d7b4856766e7addcf929119dd19fc832b22e71d942835201ef"}, - {file = "mypy-0.950-py3-none-any.whl", hash = "sha256:a4d9898f46446bfb6405383b57b96737dcfd0a7f25b748e78ef3e8c576bba3cb"}, - {file = "mypy-0.950.tar.gz", hash = "sha256:1b333cfbca1762ff15808a0ef4f71b5d3eed8528b23ea1c3fb50543c867d68de"}, + {file = "mypy-0.961-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:697540876638ce349b01b6786bc6094ccdaba88af446a9abb967293ce6eaa2b0"}, + {file = "mypy-0.961-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b117650592e1782819829605a193360a08aa99f1fc23d1d71e1a75a142dc7e15"}, + {file = "mypy-0.961-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bdd5ca340beffb8c44cb9dc26697628d1b88c6bddf5c2f6eb308c46f269bb6f3"}, + {file = "mypy-0.961-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3e09f1f983a71d0672bbc97ae33ee3709d10c779beb613febc36805a6e28bb4e"}, + {file = "mypy-0.961-cp310-cp310-win_amd64.whl", hash = "sha256:e999229b9f3198c0c880d5e269f9f8129c8862451ce53a011326cad38b9ccd24"}, + {file = "mypy-0.961-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b24be97351084b11582fef18d79004b3e4db572219deee0212078f7cf6352723"}, + {file = "mypy-0.961-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f4a21d01fc0ba4e31d82f0fff195682e29f9401a8bdb7173891070eb260aeb3b"}, + {file = "mypy-0.961-cp36-cp36m-win_amd64.whl", hash = "sha256:439c726a3b3da7ca84a0199a8ab444cd8896d95012c4a6c4a0d808e3147abf5d"}, + {file = "mypy-0.961-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5a0b53747f713f490affdceef835d8f0cb7285187a6a44c33821b6d1f46ed813"}, + {file = "mypy-0.961-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e9f70df36405c25cc530a86eeda1e0867863d9471fe76d1273c783df3d35c2e"}, + {file = "mypy-0.961-cp37-cp37m-win_amd64.whl", hash = "sha256:b88f784e9e35dcaa075519096dc947a388319cb86811b6af621e3523980f1c8a"}, + {file = "mypy-0.961-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d5aaf1edaa7692490f72bdb9fbd941fbf2e201713523bdb3f4038be0af8846c6"}, + {file = "mypy-0.961-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9f5f5a74085d9a81a1f9c78081d60a0040c3efb3f28e5c9912b900adf59a16e6"}, + {file = "mypy-0.961-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f4b794db44168a4fc886e3450201365c9526a522c46ba089b55e1f11c163750d"}, + {file = "mypy-0.961-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:64759a273d590040a592e0f4186539858c948302c653c2eac840c7a3cd29e51b"}, + {file = "mypy-0.961-cp38-cp38-win_amd64.whl", hash = "sha256:63e85a03770ebf403291ec50097954cc5caf2a9205c888ce3a61bd3f82e17569"}, + {file = "mypy-0.961-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f1332964963d4832a94bebc10f13d3279be3ce8f6c64da563d6ee6e2eeda932"}, + {file = "mypy-0.961-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:006be38474216b833eca29ff6b73e143386f352e10e9c2fbe76aa8549e5554f5"}, + {file = "mypy-0.961-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9940e6916ed9371809b35b2154baf1f684acba935cd09928952310fbddaba648"}, + {file = "mypy-0.961-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a5ea0875a049de1b63b972456542f04643daf320d27dc592d7c3d9cd5d9bf950"}, + {file = "mypy-0.961-cp39-cp39-win_amd64.whl", hash = "sha256:1ece702f29270ec6af25db8cf6185c04c02311c6bb21a69f423d40e527b75c56"}, + {file = "mypy-0.961-py3-none-any.whl", hash = "sha256:03c6cc893e7563e7b2949b969e63f02c000b32502a1b4d1314cabe391aa87d66"}, + {file = "mypy-0.961.tar.gz", hash = "sha256:f730d56cb924d371c26b8eaddeea3cc07d78ff51c521c6d04899ac6904b75492"}, +] +mypy-boto3-appconfig = [ + {file = "mypy-boto3-appconfig-1.24.0.tar.gz", hash = "sha256:3bb38c2819b78c72fd9c031058edf5e547ad549d58e052928a4f397823a51dbd"}, + {file = "mypy_boto3_appconfig-1.24.0-py3-none-any.whl", hash = "sha256:ca53b0b9606f13257dd0feb800d36531f2eba54f46bd9db7765f69baf9583485"}, +] +mypy-boto3-dynamodb = [ + {file = "mypy-boto3-dynamodb-1.24.0.tar.gz", hash = "sha256:a7de204a173dffbee972357a69bf5e59fda169a587017e0d3c5446676342aa2e"}, + {file = "mypy_boto3_dynamodb-1.24.0-py3-none-any.whl", hash = "sha256:866f0f8ae44e266ea051f57179bf40132d8e89e6fa23abab6e71421b3c0cd794"}, +] +mypy-boto3-secretsmanager = [ + {file = "mypy-boto3-secretsmanager-1.24.0.tar.gz", hash = "sha256:6680c322df031b08ef79fcdb8ffdfb08d57d4925392f641348336926dc5c6b2e"}, + {file = "mypy_boto3_secretsmanager-1.24.0-py3-none-any.whl", hash = "sha256:7da281c49ae91e60fdbcd0015379ae4cc9dc9ff911836ee78a2652310e09f53e"}, +] +mypy-boto3-ssm = [ + {file = "mypy-boto3-ssm-1.24.0.tar.gz", hash = "sha256:bab58398947c3627a4e7610cd0f57b525c12fd1d0a6bb862400b6af0a4e684fc"}, + {file = "mypy_boto3_ssm-1.24.0-py3-none-any.whl", hash = "sha256:1f17055abb8d70f25e6ece2ef4c0dc74d585744c25a3a833c2985d74165ac0c6"}, ] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, @@ -1625,41 +1685,41 @@ pycodestyle = [ {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, ] pydantic = [ - {file = "pydantic-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cb23bcc093697cdea2708baae4f9ba0e972960a835af22560f6ae4e7e47d33f5"}, - {file = "pydantic-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1d5278bd9f0eee04a44c712982343103bba63507480bfd2fc2790fa70cd64cf4"}, - {file = "pydantic-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab624700dc145aa809e6f3ec93fb8e7d0f99d9023b713f6a953637429b437d37"}, - {file = "pydantic-1.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8d7da6f1c1049eefb718d43d99ad73100c958a5367d30b9321b092771e96c25"}, - {file = "pydantic-1.9.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3c3b035103bd4e2e4a28da9da7ef2fa47b00ee4a9cf4f1a735214c1bcd05e0f6"}, - {file = "pydantic-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3011b975c973819883842c5ab925a4e4298dffccf7782c55ec3580ed17dc464c"}, - {file = "pydantic-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:086254884d10d3ba16da0588604ffdc5aab3f7f09557b998373e885c690dd398"}, - {file = "pydantic-1.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0fe476769acaa7fcddd17cadd172b156b53546ec3614a4d880e5d29ea5fbce65"}, - {file = "pydantic-1.9.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8e9dcf1ac499679aceedac7e7ca6d8641f0193c591a2d090282aaf8e9445a46"}, - {file = "pydantic-1.9.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1e4c28f30e767fd07f2ddc6f74f41f034d1dd6bc526cd59e63a82fe8bb9ef4c"}, - {file = "pydantic-1.9.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c86229333cabaaa8c51cf971496f10318c4734cf7b641f08af0a6fbf17ca3054"}, - {file = "pydantic-1.9.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:c0727bda6e38144d464daec31dff936a82917f431d9c39c39c60a26567eae3ed"}, - {file = "pydantic-1.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:dee5ef83a76ac31ab0c78c10bd7d5437bfdb6358c95b91f1ba7ff7b76f9996a1"}, - {file = "pydantic-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9c9bdb3af48e242838f9f6e6127de9be7063aad17b32215ccc36a09c5cf1070"}, - {file = "pydantic-1.9.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ee7e3209db1e468341ef41fe263eb655f67f5c5a76c924044314e139a1103a2"}, - {file = "pydantic-1.9.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b6037175234850ffd094ca77bf60fb54b08b5b22bc85865331dd3bda7a02fa1"}, - {file = "pydantic-1.9.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b2571db88c636d862b35090ccf92bf24004393f85c8870a37f42d9f23d13e032"}, - {file = "pydantic-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8b5ac0f1c83d31b324e57a273da59197c83d1bb18171e512908fe5dc7278a1d6"}, - {file = "pydantic-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bbbc94d0c94dd80b3340fc4f04fd4d701f4b038ebad72c39693c794fd3bc2d9d"}, - {file = "pydantic-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e0896200b6a40197405af18828da49f067c2fa1f821491bc8f5bde241ef3f7d7"}, - {file = "pydantic-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bdfdadb5994b44bd5579cfa7c9b0e1b0e540c952d56f627eb227851cda9db77"}, - {file = "pydantic-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:574936363cd4b9eed8acdd6b80d0143162f2eb654d96cb3a8ee91d3e64bf4cf9"}, - {file = "pydantic-1.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c556695b699f648c58373b542534308922c46a1cda06ea47bc9ca45ef5b39ae6"}, - {file = "pydantic-1.9.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f947352c3434e8b937e3aa8f96f47bdfe6d92779e44bb3f41e4c213ba6a32145"}, - {file = "pydantic-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5e48ef4a8b8c066c4a31409d91d7ca372a774d0212da2787c0d32f8045b1e034"}, - {file = "pydantic-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:96f240bce182ca7fe045c76bcebfa0b0534a1bf402ed05914a6f1dadff91877f"}, - {file = "pydantic-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:815ddebb2792efd4bba5488bc8fde09c29e8ca3227d27cf1c6990fc830fd292b"}, - {file = "pydantic-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c5b77947b9e85a54848343928b597b4f74fc364b70926b3c4441ff52620640c"}, - {file = "pydantic-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c68c3bc88dbda2a6805e9a142ce84782d3930f8fdd9655430d8576315ad97ce"}, - {file = "pydantic-1.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a79330f8571faf71bf93667d3ee054609816f10a259a109a0738dac983b23c3"}, - {file = "pydantic-1.9.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f5a64b64ddf4c99fe201ac2724daada8595ada0d102ab96d019c1555c2d6441d"}, - {file = "pydantic-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a733965f1a2b4090a5238d40d983dcd78f3ecea221c7af1497b845a9709c1721"}, - {file = "pydantic-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cc6a4cb8a118ffec2ca5fcb47afbacb4f16d0ab8b7350ddea5e8ef7bcc53a16"}, - {file = "pydantic-1.9.0-py3-none-any.whl", hash = "sha256:085ca1de245782e9b46cefcf99deecc67d418737a1fd3f6a4f511344b613a5b3"}, - {file = "pydantic-1.9.0.tar.gz", hash = "sha256:742645059757a56ecd886faf4ed2441b9c0cd406079c2b4bee51bcc3fbcd510a"}, + {file = "pydantic-1.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8098a724c2784bf03e8070993f6d46aa2eeca031f8d8a048dff277703e6e193"}, + {file = "pydantic-1.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c320c64dd876e45254bdd350f0179da737463eea41c43bacbee9d8c9d1021f11"}, + {file = "pydantic-1.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18f3e912f9ad1bdec27fb06b8198a2ccc32f201e24174cec1b3424dda605a310"}, + {file = "pydantic-1.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11951b404e08b01b151222a1cb1a9f0a860a8153ce8334149ab9199cd198131"}, + {file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8bc541a405423ce0e51c19f637050acdbdf8feca34150e0d17f675e72d119580"}, + {file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e565a785233c2d03724c4dc55464559639b1ba9ecf091288dd47ad9c629433bd"}, + {file = "pydantic-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:a4a88dcd6ff8fd47c18b3a3709a89adb39a6373f4482e04c1b765045c7e282fd"}, + {file = "pydantic-1.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:447d5521575f18e18240906beadc58551e97ec98142266e521c34968c76c8761"}, + {file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:985ceb5d0a86fcaa61e45781e567a59baa0da292d5ed2e490d612d0de5796918"}, + {file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059b6c1795170809103a1538255883e1983e5b831faea6558ef873d4955b4a74"}, + {file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d12f96b5b64bec3f43c8e82b4aab7599d0157f11c798c9f9c528a72b9e0b339a"}, + {file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ae72f8098acb368d877b210ebe02ba12585e77bd0db78ac04a1ee9b9f5dd2166"}, + {file = "pydantic-1.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:79b485767c13788ee314669008d01f9ef3bc05db9ea3298f6a50d3ef596a154b"}, + {file = "pydantic-1.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:494f7c8537f0c02b740c229af4cb47c0d39840b829ecdcfc93d91dcbb0779892"}, + {file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0f047e11febe5c3198ed346b507e1d010330d56ad615a7e0a89fae604065a0e"}, + {file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:969dd06110cb780da01336b281f53e2e7eb3a482831df441fb65dd30403f4608"}, + {file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:177071dfc0df6248fd22b43036f936cfe2508077a72af0933d0c1fa269b18537"}, + {file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9bcf8b6e011be08fb729d110f3e22e654a50f8a826b0575c7196616780683380"}, + {file = "pydantic-1.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a955260d47f03df08acf45689bd163ed9df82c0e0124beb4251b1290fa7ae728"}, + {file = "pydantic-1.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9ce157d979f742a915b75f792dbd6aa63b8eccaf46a1005ba03aa8a986bde34a"}, + {file = "pydantic-1.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0bf07cab5b279859c253d26a9194a8906e6f4a210063b84b433cf90a569de0c1"}, + {file = "pydantic-1.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d93d4e95eacd313d2c765ebe40d49ca9dd2ed90e5b37d0d421c597af830c195"}, + {file = "pydantic-1.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1542636a39c4892c4f4fa6270696902acb186a9aaeac6f6cf92ce6ae2e88564b"}, + {file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a9af62e9b5b9bc67b2a195ebc2c2662fdf498a822d62f902bf27cccb52dbbf49"}, + {file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fe4670cb32ea98ffbf5a1262f14c3e102cccd92b1869df3bb09538158ba90fe6"}, + {file = "pydantic-1.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:9f659a5ee95c8baa2436d392267988fd0f43eb774e5eb8739252e5a7e9cf07e0"}, + {file = "pydantic-1.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b83ba3825bc91dfa989d4eed76865e71aea3a6ca1388b59fc801ee04c4d8d0d6"}, + {file = "pydantic-1.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1dd8fecbad028cd89d04a46688d2fcc14423e8a196d5b0a5c65105664901f810"}, + {file = "pydantic-1.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02eefd7087268b711a3ff4db528e9916ac9aa18616da7bca69c1871d0b7a091f"}, + {file = "pydantic-1.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb57ba90929bac0b6cc2af2373893d80ac559adda6933e562dcfb375029acee"}, + {file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4ce9ae9e91f46c344bec3b03d6ee9612802682c1551aaf627ad24045ce090761"}, + {file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:72ccb318bf0c9ab97fc04c10c37683d9eea952ed526707fabf9ac5ae59b701fd"}, + {file = "pydantic-1.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:61b6760b08b7c395975d893e0b814a11cf011ebb24f7d869e7118f5a339a82e1"}, + {file = "pydantic-1.9.1-py3-none-any.whl", hash = "sha256:4988c0f13c42bfa9ddd2fe2f569c9d54646ce84adc5de84228cfe83396f3bd58"}, + {file = "pydantic-1.9.1.tar.gz", hash = "sha256:1ed987c3ff29fff7fd8c3ea3a3ea877ad310aae2ef9889a119e22d3f2db0691a"}, ] pyflakes = [ {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, @@ -1844,8 +1904,8 @@ typed-ast = [ {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, ] typing-extensions = [ - {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, - {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, + {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, + {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, ] urllib3 = [ {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"}, diff --git a/pyproject.toml b/pyproject.toml index 5bac979a9d2..d25f7dca944 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,104 +1,116 @@ [tool.poetry] -authors = ["Amazon Web Services"] -classifiers = [ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Natural Language :: English", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", -] +name = "aws_lambda_powertools" +version = "1.26.1" description = "A suite of utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, batching, idempotency, feature flags, and more." +authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"] +classifiers=[ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", +] +repository="https://github.com/awslabs/aws-lambda-powertools-python" +readme = "README.md" keywords = ["aws_lambda_powertools", "aws", "tracing", "logging", "lambda", "powertools", "feature_flags", "idempotency", "middleware"] license = "MIT-0" -name = "aws_lambda_powertools" -readme = "README.md" -repository = "https://github.com/awslabs/aws-lambda-powertools-python" -version = "1.25.10" [tool.poetry.dependencies] +python = "^3.6.2" aws-xray-sdk = "^2.8.0" -boto3 = "^1.18" -email-validator = {version = "*", optional = true} fastjsonschema = "^2.14.5" -pydantic = {version = "^1.8.2", optional = true} -python = "^3.6.2" +boto3 = "^1.18" +pydantic = {version = "^1.8.2", optional = true } +email-validator = {version = "*", optional = true } [tool.poetry.dev-dependencies] # 2022-04-21: jmespath was removed, to be re-added once we drop python 3.6. # issue #1148 -aws-cdk-lib = "^2.23.0" -bandit = "^1.7.1" -black = "^21.12b0" coverage = {extras = ["toml"], version = "^6.2"} +pytest = "^7.0.1" +black = "^21.12b0" flake8 = "^4.0.1" flake8-black = "^0.2.3" -flake8-bugbear = "^22.4.25" flake8-builtins = "^1.5.3" flake8-comprehensions = "^3.7.0" flake8-debugger = "^4.0.0" -flake8-eradicate = "^1.2.1" flake8-fixme = "^1.1.1" flake8-isort = "^4.1.1" flake8-variables-names = "^0.0.4" isort = "^5.10.1" -mike = "^0.6.0" -mkdocs-git-revision-date-plugin = "^0.3.2" -mkdocs-material = "^8.2.7" -mypy = "^0.950" -pdoc3 = "^0.10.0" -pytest = "^7.0.1" -pytest-asyncio = "^0.16.0" pytest-cov = "^3.0.0" pytest-mock = "^3.5.1" -pytest-xdist = "^2.5.0" +pdoc3 = "^0.10.0" +pytest-asyncio = "^0.16.0" +bandit = "^1.7.1" radon = "^5.1.0" -retry = "^0.9.2" xenon = "^0.9.0" +flake8-eradicate = "^1.2.1" +flake8-bugbear = "^22.4.25" +mkdocs-git-revision-date-plugin = "^0.3.2" +mike = "^0.6.0" +mypy = "^0.961" +mkdocs-material = "^8.2.7" +mypy-boto3-secretsmanager = "^1.24.0" +mypy-boto3-ssm = "^1.24.0" +mypy-boto3-appconfig = "^1.24.0" +mypy-boto3-dynamodb = "^1.24.0" +retry = "^0.9.2" +pytest-xdist = "^2.5.0" +aws-cdk-lib = "^2.23.0" + [tool.poetry.extras] pydantic = ["pydantic", "email-validator"] [tool.coverage.run] -branch = true -omit = [ - "tests/*", - "aws_lambda_powertools/exceptions/*", - "aws_lambda_powertools/utilities/parser/types.py", - "aws_lambda_powertools/utilities/jmespath_utils/envelopes.py", -] source = ["aws_lambda_powertools"] +omit = ["tests/*", "aws_lambda_powertools/exceptions/*", "aws_lambda_powertools/utilities/parser/types.py", "aws_lambda_powertools/utilities/jmespath_utils/envelopes.py"] +branch = true [tool.coverage.html] directory = "test_report" title = "Lambda Powertools Test Coverage" [tool.coverage.report] +fail_under = 90 exclude_lines = [ - # Have to re-enable the standard pragma - "pragma: no cover", # Don't complain about missing debug-only code: - "def __repr__", - "if self.debug", # Don't complain if tests don't hit defensive assertion code: - "raise AssertionError", - "raise NotImplementedError", # Don't complain if non-runnable code isn't run: - "if 0:", - "if __name__ == .__main__.:", # Ignore type function overload - "@overload", + # Have to re-enable the standard pragma + "pragma: no cover", + + # Don't complain about missing debug-only code: + "def __repr__", + "if self.debug", + + # Don't complain if tests don't hit defensive assertion code: + "raise AssertionError", + "raise NotImplementedError", + + # Don't complain if non-runnable code isn't run: + "if 0:", + "if __name__ == .__main__.:", + + # Ignore runtime type checking + "if TYPE_CHECKING:", + + # Ignore type function overload + "@overload", ] -fail_under = 90 [tool.isort] -force_grid_wrap = 0 +multi_line_output = 3 include_trailing_comma = true +force_grid_wrap = 0 +use_parentheses = true line_length = 120 -multi_line_output = 3 skip = "example" -use_parentheses = true [tool.black] +line-length = 120 exclude = ''' ( @@ -117,20 +129,16 @@ exclude = ''' | example ) ''' -line-length = 120 [tool.pytest.ini_options] -addopts = "-ra -vv" -markers = [ - "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')", - "e2e: marks e2e tests to be deselected (deselect with '-m \"not e2e\"')", -] minversion = "6.0" +addopts = "-ra -vv" testpaths = "./tests" +markers = "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')" [build-system] -build-backend = "poetry.masonry.api" requires = ["poetry>=0.12"] +build-backend = "poetry.masonry.api" # NOTE # As of now, Feb 2020, flake8 don't support pyproject From 885d5ea1587f4e2f45e589986d4f9d1336f53dc5 Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Tue, 14 Jun 2022 20:40:38 +0200 Subject: [PATCH 09/72] Drop python 3.6 support. Use current secret from github actions --- .github/workflows/run-e2e-tests.yml | 6 +++--- tests/e2e/utils/infrastructure.py | 1 - 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml index 3620944661b..bc74cecfafc 100644 --- a/.github/workflows/run-e2e-tests.yml +++ b/.github/workflows/run-e2e-tests.yml @@ -12,7 +12,7 @@ jobs: contents: read strategy: matrix: - version: ["3.6", "3.7", "3.8", "3.9"] + version: ["3.7", "3.8", "3.9"] steps: - name: "Checkout" uses: actions/checkout@v3 @@ -27,9 +27,9 @@ jobs: - name: Install dependencies run: make dev - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1.6.1 + uses: aws-actions/configure-aws-credentials@v1 with: - role-to-assume: ${{ secrets.E2E_DEPLOYMENT_ROLE_ARN }} + role-to-assume: ${{ secrets.AWS_TEST_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Test run: make e2e-test diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 26dac696eca..65f0bdd3475 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -15,7 +15,6 @@ class PythonVersion(Enum): - V36 = {"runtime": Runtime.PYTHON_3_6, "image": Runtime.PYTHON_3_6.bundling_image.image} V37 = {"runtime": Runtime.PYTHON_3_7, "image": Runtime.PYTHON_3_7.bundling_image.image} V38 = {"runtime": Runtime.PYTHON_3_8, "image": Runtime.PYTHON_3_8.bundling_image.image} V39 = {"runtime": Runtime.PYTHON_3_9, "image": Runtime.PYTHON_3_9.bundling_image.image} From 2b43d3b876a29c29d032cddbd2a9c539fad20eb4 Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Tue, 14 Jun 2022 21:42:17 +0200 Subject: [PATCH 10/72] Fix PR checks errors --- pyproject.toml | 5 ++++- tests/e2e/conftest.py | 2 +- tests/e2e/logger/test_logger.py | 5 ----- tests/e2e/utils/Dockerfile | 2 +- tests/e2e/utils/infrastructure.py | 6 +++--- 5 files changed, 9 insertions(+), 11 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4c788b3bd08..a0985282d23 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -133,7 +133,10 @@ exclude = ''' minversion = "6.0" addopts = "-ra -vv" testpaths = "./tests" -markers = "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')" +markers = [ + "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')", + "e2e: marks e2e tests to be deselected (deselect with '-m \"not e2e\"')", +] [build-system] requires = ["poetry>=0.12"] diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py index 8d2a147f51a..0bbaa68dc51 100644 --- a/tests/e2e/conftest.py +++ b/tests/e2e/conftest.py @@ -40,7 +40,7 @@ def execute_lambda(config, request) -> Generator[LambdaExecution, None, None]: lambda_arns = infra.deploy() execution_time = datetime.datetime.utcnow() - for name, arn in lambda_arns.items(): + for _, arn in lambda_arns.items(): helpers.trigger_lambda(lambda_arn=arn, client=infra.lambda_client) yield {"arns": lambda_arns, "execution_time": execution_time} # Ensure stack deletion is triggered at the end of the test session diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index 402c7b3e080..4a997258e44 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -161,8 +161,3 @@ def test_no_context_lambda_event_not_logged(execute_lambda: conftest.LambdaExecu # THEN assert not any(log.message == {} for log in filtered_logs) - - -### Add tests for cold start and non-cold start executions -### Test errors -### Test child loggers diff --git a/tests/e2e/utils/Dockerfile b/tests/e2e/utils/Dockerfile index 69bcca5033c..462d9c645fb 100644 --- a/tests/e2e/utils/Dockerfile +++ b/tests/e2e/utils/Dockerfile @@ -11,4 +11,4 @@ ARG HTTPS_PROXY RUN pip install --upgrade pip RUN pip install pipenv poetry -CMD [ "python" ] \ No newline at end of file +CMD [ "python" ] diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 65f0bdd3475..bc6037dfd42 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -139,10 +139,10 @@ def _upload_assets(self, template: dict, asset_root_dir: str): buf = io.BytesIO() asset_dir = f"{asset_root_dir}/asset.{Path(s3_key).with_suffix('')}" os.chdir(asset_dir) - files = self._find_files(directory=".") + asset_files = self._find_files(directory=".") with zipfile.ZipFile(buf, "w", compression=zipfile.ZIP_DEFLATED) as zf: - for file in files: - zf.write(os.path.join(file)) + for asset_file in asset_files: + zf.write(os.path.join(asset_file)) buf.seek(0) self.s3_client.upload_fileobj(Fileobj=buf, Bucket=bucket, Key=s3_key) From add97ca2c01336513f959831dcf14b3a5b6e959c Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Tue, 14 Jun 2022 22:02:51 +0200 Subject: [PATCH 11/72] Add note about testing to mantainers document --- MAINTAINERS.md | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index a706b94c6ab..f64d0299315 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -13,6 +13,7 @@ - [Triage Bug Reports](#triage-bug-reports) - [Triage RFCs](#triage-rfcs) - [Releasing a new version](#releasing-a-new-version) + - [Run end to end tests](#run-end-to-end-tests) - [Changelog generation](#changelog-generation) - [Bumping the version](#bumping-the-version) - [Drafting release notes](#drafting-release-notes) @@ -171,7 +172,15 @@ Some examples using our initial and new RFC templates: #92, #94, #95, #991, #122 Firstly, make sure you are using the `develop` branch and it is up to date with the origin. -There are three main steps to release a new version: Changelog generation, version bumping, and drafting release notes. +There are four main steps to release a new version: Run end to end tests, Changelog generation, version bumping, and drafting release notes. + +#### Run end to end tests + +You can run tests either from your local machine's shell by exporting `AWS_PROFILE` environment variable and running `make e2e tests`. +You may also do this directly from GitHub Console. Go to actions tab and choose `run-e2e-tests` workflow from workflow's list. Pick the branch and run tests. + +> **Q: What if there's an error?** +To be agreed #### Changelog generation From ec777f028eba38f1cde1247aeceb1f765062c539 Mon Sep 17 00:00:00 2001 From: mploski Date: Sun, 19 Jun 2022 22:32:30 +0200 Subject: [PATCH 12/72] Update .github/workflows/run-e2e-tests.yml Co-authored-by: Heitor Lessa --- .github/workflows/run-e2e-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml index bc74cecfafc..b05c26c2995 100644 --- a/.github/workflows/run-e2e-tests.yml +++ b/.github/workflows/run-e2e-tests.yml @@ -8,7 +8,7 @@ jobs: run: runs-on: ubuntu-latest permissions: - id-token: write # needed to interact with GitHub's OIDC Token endpoint. + id-token: write # needed to request JWT with GitHub's OIDC Token endpoint. docs: https://bit.ly/3MNgQO9 contents: read strategy: matrix: From f0030e0a9487f20b214d309f45221a6887a0f04c Mon Sep 17 00:00:00 2001 From: mploski Date: Sun, 19 Jun 2022 22:32:38 +0200 Subject: [PATCH 13/72] Update .github/workflows/run-e2e-tests.yml Co-authored-by: Heitor Lessa --- .github/workflows/run-e2e-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml index b05c26c2995..a79e5248299 100644 --- a/.github/workflows/run-e2e-tests.yml +++ b/.github/workflows/run-e2e-tests.yml @@ -1,6 +1,6 @@ name: run-e2e-tests on: - workflow_dispatch: {} + workflow_dispatch: env: AWS_DEFAULT_REGION: us-east-1 E2E_TESTS_PATH: tests/e2e/ From 003cd2b5bc68314dde56669dc3e618019254180e Mon Sep 17 00:00:00 2001 From: mploski Date: Sun, 19 Jun 2022 22:33:26 +0200 Subject: [PATCH 14/72] Update MAINTAINERS.md Co-authored-by: Heitor Lessa --- MAINTAINERS.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index f64d0299315..86f7bd62cb4 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -177,7 +177,7 @@ There are four main steps to release a new version: Run end to end tests, Change #### Run end to end tests You can run tests either from your local machine's shell by exporting `AWS_PROFILE` environment variable and running `make e2e tests`. -You may also do this directly from GitHub Console. Go to actions tab and choose `run-e2e-tests` workflow from workflow's list. Pick the branch and run tests. +You may also do this directly from GitHub Console. Run the [run-e2e-tests workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/run-e2e-tests.yml) and pick the branch you want to run tests against. > **Q: What if there's an error?** To be agreed From d58d3d545ce3860b6187b3b825ddb35c469ed5dc Mon Sep 17 00:00:00 2001 From: mploski Date: Sun, 19 Jun 2022 22:36:25 +0200 Subject: [PATCH 15/72] Update tests/e2e/utils/helpers.py Co-authored-by: Heitor Lessa --- tests/e2e/utils/helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/e2e/utils/helpers.py b/tests/e2e/utils/helpers.py index 399c2096ecc..21e266bd4cc 100644 --- a/tests/e2e/utils/helpers.py +++ b/tests/e2e/utils/helpers.py @@ -30,7 +30,7 @@ def trigger_lambda(lambda_arn: str, client: Any): @lru_cache(maxsize=10, typed=False) @retry(ValueError, delay=1, jitter=1, tries=10) -def get_logs(lambda_function_name: str, log_client: Any, start_time: int, **kwargs: dict): +def get_logs(lambda_function_name: str, log_client: Any, start_time: int, **kwargs: dict) -> List[Log]: response = log_client.filter_log_events(logGroupName=f"/aws/lambda/{lambda_function_name}", startTime=start_time) if not response["events"]: raise ValueError("Empty response from Cloudwatch Logs. Repeating...") From 4fd86479975e2aa81a1f2e06db949685a05379de Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Thu, 23 Jun 2022 20:56:05 +0200 Subject: [PATCH 16/72] Move end to end subsection to its own section under mantainer responsibilities --- MAINTAINERS.md | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 86f7bd62cb4..9e6c529af8b 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -166,15 +166,9 @@ When necessary, be upfront that the time to review, approve, and implement a RFC Some examples using our initial and new RFC templates: #92, #94, #95, #991, #1226 -### Releasing a new version - -> TODO: This is an area we want to increase automation while keeping communication at human level. +### Run end to end tests on merges to Develop branch -Firstly, make sure you are using the `develop` branch and it is up to date with the origin. - -There are four main steps to release a new version: Run end to end tests, Changelog generation, version bumping, and drafting release notes. - -#### Run end to end tests +In order to run end to end tests you need to install CDK CLI first and bootstrap your account with `cdk bootstrap` command. For additional details follow [documentation](https://docs.aws.amazon.com/cdk/v2/guide/bootstrapping.html). You can run tests either from your local machine's shell by exporting `AWS_PROFILE` environment variable and running `make e2e tests`. You may also do this directly from GitHub Console. Run the [run-e2e-tests workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/run-e2e-tests.yml) and pick the branch you want to run tests against. @@ -182,6 +176,14 @@ You may also do this directly from GitHub Console. Run the [run-e2e-tests workfl > **Q: What if there's an error?** To be agreed +### Releasing a new version + +> TODO: This is an area we want to increase automation while keeping communication at human level. + +Firstly, make sure you are using the `develop` branch and it is up to date with the origin. + +There are four main steps to release a new version: Changelog generation, version bumping, and drafting release notes. + #### Changelog generation You can pre-generate a temporary CHANGELOG using `make changelog`. This will generate a `TMP_CHANGELOG.md` with all staged changes under the `unreleased` section. From b8e0eab3a09592a472bebdef5c11603e33b12439 Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Thu, 23 Jun 2022 21:12:20 +0200 Subject: [PATCH 17/72] Remove unnecessary f-string from logger key modification --- tests/e2e/logger/handlers/basic_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/e2e/logger/handlers/basic_handler.py b/tests/e2e/logger/handlers/basic_handler.py index 6ba1613964f..34d7fb4678a 100644 --- a/tests/e2e/logger/handlers/basic_handler.py +++ b/tests/e2e/logger/handlers/basic_handler.py @@ -12,6 +12,6 @@ def lambda_handler(event, context): logger.debug(MESSAGE) logger.info(MESSAGE) - logger.append_keys(**{f"{ADDITIONAL_KEY}": "test"}) + logger.append_keys(**{ADDITIONAL_KEY: "test"}) logger.info(MESSAGE) return "success" From 29e8ddce8752e5c82d0aab9c9c85efd87f59faa8 Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Fri, 24 Jun 2022 13:00:15 +0200 Subject: [PATCH 18/72] Decouple infra creation step from lambda triggering --- tests/e2e/conftest.py | 39 ++++++++++++---- .../e2e/logger/handlers/no_context_handler.py | 1 + tests/e2e/logger/test_logger.py | 46 ++++++++++--------- tests/e2e/metrics/test_metrics.py | 4 +- tests/e2e/tracer/test_tracer.py | 6 +-- tests/e2e/utils/helpers.py | 2 +- 6 files changed, 60 insertions(+), 38 deletions(-) diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py index 0bbaa68dc51..672db2da279 100644 --- a/tests/e2e/conftest.py +++ b/tests/e2e/conftest.py @@ -1,6 +1,9 @@ import datetime import sys import uuid +from dataclasses import dataclass + +import boto3 # We only need typing_extensions for python versions <3.8 if sys.version_info >= (3, 8): @@ -8,7 +11,7 @@ else: from typing_extensions import TypedDict -from typing import Dict, Generator +from typing import Dict, Generator, Optional import pytest from e2e.utils import helpers, infrastructure @@ -19,13 +22,26 @@ class LambdaConfig(TypedDict): environment_variables: Dict[str, str] -class LambdaExecution(TypedDict): +@dataclass +class InfrastructureOutput: arns: Dict[str, str] execution_time: datetime.datetime + def get_lambda_arns(self) -> Dict[str, str]: + return self.arns + + def get_lambda_arn(self, name: str) -> Optional[str]: + return self.arns.get(name) + + def get_lambda_execution_time(self) -> datetime.datetime: + return self.execution_time + + def get_lambda_execution_time_timestamp(self) -> int: + return int(self.execution_time.timestamp() * 1000) + @pytest.fixture(scope="module") -def execute_lambda(config, request) -> Generator[LambdaExecution, None, None]: +def create_infrastructure(config, request) -> Generator[Dict[str, str], None, None]: stack_name = f"test-lambda-{uuid.uuid4()}" test_dir = request.fspath.dirname handlers_dir = f"{test_dir}/handlers/" @@ -36,12 +52,15 @@ def execute_lambda(config, request) -> Generator[LambdaExecution, None, None]: config=config["parameters"], environment_variables=config["environment_variables"], ) + yield infra.deploy() + infra.delete() - lambda_arns = infra.deploy() - execution_time = datetime.datetime.utcnow() - for _, arn in lambda_arns.items(): - helpers.trigger_lambda(lambda_arn=arn, client=infra.lambda_client) - yield {"arns": lambda_arns, "execution_time": execution_time} - # Ensure stack deletion is triggered at the end of the test session - infra.delete() +@pytest.fixture(scope="module") +def execute_lambda(create_infrastructure) -> InfrastructureOutput: + execution_time = datetime.datetime.utcnow() + session = boto3.Session() + client = session.client("lambda") + for _, arn in create_infrastructure.items(): + helpers.trigger_lambda(lambda_arn=arn, client=client) + return InfrastructureOutput(arns=create_infrastructure, execution_time=execution_time) diff --git a/tests/e2e/logger/handlers/no_context_handler.py b/tests/e2e/logger/handlers/no_context_handler.py index 3e4e31306fa..1347ba98d81 100644 --- a/tests/e2e/logger/handlers/no_context_handler.py +++ b/tests/e2e/logger/handlers/no_context_handler.py @@ -10,4 +10,5 @@ def lambda_handler(event, context): logger.info(MESSAGE) + logger.append_keys(**{ADDITIONAL_KEY: "test"}) return "success" diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index 4a997258e44..6bff0a9aae9 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -18,15 +18,15 @@ def config() -> conftest.LambdaConfig: @pytest.mark.e2e -def test_basic_lambda_logs_visible(execute_lambda: conftest.LambdaExecution, config: conftest.LambdaConfig): +def test_basic_lambda_logs_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): # GIVEN - lambda_arn = execute_lambda["arns"]["basichandlerarn"] - timestamp = int(execute_lambda["execution_time"].timestamp() * 1000) + lambda_arn = execute_lambda.get_lambda_arn(name="basichandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() cw_client = boto3.client("logs") # WHEN filtered_logs = helpers.get_logs( - lambda_function_name=lambda_arn.split(":")[-1], start_time=timestamp, log_client=cw_client, run="first" + lambda_function_name=lambda_arn.split(":")[-1], start_time=timestamp, log_client=cw_client ) # THEN @@ -38,10 +38,12 @@ def test_basic_lambda_logs_visible(execute_lambda: conftest.LambdaExecution, con @pytest.mark.e2e -def test_basic_lambda_no_debug_logs_visible(execute_lambda: conftest.LambdaExecution, config: conftest.LambdaConfig): +def test_basic_lambda_no_debug_logs_visible( + execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig +): # GIVEN - lambda_arn = execute_lambda["arns"]["basichandlerarn"] - timestamp = int(execute_lambda["execution_time"].timestamp() * 1000) + lambda_arn = execute_lambda.get_lambda_arn(name="basichandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() cw_client = boto3.client("logs") # WHEN @@ -56,10 +58,10 @@ def test_basic_lambda_no_debug_logs_visible(execute_lambda: conftest.LambdaExecu @pytest.mark.e2e -def test_basic_lambda_contextual_data_logged(execute_lambda: conftest.LambdaExecution): +def test_basic_lambda_contextual_data_logged(execute_lambda: conftest.InfrastructureOutput): # GIVEN - lambda_arn = execute_lambda["arns"]["basichandlerarn"] - timestamp = int(execute_lambda["execution_time"].timestamp() * 1000) + lambda_arn = execute_lambda.get_lambda_arn(name="basichandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() cw_client = boto3.client("logs") # WHEN @@ -81,11 +83,11 @@ def test_basic_lambda_contextual_data_logged(execute_lambda: conftest.LambdaExec @pytest.mark.e2e def test_basic_lambda_additional_key_persistence_basic_lambda( - execute_lambda: conftest.LambdaExecution, config: conftest.LambdaConfig + execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig ): # GIVEN - lambda_arn = execute_lambda["arns"]["basichandlerarn"] - timestamp = int(execute_lambda["execution_time"].timestamp() * 1000) + lambda_arn = execute_lambda.get_lambda_arn(name="basichandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() cw_client = boto3.client("logs") # WHEN @@ -103,11 +105,11 @@ def test_basic_lambda_additional_key_persistence_basic_lambda( @pytest.mark.e2e -def test_basic_lambda_empty_event_logged(execute_lambda: conftest.LambdaExecution): +def test_basic_lambda_empty_event_logged(execute_lambda: conftest.InfrastructureOutput): # GIVEN - lambda_arn = execute_lambda["arns"]["basichandlerarn"] - timestamp = int(execute_lambda["execution_time"].timestamp() * 1000) + lambda_arn = execute_lambda.get_lambda_arn(name="basichandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() cw_client = boto3.client("logs") # WHEN @@ -120,11 +122,11 @@ def test_basic_lambda_empty_event_logged(execute_lambda: conftest.LambdaExecutio @pytest.mark.e2e -def test_no_context_lambda_contextual_data_not_logged(execute_lambda: conftest.LambdaExecution): +def test_no_context_lambda_contextual_data_not_logged(execute_lambda: conftest.InfrastructureOutput): # GIVEN - lambda_arn = execute_lambda["arns"]["nocontexthandlerarn"] - timestamp = int(execute_lambda["execution_time"].timestamp() * 1000) + lambda_arn = execute_lambda.get_lambda_arn(name="nocontexthandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() cw_client = boto3.client("logs") # WHEN @@ -147,11 +149,11 @@ def test_no_context_lambda_contextual_data_not_logged(execute_lambda: conftest.L @pytest.mark.e2e -def test_no_context_lambda_event_not_logged(execute_lambda: conftest.LambdaExecution): +def test_no_context_lambda_event_not_logged(execute_lambda: conftest.InfrastructureOutput): # GIVEN - lambda_arn = execute_lambda["arns"]["nocontexthandlerarn"] - timestamp = int(execute_lambda["execution_time"].timestamp() * 1000) + lambda_arn = execute_lambda.get_lambda_arn(name="nocontexthandlerarn") + timestamp = execute_lambda.get_lambda_execution_time_timestamp() cw_client = boto3.client("logs") # WHEN diff --git a/tests/e2e/metrics/test_metrics.py b/tests/e2e/metrics/test_metrics.py index 89158498766..aafb25ff986 100644 --- a/tests/e2e/metrics/test_metrics.py +++ b/tests/e2e/metrics/test_metrics.py @@ -21,8 +21,8 @@ def config() -> conftest.LambdaConfig: @pytest.mark.e2e -def test_basic_lambda_metric_visible(execute_lambda: conftest.LambdaExecution, config: conftest.LambdaConfig): - start_date = execute_lambda["execution_time"] +def test_basic_lambda_metric_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): + start_date = execute_lambda.get_lambda_execution_time() end_date = start_date + datetime.timedelta(minutes=5) metrics = helpers.get_metrics( diff --git a/tests/e2e/tracer/test_tracer.py b/tests/e2e/tracer/test_tracer.py index b4d3bb2251f..23aa77f41e1 100644 --- a/tests/e2e/tracer/test_tracer.py +++ b/tests/e2e/tracer/test_tracer.py @@ -18,9 +18,9 @@ def config(): @pytest.mark.e2e -def test_basic_lambda_trace_visible(execute_lambda: conftest.LambdaExecution, config: conftest.LambdaConfig): - lambda_arn = execute_lambda["arns"]["basichandlerarn"] - start_date = execute_lambda["execution_time"] +def test_basic_lambda_trace_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): + lambda_arn = execute_lambda.get_lambda_arns()["basichandlerarn"] + start_date = execute_lambda.get_lambda_execution_time() end_date = start_date + datetime.timedelta(minutes=5) trace = helpers.get_traces( diff --git a/tests/e2e/utils/helpers.py b/tests/e2e/utils/helpers.py index 21e266bd4cc..9dd2ed9f89e 100644 --- a/tests/e2e/utils/helpers.py +++ b/tests/e2e/utils/helpers.py @@ -1,7 +1,7 @@ import json from datetime import datetime from functools import lru_cache -from typing import Any, Optional, Union +from typing import Any, List, Optional, Union from pydantic import BaseModel from retry import retry From be1a1bb6214ce78f013232d4ec3d58669b7c495f Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Tue, 28 Jun 2022 00:29:22 +0200 Subject: [PATCH 19/72] Implement strategy pattern to generalise infrastructure deployment --- tests/e2e/conftest.py | 9 +- tests/e2e/logger/test_logger.py | 41 ++++---- tests/e2e/tracer/test_tracer.py | 2 +- tests/e2e/utils/infrastructure.py | 166 +++++++++++++++++------------- 4 files changed, 117 insertions(+), 101 deletions(-) diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py index 672db2da279..2e0adfe4f13 100644 --- a/tests/e2e/conftest.py +++ b/tests/e2e/conftest.py @@ -46,13 +46,8 @@ def create_infrastructure(config, request) -> Generator[Dict[str, str], None, No test_dir = request.fspath.dirname handlers_dir = f"{test_dir}/handlers/" - infra = infrastructure.Infrastructure( - stack_name=stack_name, - handlers_dir=handlers_dir, - config=config["parameters"], - environment_variables=config["environment_variables"], - ) - yield infra.deploy() + infra = infrastructure.Infrastructure(stack_name=stack_name, handlers_dir=handlers_dir, config=config) + yield infra.deploy(Stack=infrastructure.InfrastructureStack) infra.delete() diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index 6bff0a9aae9..dab860f2ccf 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -60,6 +60,16 @@ def test_basic_lambda_no_debug_logs_visible( @pytest.mark.e2e def test_basic_lambda_contextual_data_logged(execute_lambda: conftest.InfrastructureOutput): # GIVEN + + required_keys = ( + "xray_trace_id", + "function_request_id", + "function_arn", + "function_memory_size", + "function_name", + "cold_start", + ) + lambda_arn = execute_lambda.get_lambda_arn(name="basichandlerarn") timestamp = execute_lambda.get_lambda_execution_time_timestamp() cw_client = boto3.client("logs") @@ -70,15 +80,7 @@ def test_basic_lambda_contextual_data_logged(execute_lambda: conftest.Infrastruc ) # THEN - for log in filtered_logs: - assert ( - log.xray_trace_id - and log.function_request_id - and log.function_arn - and log.function_memory_size - and log.function_name - and str(log.cold_start) - ) + assert all(keys in logs.dict(exclude_unset=True) for logs in filtered_logs for keys in required_keys) @pytest.mark.e2e @@ -125,6 +127,15 @@ def test_basic_lambda_empty_event_logged(execute_lambda: conftest.Infrastructure def test_no_context_lambda_contextual_data_not_logged(execute_lambda: conftest.InfrastructureOutput): # GIVEN + + required_missing_keys = ( + "function_request_id", + "function_arn", + "function_memory_size", + "function_name", + "cold_start", + ) + lambda_arn = execute_lambda.get_lambda_arn(name="nocontexthandlerarn") timestamp = execute_lambda.get_lambda_execution_time_timestamp() cw_client = boto3.client("logs") @@ -135,17 +146,7 @@ def test_no_context_lambda_contextual_data_not_logged(execute_lambda: conftest.I ) # THEN - assert not any( - ( - log.xray_trace_id - and log.function_request_id - and log.function_arn - and log.function_memory_size - and log.function_name - and str(log.cold_start) - ) - for log in filtered_logs - ) + assert not any(keys in logs.dict(exclude_unset=True) for logs in filtered_logs for keys in required_missing_keys) @pytest.mark.e2e diff --git a/tests/e2e/tracer/test_tracer.py b/tests/e2e/tracer/test_tracer.py index 23aa77f41e1..ef2381da239 100644 --- a/tests/e2e/tracer/test_tracer.py +++ b/tests/e2e/tracer/test_tracer.py @@ -19,7 +19,7 @@ def config(): @pytest.mark.e2e def test_basic_lambda_trace_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): - lambda_arn = execute_lambda.get_lambda_arns()["basichandlerarn"] + lambda_arn = execute_lambda.get_lambda_arn(name="basichandlerarn") start_date = execute_lambda.get_lambda_execution_time() end_date = start_date + datetime.timedelta(minutes=5) diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index bc6037dfd42..7c708652a55 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -2,9 +2,10 @@ import os import sys import zipfile +from abc import ABC, abstractmethod from enum import Enum from pathlib import Path -from typing import Dict, List +from typing import Dict, List, Tuple, Type import boto3 import yaml @@ -20,46 +21,60 @@ class PythonVersion(Enum): V39 = {"runtime": Runtime.PYTHON_3_9, "image": Runtime.PYTHON_3_9.bundling_image.image} -class Infrastructure: - def __init__(self, stack_name: str, handlers_dir: str, config: dict, environment_variables: dict) -> None: - session = boto3.Session() - self.s3_client = session.client("s3") - self.lambda_client = session.client("lambda") - self.cf_client = session.client("cloudformation") - self.s3_resource = session.resource("s3") - self.account_id = session.client("sts").get_caller_identity()["Account"] - self.region = session.region_name +class InfrastructureStackInterface(ABC): + @abstractmethod + def synthesize() -> Tuple[dict, str]: + ... + + @abstractmethod + def __call__() -> Tuple[dict, str]: + ... + + +class InfrastructureStack(InfrastructureStackInterface): + def __init__(self, handlers_dir: str, stack_name: str, config: dict) -> None: self.stack_name = stack_name self.handlers_dir = handlers_dir self.config = config - self.environment_variables = environment_variables - - def deploy(self) -> Dict[str, str]: - handlers = self._find_files(directory=self.handlers_dir, only_py=True) - template, asset_root_dir = self.prepare_stack( - handlers=handlers, - handlers_dir=self.handlers_dir, - stack_name=self.stack_name, - environment_variables=self.environment_variables, - **self.config, - ) - self._upload_assets(template, asset_root_dir) - - response = self._deploy_stack(self.stack_name, template) - return self._transform_output(response["Stacks"][0]["Outputs"]) + def _create_layer(self, stack: Stack): + output_dir = Path(str(AssetStaging.BUNDLING_OUTPUT_DIR), "python") + input_dir = Path(str(AssetStaging.BUNDLING_INPUT_DIR), "aws_lambda_powertools") + powertools_layer = LayerVersion( + stack, + "aws-lambda-powertools", + layer_version_name="aws-lambda-powertools", + compatible_runtimes=[PythonVersion[PYTHON_RUNTIME_VERSION].value["runtime"]], + code=Code.from_asset( + path=".", + bundling=BundlingOptions( + image=DockerImage.from_build( + str(Path(__file__).parent), + build_args={"IMAGE": PythonVersion[PYTHON_RUNTIME_VERSION].value["image"]}, + ), + command=[ + "bash", + "-c", + f"export PYTHONPYCACHEPREFIX='/tmp/.cache/pycache/';\ + poetry export --with-credentials --format requirements.txt --output /tmp/requirements.txt &&\ + pip install -r /tmp/requirements.txt -t {output_dir} &&\ + cp -R {input_dir} {output_dir} &&\ + find {output_dir}/ -regex '^.*\\(__pycache__\\|\\.py[co]\\)$' -delete", + ], + ), + ), + ) + return powertools_layer - def delete(self): - self.cf_client.delete_stack(StackName=self.stack_name) + def _find_handlers(self, directory: str) -> list: + for root, _, files in os.walk(directory): + return [os.path.join(root, filename) for filename in files if filename.endswith(".py")] - # Create CDK cloud assembly code - def prepare_stack( - self, handlers: List[str], handlers_dir: str, stack_name: str, environment_variables: dict, **config: dict - ): + def synthesize(self, handlers: List[str]) -> Tuple[dict, str]: integration_test_app = App() - stack = Stack(integration_test_app, stack_name) + stack = Stack(integration_test_app, self.stack_name) powertools_layer = self._create_layer(stack) - code = Code.from_asset(handlers_dir) + code = Code.from_asset(self.handlers_dir) for filename_path in handlers: filename = Path(filename_path).stem @@ -70,8 +85,10 @@ def prepare_stack( code=code, handler=f"{filename}.lambda_handler", layers=[powertools_layer], - environment=environment_variables, - tracing=Tracing.ACTIVE if config.get("tracing") == "ACTIVE" else Tracing.DISABLED, + environment=self.config.get("environment_variables"), + tracing=Tracing.ACTIVE + if self.config.get("parameters", {}).get("tracing") == "ACTIVE" + else Tracing.DISABLED, ) aws_logs.LogGroup( @@ -83,48 +100,40 @@ def prepare_stack( ) CfnOutput(stack, f"{filename}_arn", value=function_python.function_arn) return ( - integration_test_app.synth().get_stack_by_name(stack_name).template, + integration_test_app.synth().get_stack_by_name(self.stack_name).template, integration_test_app.synth().directory, ) - def _find_files(self, directory: str, only_py: bool = False) -> list: - file_paths = [] - for root, _, files in os.walk(directory): - for filename in files: - if only_py: - if filename.endswith(".py"): - file_paths.append(os.path.join(root, filename)) - else: - file_paths.append(os.path.join(root, filename)) - return file_paths + def __call__(self) -> Tuple[dict, str]: + handlers = self._find_handlers(directory=self.handlers_dir) + return self.synthesize(handlers=handlers) - def _create_layer(self, stack: Stack): - output_dir = Path(str(AssetStaging.BUNDLING_OUTPUT_DIR), "python") - input_dir = Path(str(AssetStaging.BUNDLING_INPUT_DIR), "aws_lambda_powertools") - powertools_layer = LayerVersion( - stack, - "aws-lambda-powertools", - layer_version_name="aws-lambda-powertools", - compatible_runtimes=[PythonVersion[PYTHON_RUNTIME_VERSION].value["runtime"]], - code=Code.from_asset( - path=".", - bundling=BundlingOptions( - image=DockerImage.from_build( - str(Path(__file__).parent), - build_args={"IMAGE": PythonVersion[PYTHON_RUNTIME_VERSION].value["image"]}, - ), - command=[ - "bash", - "-c", - f"poetry export --with-credentials --format requirements.txt --output /tmp/requirements.txt &&\ - pip install -r /tmp/requirements.txt -t {output_dir} &&\ - cp -R {input_dir} {output_dir} &&\ - find {output_dir}/ -regex '^.*\\(__pycache__\\|\\.py[co]\\)$' -delete", - ], - ), - ), - ) - return powertools_layer + +class Infrastructure: + def __init__(self, stack_name: str, handlers_dir: str, config: dict) -> None: + session = boto3.Session() + self.s3_client = session.client("s3") + self.lambda_client = session.client("lambda") + self.cf_client = session.client("cloudformation") + self.s3_resource = session.resource("s3") + self.account_id = session.client("sts").get_caller_identity()["Account"] + self.region = session.region_name + self.stack_name = stack_name + self.handlers_dir = handlers_dir + self.config = config + + def deploy(self, Stack: Type[InfrastructureStackInterface]) -> Dict[str, str]: + + stack = Stack(handlers_dir=self.handlers_dir, stack_name=self.stack_name, config=self.config) + template, asset_root_dir = stack() + self._upload_assets(template, asset_root_dir) + + response = self._deploy_stack(self.stack_name, template) + + return self._transform_output(response["Stacks"][0]["Outputs"]) + + def delete(self): + self.cf_client.delete_stack(StackName=self.stack_name) def _upload_assets(self, template: dict, asset_root_dir: str): @@ -146,6 +155,17 @@ def _upload_assets(self, template: dict, asset_root_dir: str): buf.seek(0) self.s3_client.upload_fileobj(Fileobj=buf, Bucket=bucket, Key=s3_key) + def _find_files(self, directory: str, only_py: bool = False) -> list: + file_paths = [] + for root, _, files in os.walk(directory): + for filename in files: + if only_py: + if filename.endswith(".py"): + file_paths.append(os.path.join(root, filename)) + else: + file_paths.append(os.path.join(root, filename)) + return file_paths + def _deploy_stack(self, stack_name: str, template: dict): response = self.cf_client.create_stack( StackName=stack_name, From 3923fc2437475ad5f89e72f1a228d7d6cd788083 Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Wed, 29 Jun 2022 08:51:20 +0200 Subject: [PATCH 20/72] Create helper function for fetching trace metadata --- tests/e2e/logger/test_logger.py | 2 -- tests/e2e/metrics/handlers/basic_handler.py | 4 +-- tests/e2e/metrics/test_metrics.py | 14 ++++++--- tests/e2e/tracer/handlers/basic_handler.py | 15 +++++++-- tests/e2e/tracer/test_tracer.py | 33 ++++++++++++-------- tests/e2e/utils/helpers.py | 34 +++++++++++++++++++-- tests/e2e/utils/infrastructure.py | 13 +++----- 7 files changed, 79 insertions(+), 36 deletions(-) diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index dab860f2ccf..e1cc980d816 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -60,7 +60,6 @@ def test_basic_lambda_no_debug_logs_visible( @pytest.mark.e2e def test_basic_lambda_contextual_data_logged(execute_lambda: conftest.InfrastructureOutput): # GIVEN - required_keys = ( "xray_trace_id", "function_request_id", @@ -127,7 +126,6 @@ def test_basic_lambda_empty_event_logged(execute_lambda: conftest.Infrastructure def test_no_context_lambda_contextual_data_not_logged(execute_lambda: conftest.InfrastructureOutput): # GIVEN - required_missing_keys = ( "function_request_id", "function_arn", diff --git a/tests/e2e/metrics/handlers/basic_handler.py b/tests/e2e/metrics/handlers/basic_handler.py index 121cacf7bb3..dd2f486d980 100644 --- a/tests/e2e/metrics/handlers/basic_handler.py +++ b/tests/e2e/metrics/handlers/basic_handler.py @@ -3,11 +3,9 @@ from aws_lambda_powertools import Metrics from aws_lambda_powertools.metrics import MetricUnit -METRIC_NAMESPACE = os.environ["METRIC_NAMESPACE"] METRIC_NAME = os.environ["METRIC_NAME"] -SERVICE_NAME = os.environ["SERVICE_NAME"] -metrics = Metrics(namespace=METRIC_NAMESPACE, service=SERVICE_NAME) +metrics = Metrics() @metrics.log_metrics diff --git a/tests/e2e/metrics/test_metrics.py b/tests/e2e/metrics/test_metrics.py index aafb25ff986..810d63941a8 100644 --- a/tests/e2e/metrics/test_metrics.py +++ b/tests/e2e/metrics/test_metrics.py @@ -13,26 +13,30 @@ def config() -> conftest.LambdaConfig: return { "parameters": {}, "environment_variables": { - "METRIC_NAMESPACE": f"powertools-e2e-metric-{uuid.uuid4()}", - "METRIC_NAME": "business-metric", - "SERVICE_NAME": "test-powertools-service", + "POWERTOOLS_METRICS_NAMESPACE": "powertools-e2e-metric", + "POWERTOOLS_SERVICE_NAME": f"test-powertools-service", + "METRIC_NAME": f"business-metric-{uuid.uuid4()}", }, } @pytest.mark.e2e def test_basic_lambda_metric_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): + # GIVEN start_date = execute_lambda.get_lambda_execution_time() end_date = start_date + datetime.timedelta(minutes=5) + # WHEN metrics = helpers.get_metrics( start_date=start_date, end_date=end_date, - namespace=config["environment_variables"]["METRIC_NAMESPACE"], + namespace=config["environment_variables"]["POWERTOOLS_METRICS_NAMESPACE"], metric_name=config["environment_variables"]["METRIC_NAME"], - service_name=config["environment_variables"]["SERVICE_NAME"], + service_name=config["environment_variables"]["POWERTOOLS_SERVICE_NAME"], cw_client=boto3.client(service_name="cloudwatch"), ) + + # THEN assert metrics["Timestamps"] and len(metrics["Timestamps"]) == 1 assert metrics["Values"] and len(metrics["Values"]) == 1 assert metrics["Values"][0] == 1 diff --git a/tests/e2e/tracer/handlers/basic_handler.py b/tests/e2e/tracer/handlers/basic_handler.py index bd0cbfd0d57..44436921d72 100644 --- a/tests/e2e/tracer/handlers/basic_handler.py +++ b/tests/e2e/tracer/handlers/basic_handler.py @@ -1,16 +1,25 @@ +import asyncio import os from aws_lambda_powertools import Tracer +from aws_lambda_powertools.utilities.typing import LambdaContext -tracer = Tracer() tracer = Tracer(service="e2e-tests-app") ANNOTATION_KEY = os.environ["ANNOTATION_KEY"] ANNOTATION_VALUE = os.environ["ANNOTATION_VALUE"] +ANNOTATION_ASYNC_VALUE = os.environ["ANNOTATION_ASYNC_VALUE"] @tracer.capture_lambda_handler -def lambda_handler(event, context): - tracer.put_annotation(key=ANNOTATION_KEY, value=ANNOTATION_VALUE) +def lambda_handler(event: dict, context: LambdaContext): + # tracer.put_annotation(key=ANNOTATION_KEY, value=ANNOTATION_VALUE) tracer.put_metadata(key=ANNOTATION_KEY, value=ANNOTATION_VALUE) + return asyncio.run(collect_payment()) + + +@tracer.capture_method +async def collect_payment() -> str: + # tracer.put_annotation(key=ANNOTATION_KEY, value=ANNOTATION_ASYNC_VALUE) + tracer.put_metadata(key=ANNOTATION_KEY, value=ANNOTATION_ASYNC_VALUE) return "success" diff --git a/tests/e2e/tracer/test_tracer.py b/tests/e2e/tracer/test_tracer.py index ef2381da239..48e3274bca7 100644 --- a/tests/e2e/tracer/test_tracer.py +++ b/tests/e2e/tracer/test_tracer.py @@ -13,16 +13,22 @@ def config(): return { "parameters": {"tracing": "ACTIVE"}, - "environment_variables": {"ANNOTATION_KEY": f"e2e-tracer-{uuid.uuid4()}", "ANNOTATION_VALUE": "stored"}, + "environment_variables": { + "ANNOTATION_KEY": f"e2e-tracer-{uuid.uuid4()}", + "ANNOTATION_VALUE": "stored", + "ANNOTATION_ASYNC_VALUE": "payments", + }, } @pytest.mark.e2e -def test_basic_lambda_trace_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): +def test_basic_lambda_async_trace_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): + # GIVEN lambda_arn = execute_lambda.get_lambda_arn(name="basichandlerarn") start_date = execute_lambda.get_lambda_execution_time() end_date = start_date + datetime.timedelta(minutes=5) + # WHEN trace = helpers.get_traces( start_date=start_date, end_date=end_date, @@ -30,17 +36,20 @@ def test_basic_lambda_trace_visible(execute_lambda: conftest.InfrastructureOutpu xray_client=boto3.client("xray"), ) - for segment in trace["Traces"][0]["Segments"]: - document = json.loads(segment["Document"]) - if document["origin"] == "AWS::Lambda::Function": - for subsegment in document["subsegments"]: - if subsegment["name"] == "Invocation": - for x_subsegment in subsegment["subsegments"]: - metadata = x_subsegment["metadata"] - annotation = x_subsegment["annotations"] + # THEN + info = helpers.find_trace_additional_info(trace=trace) + print(info) + handler_trace_segment = [trace_segment for trace_segment in info if trace_segment.name == "## lambda_handler"][0] + collect_payment_trace_segment = [ + trace_segment for trace_segment in info if trace_segment.name == "## collect_payment" + ][0] + assert handler_trace_segment.annotations["Service"] == "e2e-tests-app" assert ( - metadata["e2e-tests-app"][config["environment_variables"]["ANNOTATION_KEY"]] + handler_trace_segment.metadata["e2e-tests-app"][config["environment_variables"]["ANNOTATION_KEY"]] == config["environment_variables"]["ANNOTATION_VALUE"] ) - assert annotation["Service"] == "e2e-tests-app" + assert ( + collect_payment_trace_segment.metadata["e2e-tests-app"][config["environment_variables"]["ANNOTATION_KEY"]] + == config["environment_variables"]["ANNOTATION_ASYNC_VALUE"] + ) diff --git a/tests/e2e/utils/helpers.py b/tests/e2e/utils/helpers.py index 9dd2ed9f89e..e19024f05ad 100644 --- a/tests/e2e/utils/helpers.py +++ b/tests/e2e/utils/helpers.py @@ -1,7 +1,7 @@ import json from datetime import datetime from functools import lru_cache -from typing import Any, List, Optional, Union +from typing import Any, Dict, List, Optional, Union from pydantic import BaseModel from retry import retry @@ -23,6 +23,12 @@ class Log(BaseModel): extra_info: Optional[str] +class TraceSegment(BaseModel): + name: str + metadata: Dict = {} + annotations: Dict = {} + + def trigger_lambda(lambda_arn: str, client: Any): response = client.invoke(FunctionName=lambda_arn, InvocationType="RequestResponse") return response @@ -76,7 +82,7 @@ def get_metrics( @retry(ValueError, delay=1, jitter=1, tries=10) -def get_traces(lambda_function_name: str, xray_client: Any, start_date: datetime, end_date: datetime): +def get_traces(lambda_function_name: str, xray_client: Any, start_date: datetime, end_date: datetime) -> Dict: paginator = xray_client.get_paginator("get_trace_summaries") response_iterator = paginator.paginate( StartTime=start_date, @@ -95,3 +101,27 @@ def get_traces(lambda_function_name: str, xray_client: Any, start_date: datetime ) return trace_details + + +def find_trace_additional_info(trace: Dict) -> List[TraceSegment]: + info = [] + for segment in trace["Traces"][0]["Segments"]: + document = json.loads(segment["Document"]) + if document["origin"] == "AWS::Lambda::Function": + for subsegment in document["subsegments"]: + if subsegment["name"] == "Invocation": + find_meta(segment=subsegment, result=info) + return info + + +def find_meta(segment: dict, result: List): + for x_subsegment in segment["subsegments"]: + result.append( + TraceSegment( + name=x_subsegment["name"], + metadata=x_subsegment.get("metadata", {}), + annotations=x_subsegment.get("annotations", {}), + ) + ) + if x_subsegment.get("subsegments"): + find_meta(segment=x_subsegment, result=result) diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 7c708652a55..54682055f5b 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -55,11 +55,10 @@ def _create_layer(self, stack: Stack): command=[ "bash", "-c", - f"export PYTHONPYCACHEPREFIX='/tmp/.cache/pycache/';\ - poetry export --with-credentials --format requirements.txt --output /tmp/requirements.txt &&\ + f"poetry export --with-credentials --format requirements.txt --output /tmp/requirements.txt &&\ pip install -r /tmp/requirements.txt -t {output_dir} &&\ cp -R {input_dir} {output_dir} &&\ - find {output_dir}/ -regex '^.*\\(__pycache__\\|\\.py[co]\\)$' -delete", + find {output_dir}/ -regex '^.*__pycache__.*' -delete", ], ), ), @@ -155,15 +154,11 @@ def _upload_assets(self, template: dict, asset_root_dir: str): buf.seek(0) self.s3_client.upload_fileobj(Fileobj=buf, Bucket=bucket, Key=s3_key) - def _find_files(self, directory: str, only_py: bool = False) -> list: + def _find_files(self, directory: str) -> list: file_paths = [] for root, _, files in os.walk(directory): for filename in files: - if only_py: - if filename.endswith(".py"): - file_paths.append(os.path.join(root, filename)) - else: - file_paths.append(os.path.join(root, filename)) + file_paths.append(os.path.join(root, filename)) return file_paths def _deploy_stack(self, stack_name: str, template: dict): From 9afdf3d393d45b6860e7e67980661785ac0a0e1e Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Wed, 29 Jun 2022 08:51:20 +0200 Subject: [PATCH 21/72] Create helper function for fetching trace metadata --- tests/e2e/logger/test_logger.py | 2 -- tests/e2e/metrics/handlers/basic_handler.py | 4 +-- tests/e2e/metrics/test_metrics.py | 14 +++++--- tests/e2e/tracer/handlers/basic_handler.py | 15 ++++++-- tests/e2e/tracer/test_tracer.py | 39 ++++++++++++--------- tests/e2e/utils/helpers.py | 34 ++++++++++++++++-- tests/e2e/utils/infrastructure.py | 13 +++---- 7 files changed, 81 insertions(+), 40 deletions(-) diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index dab860f2ccf..e1cc980d816 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -60,7 +60,6 @@ def test_basic_lambda_no_debug_logs_visible( @pytest.mark.e2e def test_basic_lambda_contextual_data_logged(execute_lambda: conftest.InfrastructureOutput): # GIVEN - required_keys = ( "xray_trace_id", "function_request_id", @@ -127,7 +126,6 @@ def test_basic_lambda_empty_event_logged(execute_lambda: conftest.Infrastructure def test_no_context_lambda_contextual_data_not_logged(execute_lambda: conftest.InfrastructureOutput): # GIVEN - required_missing_keys = ( "function_request_id", "function_arn", diff --git a/tests/e2e/metrics/handlers/basic_handler.py b/tests/e2e/metrics/handlers/basic_handler.py index 121cacf7bb3..dd2f486d980 100644 --- a/tests/e2e/metrics/handlers/basic_handler.py +++ b/tests/e2e/metrics/handlers/basic_handler.py @@ -3,11 +3,9 @@ from aws_lambda_powertools import Metrics from aws_lambda_powertools.metrics import MetricUnit -METRIC_NAMESPACE = os.environ["METRIC_NAMESPACE"] METRIC_NAME = os.environ["METRIC_NAME"] -SERVICE_NAME = os.environ["SERVICE_NAME"] -metrics = Metrics(namespace=METRIC_NAMESPACE, service=SERVICE_NAME) +metrics = Metrics() @metrics.log_metrics diff --git a/tests/e2e/metrics/test_metrics.py b/tests/e2e/metrics/test_metrics.py index aafb25ff986..810d63941a8 100644 --- a/tests/e2e/metrics/test_metrics.py +++ b/tests/e2e/metrics/test_metrics.py @@ -13,26 +13,30 @@ def config() -> conftest.LambdaConfig: return { "parameters": {}, "environment_variables": { - "METRIC_NAMESPACE": f"powertools-e2e-metric-{uuid.uuid4()}", - "METRIC_NAME": "business-metric", - "SERVICE_NAME": "test-powertools-service", + "POWERTOOLS_METRICS_NAMESPACE": "powertools-e2e-metric", + "POWERTOOLS_SERVICE_NAME": f"test-powertools-service", + "METRIC_NAME": f"business-metric-{uuid.uuid4()}", }, } @pytest.mark.e2e def test_basic_lambda_metric_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): + # GIVEN start_date = execute_lambda.get_lambda_execution_time() end_date = start_date + datetime.timedelta(minutes=5) + # WHEN metrics = helpers.get_metrics( start_date=start_date, end_date=end_date, - namespace=config["environment_variables"]["METRIC_NAMESPACE"], + namespace=config["environment_variables"]["POWERTOOLS_METRICS_NAMESPACE"], metric_name=config["environment_variables"]["METRIC_NAME"], - service_name=config["environment_variables"]["SERVICE_NAME"], + service_name=config["environment_variables"]["POWERTOOLS_SERVICE_NAME"], cw_client=boto3.client(service_name="cloudwatch"), ) + + # THEN assert metrics["Timestamps"] and len(metrics["Timestamps"]) == 1 assert metrics["Values"] and len(metrics["Values"]) == 1 assert metrics["Values"][0] == 1 diff --git a/tests/e2e/tracer/handlers/basic_handler.py b/tests/e2e/tracer/handlers/basic_handler.py index bd0cbfd0d57..44436921d72 100644 --- a/tests/e2e/tracer/handlers/basic_handler.py +++ b/tests/e2e/tracer/handlers/basic_handler.py @@ -1,16 +1,25 @@ +import asyncio import os from aws_lambda_powertools import Tracer +from aws_lambda_powertools.utilities.typing import LambdaContext -tracer = Tracer() tracer = Tracer(service="e2e-tests-app") ANNOTATION_KEY = os.environ["ANNOTATION_KEY"] ANNOTATION_VALUE = os.environ["ANNOTATION_VALUE"] +ANNOTATION_ASYNC_VALUE = os.environ["ANNOTATION_ASYNC_VALUE"] @tracer.capture_lambda_handler -def lambda_handler(event, context): - tracer.put_annotation(key=ANNOTATION_KEY, value=ANNOTATION_VALUE) +def lambda_handler(event: dict, context: LambdaContext): + # tracer.put_annotation(key=ANNOTATION_KEY, value=ANNOTATION_VALUE) tracer.put_metadata(key=ANNOTATION_KEY, value=ANNOTATION_VALUE) + return asyncio.run(collect_payment()) + + +@tracer.capture_method +async def collect_payment() -> str: + # tracer.put_annotation(key=ANNOTATION_KEY, value=ANNOTATION_ASYNC_VALUE) + tracer.put_metadata(key=ANNOTATION_KEY, value=ANNOTATION_ASYNC_VALUE) return "success" diff --git a/tests/e2e/tracer/test_tracer.py b/tests/e2e/tracer/test_tracer.py index ef2381da239..13c2bfb5902 100644 --- a/tests/e2e/tracer/test_tracer.py +++ b/tests/e2e/tracer/test_tracer.py @@ -13,16 +13,22 @@ def config(): return { "parameters": {"tracing": "ACTIVE"}, - "environment_variables": {"ANNOTATION_KEY": f"e2e-tracer-{uuid.uuid4()}", "ANNOTATION_VALUE": "stored"}, + "environment_variables": { + "ANNOTATION_KEY": f"e2e-tracer-{uuid.uuid4()}", + "ANNOTATION_VALUE": "stored", + "ANNOTATION_ASYNC_VALUE": "payments", + }, } @pytest.mark.e2e -def test_basic_lambda_trace_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): +def test_basic_lambda_async_trace_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): + # GIVEN lambda_arn = execute_lambda.get_lambda_arn(name="basichandlerarn") start_date = execute_lambda.get_lambda_execution_time() end_date = start_date + datetime.timedelta(minutes=5) + # WHEN trace = helpers.get_traces( start_date=start_date, end_date=end_date, @@ -30,17 +36,18 @@ def test_basic_lambda_trace_visible(execute_lambda: conftest.InfrastructureOutpu xray_client=boto3.client("xray"), ) - for segment in trace["Traces"][0]["Segments"]: - document = json.loads(segment["Document"]) - if document["origin"] == "AWS::Lambda::Function": - for subsegment in document["subsegments"]: - if subsegment["name"] == "Invocation": - for x_subsegment in subsegment["subsegments"]: - metadata = x_subsegment["metadata"] - annotation = x_subsegment["annotations"] - - assert ( - metadata["e2e-tests-app"][config["environment_variables"]["ANNOTATION_KEY"]] - == config["environment_variables"]["ANNOTATION_VALUE"] - ) - assert annotation["Service"] == "e2e-tests-app" + # THEN + info = helpers.find_trace_additional_info(trace=trace) + print(info) + handler_trace_segment = [trace_segment for trace_segment in info if trace_segment.name == "## lambda_handler"][0] + collect_payment_trace_segment = [ + trace_segment for trace_segment in info if trace_segment.name == "## collect_payment" + ][0] + + annotation_key = config["environment_variables"]["ANNOTATION_KEY"] + expected_value = config["environment_variables"]["ANNOTATION_VALUE"] + expected_async_value = config["environment_variables"]["ANNOTATION_ASYNC_VALUE"] + + assert handler_trace_segment.annotations["Service"] == "e2e-tests-app" + assert handler_trace_segment.metadata["e2e-tests-app"][annotation_key] == expected_value + assert collect_payment_trace_segment.metadata["e2e-tests-app"][annotation_key] == expected_async_value diff --git a/tests/e2e/utils/helpers.py b/tests/e2e/utils/helpers.py index 9dd2ed9f89e..e19024f05ad 100644 --- a/tests/e2e/utils/helpers.py +++ b/tests/e2e/utils/helpers.py @@ -1,7 +1,7 @@ import json from datetime import datetime from functools import lru_cache -from typing import Any, List, Optional, Union +from typing import Any, Dict, List, Optional, Union from pydantic import BaseModel from retry import retry @@ -23,6 +23,12 @@ class Log(BaseModel): extra_info: Optional[str] +class TraceSegment(BaseModel): + name: str + metadata: Dict = {} + annotations: Dict = {} + + def trigger_lambda(lambda_arn: str, client: Any): response = client.invoke(FunctionName=lambda_arn, InvocationType="RequestResponse") return response @@ -76,7 +82,7 @@ def get_metrics( @retry(ValueError, delay=1, jitter=1, tries=10) -def get_traces(lambda_function_name: str, xray_client: Any, start_date: datetime, end_date: datetime): +def get_traces(lambda_function_name: str, xray_client: Any, start_date: datetime, end_date: datetime) -> Dict: paginator = xray_client.get_paginator("get_trace_summaries") response_iterator = paginator.paginate( StartTime=start_date, @@ -95,3 +101,27 @@ def get_traces(lambda_function_name: str, xray_client: Any, start_date: datetime ) return trace_details + + +def find_trace_additional_info(trace: Dict) -> List[TraceSegment]: + info = [] + for segment in trace["Traces"][0]["Segments"]: + document = json.loads(segment["Document"]) + if document["origin"] == "AWS::Lambda::Function": + for subsegment in document["subsegments"]: + if subsegment["name"] == "Invocation": + find_meta(segment=subsegment, result=info) + return info + + +def find_meta(segment: dict, result: List): + for x_subsegment in segment["subsegments"]: + result.append( + TraceSegment( + name=x_subsegment["name"], + metadata=x_subsegment.get("metadata", {}), + annotations=x_subsegment.get("annotations", {}), + ) + ) + if x_subsegment.get("subsegments"): + find_meta(segment=x_subsegment, result=result) diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 7c708652a55..54682055f5b 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -55,11 +55,10 @@ def _create_layer(self, stack: Stack): command=[ "bash", "-c", - f"export PYTHONPYCACHEPREFIX='/tmp/.cache/pycache/';\ - poetry export --with-credentials --format requirements.txt --output /tmp/requirements.txt &&\ + f"poetry export --with-credentials --format requirements.txt --output /tmp/requirements.txt &&\ pip install -r /tmp/requirements.txt -t {output_dir} &&\ cp -R {input_dir} {output_dir} &&\ - find {output_dir}/ -regex '^.*\\(__pycache__\\|\\.py[co]\\)$' -delete", + find {output_dir}/ -regex '^.*__pycache__.*' -delete", ], ), ), @@ -155,15 +154,11 @@ def _upload_assets(self, template: dict, asset_root_dir: str): buf.seek(0) self.s3_client.upload_fileobj(Fileobj=buf, Bucket=bucket, Key=s3_key) - def _find_files(self, directory: str, only_py: bool = False) -> list: + def _find_files(self, directory: str) -> list: file_paths = [] for root, _, files in os.walk(directory): for filename in files: - if only_py: - if filename.endswith(".py"): - file_paths.append(os.path.join(root, filename)) - else: - file_paths.append(os.path.join(root, filename)) + file_paths.append(os.path.join(root, filename)) return file_paths def _deploy_stack(self, stack_name: str, template: dict): From b6e47fe01e675a714a2978bce0c90aaf2710c890 Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Fri, 1 Jul 2022 21:00:59 +0200 Subject: [PATCH 22/72] Add comment to Dockerfile --- tests/e2e/utils/Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/e2e/utils/Dockerfile b/tests/e2e/utils/Dockerfile index 462d9c645fb..eccfe2c6dfd 100644 --- a/tests/e2e/utils/Dockerfile +++ b/tests/e2e/utils/Dockerfile @@ -1,3 +1,5 @@ +# Image used by CDK's LayerVersion construct to create Lambda Layer with Powertools +# library code. # The correct AWS SAM build image based on the runtime of the function will be # passed as build arg. The default allows to do `docker build .` when testing. ARG IMAGE=public.ecr.aws/sam/build-python3.7 From 82673dbdc3452d974183e8d5a1ce38ac04777cfb Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Fri, 1 Jul 2022 21:30:45 +0200 Subject: [PATCH 23/72] Add typing for AWS services clients --- poetry.lock | 47 +++++++++++++++++++++++++++++++++++++- pyproject.toml | 3 +++ tests/e2e/utils/helpers.py | 18 +++++++++++---- 3 files changed, 62 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 02220b76823..d6e8fddc7b5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -740,6 +740,17 @@ python-versions = ">=3.6" [package.dependencies] typing-extensions = ">=4.1.0" +[[package]] +name = "mypy-boto3-cloudwatch" +version = "1.24.0" +description = "Type annotations for boto3.CloudWatch 1.24.0 service generated with mypy-boto3-builder 7.6.1" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = ">=4.1.0" + [[package]] name = "mypy-boto3-dynamodb" version = "1.24.0" @@ -751,6 +762,17 @@ python-versions = ">=3.6" [package.dependencies] typing-extensions = ">=4.1.0" +[[package]] +name = "mypy-boto3-lambda" +version = "1.24.0" +description = "Type annotations for boto3.Lambda 1.24.0 service generated with mypy-boto3-builder 7.6.1" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = ">=4.1.0" + [[package]] name = "mypy-boto3-secretsmanager" version = "1.24.0" @@ -773,6 +795,17 @@ python-versions = ">=3.6" [package.dependencies] typing-extensions = ">=4.1.0" +[[package]] +name = "mypy-boto3-xray" +version = "1.24.0" +description = "Type annotations for boto3.XRay 1.24.0 service generated with mypy-boto3-builder 7.6.1" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = ">=4.1.0" + [[package]] name = "mypy-extensions" version = "0.4.3" @@ -1266,7 +1299,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.2" -content-hash = "e457c68bd754118733c7ad1c54d389f4aa3b06164d947fae5d682566e202b776" +content-hash = "1cf5f5fe83351bd9213cf42054c156e6d9b868f10ff88c548bf2072eabce078b" [metadata.files] atomicwrites = [ @@ -1632,10 +1665,18 @@ mypy-boto3-appconfig = [ {file = "mypy-boto3-appconfig-1.24.0.tar.gz", hash = "sha256:3bb38c2819b78c72fd9c031058edf5e547ad549d58e052928a4f397823a51dbd"}, {file = "mypy_boto3_appconfig-1.24.0-py3-none-any.whl", hash = "sha256:ca53b0b9606f13257dd0feb800d36531f2eba54f46bd9db7765f69baf9583485"}, ] +mypy-boto3-cloudwatch = [ + {file = "mypy-boto3-cloudwatch-1.24.0.tar.gz", hash = "sha256:d19cd71aa07ecc69c1e2f9691af6a81bf1d65267ad4be1f9486bf683370727a5"}, + {file = "mypy_boto3_cloudwatch-1.24.0-py3-none-any.whl", hash = "sha256:82dac27b1dd0ad8969fedf874ea4713b36d37fe04229f7fdaaecf4addb59d4bd"}, +] mypy-boto3-dynamodb = [ {file = "mypy-boto3-dynamodb-1.24.0.tar.gz", hash = "sha256:a7de204a173dffbee972357a69bf5e59fda169a587017e0d3c5446676342aa2e"}, {file = "mypy_boto3_dynamodb-1.24.0-py3-none-any.whl", hash = "sha256:866f0f8ae44e266ea051f57179bf40132d8e89e6fa23abab6e71421b3c0cd794"}, ] +mypy-boto3-lambda = [ + {file = "mypy-boto3-lambda-1.24.0.tar.gz", hash = "sha256:ab425f941d0d50a2b8a20cc13cebe03c3097b122259bf00e7b295d284814bd6f"}, + {file = "mypy_boto3_lambda-1.24.0-py3-none-any.whl", hash = "sha256:a286a464513adf50847bda8573f2dc7adc348234827d1ac0200e610ee9a09b80"}, +] mypy-boto3-secretsmanager = [ {file = "mypy-boto3-secretsmanager-1.24.0.tar.gz", hash = "sha256:6680c322df031b08ef79fcdb8ffdfb08d57d4925392f641348336926dc5c6b2e"}, {file = "mypy_boto3_secretsmanager-1.24.0-py3-none-any.whl", hash = "sha256:7da281c49ae91e60fdbcd0015379ae4cc9dc9ff911836ee78a2652310e09f53e"}, @@ -1644,6 +1685,10 @@ mypy-boto3-ssm = [ {file = "mypy-boto3-ssm-1.24.0.tar.gz", hash = "sha256:bab58398947c3627a4e7610cd0f57b525c12fd1d0a6bb862400b6af0a4e684fc"}, {file = "mypy_boto3_ssm-1.24.0-py3-none-any.whl", hash = "sha256:1f17055abb8d70f25e6ece2ef4c0dc74d585744c25a3a833c2985d74165ac0c6"}, ] +mypy-boto3-xray = [ + {file = "mypy-boto3-xray-1.24.0.tar.gz", hash = "sha256:fbe211b7601684a2d4defa2f959286f1441027c15044c0c0013257e22307778a"}, + {file = "mypy_boto3_xray-1.24.0-py3-none-any.whl", hash = "sha256:6b9bc96e7924215fe833fe0d732d5e3ce98f7739b373432b9735a9905f867171"}, +] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, diff --git a/pyproject.toml b/pyproject.toml index a0985282d23..f19ab328689 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,6 +26,9 @@ fastjsonschema = "^2.14.5" boto3 = "^1.18" pydantic = {version = "^1.8.2", optional = true } email-validator = {version = "*", optional = true } +mypy-boto3-cloudwatch = "^1.24.0" +mypy-boto3-lambda = "^1.24.0" +mypy-boto3-xray = "^1.24.0" [tool.poetry.dev-dependencies] # 2022-04-21: jmespath was removed, to be re-added once we drop python 3.6. diff --git a/tests/e2e/utils/helpers.py b/tests/e2e/utils/helpers.py index e19024f05ad..2695d76089d 100644 --- a/tests/e2e/utils/helpers.py +++ b/tests/e2e/utils/helpers.py @@ -1,8 +1,11 @@ import json from datetime import datetime from functools import lru_cache -from typing import Any, Dict, List, Optional, Union +from typing import Dict, List, Optional, Union +from mypy_boto3_cloudwatch.client import CloudWatchClient +from mypy_boto3_lambda.client import LambdaClient +from mypy_boto3_xray.client import XRayClient from pydantic import BaseModel from retry import retry @@ -29,14 +32,14 @@ class TraceSegment(BaseModel): annotations: Dict = {} -def trigger_lambda(lambda_arn: str, client: Any): +def trigger_lambda(lambda_arn: str, client: LambdaClient): response = client.invoke(FunctionName=lambda_arn, InvocationType="RequestResponse") return response @lru_cache(maxsize=10, typed=False) @retry(ValueError, delay=1, jitter=1, tries=10) -def get_logs(lambda_function_name: str, log_client: Any, start_time: int, **kwargs: dict) -> List[Log]: +def get_logs(lambda_function_name: str, log_client: CloudWatchClient, start_time: int, **kwargs: dict) -> List[Log]: response = log_client.filter_log_events(logGroupName=f"/aws/lambda/{lambda_function_name}", startTime=start_time) if not response["events"]: raise ValueError("Empty response from Cloudwatch Logs. Repeating...") @@ -54,7 +57,12 @@ def get_logs(lambda_function_name: str, log_client: Any, start_time: int, **kwar @lru_cache(maxsize=10, typed=False) @retry(ValueError, delay=1, jitter=1, tries=10) def get_metrics( - namespace: str, cw_client: Any, start_date: datetime, end_date: datetime, metric_name: str, service_name: str + namespace: str, + cw_client: CloudWatchClient, + start_date: datetime, + end_date: datetime, + metric_name: str, + service_name: str, ): response = cw_client.get_metric_data( MetricDataQueries=[ @@ -82,7 +90,7 @@ def get_metrics( @retry(ValueError, delay=1, jitter=1, tries=10) -def get_traces(lambda_function_name: str, xray_client: Any, start_date: datetime, end_date: datetime) -> Dict: +def get_traces(lambda_function_name: str, xray_client: XRayClient, start_date: datetime, end_date: datetime) -> Dict: paginator = xray_client.get_paginator("get_trace_summaries") response_iterator = paginator.paginate( StartTime=start_date, From 303404132977d863acd082b04b50daccdcd7e6de Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Fri, 1 Jul 2022 21:53:51 +0200 Subject: [PATCH 24/72] Unified types --- tests/e2e/utils/infrastructure.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 54682055f5b..f4e6520abb3 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -65,7 +65,7 @@ def _create_layer(self, stack: Stack): ) return powertools_layer - def _find_handlers(self, directory: str) -> list: + def _find_handlers(self, directory: str) -> List: for root, _, files in os.walk(directory): return [os.path.join(root, filename) for filename in files if filename.endswith(".py")] @@ -154,7 +154,7 @@ def _upload_assets(self, template: dict, asset_root_dir: str): buf.seek(0) self.s3_client.upload_fileobj(Fileobj=buf, Bucket=bucket, Key=s3_key) - def _find_files(self, directory: str) -> list: + def _find_files(self, directory: str) -> List: file_paths = [] for root, _, files in os.walk(directory): for filename in files: From 0c49b66a122abb1889c29f50faf2f4ddde7cefdd Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Sat, 2 Jul 2022 22:10:13 +0200 Subject: [PATCH 25/72] Add PR suggestions --- pyproject.toml | 1 - tests/e2e/logger/test_logger.py | 7 ------- tests/e2e/metrics/test_metrics.py | 3 +-- tests/e2e/tracer/handlers/basic_handler.py | 2 -- tests/e2e/tracer/test_tracer.py | 5 ++--- tests/e2e/utils/helpers.py | 22 ++++++++-------------- tests/e2e/utils/infrastructure.py | 4 ++-- 7 files changed, 13 insertions(+), 31 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f19ab328689..1c82b46c3b2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -138,7 +138,6 @@ addopts = "-ra -vv" testpaths = "./tests" markers = [ "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')", - "e2e: marks e2e tests to be deselected (deselect with '-m \"not e2e\"')", ] [build-system] diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index e1cc980d816..156877b8f35 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -17,7 +17,6 @@ def config() -> conftest.LambdaConfig: } -@pytest.mark.e2e def test_basic_lambda_logs_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): # GIVEN lambda_arn = execute_lambda.get_lambda_arn(name="basichandlerarn") @@ -37,7 +36,6 @@ def test_basic_lambda_logs_visible(execute_lambda: conftest.InfrastructureOutput ) -@pytest.mark.e2e def test_basic_lambda_no_debug_logs_visible( execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig ): @@ -57,7 +55,6 @@ def test_basic_lambda_no_debug_logs_visible( ) -@pytest.mark.e2e def test_basic_lambda_contextual_data_logged(execute_lambda: conftest.InfrastructureOutput): # GIVEN required_keys = ( @@ -82,7 +79,6 @@ def test_basic_lambda_contextual_data_logged(execute_lambda: conftest.Infrastruc assert all(keys in logs.dict(exclude_unset=True) for logs in filtered_logs for keys in required_keys) -@pytest.mark.e2e def test_basic_lambda_additional_key_persistence_basic_lambda( execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig ): @@ -105,7 +101,6 @@ def test_basic_lambda_additional_key_persistence_basic_lambda( ) -@pytest.mark.e2e def test_basic_lambda_empty_event_logged(execute_lambda: conftest.InfrastructureOutput): # GIVEN @@ -122,7 +117,6 @@ def test_basic_lambda_empty_event_logged(execute_lambda: conftest.Infrastructure assert any(log.message == {} for log in filtered_logs) -@pytest.mark.e2e def test_no_context_lambda_contextual_data_not_logged(execute_lambda: conftest.InfrastructureOutput): # GIVEN @@ -147,7 +141,6 @@ def test_no_context_lambda_contextual_data_not_logged(execute_lambda: conftest.I assert not any(keys in logs.dict(exclude_unset=True) for logs in filtered_logs for keys in required_missing_keys) -@pytest.mark.e2e def test_no_context_lambda_event_not_logged(execute_lambda: conftest.InfrastructureOutput): # GIVEN diff --git a/tests/e2e/metrics/test_metrics.py b/tests/e2e/metrics/test_metrics.py index 810d63941a8..76c1b25285f 100644 --- a/tests/e2e/metrics/test_metrics.py +++ b/tests/e2e/metrics/test_metrics.py @@ -14,13 +14,12 @@ def config() -> conftest.LambdaConfig: "parameters": {}, "environment_variables": { "POWERTOOLS_METRICS_NAMESPACE": "powertools-e2e-metric", - "POWERTOOLS_SERVICE_NAME": f"test-powertools-service", + "POWERTOOLS_SERVICE_NAME": "test-powertools-service", "METRIC_NAME": f"business-metric-{uuid.uuid4()}", }, } -@pytest.mark.e2e def test_basic_lambda_metric_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): # GIVEN start_date = execute_lambda.get_lambda_execution_time() diff --git a/tests/e2e/tracer/handlers/basic_handler.py b/tests/e2e/tracer/handlers/basic_handler.py index 44436921d72..557397ea62a 100644 --- a/tests/e2e/tracer/handlers/basic_handler.py +++ b/tests/e2e/tracer/handlers/basic_handler.py @@ -13,13 +13,11 @@ @tracer.capture_lambda_handler def lambda_handler(event: dict, context: LambdaContext): - # tracer.put_annotation(key=ANNOTATION_KEY, value=ANNOTATION_VALUE) tracer.put_metadata(key=ANNOTATION_KEY, value=ANNOTATION_VALUE) return asyncio.run(collect_payment()) @tracer.capture_method async def collect_payment() -> str: - # tracer.put_annotation(key=ANNOTATION_KEY, value=ANNOTATION_ASYNC_VALUE) tracer.put_metadata(key=ANNOTATION_KEY, value=ANNOTATION_ASYNC_VALUE) return "success" diff --git a/tests/e2e/tracer/test_tracer.py b/tests/e2e/tracer/test_tracer.py index 13c2bfb5902..57e954121e2 100644 --- a/tests/e2e/tracer/test_tracer.py +++ b/tests/e2e/tracer/test_tracer.py @@ -1,5 +1,4 @@ import datetime -import json import uuid import boto3 @@ -21,18 +20,18 @@ def config(): } -@pytest.mark.e2e def test_basic_lambda_async_trace_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): # GIVEN lambda_arn = execute_lambda.get_lambda_arn(name="basichandlerarn") start_date = execute_lambda.get_lambda_execution_time() end_date = start_date + datetime.timedelta(minutes=5) + trace_filter_exporession = f'service("{lambda_arn.split(":")[-1]}")' # WHEN trace = helpers.get_traces( start_date=start_date, end_date=end_date, - lambda_function_name=lambda_arn.split(":")[-1], + filter_expression=trace_filter_exporession, xray_client=boto3.client("xray"), ) diff --git a/tests/e2e/utils/helpers.py b/tests/e2e/utils/helpers.py index 2695d76089d..f38deb40853 100644 --- a/tests/e2e/utils/helpers.py +++ b/tests/e2e/utils/helpers.py @@ -55,33 +55,27 @@ def get_logs(lambda_function_name: str, log_client: CloudWatchClient, start_time @lru_cache(maxsize=10, typed=False) -@retry(ValueError, delay=1, jitter=1, tries=10) +@retry(ValueError, delay=1, jitter=1, tries=20) def get_metrics( namespace: str, cw_client: CloudWatchClient, start_date: datetime, - end_date: datetime, metric_name: str, service_name: str, + end_date: Optional[datetime] = None, ): response = cw_client.get_metric_data( MetricDataQueries=[ { "Id": "m1", - "MetricStat": { - "Metric": { - "Namespace": namespace, - "MetricName": metric_name, - "Dimensions": [{"Name": "service", "Value": service_name}], - }, - "Period": 600, - "Stat": "Maximum", - }, + "Expression": f'SELECT MAX("{metric_name}") from SCHEMA("{namespace}",service) \ + where service=\'{service_name}\'', "ReturnData": True, + "Period": 600, }, ], StartTime=start_date, - EndTime=end_date, + EndTime=end_date if end_date else datetime.utcnow(), ) result = response["MetricDataResults"][0] if not result["Values"]: @@ -90,14 +84,14 @@ def get_metrics( @retry(ValueError, delay=1, jitter=1, tries=10) -def get_traces(lambda_function_name: str, xray_client: XRayClient, start_date: datetime, end_date: datetime) -> Dict: +def get_traces(filter_expression: str, xray_client: XRayClient, start_date: datetime, end_date: datetime) -> Dict: paginator = xray_client.get_paginator("get_trace_summaries") response_iterator = paginator.paginate( StartTime=start_date, EndTime=end_date, TimeRangeType="Event", Sampling=False, - FilterExpression=f'service("{lambda_function_name}")', + FilterExpression=filter_expression, ) traces = [trace["TraceSummaries"][0]["Id"] for trace in response_iterator if trace["TraceSummaries"]] diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index f4e6520abb3..df54b561114 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -55,10 +55,10 @@ def _create_layer(self, stack: Stack): command=[ "bash", "-c", - f"poetry export --with-credentials --format requirements.txt --output /tmp/requirements.txt &&\ + rf"poetry export --with-credentials --format requirements.txt --output /tmp/requirements.txt &&\ pip install -r /tmp/requirements.txt -t {output_dir} &&\ cp -R {input_dir} {output_dir} &&\ - find {output_dir}/ -regex '^.*__pycache__.*' -delete", + find {output_dir}/ -type d -name __pycache__ -prune -exec rm -rf {{}} \;", ], ), ), From 6c1a4d2c9a23cd1c56d58ae5b12a62b3164be8ac Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Mon, 4 Jul 2022 13:44:34 +0200 Subject: [PATCH 26/72] fix(logger): preserve std keys when using custom formatters (#1264) --- aws_lambda_powertools/logging/formatter.py | 5 ++++ aws_lambda_powertools/logging/logger.py | 16 +++++++--- tests/functional/test_logger.py | 34 ++++++++++++++++++++++ 3 files changed, 51 insertions(+), 4 deletions(-) diff --git a/aws_lambda_powertools/logging/formatter.py b/aws_lambda_powertools/logging/formatter.py index becfc9de85c..1f01015051c 100644 --- a/aws_lambda_powertools/logging/formatter.py +++ b/aws_lambda_powertools/logging/formatter.py @@ -1,3 +1,4 @@ +import inspect import json import logging import os @@ -286,3 +287,7 @@ def _strip_none_records(records: Dict[str, Any]) -> Dict[str, Any]: JsonFormatter = LambdaPowertoolsFormatter # alias to previous formatter + + +# Fetch current and future parameters from PowertoolsFormatter that should be reserved +RESERVED_FORMATTER_CUSTOM_KEYS: List[str] = inspect.getfullargspec(LambdaPowertoolsFormatter).args[1:] diff --git a/aws_lambda_powertools/logging/logger.py b/aws_lambda_powertools/logging/logger.py index 157d53adf7e..f70224cabae 100644 --- a/aws_lambda_powertools/logging/logger.py +++ b/aws_lambda_powertools/logging/logger.py @@ -12,7 +12,7 @@ from ..shared.functions import resolve_env_var_choice, resolve_truthy_env_var_choice from .exceptions import InvalidLoggerSamplingRateError from .filters import SuppressFilter -from .formatter import BasePowertoolsFormatter, LambdaPowertoolsFormatter +from .formatter import RESERVED_FORMATTER_CUSTOM_KEYS, BasePowertoolsFormatter, LambdaPowertoolsFormatter from .lambda_context import build_lambda_context_model logger = logging.getLogger(__name__) @@ -82,7 +82,7 @@ class Logger(logging.Logger): # lgtm [py/missing-call-to-init] datefmt: str, optional String directives (strftime) to format log timestamp using `time`, by default it uses RFC 3339. - use_datetime_directive: str, optional + use_datetime_directive: bool, optional Interpret `datefmt` as a format string for `datetime.datetime.strftime`, rather than `time.strftime`. @@ -368,7 +368,7 @@ def registered_handler(self) -> logging.Handler: return handlers[0] @property - def registered_formatter(self) -> PowertoolsFormatter: + def registered_formatter(self) -> BasePowertoolsFormatter: """Convenience property to access logger formatter""" return self.registered_handler.formatter # type: ignore @@ -395,7 +395,15 @@ def structure_logs(self, append: bool = False, **keys): is_logger_preconfigured = getattr(self._logger, "init", False) if not is_logger_preconfigured: formatter = self.logger_formatter or LambdaPowertoolsFormatter(**log_keys) # type: ignore - return self.registered_handler.setFormatter(formatter) + self.registered_handler.setFormatter(formatter) + + # when using a custom Lambda Powertools Formatter + # standard and custom keys that are not Powertools Formatter parameters should be appended + # and custom keys that might happen to be Powertools Formatter parameters should be discarded + # this prevents adding them as custom keys, for example, `json_default=` + # see https://github.com/awslabs/aws-lambda-powertools-python/issues/1263 + custom_keys = {k: v for k, v in log_keys.items() if k not in RESERVED_FORMATTER_CUSTOM_KEYS} + return self.registered_formatter.append_keys(**custom_keys) # Mode 2 (legacy) if append: diff --git a/tests/functional/test_logger.py b/tests/functional/test_logger.py index c76faa7cde5..c8b3dc61755 100644 --- a/tests/functional/test_logger.py +++ b/tests/functional/test_logger.py @@ -625,6 +625,40 @@ def handler(event, context): assert all(k in second_log for k in lambda_context_keys) +def test_logger_custom_formatter_has_standard_and_custom_keys(stdout, service_name, lambda_context): + class CustomFormatter(LambdaPowertoolsFormatter): + ... + + # GIVEN a Logger is initialized with a custom formatter + logger = Logger(service=service_name, stream=stdout, logger_formatter=CustomFormatter(), my_key="value") + + # WHEN a lambda function is decorated with logger + @logger.inject_lambda_context + def handler(event, context): + logger.info("Hello") + + handler({}, lambda_context) + + standard_keys = ( + "level", + "location", + "message", + "timestamp", + "service", + "cold_start", + "function_name", + "function_memory_size", + "function_arn", + "function_request_id", + ) + + log = capture_logging_output(stdout) + + # THEN all standard keys should be available + assert all(k in log for k in standard_keys) + assert "my_key" in log + + def test_logger_custom_handler(lambda_context, service_name, tmp_path): # GIVEN a Logger is initialized with a FileHandler log_file = tmp_path / "log.json" From c6de9c1e56889cbff1b68850e04a0f9eebbbbdd3 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 4 Jul 2022 14:46:37 +0200 Subject: [PATCH 27/72] fix(ci): checkout project before validating related issue workflow --- .github/workflows/on_merged_pr.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/on_merged_pr.yml b/.github/workflows/on_merged_pr.yml index 97029740cdb..70f43daab3b 100644 --- a/.github/workflows/on_merged_pr.yml +++ b/.github/workflows/on_merged_pr.yml @@ -18,6 +18,7 @@ jobs: issues: write # required for new scoped token pull-requests: write # required for new scoped token steps: + - uses: actions/checkout@v3 - name: "Label PR related issue for release" uses: actions/github-script@v6 with: From 0f1f1cdb06de9355dcd4af3bac355fcdedac8df7 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 4 Jul 2022 17:05:32 +0200 Subject: [PATCH 28/72] chore(layers): bump to 22 for 1.26.3 --- docs/index.md | 58 +++++++++++++++++++++++++-------------------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/docs/index.md b/docs/index.md index cc3d437334e..5396ccb3f2e 100644 --- a/docs/index.md +++ b/docs/index.md @@ -15,7 +15,7 @@ A suite of utilities for AWS Lambda functions to ease adopting best practices su Powertools is available in the following formats: -* **Lambda Layer**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:21**](#){: .copyMe}:clipboard: +* **Lambda Layer**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:22**](#){: .copyMe}:clipboard: * **PyPi**: **`pip install aws-lambda-powertools`** ???+ hint "Support this project by using Lambda Layers :heart:" @@ -33,23 +33,23 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: | Region | Layer ARN | | ---------------- | -------------------------------------------------------------------------------------------------------- | - | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | - | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPython:21](#){: .copyMe}:clipboard: | + | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | + | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPython:22](#){: .copyMe}:clipboard: | ??? question "Can't find our Lambda Layer for your preferred AWS region?" You can use [Serverless Application Repository (SAR)](#sar) method, our [CDK Layer Construct](https://github.com/aws-samples/cdk-lambda-powertools-python-layer){target="_blank"}, or PyPi like you normally would for any other library. @@ -63,7 +63,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: Type: AWS::Serverless::Function Properties: Layers: - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:21 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:22 ``` === "Serverless framework" @@ -73,7 +73,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: hello: handler: lambda_function.lambda_handler layers: - - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPython:21 + - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPython:22 ``` === "CDK" @@ -89,7 +89,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn( self, id="lambda-powertools", - layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:21" + layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:22" ) aws_lambda.Function(self, 'sample-app-lambda', @@ -138,7 +138,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: role = aws_iam_role.iam_for_lambda.arn handler = "index.test" runtime = "python3.9" - layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:21"] + layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:22"] source_code_hash = filebase64sha256("lambda_function_payload.zip") } @@ -157,7 +157,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: ? Do you want to configure advanced settings? Yes ... ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:21 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:22 ❯ amplify push -y @@ -168,7 +168,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: - Name: ? Which setting do you want to update? Lambda layers configuration ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:21 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:22 ? Do you want to edit the local lambda function now? No ``` @@ -176,7 +176,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: Change {region} to your AWS region, e.g. `eu-west-1` ```bash title="AWS CLI" - aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:21 --region {region} + aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:22 --region {region} ``` The pre-signed URL to download this Lambda Layer will be within `Location` key. @@ -214,7 +214,7 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, Properties: Location: ApplicationId: arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer - SemanticVersion: 1.25.10 # change to latest semantic version available in SAR + SemanticVersion: 1.26.3 # change to latest semantic version available in SAR MyLambdaFunction: Type: AWS::Serverless::Function @@ -242,7 +242,7 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, Location: ApplicationId: arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer # Find latest from github.com/awslabs/aws-lambda-powertools-python/releases - SemanticVersion: 1.25.10 + SemanticVersion: 1.26.3 ``` === "CDK" @@ -252,7 +252,7 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, POWERTOOLS_BASE_NAME = 'AWSLambdaPowertools' # Find latest from github.com/awslabs/aws-lambda-powertools-python/releases - POWERTOOLS_VER = '1.23.0' + POWERTOOLS_VER = '1.26.3' POWERTOOLS_ARN = 'arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer' class SampleApp(core.Construct): @@ -316,7 +316,7 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, variable "aws_powertools_version" { type = string - default = "1.20.2" + default = "1.26.3" description = "The AWS Powertools release version" } From 89d33d376e54d7a414af23a533290a960da6f3d7 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 4 Jul 2022 17:10:45 +0200 Subject: [PATCH 29/72] chore(ci): limits concurrency for docs workflow --- .github/workflows/python_docs.yml | 4 ++ .github/workflows/rebuild_latest_docs.yml | 88 ++++++++++++----------- 2 files changed, 49 insertions(+), 43 deletions(-) diff --git a/.github/workflows/python_docs.yml b/.github/workflows/python_docs.yml index 3a6e15e5431..d7ae6c2cc52 100644 --- a/.github/workflows/python_docs.yml +++ b/.github/workflows/python_docs.yml @@ -12,6 +12,10 @@ on: jobs: docs: + # Force Github action to run only a single job at a time (based on the group name) + # This is to prevent "race-condition" in publishing a new version of doc to `gh-pages` + concurrency: + group: on-docs-build runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/rebuild_latest_docs.yml b/.github/workflows/rebuild_latest_docs.yml index b185556f2ff..92cb8a3635d 100644 --- a/.github/workflows/rebuild_latest_docs.yml +++ b/.github/workflows/rebuild_latest_docs.yml @@ -7,56 +7,58 @@ name: Rebuild latest docs # 2. Use the latest version released under Releases e.g. v1.22.0 # 3. Set `Build and publish docs only` field to `true` - on: workflow_dispatch: inputs: latest_published_version: - description: 'Latest PyPi published version to rebuild latest docs for, e.g. v1.22.0' - default: 'v1.22.0' + description: "Latest PyPi published version to rebuild latest docs for, e.g. v1.22.0" + default: "v1.22.0" required: true - jobs: release: + # Force Github action to run only a single job at a time (based on the group name) + # This is to prevent "race-condition" in publishing a new version of doc to `gh-pages` + concurrency: + group: on-docs-rebuild runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.8" - - name: Set release notes tag - run: | - RELEASE_TAG_VERSION=${{ github.event.inputs.latest_published_version }} - echo "RELEASE_TAG_VERSION=${RELEASE_TAG_VERSION:1}" >> $GITHUB_ENV - - name: Ensure new version is also set in pyproject and CHANGELOG - run: | - grep --regexp "${RELEASE_TAG_VERSION}" CHANGELOG.md - grep --regexp "version \= \"${RELEASE_TAG_VERSION}\"" pyproject.toml - - name: Install dependencies - run: make dev - - name: Setup doc deploy - run: | - git config --global user.name Docs deploy - git config --global user.email aws-devax-open-source@amazon.com - - name: Build docs website and API reference - run: | - make release-docs VERSION=${RELEASE_TAG_VERSION} ALIAS="latest" - poetry run mike set-default --push latest - - name: Release API docs to release version - uses: peaceiris/actions-gh-pages@v3 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./api - keep_files: true - destination_dir: ${{ env.RELEASE_TAG_VERSION }}/api - - name: Release API docs to latest - uses: peaceiris/actions-gh-pages@v3 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./api - keep_files: true - destination_dir: latest/api + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.8" + - name: Set release notes tag + run: | + RELEASE_TAG_VERSION=${{ github.event.inputs.latest_published_version }} + echo "RELEASE_TAG_VERSION=${RELEASE_TAG_VERSION:1}" >> $GITHUB_ENV + - name: Ensure new version is also set in pyproject and CHANGELOG + run: | + grep --regexp "${RELEASE_TAG_VERSION}" CHANGELOG.md + grep --regexp "version \= \"${RELEASE_TAG_VERSION}\"" pyproject.toml + - name: Install dependencies + run: make dev + - name: Setup doc deploy + run: | + git config --global user.name Docs deploy + git config --global user.email aws-devax-open-source@amazon.com + - name: Build docs website and API reference + run: | + make release-docs VERSION=${RELEASE_TAG_VERSION} ALIAS="latest" + poetry run mike set-default --push latest + - name: Release API docs to release version + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./api + keep_files: true + destination_dir: ${{ env.RELEASE_TAG_VERSION }}/api + - name: Release API docs to latest + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./api + keep_files: true + destination_dir: latest/api From c0bb85f341a4e98a5a19e1f27f824166974b66a8 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 5 Jul 2022 12:16:35 +0200 Subject: [PATCH 30/72] fix(ci): regex to catch combination of related issues workflow --- .github/scripts/label_related_issue.js | 31 +++++++++++++------------- 1 file changed, 15 insertions(+), 16 deletions(-) diff --git a/.github/scripts/label_related_issue.js b/.github/scripts/label_related_issue.js index 1953412ae41..8f851056cb3 100644 --- a/.github/scripts/label_related_issue.js +++ b/.github/scripts/label_related_issue.js @@ -4,27 +4,26 @@ module.exports = async ({github, context}) => { const releaseLabel = process.env.RELEASE_LABEL; const maintainersTeam = process.env.MAINTAINERS_TEAM - const RELATED_ISSUE_REGEX = /Issue number:.+(\d)/ + const RELATED_ISSUE_REGEX = /Issue number:[^\d\r\n]+(?\d+)/; - const matcher = new RegExp(RELATED_ISSUE_REGEX) - const isMatch = matcher.exec(prBody) - if (isMatch != null) { - let relatedIssueNumber = isMatch[1] - console.info(`Auto-labeling related issue ${relatedIssueNumber} for release`) - - return await github.rest.issues.addLabels({ - issue_number: relatedIssueNumber, - owner: context.repo.owner, - repo: context.repo.repo, - labels: [releaseLabel] - }) - } else { - let msg = `${maintainersTeam} No related issues found. Please ensure '${releaseLabel}' label is applied before releasing.`; + const isMatch = RELATED_ISSUE_REGEX.exec(body); + if (!isMatch) { + core.setFailed(`Unable to find related issue for PR number ${prNumber}.\n\n Body details: ${prBody}`); return await github.rest.issues.createComment({ owner: context.repo.owner, repo: context.repo.repo, - body: msg, + body: `${maintainersTeam} No related issues found. Please ensure '${releaseLabel}' label is applied before releasing.`, issue_number: prNumber, }); } + + const { groups: {relatedIssueNumber} } = isMatch + + core.info(`Auto-labeling related issue ${relatedIssueNumber} for release`) + return await github.rest.issues.addLabels({ + issue_number: relatedIssueNumber, + owner: context.repo.owner, + repo: context.repo.repo, + labels: [releaseLabel] + }) } From 84eadd180a6dccfc78683602682bee7ac7bc4e44 Mon Sep 17 00:00:00 2001 From: mploski Date: Thu, 7 Jul 2022 11:47:01 +0200 Subject: [PATCH 31/72] Update MAINTAINERS.md Co-authored-by: Heitor Lessa --- MAINTAINERS.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 9e6c529af8b..70c137fbd31 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -166,7 +166,7 @@ When necessary, be upfront that the time to review, approve, and implement a RFC Some examples using our initial and new RFC templates: #92, #94, #95, #991, #1226 -### Run end to end tests on merges to Develop branch +### Run end to end tests In order to run end to end tests you need to install CDK CLI first and bootstrap your account with `cdk bootstrap` command. For additional details follow [documentation](https://docs.aws.amazon.com/cdk/v2/guide/bootstrapping.html). From 9203232e0ee92b4b5f1d73c53143b8b5210b07c8 Mon Sep 17 00:00:00 2001 From: mploski Date: Thu, 7 Jul 2022 11:47:27 +0200 Subject: [PATCH 32/72] Update MAINTAINERS.md Co-authored-by: Heitor Lessa --- MAINTAINERS.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 70c137fbd31..cd2ec64b6a5 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -182,7 +182,7 @@ To be agreed Firstly, make sure you are using the `develop` branch and it is up to date with the origin. -There are four main steps to release a new version: Changelog generation, version bumping, and drafting release notes. +There are three main steps to release a new version: Changelog generation, version bumping, and drafting release notes. #### Changelog generation From f98df4c58520e3ba1bce8fff76672efa165cb28f Mon Sep 17 00:00:00 2001 From: mploski Date: Thu, 7 Jul 2022 11:49:04 +0200 Subject: [PATCH 33/72] Update MAINTAINERS.md Co-authored-by: Heitor Lessa --- MAINTAINERS.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index cd2ec64b6a5..b84675f02f0 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -170,8 +170,9 @@ Some examples using our initial and new RFC templates: #92, #94, #95, #991, #122 In order to run end to end tests you need to install CDK CLI first and bootstrap your account with `cdk bootstrap` command. For additional details follow [documentation](https://docs.aws.amazon.com/cdk/v2/guide/bootstrapping.html). -You can run tests either from your local machine's shell by exporting `AWS_PROFILE` environment variable and running `make e2e tests`. -You may also do this directly from GitHub Console. Run the [run-e2e-tests workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/run-e2e-tests.yml) and pick the branch you want to run tests against. +To run locally, export `AWS_PROFILE` environment variable and run `make e2e tests`. To run from GitHub Actions, use [run-e2e-tests workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/run-e2e-tests.yml) and pick the branch you want to run tests against. + +**NOTE**: E2E tests are run as part of each merge to `develop` branch. > **Q: What if there's an error?** To be agreed From 936839ecc60efa33e6ee57763ea037580c22bd24 Mon Sep 17 00:00:00 2001 From: mploski Date: Thu, 7 Jul 2022 11:50:56 +0200 Subject: [PATCH 34/72] Update tests/e2e/utils/infrastructure.py Co-authored-by: Heitor Lessa --- tests/e2e/utils/infrastructure.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 54682055f5b..3cd97808589 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -21,7 +21,7 @@ class PythonVersion(Enum): V39 = {"runtime": Runtime.PYTHON_3_9, "image": Runtime.PYTHON_3_9.bundling_image.image} -class InfrastructureStackInterface(ABC): +class BaseInfrastructureStack(ABC): @abstractmethod def synthesize() -> Tuple[dict, str]: ... From 1f570a62aaffbb7c4c4434404c7086ebede2ab37 Mon Sep 17 00:00:00 2001 From: Alexander Melnyk Date: Thu, 7 Jul 2022 12:54:00 +0200 Subject: [PATCH 35/72] chore(layers): add release pipeline in GitHub Actions (#1278) * chore: add layer project * reduce to 1 region for dev * chore: shorter name for the workflow * fix ignore markdown lint for now * fix: more f strings * ignore mdlint * add reusable workflow for both beta and prod * Update layer/layer/canary/app.py Co-authored-by: Heitor Lessa * Update layer/layer/canary/app.py Co-authored-by: Heitor Lessa * readme review * rephrase canary stack ssm parameter usage * add default RELEASE_TAG_VERSION assignment based on the input (release or manual trigger) * add reference to layer docs * wording * move version trackign arn to canary stack * remove outdated npm caching, add release tag resolution for manual workflow trigger * review: fix layer name and remove dependencies from reusable workflow * remove debug statement, add default working dir * pin versions and hashes for requirements with pip-compile * rename reusable workflow * pass artefact name to the reusable workflow to prevent potential future conflicts Co-authored-by: Heitor Lessa --- .github/workflows/publish_layer.yml | 80 +++++++++++++++ .../workflows/reusable_deploy_layer_stack.yml | 87 ++++++++++++++++ layer/.gitignore | 10 ++ layer/README.md | 27 +++++ layer/app.py | 23 +++++ layer/cdk.json | 35 +++++++ layer/layer/__init__.py | 0 layer/layer/canary/app.py | 99 +++++++++++++++++++ layer/layer/canary_stack.py | 75 ++++++++++++++ layer/layer/layer_stack.py | 19 ++++ layer/requirements-dev.txt | 2 + layer/requirements.txt | 76 ++++++++++++++ 12 files changed, 533 insertions(+) create mode 100644 .github/workflows/publish_layer.yml create mode 100644 .github/workflows/reusable_deploy_layer_stack.yml create mode 100644 layer/.gitignore create mode 100644 layer/README.md create mode 100644 layer/app.py create mode 100644 layer/cdk.json create mode 100644 layer/layer/__init__.py create mode 100644 layer/layer/canary/app.py create mode 100644 layer/layer/canary_stack.py create mode 100644 layer/layer/layer_stack.py create mode 100644 layer/requirements-dev.txt create mode 100644 layer/requirements.txt diff --git a/.github/workflows/publish_layer.yml b/.github/workflows/publish_layer.yml new file mode 100644 index 00000000000..d4001e0bdce --- /dev/null +++ b/.github/workflows/publish_layer.yml @@ -0,0 +1,80 @@ +name: Deploy layer to all regions + +permissions: + id-token: write + contents: read + +on: + workflow_dispatch: + inputs: + latest_published_version: + description: "Latest PyPi published version to rebuild latest docs for, e.g. v1.22.0" + default: "v1.22.0" + required: true + workflow_run: + workflows: [ "Publish to PyPi" ] + types: + - completed + + +jobs: + build-layer: + runs-on: ubuntu-latest + defaults: + run: + working-directory: ./layer + steps: + - name: checkout + uses: actions/checkout@v2 + - name: Setup Node.js + uses: actions/setup-node@v2 + with: + node-version: '16.12' + cache: 'npm' + - name: Setup python + uses: actions/setup-python@v4 + with: + python-version: '3.9' + cache: 'pip' + - name: Set release notes tag + run: | + RELEASE_INPUT=${{ inputs.latest_published_version }} + GITHUB_EVENT_RELEASE_TAG=${{ github.event.release.tag_name }} + RELEASE_TAG_VERSION=${GITHUB_EVENT_RELEASE_TAG:-$RELEASE_INPUT} + echo "RELEASE_TAG_VERSION=${RELEASE_TAG_VERSION:1}" >> $GITHUB_ENV + - name: install cdk and deps + run: | + npm install -g aws-cdk@2.29.0 + cdk --version + - name: install deps + run: | + pip install -r requirements.txt + - name: CDK build + run: cdk synth --context version=$RELEASE_TAG_VERSION -o cdk.out + - name: zip output + run: zip -r cdk.out.zip cdk.out + - name: Archive CDK artifacts + uses: actions/upload-artifact@v3 + with: + name: cdk-layer-artefact + path: cdk.out.zip + + deploy-beta: + needs: + - build-layer + uses: ./.github/workflows/reusable_deploy_layer_stack.yml + with: + stage: "BETA" + artifact-name: "cdk-layer-artefact" + secrets: + target-account: ${{ secrets.LAYERS_BETA_ACCOUNT }} + + deploy-prod: + needs: + - deploy-beta + uses: ./.github/workflows/reusable_deploy_layer_stack.yml + with: + stage: "PROD" + artifact-name: "cdk-layer-artefact" + secrets: + target-account: ${{ secrets.LAYERS_PROD_ACCOUNT }} diff --git a/.github/workflows/reusable_deploy_layer_stack.yml b/.github/workflows/reusable_deploy_layer_stack.yml new file mode 100644 index 00000000000..f31449e7ba6 --- /dev/null +++ b/.github/workflows/reusable_deploy_layer_stack.yml @@ -0,0 +1,87 @@ +name: Deploy cdk stack + +permissions: + id-token: write + contents: read + +on: + workflow_call: + inputs: + stage: + required: true + type: string + artefact-name: + required: true + type: string + secrets: + target-account: + required: true + +jobs: + deploy-cdk-stack: + runs-on: ubuntu-latest + defaults: + run: + working-directory: ./layer + strategy: + fail-fast: false + matrix: + region: [ + "af-south-1", + # "eu-central-1", + # "us-east-1", + # "us-east-2", + # "us-west-1", + # "us-west-2", + # "ap-east-1", + # "ap-south-1", + # "ap-northeast-1", + # "ap-northeast-2", + # "ap-southeast-1", + # "ap-southeast-2", + # "ca-central-1", + # "eu-west-1", + # "eu-west-2", + # "eu-west-3", + # "eu-south-1", + # "eu-north-1", + # "sa-east-1", + # "ap-southeast-3", + # "ap-northeast-3", + # "me-south-1" + ] + steps: + - name: checkout + uses: actions/checkout@v2 + - name: aws credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-region: ${{ matrix.region }} + role-to-assume: arn:aws:iam::${{ secrets.target-account }}:role/${{ secrets.AWS_GITHUB_OIDC_ROLE }} + - name: Setup Node.js + uses: actions/setup-node@v2 + with: + node-version: '16.12' + cache: 'npm' + - name: Setup python + uses: actions/setup-python@v4 + with: + python-version: '3.9' + cache: 'pip' + - name: install cdk and deps + run: | + npm install -g aws-cdk@2.29.0 + cdk --version + - name: install deps + run: | + pip install -r requirements.txt + - name: Download artifact + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.artefact-name }} + - name: unzip artefact + run: unzip cdk.out.zip + - name: CDK Deploy Layer + run: cdk deploy --app cdk.out --context region=${{ matrix.region }} 'LayerStack ' --require-approval never --verbose + - name: CDK Deploy Canary + run: cdk deploy --app cdk.out --context region=${{ matrix.region}} --parameters DeployStage="${{ input.stage }}" 'CanaryStack' --require-approval never --verbose diff --git a/layer/.gitignore b/layer/.gitignore new file mode 100644 index 00000000000..37833f8beb2 --- /dev/null +++ b/layer/.gitignore @@ -0,0 +1,10 @@ +*.swp +package-lock.json +__pycache__ +.pytest_cache +.venv +*.egg-info + +# CDK asset staging directory +.cdk.staging +cdk.out diff --git a/layer/README.md b/layer/README.md new file mode 100644 index 00000000000..99da0083ffc --- /dev/null +++ b/layer/README.md @@ -0,0 +1,27 @@ + +# CDK Powertools layer + +This is a CDK project to build and deploy AWS Lambda Powertools [Lambda layer](https://docs.aws.amazon.com/lambda/latest/dg/gettingstarted-concepts.html#gettingstarted-concepts-layer) to multiple commercial regions. + +## Build the layer + +To build the layer construct you need to provide the Powertools version that is [available in PyPi](https://pypi.org/project/aws-lambda-powertools/). +You can pass it as a context variable when running `synth` or `deploy`, + +```shell +cdk synth --context version=1.25.1 +``` + +## Canary stack + +We use a canary stack to verify that the deployment is successful and we can use the layer by adding it to a newly created Lambda function. +The canary is deployed after the layer construct. Because the layer ARN is created during the deploy we need to pass this information async via SSM parameter. +To achieve that we use SSM parameter store to pass the layer ARN to the canary. +The layer stack writes the layer ARN after the deployment as SSM parameter and the canary stacks reads this information and adds the layer to the function. + +## Version tracking + +AWS Lambda versions Lambda layers by incrementing a number at the end of the ARN. +This makes it challenging to know which Powertools version a layer contains. +For better tracking of the ARNs and the corresponding version we need to keep track which powertools version was deployed to which layer. +To achieve that we created two components. First, we created a version tracking app which receives events via EventBridge. Second, after a successful canary deployment we send the layer ARN, Powertools version, and the region to this EventBridge. diff --git a/layer/app.py b/layer/app.py new file mode 100644 index 00000000000..78e99b17654 --- /dev/null +++ b/layer/app.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 + +import aws_cdk as cdk + +from layer.canary_stack import CanaryStack +from layer.layer_stack import LayerStack + +app = cdk.App() + +POWERTOOLS_VERSION: str = app.node.try_get_context("version") +SSM_PARAM_LAYER_ARN: str = "/layers/powertools-layer-arn" + +if not POWERTOOLS_VERSION: + raise ValueError( + "Please set the version for Powertools by passing the '--context=version:' parameter to the CDK " + "synth step." + ) + +LayerStack(app, "LayerStack", powertools_version=POWERTOOLS_VERSION, ssm_paramter_layer_arn=SSM_PARAM_LAYER_ARN) + +CanaryStack(app, "CanaryStack", powertools_version=POWERTOOLS_VERSION, ssm_paramter_layer_arn=SSM_PARAM_LAYER_ARN) + +app.synth() diff --git a/layer/cdk.json b/layer/cdk.json new file mode 100644 index 00000000000..c120c5f4765 --- /dev/null +++ b/layer/cdk.json @@ -0,0 +1,35 @@ +{ + "app": "python3 app.py", + "watch": { + "include": [ + "**" + ], + "exclude": [ + "README.md", + "cdk*.json", + "requirements*.txt", + "source.bat", + "**/__init__.py", + "python/__pycache__", + "tests" + ] + }, + "context": { + "@aws-cdk/aws-apigateway:usagePlanKeyOrderInsensitiveId": true, + "@aws-cdk/core:stackRelativeExports": true, + "@aws-cdk/aws-rds:lowercaseDbIdentifier": true, + "@aws-cdk/aws-lambda:recognizeVersionProps": true, + "@aws-cdk/aws-lambda:recognizeLayerVersion": true, + "@aws-cdk/aws-cloudfront:defaultSecurityPolicyTLSv1.2_2021": true, + "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, + "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, + "@aws-cdk/core:checkSecretUsage": true, + "@aws-cdk/aws-iam:minimizePolicies": true, + "@aws-cdk/core:validateSnapshotRemovalPolicy": true, + "@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true, + "@aws-cdk/core:target-partitions": [ + "aws", + "aws-cn" + ] + } +} diff --git a/layer/layer/__init__.py b/layer/layer/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/layer/layer/canary/app.py b/layer/layer/canary/app.py new file mode 100644 index 00000000000..31db94dd92b --- /dev/null +++ b/layer/layer/canary/app.py @@ -0,0 +1,99 @@ +import datetime +import json +import os +from importlib.metadata import version + +import boto3 + +from aws_lambda_powertools import Logger, Metrics, Tracer + +logger = Logger(service="version-track") +tracer = Tracer() +metrics = Metrics(namespace="powertools-layer-canary", service="PowertoolsLayerCanary") + +layer_arn = os.getenv("POWERTOOLS_LAYER_ARN") +powertools_version = os.getenv("POWERTOOLS_VERSION") +stage = os.getenv("LAYER_PIPELINE_STAGE") +event_bus_arn = os.getenv("VERSION_TRACKING_EVENT_BUS_ARN") + + +def handler(event): + logger.info("Running checks") + check_envs() + verify_powertools_version() + send_notification() + return True + + +@logger.inject_lambda_context(log_event=True) +def on_event(event, context): + request_type = event["RequestType"] + # we handle only create events, because we recreate the canary on each run + if request_type == "Create": + return on_create(event) + + return "Nothing to be processed" + + +def on_create(event): + props = event["ResourceProperties"] + logger.info("create new resource with properties %s" % props) + handler(event) + + +def check_envs(): + logger.info('Checking required envs ["POWERTOOLS_LAYER_ARN", "AWS_REGION", "STAGE"]') + if not layer_arn: + raise ValueError("POWERTOOLS_LAYER_ARN is not set. Aborting...") + if not powertools_version: + raise ValueError("POWERTOOLS_VERSION is not set. Aborting...") + if not stage: + raise ValueError("LAYER_PIPELINE_STAGE is not set. Aborting...") + if not event_bus_arn: + raise ValueError("VERSION_TRACKING_EVENT_BUS_ARN is not set. Aborting...") + logger.info("All envs configured, continue...") + + +def verify_powertools_version() -> None: + """ + fetches the version that we import from the powertools layer and compares + it with expected version set in environment variable, which we pass during deployment. + :raise ValueError if the expected version is not the same as the version we get from the layer + """ + logger.info("Checking Powertools version in library...") + current_version = version("aws_lambda_powertools") + if powertools_version != current_version: + raise ValueError( + f'Expected powertoosl version is "{powertools_version}", but layer contains version "{current_version}"' + ) + logger.info(f"Current Powertools version is: {current_version}") + + +def send_notification(): + """ + sends an event to version tracking event bridge + """ + event = { + "Time": datetime.datetime.now(), + "Source": "powertools.layer.canary", + "EventBusName": event_bus_arn, + "DetailType": "deployment", + "Detail": json.dumps( + { + "id": "powertools-python", + "stage": stage, + "region": os.environ["AWS_REGION"], + "version": powertools_version, + "layerArn": layer_arn, + } + ), + } + + logger.info(f"sending notification event: {event}") + + client = boto3.client("events", region_name="eu-central-1") + resp = client.put_events(Entries=[event]) + logger.info(resp) + if resp["FailedEntryCount"] != 0: + logger.error(resp) + raise ValueError("Failed to send deployment notification to version tracking") diff --git a/layer/layer/canary_stack.py b/layer/layer/canary_stack.py new file mode 100644 index 00000000000..15bc80214d3 --- /dev/null +++ b/layer/layer/canary_stack.py @@ -0,0 +1,75 @@ +import uuid + +from aws_cdk import CfnParameter, CustomResource, Duration, Stack +from aws_cdk.aws_iam import Effect, ManagedPolicy, PolicyStatement, Role, ServicePrincipal +from aws_cdk.aws_lambda import Code, Function, LayerVersion, Runtime +from aws_cdk.aws_logs import RetentionDays +from aws_cdk.aws_ssm import StringParameter +from aws_cdk.custom_resources import Provider +from constructs import Construct + + +class CanaryStack(Stack): + def __init__( + self, + scope: Construct, + construct_id: str, + powertools_version: str, + ssm_paramter_layer_arn: str, + **kwargs, + ) -> None: + super().__init__(scope, construct_id, **kwargs) + + VERSION_TRACKING_EVENT_BUS_ARN: str = ( + "arn:aws:events:eu-central-1:027876851704:event-bus/VersionTrackingEventBus" + ) + + layer_arn = StringParameter.from_string_parameter_attributes( + self, "LayerVersionArnParam", parameter_name=ssm_paramter_layer_arn + ).string_value + + layer = LayerVersion.from_layer_version_arn(self, "PowertoolsLayer", layer_version_arn=layer_arn) + deploy_stage = CfnParameter(self, "DeployStage", description="Deployment stage for canary").value_as_string + + execution_role = Role(self, "LambdaExecutionRole", assumed_by=ServicePrincipal("lambda.amazonaws.com")) + + execution_role.add_managed_policy( + ManagedPolicy.from_aws_managed_policy_name("service-role/AWSLambdaBasicExecutionRole") + ) + + execution_role.add_to_policy( + PolicyStatement(effect=Effect.ALLOW, actions=["lambda:GetFunction"], resources=["*"]) + ) + + canary_lambda = Function( + self, + "CanaryLambdaFunction", + function_name="CanaryLambdaFunction", + code=Code.from_asset("layer/canary"), + handler="app.on_event", + layers=[layer], + memory_size=512, + timeout=Duration.seconds(10), + runtime=Runtime.PYTHON_3_9, + log_retention=RetentionDays.ONE_MONTH, + role=execution_role, + environment={ + "POWERTOOLS_VERSION": powertools_version, + "POWERTOOLS_LAYER_ARN": layer_arn, + "VERSION_TRACKING_EVENT_BUS_ARN": VERSION_TRACKING_EVENT_BUS_ARN, + "LAYER_PIPELINE_STAGE": deploy_stage, + }, + ) + + canary_lambda.add_to_role_policy( + PolicyStatement( + effect=Effect.ALLOW, actions=["events:PutEvents"], resources=[VERSION_TRACKING_EVENT_BUS_ARN] + ) + ) + + # custom resource provider configuration + provider = Provider( + self, "CanaryCustomResource", on_event_handler=canary_lambda, log_retention=RetentionDays.ONE_MONTH + ) + # force to recreate resource on each deployment with randomized name + CustomResource(self, f"CanaryTrigger-{str(uuid.uuid4())[0:7]}", service_token=provider.service_token) diff --git a/layer/layer/layer_stack.py b/layer/layer/layer_stack.py new file mode 100644 index 00000000000..8b32de9c206 --- /dev/null +++ b/layer/layer/layer_stack.py @@ -0,0 +1,19 @@ +from aws_cdk import Stack +from aws_cdk.aws_ssm import StringParameter +from cdk_lambda_powertools_python_layer import LambdaPowertoolsLayer +from constructs import Construct + + +class LayerStack(Stack): + def __init__( + self, scope: Construct, construct_id: str, powertools_version: str, ssm_paramter_layer_arn: str, **kwargs + ) -> None: + super().__init__(scope, construct_id, **kwargs) + + layer = LambdaPowertoolsLayer( + self, "Layer", layer_version_name="AWSLambdaPowertoolsPython", version=powertools_version + ) + + layer.add_permission("PublicLayerAccess", account_id="*") + + StringParameter(self, "VersionArn", parameter_name=ssm_paramter_layer_arn, string_value=layer.layer_version_arn) diff --git a/layer/requirements-dev.txt b/layer/requirements-dev.txt new file mode 100644 index 00000000000..f3ec7d732b5 --- /dev/null +++ b/layer/requirements-dev.txt @@ -0,0 +1,2 @@ +pytest==6.2.5 +boto3==1.24.22 diff --git a/layer/requirements.txt b/layer/requirements.txt new file mode 100644 index 00000000000..0484892d321 --- /dev/null +++ b/layer/requirements.txt @@ -0,0 +1,76 @@ +# +# This file is autogenerated by pip-compile with python 3.9 +# To update, run: +# +# pip-compile --generate-hashes requirements.txt +# +attrs==21.4.0 \ + --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \ + --hash=sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd + # via + # -r requirements.txt + # cattrs + # jsii +aws-cdk-lib==2.29.0 \ + --hash=sha256:4f852105cafd28a2bbd9bd2c6d24a2e1ab503bba923fd49a1782390b235af999 \ + --hash=sha256:53a78788219d9bf3a998211223225b34a10f066124e2812adcd40fd0a2058572 + # via + # -r requirements.txt + # cdk-lambda-powertools-python-layer +cattrs==22.1.0 \ + --hash=sha256:94b67b64cf92c994f8784c40c082177dc916e0489a73a9a36b24eb18a9db40c6 \ + --hash=sha256:d55c477b4672f93606e992049f15d526dc7867e6c756cd6256d4af92e2b1e364 + # via + # -r requirements.txt + # jsii +cdk-lambda-powertools-python-layer==2.0.48 \ + --hash=sha256:7bdd5a196e74b48d403223722f2838d1d10064d02e960a5565482cc0b7aad18d \ + --hash=sha256:9afeacea31eba14d67360db71af385c654c9e0af9b29a0d4e0922b52f862ae03 + # via -r requirements.txt +constructs==10.1.43 \ + --hash=sha256:69fd6da574c9506f44ca61e112af7d5db08ebb29b4bedc67b6d200b616f4abce \ + --hash=sha256:f37e8c3432f94f403b50bf69476bea55719bcc3fa0d3a0e60bf0975dfe492867 + # via + # -r requirements.txt + # aws-cdk-lib + # cdk-lambda-powertools-python-layer +exceptiongroup==1.0.0rc8 \ + --hash=sha256:6990c24f06b8d33c8065cfe43e5e8a4bfa384e0358be036af9cc60b6321bd11a \ + --hash=sha256:ab0a968e1ef769e55d9a596f4a89f7be9ffedbc9fdefdb77cc68cf5c33ce1035 + # via + # -r requirements.txt + # cattrs +jsii==1.61.0 \ + --hash=sha256:542a72cd1a144d36fa530dc359b5295b82d9e7ecdd76d5c7b4b61195f132a746 \ + --hash=sha256:b2899f24bcc95ce009bc256558c81cde8cff9f830eddbe9b0d581c40558a1ff0 + # via + # -r requirements.txt + # aws-cdk-lib + # cdk-lambda-powertools-python-layer + # constructs +publication==0.0.3 \ + --hash=sha256:0248885351febc11d8a1098d5c8e3ab2dabcf3e8c0c96db1e17ecd12b53afbe6 \ + --hash=sha256:68416a0de76dddcdd2930d1c8ef853a743cc96c82416c4e4d3b5d901c6276dc4 + # via + # -r requirements.txt + # aws-cdk-lib + # cdk-lambda-powertools-python-layer + # constructs +python-dateutil==2.8.2 \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 + # via + # -r requirements.txt + # jsii +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via + # -r requirements.txt + # python-dateutil +typing-extensions==4.3.0 \ + --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ + --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 + # via + # -r requirements.txt + # jsii From 577a410ab3bcde7d2017eedd59bb4f37ccd2687f Mon Sep 17 00:00:00 2001 From: Alexander Melnyk Date: Thu, 7 Jul 2022 13:05:41 +0200 Subject: [PATCH 36/72] fix: typo in input for layer workflow --- .github/workflows/publish_layer.yml | 4 ++-- .github/workflows/reusable_deploy_layer_stack.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/publish_layer.yml b/.github/workflows/publish_layer.yml index d4001e0bdce..7192bf8b6ce 100644 --- a/.github/workflows/publish_layer.yml +++ b/.github/workflows/publish_layer.yml @@ -65,7 +65,7 @@ jobs: uses: ./.github/workflows/reusable_deploy_layer_stack.yml with: stage: "BETA" - artifact-name: "cdk-layer-artefact" + artefact-name: "cdk-layer-artefact" secrets: target-account: ${{ secrets.LAYERS_BETA_ACCOUNT }} @@ -75,6 +75,6 @@ jobs: uses: ./.github/workflows/reusable_deploy_layer_stack.yml with: stage: "PROD" - artifact-name: "cdk-layer-artefact" + artefact-name: "cdk-layer-artefact" secrets: target-account: ${{ secrets.LAYERS_PROD_ACCOUNT }} diff --git a/.github/workflows/reusable_deploy_layer_stack.yml b/.github/workflows/reusable_deploy_layer_stack.yml index f31449e7ba6..ffeadf43045 100644 --- a/.github/workflows/reusable_deploy_layer_stack.yml +++ b/.github/workflows/reusable_deploy_layer_stack.yml @@ -84,4 +84,4 @@ jobs: - name: CDK Deploy Layer run: cdk deploy --app cdk.out --context region=${{ matrix.region }} 'LayerStack ' --require-approval never --verbose - name: CDK Deploy Canary - run: cdk deploy --app cdk.out --context region=${{ matrix.region}} --parameters DeployStage="${{ input.stage }}" 'CanaryStack' --require-approval never --verbose + run: cdk deploy --app cdk.out --context region=${{ matrix.region}} --parameters DeployStage="${{ inputs.stage }}" 'CanaryStack' --require-approval never --verbose From e0b3b58d9774d5203d6370a2a16968026f751fb2 Mon Sep 17 00:00:00 2001 From: Alexander Melnyk Date: Thu, 7 Jul 2022 13:15:30 +0200 Subject: [PATCH 37/72] fix: no need to cache npm since we only install cdk cli and don't have .lock files --- .github/workflows/publish_layer.yml | 1 - .github/workflows/reusable_deploy_layer_stack.yml | 1 - 2 files changed, 2 deletions(-) diff --git a/.github/workflows/publish_layer.yml b/.github/workflows/publish_layer.yml index 7192bf8b6ce..c69e8cb5b9e 100644 --- a/.github/workflows/publish_layer.yml +++ b/.github/workflows/publish_layer.yml @@ -30,7 +30,6 @@ jobs: uses: actions/setup-node@v2 with: node-version: '16.12' - cache: 'npm' - name: Setup python uses: actions/setup-python@v4 with: diff --git a/.github/workflows/reusable_deploy_layer_stack.yml b/.github/workflows/reusable_deploy_layer_stack.yml index ffeadf43045..2923c3efdbb 100644 --- a/.github/workflows/reusable_deploy_layer_stack.yml +++ b/.github/workflows/reusable_deploy_layer_stack.yml @@ -62,7 +62,6 @@ jobs: uses: actions/setup-node@v2 with: node-version: '16.12' - cache: 'npm' - name: Setup python uses: actions/setup-python@v4 with: From 925c14583d0815fec22dc606d350baef4dbd3d56 Mon Sep 17 00:00:00 2001 From: Alexander Melnyk Date: Thu, 7 Jul 2022 13:43:49 +0200 Subject: [PATCH 38/72] fix: add entire ARN role instead of account and role name --- .github/workflows/publish_layer.yml | 4 ++-- .github/workflows/reusable_deploy_layer_stack.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/publish_layer.yml b/.github/workflows/publish_layer.yml index c69e8cb5b9e..df20241c331 100644 --- a/.github/workflows/publish_layer.yml +++ b/.github/workflows/publish_layer.yml @@ -66,7 +66,7 @@ jobs: stage: "BETA" artefact-name: "cdk-layer-artefact" secrets: - target-account: ${{ secrets.LAYERS_BETA_ACCOUNT }} + target-account-role: arn:aws:iam::${{ secrets.LAYERS_BETA_ACCOUNT }}:role/${{ secrets.AWS_GITHUB_OIDC_ROLE }} deploy-prod: needs: @@ -76,4 +76,4 @@ jobs: stage: "PROD" artefact-name: "cdk-layer-artefact" secrets: - target-account: ${{ secrets.LAYERS_PROD_ACCOUNT }} + target-account-role: arn:aws:iam::${{ secrets.LAYERS_PROD_ACCOUNT }}:role/${{ secrets.AWS_GITHUB_OIDC_ROLE }} diff --git a/.github/workflows/reusable_deploy_layer_stack.yml b/.github/workflows/reusable_deploy_layer_stack.yml index 2923c3efdbb..506cae96809 100644 --- a/.github/workflows/reusable_deploy_layer_stack.yml +++ b/.github/workflows/reusable_deploy_layer_stack.yml @@ -14,7 +14,7 @@ on: required: true type: string secrets: - target-account: + target-account-role: required: true jobs: @@ -57,7 +57,7 @@ jobs: uses: aws-actions/configure-aws-credentials@v1 with: aws-region: ${{ matrix.region }} - role-to-assume: arn:aws:iam::${{ secrets.target-account }}:role/${{ secrets.AWS_GITHUB_OIDC_ROLE }} + role-to-assume: ${{ secrets.target-account-role }} - name: Setup Node.js uses: actions/setup-node@v2 with: From 5bcd0260e8e793e55258cc1a795845656b3acfad Mon Sep 17 00:00:00 2001 From: Alexander Melnyk Date: Thu, 7 Jul 2022 15:47:53 +0200 Subject: [PATCH 39/72] fix: path to artefact --- .github/workflows/publish_layer.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish_layer.yml b/.github/workflows/publish_layer.yml index df20241c331..b4814fbb98a 100644 --- a/.github/workflows/publish_layer.yml +++ b/.github/workflows/publish_layer.yml @@ -56,7 +56,7 @@ jobs: uses: actions/upload-artifact@v3 with: name: cdk-layer-artefact - path: cdk.out.zip + path: layer/cdk.out.zip deploy-beta: needs: From 46c1754ac073b7e0b9151d07ce26eec4ba7033dc Mon Sep 17 00:00:00 2001 From: Alexander Melnyk Date: Thu, 7 Jul 2022 16:03:44 +0200 Subject: [PATCH 40/72] fix: unzip the right artifact name --- .github/workflows/reusable_deploy_layer_stack.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/reusable_deploy_layer_stack.yml b/.github/workflows/reusable_deploy_layer_stack.yml index 506cae96809..31f8830931d 100644 --- a/.github/workflows/reusable_deploy_layer_stack.yml +++ b/.github/workflows/reusable_deploy_layer_stack.yml @@ -79,7 +79,7 @@ jobs: with: name: ${{ inputs.artefact-name }} - name: unzip artefact - run: unzip cdk.out.zip + run: unzip ${{ inputs.artefact-name }} - name: CDK Deploy Layer run: cdk deploy --app cdk.out --context region=${{ matrix.region }} 'LayerStack ' --require-approval never --verbose - name: CDK Deploy Canary From 1170e7674d8d09fb7938dae79d332371e85e2329 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Thu, 7 Jul 2022 16:47:43 +0200 Subject: [PATCH 41/72] docs(event-handler): snippets split, improved, and lint (#1279) --- docs/core/event_handler/api_gateway.md | 1166 +++-------------- docs/core/logger.md | 3 + docs/core/metrics.md | 3 + docs/core/tracer.md | 3 + examples/event_handler_rest/sam/template.yaml | 56 + .../src/accessing_request_details.py | 40 + .../src/accessing_request_details_headers.py | 30 + .../src/assert_http_response.py | 28 + .../src/assert_http_response_module.py | 27 + .../src/binary_responses.json | 8 + .../src/binary_responses.py | 27 + .../src/binary_responses_logo.svg | 14 + .../src/binary_responses_output.json | 8 + .../src/compressing_responses.json | 8 + .../src/compressing_responses.py | 28 + .../src/compressing_responses_output.json | 9 + .../src/custom_api_mapping.json | 5 + .../src/custom_api_mapping.py | 20 + .../src/custom_serializer.py | 58 + examples/event_handler_rest/src/debug_mode.py | 28 + .../src/dynamic_routes.json | 5 + .../event_handler_rest/src/dynamic_routes.py | 27 + .../src/dynamic_routes_catch_all.json | 5 + .../src/dynamic_routes_catch_all.py | 21 + .../src/exception_handling.py | 43 + .../src/fine_grained_responses.py | 36 + .../src/fine_grained_responses_output.json | 9 + .../src/getting_started_alb_api_resolver.py | 28 + .../src/getting_started_http_api_resolver.py | 28 + .../getting_started_rest_api_resolver.json | 58 + .../src/getting_started_rest_api_resolver.py | 28 + ...ting_started_rest_api_resolver_output.json | 8 + .../event_handler_rest/src/http_methods.json | 6 + .../event_handler_rest/src/http_methods.py | 28 + .../src/http_methods_multiple.py | 29 + .../src/not_found_routes.py | 35 + .../src/raising_http_errors.py | 59 + .../event_handler_rest/src/setting_cors.py | 44 + .../src/setting_cors_output.json | 10 + .../event_handler_rest/src/split_route.py | 18 + .../src/split_route_module.py | 33 + .../src/split_route_prefix.py | 19 + .../src/split_route_prefix_module.py | 36 + 43 files changed, 1164 insertions(+), 1018 deletions(-) create mode 100644 examples/event_handler_rest/sam/template.yaml create mode 100644 examples/event_handler_rest/src/accessing_request_details.py create mode 100644 examples/event_handler_rest/src/accessing_request_details_headers.py create mode 100644 examples/event_handler_rest/src/assert_http_response.py create mode 100644 examples/event_handler_rest/src/assert_http_response_module.py create mode 100644 examples/event_handler_rest/src/binary_responses.json create mode 100644 examples/event_handler_rest/src/binary_responses.py create mode 100644 examples/event_handler_rest/src/binary_responses_logo.svg create mode 100644 examples/event_handler_rest/src/binary_responses_output.json create mode 100644 examples/event_handler_rest/src/compressing_responses.json create mode 100644 examples/event_handler_rest/src/compressing_responses.py create mode 100644 examples/event_handler_rest/src/compressing_responses_output.json create mode 100644 examples/event_handler_rest/src/custom_api_mapping.json create mode 100644 examples/event_handler_rest/src/custom_api_mapping.py create mode 100644 examples/event_handler_rest/src/custom_serializer.py create mode 100644 examples/event_handler_rest/src/debug_mode.py create mode 100644 examples/event_handler_rest/src/dynamic_routes.json create mode 100644 examples/event_handler_rest/src/dynamic_routes.py create mode 100644 examples/event_handler_rest/src/dynamic_routes_catch_all.json create mode 100644 examples/event_handler_rest/src/dynamic_routes_catch_all.py create mode 100644 examples/event_handler_rest/src/exception_handling.py create mode 100644 examples/event_handler_rest/src/fine_grained_responses.py create mode 100644 examples/event_handler_rest/src/fine_grained_responses_output.json create mode 100644 examples/event_handler_rest/src/getting_started_alb_api_resolver.py create mode 100644 examples/event_handler_rest/src/getting_started_http_api_resolver.py create mode 100644 examples/event_handler_rest/src/getting_started_rest_api_resolver.json create mode 100644 examples/event_handler_rest/src/getting_started_rest_api_resolver.py create mode 100644 examples/event_handler_rest/src/getting_started_rest_api_resolver_output.json create mode 100644 examples/event_handler_rest/src/http_methods.json create mode 100644 examples/event_handler_rest/src/http_methods.py create mode 100644 examples/event_handler_rest/src/http_methods_multiple.py create mode 100644 examples/event_handler_rest/src/not_found_routes.py create mode 100644 examples/event_handler_rest/src/raising_http_errors.py create mode 100644 examples/event_handler_rest/src/setting_cors.py create mode 100644 examples/event_handler_rest/src/setting_cors_output.json create mode 100644 examples/event_handler_rest/src/split_route.py create mode 100644 examples/event_handler_rest/src/split_route_module.py create mode 100644 examples/event_handler_rest/src/split_route_prefix.py create mode 100644 examples/event_handler_rest/src/split_route_prefix_module.py diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index cf99b615a80..9db219e994e 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -8,14 +8,14 @@ Event handler for Amazon API Gateway REST and HTTP APIs, and Application Loader ## Key Features * Lightweight routing to reduce boilerplate for API Gateway REST/HTTP API and ALB -* Seamless support for CORS, binary and Gzip compression -* Integrates with [Data classes utilities](../../utilities/data_classes.md){target="_blank"} to easily access event and identity information -* Built-in support for Decimals JSON encoding -* Support for dynamic path expressions -* Router to allow for splitting up the handler across multiple files +* Support for CORS, binary and Gzip compression, Decimals JSON encoding and bring your own JSON serializer +* Built-in integration with [Event Source Data Classes utilities](../../utilities/data_classes.md){target="_blank"} for self-documented event schema ## Getting started +???+ tip + All examples shared in this documentation are available within the [project repository](https://github.com/awslabs/aws-lambda-powertools-python/tree/develop/examples){target="_blank"}. + ### Required resources You must have an existing [API Gateway Proxy integration](https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html){target="_blank"} or [ALB](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html){target="_blank"} configured to invoke your Lambda function. @@ -25,54 +25,14 @@ This is the sample infrastructure for API Gateway we are using for the examples ???+ info "There is no additional permissions or dependencies required to use this utility." ```yaml title="AWS Serverless Application Model (SAM) example" -AWSTemplateFormatVersion: "2010-09-09" -Transform: AWS::Serverless-2016-10-31 -Description: Hello world event handler API Gateway - -Globals: - Api: - TracingEnabled: true - Cors: # see CORS section - AllowOrigin: "'https://example.com'" - AllowHeaders: "'Content-Type,Authorization,X-Amz-Date'" - MaxAge: "'300'" - BinaryMediaTypes: # see Binary responses section - - "*~1*" # converts to */* for any binary type - Function: - Timeout: 5 - Runtime: python3.8 - Tracing: Active - Environment: - Variables: - LOG_LEVEL: INFO - POWERTOOLS_LOGGER_SAMPLE_RATE: 0.1 - POWERTOOLS_LOGGER_LOG_EVENT: true - POWERTOOLS_METRICS_NAMESPACE: MyServerlessApplication - POWERTOOLS_SERVICE_NAME: my_api-service - -Resources: - ApiFunction: - Type: AWS::Serverless::Function - Properties: - Handler: app.lambda_handler - CodeUri: api_handler/ - Description: API handler function - Events: - ApiEvent: - Type: Api - Properties: - # NOTE: this is a catch-all rule to simply the documentation. - # explicit routes and methods are recommended for prod instead - # for example, Path: /hello, Method: GET - Path: /{proxy+} # Send requests on any path to the lambda function - Method: ANY # Send requests using any http method to the lambda function +--8<-- "examples/event_handler_rest/sam/template.yaml" ``` ### Event Resolvers Before you decorate your functions to handle a given path and HTTP method(s), you need to initialize a resolver. -A resolver will handle request resolution, include [one or more routers](#split-routes-with-router), and give you access to the current event via typed properties. +A resolver will handle request resolution, including [one or more routers](#split-routes-with-router), and give you access to the current event via typed properties. For resolvers, we provide: `APIGatewayRestResolver`, `APIGatewayHttpResolver`, and `ALBResolver`. @@ -83,113 +43,29 @@ For resolvers, we provide: `APIGatewayRestResolver`, `APIGatewayHttpResolver`, a When using Amazon API Gateway REST API to front your Lambda functions, you can use `APIGatewayRestResolver`. -Here's an example on how we can handle the `/hello` path. +Here's an example on how we can handle the `/todos` path. ???+ info We automatically serialize `Dict` responses as JSON, trim whitespace for compact responses, and set content-type to `application/json`. === "app.py" - ```python hl_lines="3 7 9 12 18" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver() - - @app.get("/hello") - @tracer.capture_method - def get_hello_universe(): - return {"message": "hello universe"} - - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="5 11 14 28" + --8<-- "examples/event_handler_rest/src/getting_started_rest_api_resolver.py" ``` -=== "hello_event.json" + +=== "Request" This utility uses `path` and `httpMethod` to route to the right function. This helps make unit tests and local invocation easier too. ```json hl_lines="4-5" - { - "body": "hello", - "resource": "/hello", - "path": "/hello", - "httpMethod": "GET", - "isBase64Encoded": false, - "queryStringParameters": { - "foo": "bar" - }, - "multiValueQueryStringParameters": {}, - "pathParameters": { - "hello": "/hello" - }, - "stageVariables": {}, - "headers": { - "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", - "Accept-Encoding": "gzip, deflate, sdch", - "Accept-Language": "en-US,en;q=0.8", - "Cache-Control": "max-age=0", - "CloudFront-Forwarded-Proto": "https", - "CloudFront-Is-Desktop-Viewer": "true", - "CloudFront-Is-Mobile-Viewer": "false", - "CloudFront-Is-SmartTV-Viewer": "false", - "CloudFront-Is-Tablet-Viewer": "false", - "CloudFront-Viewer-Country": "US", - "Host": "1234567890.execute-api.us-east-1.amazonaws.com", - "Upgrade-Insecure-Requests": "1", - "User-Agent": "Custom User Agent String", - "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)", - "X-Amz-Cf-Id": "cDehVQoZnx43VYQb9j2-nvCh-9z396Uhbp027Y2JvkCPNLmGJHqlaA==", - "X-Forwarded-For": "127.0.0.1, 127.0.0.2", - "X-Forwarded-Port": "443", - "X-Forwarded-Proto": "https" - }, - "multiValueHeaders": {}, - "requestContext": { - "accountId": "123456789012", - "resourceId": "123456", - "stage": "Prod", - "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef", - "requestTime": "25/Jul/2020:12:34:56 +0000", - "requestTimeEpoch": 1428582896000, - "identity": { - "cognitoIdentityPoolId": null, - "accountId": null, - "cognitoIdentityId": null, - "caller": null, - "accessKey": null, - "sourceIp": "127.0.0.1", - "cognitoAuthenticationType": null, - "cognitoAuthenticationProvider": null, - "userArn": null, - "userAgent": "Custom User Agent String", - "user": null - }, - "path": "/Prod/hello", - "resourcePath": "/hello", - "httpMethod": "POST", - "apiId": "1234567890", - "protocol": "HTTP/1.1" - } - } + --8<-- "examples/event_handler_rest/src/getting_started_rest_api_resolver.json" ``` -=== "response.json" +=== "Response" ```json - { - "statusCode": 200, - "headers": { - "Content-Type": "application/json" - }, - "body": "{\"message\":\"hello universe\"}", - "isBase64Encoded": false - } + --8<-- "examples/event_handler_rest/src/getting_started_rest_api_resolver_output.json" ``` #### API Gateway HTTP API @@ -199,477 +75,166 @@ When using Amazon API Gateway HTTP API to front your Lambda functions, you can u ???+ note Using HTTP API v1 payload? Use `APIGatewayRestResolver` instead. `APIGatewayHttpResolver` defaults to v2 payload. -Here's an example on how we can handle the `/hello` path. - -```python hl_lines="3 7" title="Using HTTP API resolver" -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler import APIGatewayHttpResolver - -tracer = Tracer() -logger = Logger() -app = APIGatewayHttpResolver() - -@app.get("/hello") -@tracer.capture_method -def get_hello_universe(): - return {"message": "hello universe"} - -# You can continue to use other utilities just as before -@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_HTTP) -@tracer.capture_lambda_handler -def lambda_handler(event, context): - return app.resolve(event, context) +```python hl_lines="5 11" title="Using HTTP API resolver" +--8<-- "examples/event_handler_rest/src/getting_started_http_api_resolver.py" ``` #### Application Load Balancer -When using Amazon Application Load Balancer to front your Lambda functions, you can use `ALBResolver`. - -```python hl_lines="3 7" title="Using ALB resolver" -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler import ALBResolver - -tracer = Tracer() -logger = Logger() -app = ALBResolver() +When using Amazon Application Load Balancer (ALB) to front your Lambda functions, you can use `ALBResolver`. -@app.get("/hello") -@tracer.capture_method -def get_hello_universe(): - return {"message": "hello universe"} - -# You can continue to use other utilities just as before -@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPLICATION_LOAD_BALANCER) -@tracer.capture_lambda_handler -def lambda_handler(event, context): - return app.resolve(event, context) +```python hl_lines="5 11" title="Using ALB resolver" +--8<-- "examples/event_handler_rest/src/getting_started_alb_api_resolver.py" ``` ### Dynamic routes -You can use `/path/{dynamic_value}` when configuring dynamic URL paths. This allows you to define such dynamic value as part of your function signature. - -=== "app.py" - - ```python hl_lines="9 11" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver() - - @app.get("/hello/") - @tracer.capture_method - def get_hello_you(name): - return {"message": f"hello {name}"} - - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) - ``` - -=== "sample_request.json" +You can use `/todos/` to configure dynamic URL paths, where `` will be resolved at runtime. - ```json - { - "resource": "/hello/{name}", - "path": "/hello/lessa", - "httpMethod": "GET", - ... - } - ``` +Each dynamic route you set must be part of your function signature. This allows us to call your function using keyword arguments when matching your dynamic route. -#### Nested routes - -You can also nest paths as configured earlier in [our sample infrastructure](#required-resources): `/{message}/{name}`. +???+ note + For brevity, we will only include the necessary keys for each sample request for the example to work. === "app.py" - ```python hl_lines="9 11" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver() - - @app.get("//") - @tracer.capture_method - def get_message(message, name): - return {"message": f"{message}, {name}"} - - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="14 16" + --8<-- "examples/event_handler_rest/src/dynamic_routes.py" ``` -=== "sample_request.json" +=== "Request" ```json - { - "resource": "/{message}/{name}", - "path": "/hi/michael", - "httpMethod": "GET", - ... - } + --8<-- "examples/event_handler_rest/src/dynamic_routes.json" ``` +???+ tip + You can also nest dynamic paths, for example `/todos//`. + #### Catch-all routes ???+ note We recommend having explicit routes whenever possible; use catch-all routes sparingly. -You can use a regex string to handle an arbitrary number of paths within a request, for example `.+`. +You can use a [regex](https://docs.python.org/3/library/re.html#regular-expression-syntax){target="_blank"} string to handle an arbitrary number of paths within a request, for example `.+`. You can also combine nested paths with greedy regex to catch in between routes. ???+ warning - We will choose the more explicit registered route that match incoming event. + We choose the most explicit registered route that matches an incoming event. === "app.py" - ```python hl_lines="5" - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - app = APIGatewayRestResolver() - - @app.get(".+") - def catch_any_route_after_any(): - return {"path_received": app.current_event.path} - - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="11" + --8<-- "examples/event_handler_rest/src/dynamic_routes_catch_all.py" ``` -=== "sample_request.json" +=== "Request" ```json - { - "resource": "/any/route/should/work", - "path": "/any/route/should/work", - "httpMethod": "GET", - ... - } + --8<-- "examples/event_handler_rest/src/dynamic_routes_catch_all.json" ``` ### HTTP Methods -You can use named decorators to specify the HTTP method that should be handled in your functions. As well as the -`get` method already shown above, you can use `post`, `put`, `patch`, `delete`, and `patch`. +You can use named decorators to specify the HTTP method that should be handled in your functions. That is, `app.`, where the HTTP method could be `get`, `post`, `put`, `patch`, `delete`, and `options`. === "app.py" - ```python hl_lines="9-10" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver() - - # Only POST HTTP requests to the path /hello will route to this function - @app.post("/hello") - @tracer.capture_method - def get_hello_you(): - name = app.current_event.json_body.get("name") - return {"message": f"hello {name}"} - - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="14 17" + --8<-- "examples/event_handler_rest/src/http_methods.py" ``` -=== "sample_request.json" +=== "Request" ```json - { - "resource": "/hello/{name}", - "path": "/hello/lessa", - "httpMethod": "GET", - ... - } - ``` - -If you need to accept multiple HTTP methods in a single function, you can use the `route` method and pass a list of -HTTP methods. - -=== "app.py" - - ```python hl_lines="9-10" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver() - - # PUT and POST HTTP requests to the path /hello will route to this function - @app.route("/hello", method=["PUT", "POST"]) - @tracer.capture_method - def get_hello_you(): - name = app.current_event.json_body.get("name") - return {"message": f"hello {name}"} - - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + --8<-- "examples/event_handler_rest/src/http_methods.json" ``` -=== "sample_request.json" +If you need to accept multiple HTTP methods in a single function, you can use the `route` method and pass a list of HTTP methods. - ```json - { - "resource": "/hello/{name}", - "path": "/hello/lessa", - "httpMethod": "GET", - ... - } - ``` +```python hl_lines="15" title="Handling multiple HTTP Methods" +--8<-- "examples/event_handler_rest/src/http_methods_multiple.py" +``` ???+ note - It is usually better to have separate functions for each HTTP method, as the functionality tends to differ depending on which method is used. + It is generally better to have separate functions for each HTTP method, as the functionality tends to differ depending on which method is used. ### Accessing request details -By integrating with [Data classes utilities](../../utilities/data_classes.md){target="_blank"}, you have access to request details, Lambda context and also some convenient methods. +Event Handler integrates with [Event Source Data Classes utilities](../../utilities/data_classes.md){target="_blank"}, and it exposes their respective resolver request details and convenient methods under `app.current_event`. -These are made available in the response returned when instantiating `APIGatewayRestResolver`, for example `app.current_event` and `app.lambda_context`. +That is why you see `app.resolve(event, context)` in every example. This allows Event Handler to resolve requests, and expose data like `app.lambda_context` and `app.current_event`. #### Query strings and payload -Within `app.current_event` property, you can access query strings as dictionary via `query_string_parameters`, or by name via `get_query_string_value` method. - -You can access the raw payload via `body` property, or if it's a JSON string you can quickly deserialize it via `json_body` property. +Within `app.current_event` property, you can access all available query strings as a dictionary via `query_string_parameters`, or a specific one via `get_query_string_value` method. -```python hl_lines="7-9 11" title="Accessing query strings, JSON payload, and raw payload" -from aws_lambda_powertools.event_handler import APIGatewayRestResolver +You can access the raw payload via `body` property, or if it's a JSON string you can quickly deserialize it via `json_body` property - like the earlier example in the [HTTP Methods](#http-methods) section. -app = APIGatewayRestResolver() - -@app.get("/hello") -def get_hello_you(): - query_strings_as_dict = app.current_event.query_string_parameters - json_payload = app.current_event.json_body - payload = app.current_event.body - - name = app.current_event.get_query_string_value(name="name", default_value="") - return {"message": f"hello {name}"} - -def lambda_handler(event, context): - return app.resolve(event, context) +```python hl_lines="19 24" title="Accessing query strings and raw payload" +--8<-- "examples/event_handler_rest/src/accessing_request_details.py" ``` #### Headers Similarly to [Query strings](#query-strings-and-payload), you can access headers as dictionary via `app.current_event.headers`, or by name via `get_header_value`. -```python hl_lines="7-8" title="Accessing HTTP Headers" -from aws_lambda_powertools.event_handler import APIGatewayRestResolver - -app = APIGatewayRestResolver() - -@app.get("/hello") -def get_hello_you(): - headers_as_dict = app.current_event.headers - name = app.current_event.get_header_value(name="X-Name", default_value="") - - return {"message": f"hello {name}"} - -def lambda_handler(event, context): - return app.resolve(event, context) +```python hl_lines="19" title="Accessing HTTP Headers" +--8<-- "examples/event_handler_rest/src/accessing_request_details_headers.py" ``` ### Handling not found routes By default, we return `404` for any unmatched route. -You can use **`not_found`** decorator to override this behaviour, and return a custom **`Response`**. - -```python hl_lines="11 13 16" title="Handling not found" -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler import content_types -from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response -from aws_lambda_powertools.event_handler.exceptions import NotFoundError - -tracer = Tracer() -logger = Logger() -app = APIGatewayRestResolver() - -@app.not_found -@tracer.capture_method -def handle_not_found_errors(exc: NotFoundError) -> Response: - # Return 418 upon 404 errors - logger.info(f"Not found route: {app.current_event.path}") - return Response( - status_code=418, - content_type=content_types.TEXT_PLAIN, - body="I'm a teapot!" - ) - - -@app.get("/catch/me/if/you/can") -@tracer.capture_method -def catch_me_if_you_can(): - return {"message": "oh hey"} - -@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) -@tracer.capture_lambda_handler -def lambda_handler(event, context): - return app.resolve(event, context) +You can use **`not_found`** decorator to override this behavior, and return a custom **`Response`**. + +```python hl_lines="14 18" title="Handling not found" +--8<-- "examples/event_handler_rest/src/not_found_routes.py" ``` ### Exception handling You can use **`exception_handler`** decorator with any Python exception. This allows you to handle a common exception outside your route, for example validation errors. -```python hl_lines="10 15" title="Exception handling" -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler import content_types -from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response - -tracer = Tracer() -logger = Logger() -app = APIGatewayRestResolver() - -@app.exception_handler(ValueError) -def handle_value_error(ex: ValueError): - metadata = {"path": app.current_event.path} - logger.error(f"Malformed request: {ex}", extra=metadata) - - return Response( - status_code=400, - content_type=content_types.TEXT_PLAIN, - body="Invalid request", - ) - - -@app.get("/hello") -@tracer.capture_method -def hello_name(): - name = app.current_event.get_query_string_value(name="name") - if name is not None: - raise ValueError("name query string must be present") - return {"message": f"hello {name}"} - -@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) -@tracer.capture_lambda_handler -def lambda_handler(event, context): - return app.resolve(event, context) +```python hl_lines="14 15" title="Exception handling" +--8<-- "examples/event_handler_rest/src/exception_handling.py" ``` ### Raising HTTP errors -You can easily raise any HTTP Error back to the client using `ServiceError` exception. +You can easily raise any HTTP Error back to the client using `ServiceError` exception. This ensures your Lambda function doesn't fail but return the correct HTTP response signalling the error. ???+ info If you need to send custom headers, use [Response](#fine-grained-responses) class instead. -Additionally, we provide pre-defined errors for the most popular ones such as HTTP 400, 401, 404, 500. - -```python hl_lines="4-10 20 25 30 35 39" title="Raising common HTTP Status errors (4xx, 5xx)" -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler import APIGatewayRestResolver -from aws_lambda_powertools.event_handler.exceptions import ( - BadRequestError, - InternalServerError, - NotFoundError, - ServiceError, - UnauthorizedError, -) - -tracer = Tracer() -logger = Logger() - -app = APIGatewayRestResolver() - -@app.get(rule="/bad-request-error") -def bad_request_error(): - # HTTP 400 - raise BadRequestError("Missing required parameter") - -@app.get(rule="/unauthorized-error") -def unauthorized_error(): - # HTTP 401 - raise UnauthorizedError("Unauthorized") - -@app.get(rule="/not-found-error") -def not_found_error(): - # HTTP 404 - raise NotFoundError - -@app.get(rule="/internal-server-error") -def internal_server_error(): - # HTTP 500 - raise InternalServerError("Internal server error") - -@app.get(rule="/service-error", cors=True) -def service_error(): - raise ServiceError(502, "Something went wrong!") - # alternatively - # from http import HTTPStatus - # raise ServiceError(HTTPStatus.BAD_GATEWAY.value, "Something went wrong) - -def handler(event, context): - return app.resolve(event, context) +We provide pre-defined errors for the most popular ones such as HTTP 400, 401, 404, 500. + +```python hl_lines="6-11 23 28 33 38 43" title="Raising common HTTP Status errors (4xx, 5xx)" +--8<-- "examples/event_handler_rest/src/raising_http_errors.py" ``` ### Custom Domain API Mappings -When using Custom Domain API Mappings feature, you must use **`strip_prefixes`** param in the `APIGatewayRestResolver` constructor. - -Scenario: You have a custom domain `api.mydomain.dev` and set an API Mapping `payment` to forward requests to your Payments API, the path argument will be `/payment/`. +When using [Custom Domain API Mappings feature](https://docs.aws.amazon.com/apigateway/latest/developerguide/rest-api-mappings.html){target="_blank"}, you must use **`strip_prefixes`** param in the `APIGatewayRestResolver` constructor. -This will lead to a HTTP 404 despite having your Lambda configured correctly. See the example below on how to account for this change. - -=== "app.py" +**Scenario**: You have a custom domain `api.mydomain.dev`. Then you set `/payment` API Mapping to forward any payment requests to your Payments API. - ```python hl_lines="7" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver +**Challenge**: This means your `path` value for any API requests will always contain `/payment/`, leading to HTTP 404 as Event Handler is trying to match what's after `payment/`. This gets further complicated with an [arbitrary level of nesting](https://github.com/awslabs/aws-lambda-powertools-roadmap/issues/34). - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver(strip_prefixes=["/payment"]) +To address this API Gateway behavior, we use `strip_prefixes` parameter to account for these prefixes that are now injected into the path regardless of which type of API Gateway you're using. - @app.get("/subscriptions/") - @tracer.capture_method - def get_subscription(subscription): - return {"subscription_id": subscription} +=== "app.py" - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="8" + --8<-- "examples/event_handler_rest/src/custom_api_mapping.py" ``` -=== "sample_request.json" +=== "Request" ```json - { - "resource": "/subscriptions/{subscription}", - "path": "/payment/subscriptions/123", - "httpMethod": "GET", - ... - } + --8<-- "examples/event_handler_rest/src/custom_api_mapping.json" ``` ???+ note @@ -685,67 +250,21 @@ You can configure CORS at the `APIGatewayRestResolver` constructor via `cors` pa This will ensure that CORS headers are always returned as part of the response when your functions match the path invoked. -=== "app.py" - - ```python hl_lines="9 11" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, CORSConfig - - tracer = Tracer() - logger = Logger() - - cors_config = CORSConfig(allow_origin="https://example.com", max_age=300) - app = APIGatewayRestResolver(cors=cors_config) - - @app.get("/hello/") - @tracer.capture_method - def get_hello_you(name): - return {"message": f"hello {name}"} - - @app.get("/hello", cors=False) # optionally exclude CORS from response, if needed - @tracer.capture_method - def get_hello_no_cors_needed(): - return {"message": "hello, no CORS needed for this path ;)"} - - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) - ``` +???+ tip + Optionally disable CORS on a per path basis with `cors=False` parameter. -=== "response.json" +=== "app.py" - ```json - { - "statusCode": 200, - "headers": { - "Content-Type": "application/json", - "Access-Control-Allow-Origin": "https://www.example.com", - "Access-Control-Allow-Headers": "Authorization,Content-Type,X-Amz-Date,X-Amz-Security-Token,X-Api-Key" - }, - "body": "{\"message\":\"hello lessa\"}", - "isBase64Encoded": false - } + ```python hl_lines="5 11-12 34" + --8<-- "examples/event_handler_rest/src/setting_cors.py" ``` -=== "response_no_cors.json" +=== "Response" ```json - { - "statusCode": 200, - "headers": { - "Content-Type": "application/json" - }, - "body": "{\"message\":\"hello lessa\"}", - "isBase64Encoded": false - } + --8<-- "examples/event_handler_rest/src/setting_cors_output.json" ``` -???+ tip - Optionally disable CORS on a per path basis with `cors=False` parameter. - #### Pre-flight Pre-flight (OPTIONS) calls are typically handled at the API Gateway level as per [our sample infrastructure](#required-resources), no Lambda integration necessary. However, ALB expects you to handle pre-flight requests. @@ -773,40 +292,15 @@ You can use the `Response` class to have full control over the response, for exa === "app.py" - ```python hl_lines="11-16" - import json - from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response - - app = APIGatewayRestResolver() - - @app.get("/hello") - def get_hello_you(): - payload = json.dumps({"message": "I'm a teapot"}) - custom_headers = {"X-Custom": "X-Value"} - - return Response( - status_code=418, - content_type="application/json", - body=payload, - headers=custom_headers, - ) - - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="7 24-28" + --8<-- "examples/event_handler_rest/src/fine_grained_responses.py" ``` -=== "response.json" +=== "Response" ```json - { - "body": "{\"message\":\"I\'m a teapot\"}", - "headers": { - "Content-Type": "application/json", - "X-Custom": "X-Value" - }, - "isBase64Encoded": false, - "statusCode": 418 - } + --8<-- "examples/event_handler_rest/src/fine_grained_responses_output.json" + ``` ### Compress @@ -817,44 +311,20 @@ You can compress with gzip and base64 encode your responses via `compress` param === "app.py" - ```python hl_lines="5 7" - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - app = APIGatewayRestResolver() - - @app.get("/hello", compress=True) - def get_hello_you(): - return {"message": "hello universe"} - - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="14" + --8<-- "examples/event_handler_rest/src/compressing_responses.py" ``` -=== "sample_request.json" +=== "Request" ```json - { - "headers": { - "Accept-Encoding": "gzip" - }, - "httpMethod": "GET", - "path": "/hello", - ... - } + --8<-- "examples/event_handler_rest/src/compressing_responses.json" ``` -=== "response.json" +=== "Response" ```json - { - "body": "H4sIAAAAAAACE6tWyk0tLk5MT1WyUspIzcnJVyjNyyxLLSpOVaoFANha8kEcAAAA", - "headers": { - "Content-Encoding": "gzip", - "Content-Type": "application/json" - }, - "isBase64Encoded": true, - "statusCode": 200 - } + --8<-- "examples/event_handler_rest/src/compressing_responses_output.json" ``` ### Binary responses @@ -868,89 +338,26 @@ Like `compress` feature, the client must send the `Accept` header with the corre === "app.py" - ```python hl_lines="4 7 11" - import os - from pathlib import Path - - from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response - - app = APIGatewayRestResolver() - logo_file: bytes = Path(os.getenv("LAMBDA_TASK_ROOT") + "/logo.svg").read_bytes() - - @app.get("/logo") - def get_logo(): - return Response(status_code=200, content_type="image/svg+xml", body=logo_file) - - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="14 20" + --8<-- "examples/event_handler_rest/src/binary_responses.py" ``` === "logo.svg" ```xml - - - - - - - - - - - - + --8<-- "examples/event_handler_rest/src/binary_responses_logo.svg" ``` -=== "sample_request.json" + +=== "Request" ```json - { - "headers": { - "Accept": "image/svg+xml" - }, - "httpMethod": "GET", - "path": "/logo", - ... - } + --8<-- "examples/event_handler_rest/src/binary_responses.json" ``` -=== "response.json" +=== "Response" ```json - { - "body": "H4sIAAAAAAACE3VXa2scRxD87ID/w+byKTCzN899yFZMLBLHYEMg4K9BHq0l4c2duDudZIf891TVrPwiMehmd+fR3dXV1eOnz+7/mpvjtNtfbzenK9+6VTNtyvbienN5uro9vLPD6tlPj797+r21zYtpM+3OD9vdSfPzxfbt1Lyc59v9QZ8aP7au9ab5482L5pf7m+3u0Pw+317al5um1cc31chJ07XONc9vr+eLxv3YNNby/P3x8ks3/Kq5vjhdvTr/MO3+xAu83OxPV1eHw83Jen13d9fexXa7u1wH59wam5clJ/fz9eb9fy304ziuNYulpyt3c79qPtTx8XePmuP1dPd8y4nGNdGlxg9h1ewPH+bpdDVtzt/Ok317Xt5f7ra3m4uTzXTXfLHyicyf7G/OC5bf7Kb9tDtOKwXGI5rDhxtMHKb7w7rs95x41O4P7u931/N88sOv+vfkn/rV66vd3c7TyXScNtuLiydlvr75+su3O5+uZYkmL3n805vzw1VT5vM9cIOpVQM8Xw9dm0yHn+JMbHvj+IoRiJuhHYtrBxPagPfBpLbDmmD6NuB7NpxzWttpDG3EKd46vAfr29HE2XZtxMYABx4VzIxY2VmvnaMN2jkW642zAdPZRkyms76DndGZPpthgEt9MvB0wEJM91gacUpsvc3c3eO4sYXJHuf52A42jNjEp2qXRzjrMzaENtngLGOwCS4krO7xzXscoIeR4WFLNpFbEo7GNrhdOhkEGElrgUyCx3gokQYAHMOLxjvFVY1XVDNQy0AKkx4PgPSIjcALv8QDf0He9NZ3BaEFhTdgInESMPKBMwAemzxTZT1zgFP5vRekOJTg8zucquEvCULsXOx1hjY5bWKuAh1fFkbuIGABa71+4cuRcMHfuiboMB6Kw8gGW5mQtDUwBa1f4s/Kd6+1iD8oplyIvq9oebEFYBOKsXi+ORNEJBKLbBhaXzIcZ0YGbgMF9IAkdG9I4Y/N65RhaYCLi+morPSipK8RMlmdIgahbFR+s2UF+Gpe3ieip6/kayCbkHpYRUp6QgH6MGFEgLuiFQHbviLO/DkdEGkbk4ljsawtR7J1zIAFk0aTioBBpIQYbmWNJArqKQlXxh9UoSQXjZxFIGoGFmzSPM/8FD+w8IDNmxG+l1pwlr5Ey/rwzP1gay1mG5Ykj6/GrpoIRZOMYqR3GiudHijAFJPJiePVCGBr2mIlE0bEUKpIMFrQwjCEcQabB4pOmJVyPolCYWEnYJZVyU+VE4JrQC56cPWtpfSVHfhkJD60RDy6foYyRNv1NZlCXoh/YwM05C7rEU0sitKERehqrLkiYCrhvcSO53VFrzxeAqB0UxHzbMFPb/q+1ltVRoITiTnNKRWm0ownRlbpFUu/iI5uYRMEoMb/kLt+yR3BSq98xtkQXElWl5h1yg6nvcz5SrVFta1UHTz3v4koIEzIVPgRKlkkc44ykipJsip7kVMWdICDFPBMMoOwUhlbRb23NX/UjqHYesi4sK2OmDhaWpLKiE1YzxbCsUhATZUlb2q7iBX7Kj/Kc80atEz66yWyXorhGTIkRqnrSURu8fWhdNIFKT7B8UnNJPIUwYLgLVHkOD7knC4rjNpFeturrBRRbmtHkpTh5VVIncmBnYlpjhT3HhMUd1urK0rQE7AE14goJdFRWBYZHyUIcLLm3AuhwF5qO7Zg4B+KTodiJCaSOMN4SXbRC+pR1Vs8FEZGOcnCtKvNvnC/aoiKj2+dekO1GdS4VMfAQo2++KXOonIgf5ifoo6hOkm6EFDP8pItNXvVpFNdxiNErThVXG1UQXHEz/eEYWk/jEmCRcyyaKtWKbVSr1YNc6rytcLnq6AORazytbMa9nqOutgYdUPmGL72nyKmlzxMVcjpPLPdE7cC1MlQQkpyZHasjPbRFVpJ+mNPqlcln6Tekk5lg7cd/9CbJMkkXFInSmrcw4PHQS1p0HZSANa6s8CqNiN/Qh7hI0vVfK7aj6u1Lnq67n173/P1vhd6Nf+ETgJLgSyjjYGpj2SVD3JM96PM+xRRZYcMtV8NJHKn3bW+pUydGMFg1CMelUSIgjwj4nGUVULDxxJJM1zvsM/q0uZ5TQggwFnoRanI9h76gcSJDPYLz5dA/y/EgXnygRcGostStqFXv0KdD7qP6MYUTKVXr1uhEzty8QP5plqDXbZuk1mtuUZGv3jtg8JIFKHTJrt6H9AduN4TAE6q95qzMEikMmkVRq+bKQXrC0cfUrdm7h5+8b8YjP8Cgadmu5INAAA=", - "headers": { - "Content-Type": "image/svg+xml" - }, - "isBase64Encoded": true, - "statusCode": 200 - } + --8<-- "examples/event_handler_rest/src/binary_responses_output.json" ``` ### Debug mode @@ -964,326 +371,88 @@ This will enable full tracebacks errors in the response, print request and respo It's best to use for local development only! -```python hl_lines="3" title="Enabling debug mode" -from aws_lambda_powertools.event_handler import APIGatewayRestResolver - -app = APIGatewayRestResolver(debug=True) - -@app.get("/hello") -def get_hello_universe(): - return {"message": "hello universe"} - -def lambda_handler(event, context): - return app.resolve(event, context) +```python hl_lines="11" title="Enabling debug mode" +--8<-- "examples/event_handler_rest/src/debug_mode.py" ``` ### Custom serializer You can instruct API Gateway handler to use a custom serializer to best suit your needs, for example take into account Enums when serializing. -```python hl_lines="21-22 26" title="Using a custom JSON serializer for responses" -import json -from enum import Enum -from json import JSONEncoder -from typing import Dict - -from aws_lambda_powertools.event_handler import APIGatewayRestResolver - -class CustomEncoder(JSONEncoder): - """Your customer json encoder""" - def default(self, obj): - if isinstance(obj, Enum): - return obj.value - try: - iterable = iter(obj) - except TypeError: - pass - else: - return sorted(iterable) - return JSONEncoder.default(self, obj) - -def custom_serializer(obj) -> str: - """Your custom serializer function APIGatewayRestResolver will use""" - return json.dumps(obj, cls=CustomEncoder) - -# Assigning your custom serializer -app = APIGatewayRestResolver(serializer=custom_serializer) - -class Color(Enum): - RED = 1 - BLUE = 2 - -@app.get("/colors") -def get_color() -> Dict: - return { - # Color.RED will be serialized to 1 as expected now - "color": Color.RED, - "variations": {"light", "dark"}, - } +```python hl_lines="35 40" title="Using a custom JSON serializer for responses" +--8<-- "examples/event_handler_rest/src/custom_serializer.py" ``` ### Split routes with Router As you grow the number of routes a given Lambda function should handle, it is natural to split routes into separate files to ease maintenance - That's where the `Router` feature is useful. -Let's assume you have `app.py` as your Lambda function entrypoint and routes in `users.py`, this is how you'd use the `Router` feature. +Let's assume you have `app.py` as your Lambda function entrypoint and routes in `todos.py`, this is how you'd use the `Router` feature. -=== "users.py" +=== "todos.py" We import **Router** instead of **APIGatewayRestResolver**; syntax wise is exactly the same. - ```python hl_lines="5 8 12 15 21" - import itertools - from typing import Dict - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.event_handler.api_gateway import Router - - logger = Logger(child=True) - router = Router() - USERS = {"user1": "details_here", "user2": "details_here", "user3": "details_here"} - - - @router.get("/users") - def get_users() -> Dict: - # /users?limit=1 - pagination_limit = router.current_event.get_query_string_value(name="limit", default_value=10) - - logger.info(f"Fetching the first {pagination_limit} users...") - ret = dict(itertools.islice(USERS.items(), int(pagination_limit))) - return {"items": [ret]} - - @router.get("/users/") - def get_user(username: str) -> Dict: - logger.info(f"Fetching username {username}") - return {"details": USERS.get(username, {})} - - # many other related /users routing + ```python hl_lines="5 13 16 25 28" + --8<-- "examples/event_handler_rest/src/split_route_module.py" ``` === "app.py" We use `include_router` method and include all user routers registered in the `router` global object. - ```python hl_lines="7 10-11" - from typing import Dict - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - from aws_lambda_powertools.utilities.typing import LambdaContext - - import users - - logger = Logger() - app = APIGatewayRestResolver() - app.include_router(users.router) - - - def lambda_handler(event: Dict, context: LambdaContext): - return app.resolve(event, context) + ```python hl_lines="11" + --8<-- "examples/event_handler_rest/src/split_route.py" ``` #### Route prefix -In the previous example, `users.py` routes had a `/users` prefix. This might grow over time and become repetitive. +In the previous example, `todos.py` routes had a `/todos` prefix. This might grow over time and become repetitive. -When necessary, you can set a prefix when including a router object. This means you could remove `/users` prefix in `users.py` altogether. +When necessary, you can set a prefix when including a router object. This means you could remove `/todos` prefix in `todos.py` altogether. === "app.py" - ```python hl_lines="9" - from typing import Dict - - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - from aws_lambda_powertools.utilities.typing import LambdaContext - - import users - - app = APIGatewayRestResolver() - app.include_router(users.router, prefix="/users") # prefix '/users' to any route in `users.router` - - - def lambda_handler(event: Dict, context: LambdaContext): - return app.resolve(event, context) + ```python hl_lines="12" + --8<-- "examples/event_handler_rest/src/split_route_prefix.py" ``` -=== "users.py" - - ```python hl_lines="11 15" - from typing import Dict - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.event_handler.api_gateway import Router - - logger = Logger(child=True) - router = Router() - USERS = {"user1": "details", "user2": "details", "user3": "details"} - +=== "todos.py" - @router.get("/") # /users, when we set the prefix in app.py - def get_users() -> Dict: - ... - - @router.get("/") - def get_user(username: str) -> Dict: - ... - - # many other related /users routing + ```python hl_lines="13 25" + --8<-- "examples/event_handler_rest/src/split_route_prefix_module.py" ``` #### Sample layout -This sample project contains a Users function with two distinct set of routes, `/users` and `/health`. The layout optimizes for code sharing, no custom build tooling, and it uses [Lambda Layers](../../index.md#lambda-layer) to install Lambda Powertools. - -=== "Project layout" - - ```python hl_lines="1 8 10 12-15" - . - ├── Pipfile # project app & dev dependencies; poetry, pipenv, etc. - ├── Pipfile.lock - ├── README.md - ├── src - │ ├── __init__.py - │ ├── requirements.txt # sam build detect it automatically due to CodeUri: src, e.g. pipenv lock -r > src/requirements.txt - │ └── users - │ ├── __init__.py - │ ├── main.py # this will be our users Lambda fn; it could be split in folders if we want separate fns same code base - │ └── routers # routers module - │ ├── __init__.py - │ ├── health.py # /users routes, e.g. from routers import users; users.router - │ └── users.py # /users routes, e.g. from .routers import users; users.router - ├── template.yml # SAM template.yml, CodeUri: src, Handler: users.main.lambda_handler - └── tests +This is a sample project layout for a monolithic function with routes split in different files (`/todos`, `/health`). + +```shell hl_lines="4 7 10 12-13" title="Sample project layout" +. +├── pyproject.toml # project app & dev dependencies; poetry, pipenv, etc. +├── poetry.lock +├── src +│ ├── __init__.py +│ ├── requirements.txt # sam build detect it automatically due to CodeUri: src. poetry export --format src/requirements.txt +│ └── todos +│ ├── __init__.py +│ ├── main.py # this will be our todos Lambda fn; it could be split in folders if we want separate fns same code base +│ └── routers # routers module +│ ├── __init__.py +│ ├── health.py # /health routes. from routers import todos; health.router +│ └── todos.py # /todos routes. from .routers import todos; todos.router +├── template.yml # SAM. CodeUri: src, Handler: todos.main.lambda_handler +└── tests + ├── __init__.py + ├── unit + │ ├── __init__.py + │ └── test_todos.py # unit tests for the todos router + │ └── test_health.py # unit tests for the health router + └── functional ├── __init__.py - ├── unit - │ ├── __init__.py - │ └── test_users.py # unit tests for the users router - │ └── test_health.py # unit tests for the health router - └── functional - ├── __init__.py - ├── conftest.py # pytest fixtures for the functional tests - └── test_main.py # functional tests for the main lambda handler - ``` - -=== "template.yml" - - ```yaml hl_lines="22-23" - AWSTemplateFormatVersion: '2010-09-09' - Transform: AWS::Serverless-2016-10-31 - Description: Example service with multiple routes - Globals: - Function: - Timeout: 10 - MemorySize: 512 - Runtime: python3.9 - Tracing: Active - Architectures: - - x86_64 - Environment: - Variables: - LOG_LEVEL: INFO - POWERTOOLS_LOGGER_LOG_EVENT: true - POWERTOOLS_METRICS_NAMESPACE: MyServerlessApplication - POWERTOOLS_SERVICE_NAME: users - Resources: - UsersService: - Type: AWS::Serverless::Function - Properties: - Handler: users.main.lambda_handler - CodeUri: src - Layers: - # Latest version: https://awslabs.github.io/aws-lambda-powertools-python/latest/#lambda-layer - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:4 - Events: - ByUser: - Type: Api - Properties: - Path: /users/{name} - Method: GET - AllUsers: - Type: Api - Properties: - Path: /users - Method: GET - HealthCheck: - Type: Api - Properties: - Path: /status - Method: GET - Outputs: - UsersApiEndpoint: - Description: "API Gateway endpoint URL for Prod environment for Users Function" - Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod" - AllUsersURL: - Description: "URL to fetch all registered users" - Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/users" - ByUserURL: - Description: "URL to retrieve details by user" - Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/users/test" - UsersServiceFunctionArn: - Description: "Users Lambda Function ARN" - Value: !GetAtt UsersService.Arn - ``` - -=== "src/users/main.py" - - ```python hl_lines="8 14-15" - from typing import Dict - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - from aws_lambda_powertools.logging.correlation_paths import APPLICATION_LOAD_BALANCER - from aws_lambda_powertools.utilities.typing import LambdaContext - - from .routers import health, users - - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver() - - app.include_router(health.router) - app.include_router(users.router) - - - @logger.inject_lambda_context(correlation_id_path=API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event: Dict, context: LambdaContext): - return app.resolve(event, context) - ``` - -=== "src/users/routers/health.py" - - ```python hl_lines="4 6-7 10" - from typing import Dict - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.event_handler.api_gateway import Router - - router = Router() - logger = Logger(child=True) - - - @router.get("/status") - def health() -> Dict: - logger.debug("Health check called") - return {"status": "OK"} - ``` - -=== "tests/functional/test_users.py" - - ```python hl_lines="3" - import json - - from src.users import main # follows namespace package from root - - - def test_lambda_handler(apigw_event, lambda_context): - ret = main.lambda_handler(apigw_event, lambda_context) - expected = json.dumps({"message": "hello universe"}, separators=(",", ":")) - - assert ret["statusCode"] == 200 - assert ret["body"] == expected - ``` + ├── conftest.py # pytest fixtures for the functional tests + └── test_main.py # functional tests for the main lambda handler +``` ### Considerations @@ -1342,53 +511,14 @@ You can test your routes by passing a proxy event request where `path` and `http === "test_app.py" - ```python hl_lines="18-24" - from dataclasses import dataclass - - import pytest - import app - - @pytest.fixture - def lambda_context(): - @dataclass - class LambdaContext: - function_name: str = "test" - memory_limit_in_mb: int = 128 - invoked_function_arn: str = "arn:aws:lambda:eu-west-1:809313241:function:test" - aws_request_id: str = "52fdfc07-2182-154f-163f-5f0f9a621d72" - - return LambdaContext() - - def test_lambda_handler(lambda_context): - minimal_event = { - "path": "/hello", - "httpMethod": "GET", - "requestContext": { # correlation ID - "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef" - } - } - - app.lambda_handler(minimal_event, lambda_context) + ```python hl_lines="21-24" + --8<-- "examples/event_handler_rest/src/assert_http_response.py" ``` === "app.py" ```python - from aws_lambda_powertools import Logger - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - logger = Logger() - app = APIGatewayRestResolver() # API Gateway REST API (v1) - - @app.get("/hello") - def get_hello_universe(): - return {"message": "hello universe"} - - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - def lambda_handler(event, context): - return app.resolve(event, context) + --8<-- "examples/event_handler_rest/src/assert_http_response_module.py" ``` ## FAQ diff --git a/docs/core/logger.md b/docs/core/logger.md index 23d57e251b9..b09cc6c85d3 100644 --- a/docs/core/logger.md +++ b/docs/core/logger.md @@ -14,6 +14,9 @@ Logger provides an opinionated logger with output structured as JSON. ## Getting started +???+ tip + All examples shared in this documentation are available within the [project repository](https://github.com/awslabs/aws-lambda-powertools-python/tree/develop/examples){target="_blank"}. + Logger requires two settings: | Setting | Description | Environment variable | Constructor parameter | diff --git a/docs/core/metrics.md b/docs/core/metrics.md index 24a8f1e6fda..713a53b193c 100644 --- a/docs/core/metrics.md +++ b/docs/core/metrics.md @@ -28,6 +28,9 @@ If you're new to Amazon CloudWatch, there are two terminologies you must be awar ## Getting started +???+ tip + All examples shared in this documentation are available within the [project repository](https://github.com/awslabs/aws-lambda-powertools-python/tree/develop/examples){target="_blank"}. + Metric has two global settings that will be used across all metrics emitted: | Setting | Description | Environment variable | Constructor parameter | diff --git a/docs/core/tracer.md b/docs/core/tracer.md index c8037eff241..7664231cc31 100644 --- a/docs/core/tracer.md +++ b/docs/core/tracer.md @@ -16,6 +16,9 @@ Tracer is an opinionated thin wrapper for [AWS X-Ray Python SDK](https://github. ## Getting started +???+ tip + All examples shared in this documentation are available within the [project repository](https://github.com/awslabs/aws-lambda-powertools-python/tree/develop/examples){target="_blank"}. + ### Permissions Before your use this utility, your AWS Lambda function [must have permissions](https://docs.aws.amazon.com/lambda/latest/dg/services-xray.html#services-xray-permissions) to send traces to AWS X-Ray. diff --git a/examples/event_handler_rest/sam/template.yaml b/examples/event_handler_rest/sam/template.yaml new file mode 100644 index 00000000000..f9837e729a5 --- /dev/null +++ b/examples/event_handler_rest/sam/template.yaml @@ -0,0 +1,56 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 +Description: Hello world event handler API Gateway + +Globals: + Api: + TracingEnabled: true + Cors: # see CORS section + AllowOrigin: "'https://example.com'" + AllowHeaders: "'Content-Type,Authorization,X-Amz-Date'" + MaxAge: "'300'" + BinaryMediaTypes: # see Binary responses section + - "*~1*" # converts to */* for any binary type + Function: + Timeout: 5 + Runtime: python3.8 + Tracing: Active + Environment: + Variables: + LOG_LEVEL: INFO + POWERTOOLS_LOGGER_SAMPLE_RATE: 0.1 + POWERTOOLS_LOGGER_LOG_EVENT: true + POWERTOOLS_SERVICE_NAME: example + +Resources: + ApiFunction: + Type: AWS::Serverless::Function + Properties: + Handler: app.lambda_handler + CodeUri: api_handler/ + Description: API handler function + Events: + AnyApiEvent: + Type: Api + Properties: + # NOTE: this is a catch-all rule to simplify the documentation. + # explicit routes and methods are recommended for prod instead (see below) + Path: /{proxy+} # Send requests on any path to the lambda function + Method: ANY # Send requests using any http method to the lambda function + + + # GetAllTodos: + # Type: Api + # Properties: + # Path: /todos + # Method: GET + # GetTodoById: + # Type: Api + # Properties: + # Path: /todos/{todo_id} + # Method: GET + # CreateTodo: + # Type: Api + # Properties: + # Path: /todos + # Method: POST diff --git a/examples/event_handler_rest/src/accessing_request_details.py b/examples/event_handler_rest/src/accessing_request_details.py new file mode 100644 index 00000000000..9929b601db0 --- /dev/null +++ b/examples/event_handler_rest/src/accessing_request_details.py @@ -0,0 +1,40 @@ +from typing import Optional + +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todo_id: str = app.current_event.get_query_string_value(name="id", default_value="") + # alternatively + _: Optional[str] = app.current_event.query_string_parameters.get("id") + + # Payload + _: Optional[str] = app.current_event.body # raw str | None + + endpoint = "https://jsonplaceholder.typicode.com/todos" + if todo_id: + endpoint = f"{endpoint}/{todo_id}" + + todos: Response = requests.get(endpoint) + todos.raise_for_status() + + return {"todos": todos.json()} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/accessing_request_details_headers.py b/examples/event_handler_rest/src/accessing_request_details_headers.py new file mode 100644 index 00000000000..f6bfb88c869 --- /dev/null +++ b/examples/event_handler_rest/src/accessing_request_details_headers.py @@ -0,0 +1,30 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + endpoint = "https://jsonplaceholder.typicode.com/todos" + + api_key: str = app.current_event.get_header_value(name="X-Api-Key", case_sensitive=True, default_value="") + todos: Response = requests.get(endpoint, headers={"X-Api-Key": api_key}) + todos.raise_for_status() + + return {"todos": todos.json()} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/assert_http_response.py b/examples/event_handler_rest/src/assert_http_response.py new file mode 100644 index 00000000000..95d56599288 --- /dev/null +++ b/examples/event_handler_rest/src/assert_http_response.py @@ -0,0 +1,28 @@ +from dataclasses import dataclass + +import assert_http_response_module +import pytest + + +@pytest.fixture +def lambda_context(): + @dataclass + class LambdaContext: + function_name: str = "test" + memory_limit_in_mb: int = 128 + invoked_function_arn: str = "arn:aws:lambda:eu-west-1:123456789012:function:test" + aws_request_id: str = "da658bd3-2d6f-4e7b-8ec2-937234644fdc" + + return LambdaContext() + + +def test_lambda_handler(lambda_context): + minimal_event = { + "path": "/todos", + "httpMethod": "GET", + "requestContext": {"requestId": "227b78aa-779d-47d4-a48e-ce62120393b8"}, # correlation ID + } + + ret = assert_http_response_module.lambda_handler(minimal_event, lambda_context) + assert ret["statusCode"] == 200 + assert ret["body"] != "" diff --git a/examples/event_handler_rest/src/assert_http_response_module.py b/examples/event_handler_rest/src/assert_http_response_module.py new file mode 100644 index 00000000000..ea5d839fb72 --- /dev/null +++ b/examples/event_handler_rest/src/assert_http_response_module.py @@ -0,0 +1,27 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + return {"todos": todos.json()[:10]} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/binary_responses.json b/examples/event_handler_rest/src/binary_responses.json new file mode 100644 index 00000000000..fcdf86dfebe --- /dev/null +++ b/examples/event_handler_rest/src/binary_responses.json @@ -0,0 +1,8 @@ +{ + "headers": { + "Accept": "image/svg+xml" + }, + "resource": "/logo", + "path": "/logo", + "httpMethod": "GET" +} diff --git a/examples/event_handler_rest/src/binary_responses.py b/examples/event_handler_rest/src/binary_responses.py new file mode 100644 index 00000000000..00c027937b8 --- /dev/null +++ b/examples/event_handler_rest/src/binary_responses.py @@ -0,0 +1,27 @@ +import os +from pathlib import Path + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() + + +app = APIGatewayRestResolver() +logo_file: bytes = Path(os.getenv("LAMBDA_TASK_ROOT") + "/logo.svg").read_bytes() + + +@app.get("/logo") +@tracer.capture_method +def get_logo(): + return Response(status_code=200, content_type="image/svg+xml", body=logo_file) + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/binary_responses_logo.svg b/examples/event_handler_rest/src/binary_responses_logo.svg new file mode 100644 index 00000000000..fccb29e01ed --- /dev/null +++ b/examples/event_handler_rest/src/binary_responses_logo.svg @@ -0,0 +1,14 @@ + + + AWS Lambda + + + + + + + + + + + diff --git a/examples/event_handler_rest/src/binary_responses_output.json b/examples/event_handler_rest/src/binary_responses_output.json new file mode 100644 index 00000000000..0938dee6811 --- /dev/null +++ b/examples/event_handler_rest/src/binary_responses_output.json @@ -0,0 +1,8 @@ +{ + "body": "PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPHN2ZyB3aWR0aD0iMjU2cHgiIGhlaWdodD0iMjU2cHgiIHZpZXdCb3g9IjAgMCAyNTYgMjU2IiB2ZXJzaW9uPSIxLjEiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgeG1sbnM6eGxpbms9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkveGxpbmsiIHByZXNlcnZlQXNwZWN0UmF0aW89InhNaWRZTWlkIj4KICAgIDx0aXRsZT5BV1MgTGFtYmRhPC90aXRsZT4KICAgIDxkZWZzPgogICAgICAgIDxsaW5lYXJHcmFkaWVudCB4MT0iMCUiIHkxPSIxMDAlIiB4Mj0iMTAwJSIgeTI9IjAlIiBpZD0ibGluZWFyR3JhZGllbnQtMSI+CiAgICAgICAgICAgIDxzdG9wIHN0b3AtY29sb3I9IiNDODUxMUIiIG9mZnNldD0iMCUiPjwvc3RvcD4KICAgICAgICAgICAgPHN0b3Agc3RvcC1jb2xvcj0iI0ZGOTkwMCIgb2Zmc2V0PSIxMDAlIj48L3N0b3A+CiAgICAgICAgPC9saW5lYXJHcmFkaWVudD4KICAgIDwvZGVmcz4KICAgIDxnPgogICAgICAgIDxyZWN0IGZpbGw9InVybCgjbGluZWFyR3JhZGllbnQtMSkiIHg9IjAiIHk9IjAiIHdpZHRoPSIyNTYiIGhlaWdodD0iMjU2Ij48L3JlY3Q+CiAgICAgICAgPHBhdGggZD0iTTg5LjYyNDExMjYsMjExLjIgTDQ5Ljg5MDMyNzcsMjExLjIgTDkzLjgzNTQ4MzIsMTE5LjM0NzIgTDExMy43NDcyOCwxNjAuMzM5MiBMODkuNjI0MTEyNiwyMTEuMiBaIE05Ni43MDI5MzU3LDExMC41Njk2IEM5Ni4xNjQwODU4LDEwOS40NjU2IDk1LjA0MTQ4MTMsMTA4Ljc2NDggOTMuODE2MjM4NCwxMDguNzY0OCBMOTMuODA2NjE2MywxMDguNzY0OCBDOTIuNTcxNzUxNCwxMDguNzY4IDkxLjQ0OTE0NjYsMTA5LjQ3NTIgOTAuOTE5OTE4NywxMTAuNTg1NiBMNDEuOTEzNDIwOCwyMTMuMDIwOCBDNDEuNDM4NzE5NywyMTQuMDEyOCA0MS41MDYwNzU4LDIxNS4xNzc2IDQyLjA5NjI0NTEsMjE2LjEwODggQzQyLjY3OTk5OTQsMjE3LjAzNjggNDMuNzA2MzgwNSwyMTcuNiA0NC44MDY1MzMxLDIxNy42IEw5MS42NTQ0MjMsMjE3LjYgQzkyLjg5NTcwMjcsMjE3LjYgOTQuMDIxNTE0OSwyMTYuODg2NCA5NC41NTM5NTAxLDIxNS43Njk2IEwxMjAuMjAzODU5LDE2MS42ODk2IEMxMjAuNjE3NjE5LDE2MC44MTI4IDEyMC42MTQ0MTIsMTU5Ljc5ODQgMTIwLjE4NzgyMiwxNTguOTI4IEw5Ni43MDI5MzU3LDExMC41Njk2IFogTTIwNy45ODUxMTcsMjExLjIgTDE2OC41MDc5MjgsMjExLjIgTDEwNS4xNzM3ODksNzguNjI0IEMxMDQuNjQ0NTYxLDc3LjUxMDQgMTAzLjUxNTU0MSw3Ni44IDEwMi4yNzc0NjksNzYuOCBMNzYuNDQ3OTQzLDc2LjggTDc2LjQ3NjgwOTksNDQuOCBMMTI3LjEwMzA2Niw0NC44IEwxOTAuMTQ1MzI4LDE3Ny4zNzI4IEMxOTAuNjc0NTU2LDE3OC40ODY0IDE5MS44MDM1NzUsMTc5LjIgMTkzLjA0MTY0NywxNzkuMiBMMjA3Ljk4NTExNywxNzkuMiBMMjA3Ljk4NTExNywyMTEuMiBaIE0yMTEuMTkyNTU4LDE3Mi44IEwxOTUuMDcxOTU4LDE3Mi44IEwxMzIuMDI5Njk2LDQwLjIyNzIgQzEzMS41MDA0NjgsMzkuMTEzNiAxMzAuMzcxNDQ5LDM4LjQgMTI5LjEzMDE2OSwzOC40IEw3My4yNzI1NzYsMzguNCBDNzEuNTA1Mjc1OCwzOC40IDcwLjA2ODM0MjEsMzkuODMwNCA3MC4wNjUxMzQ0LDQxLjU5NjggTDcwLjAyOTg1MjgsNzkuOTk2OCBDNzAuMDI5ODUyOCw4MC44NDggNzAuMzYzNDI2Niw4MS42NjA4IDcwLjk2OTYzMyw4Mi4yNjI0IEM3MS41Njk0MjQ2LDgyLjg2NCA3Mi4zODQxMTQ2LDgzLjIgNzMuMjM3Mjk0MSw4My4yIEwxMDAuMjUzNTczLDgzLjIgTDE2My41OTA5MiwyMTUuNzc2IEMxNjQuMTIzMzU1LDIxNi44ODk2IDE2NS4yNDU5NiwyMTcuNiAxNjYuNDg0MDMyLDIxNy42IEwyMTEuMTkyNTU4LDIxNy42IEMyMTIuOTY2Mjc0LDIxNy42IDIxNC40LDIxNi4xNjY0IDIxNC40LDIxNC40IEwyMTQuNCwxNzYgQzIxNC40LDE3NC4yMzM2IDIxMi45NjYyNzQsMTcyLjggMjExLjE5MjU1OCwxNzIuOCBMMjExLjE5MjU1OCwxNzIuOCBaIiBmaWxsPSIjRkZGRkZGIj48L3BhdGg+CiAgICA8L2c+Cjwvc3ZnPg==", + "headers": { + "Content-Type": "image/svg+xml" + }, + "isBase64Encoded": true, + "statusCode": 200 +} diff --git a/examples/event_handler_rest/src/compressing_responses.json b/examples/event_handler_rest/src/compressing_responses.json new file mode 100644 index 00000000000..f706df20d58 --- /dev/null +++ b/examples/event_handler_rest/src/compressing_responses.json @@ -0,0 +1,8 @@ +{ + "headers": { + "Accept-Encoding": "gzip" + }, + "resource": "/todos", + "path": "/todos", + "httpMethod": "GET" +} diff --git a/examples/event_handler_rest/src/compressing_responses.py b/examples/event_handler_rest/src/compressing_responses.py new file mode 100644 index 00000000000..1af4b9a58b2 --- /dev/null +++ b/examples/event_handler_rest/src/compressing_responses.py @@ -0,0 +1,28 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get("/todos", compress=True) +@tracer.capture_method +def get_todos(): + todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return {"todos": todos.json()[:10]} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/compressing_responses_output.json b/examples/event_handler_rest/src/compressing_responses_output.json new file mode 100644 index 00000000000..0836b3aa726 --- /dev/null +++ b/examples/event_handler_rest/src/compressing_responses_output.json @@ -0,0 +1,9 @@ +{ + "statusCode": 200, + "headers": { + "Content-Type": "application/json", + "Content-Encoding": "gzip" + }, + "body": "H4sIAAAAAAACE42STU4DMQyFrxJl3QXln96AMyAW7sSDLCVxiJ0Kqerd8TCCUOgii1EmP/783pOPXjmw+N3L0TfB+hz8brvxtC5KGtHvfMCIkzZx0HT5MPmNnziViIr2dIYoeNr8Q1x3xHsjcVadIbkZJoq2RXU8zzQROLseQ9505NzeCNQdMJNBE+UmY4zbzjAJhWtlZ57sB84BWtul+rteH2HPlVgWARwjqXkxpklK5gmEHAQqJBMtFsGVygcKmNVRjG0wxvuzGF2L0dpVUOKMC3bfJNjJgWMrCuZk7cUp02AiD72D6WKHHwUDKbiJs6AZ0VZXKOUx4uNvzdxT+E4mLcMA+6G8nzrLQkaxkNEVrFKW2VGbJCoCY7q2V3+tiv5kGThyxfTecDWbgGz/NfYXhL6ePgF9PnFdPgMAAA==", + "isBase64Encoded": true +} diff --git a/examples/event_handler_rest/src/custom_api_mapping.json b/examples/event_handler_rest/src/custom_api_mapping.json new file mode 100644 index 00000000000..eb1d68afbf9 --- /dev/null +++ b/examples/event_handler_rest/src/custom_api_mapping.json @@ -0,0 +1,5 @@ +{ + "resource": "/subscriptions/{subscription}", + "path": "/payment/subscriptions/123", + "httpMethod": "GET" +} diff --git a/examples/event_handler_rest/src/custom_api_mapping.py b/examples/event_handler_rest/src/custom_api_mapping.py new file mode 100644 index 00000000000..0b180d54f01 --- /dev/null +++ b/examples/event_handler_rest/src/custom_api_mapping.py @@ -0,0 +1,20 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver(strip_prefixes=["/payment"]) + + +@app.get("/subscriptions/") +@tracer.capture_method +def get_subscription(subscription): + return {"subscription_id": subscription} + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/custom_serializer.py b/examples/event_handler_rest/src/custom_serializer.py new file mode 100644 index 00000000000..cfb8cefd2d9 --- /dev/null +++ b/examples/event_handler_rest/src/custom_serializer.py @@ -0,0 +1,58 @@ +import json +from dataclasses import asdict, dataclass, is_dataclass +from json import JSONEncoder + +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@dataclass +class Todo: + userId: str + id: str # noqa: A003 VNE003 "id" field is reserved + title: str + completed: bool + + +class DataclassCustomEncoder(JSONEncoder): + """A custom JSON encoder to serialize dataclass obj""" + + def default(self, obj): + # Only called for values that aren't JSON serializable + # where `obj` will be an instance of Todo in this example + return asdict(obj) if is_dataclass(obj) else super().default(obj) + + +def custom_serializer(obj) -> str: + """Your custom serializer function APIGatewayRestResolver will use""" + return json.dumps(obj, separators=(",", ":"), cls=DataclassCustomEncoder) + + +app = APIGatewayRestResolver(serializer=custom_serializer) + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + ret: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + ret.raise_for_status() + todos = [Todo(**todo) for todo in ret.json()] + + # for brevity, we'll limit to the first 10 only + return {"todos": todos[:10]} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/debug_mode.py b/examples/event_handler_rest/src/debug_mode.py new file mode 100644 index 00000000000..47ffb8905eb --- /dev/null +++ b/examples/event_handler_rest/src/debug_mode.py @@ -0,0 +1,28 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver(debug=True) + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return {"todos": todos.json()[:10]} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/dynamic_routes.json b/examples/event_handler_rest/src/dynamic_routes.json new file mode 100644 index 00000000000..23e8261d283 --- /dev/null +++ b/examples/event_handler_rest/src/dynamic_routes.json @@ -0,0 +1,5 @@ +{ + "resource": "/todos/{id}", + "path": "/todos/1", + "httpMethod": "GET" +} diff --git a/examples/event_handler_rest/src/dynamic_routes.py b/examples/event_handler_rest/src/dynamic_routes.py new file mode 100644 index 00000000000..2ee2dc21044 --- /dev/null +++ b/examples/event_handler_rest/src/dynamic_routes.py @@ -0,0 +1,27 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get("/todos/") +@tracer.capture_method +def get_todo_by_id(todo_id: str): # value come as str + todos: Response = requests.get(f"https://jsonplaceholder.typicode.com/todos/{todo_id}") + todos.raise_for_status() + + return {"todos": todos.json()} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/dynamic_routes_catch_all.json b/examples/event_handler_rest/src/dynamic_routes_catch_all.json new file mode 100644 index 00000000000..c9395f23027 --- /dev/null +++ b/examples/event_handler_rest/src/dynamic_routes_catch_all.json @@ -0,0 +1,5 @@ +{ + "resource": "/{proxy+}", + "path": "/any/route/should/work", + "httpMethod": "GET" +} diff --git a/examples/event_handler_rest/src/dynamic_routes_catch_all.py b/examples/event_handler_rest/src/dynamic_routes_catch_all.py new file mode 100644 index 00000000000..f615f2a8dee --- /dev/null +++ b/examples/event_handler_rest/src/dynamic_routes_catch_all.py @@ -0,0 +1,21 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get(".+") +@tracer.capture_method +def catch_any_route_get_method(): + return {"path_received": app.current_event.path} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/exception_handling.py b/examples/event_handler_rest/src/exception_handling.py new file mode 100644 index 00000000000..fdac8589299 --- /dev/null +++ b/examples/event_handler_rest/src/exception_handling.py @@ -0,0 +1,43 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver, content_types +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.exception_handler(ValueError) +def handle_invalid_limit_qs(ex: ValueError): # receives exception raised + metadata = {"path": app.current_event.path, "query_strings": app.current_event.query_string_parameters} + logger.error(f"Malformed request: {ex}", extra=metadata) + + return Response( + status_code=400, + content_type=content_types.TEXT_PLAIN, + body="Invalid request parameters.", + ) + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + # educational purpose only: we should receive a `ValueError` + # if a query string value for `limit` cannot be coerced to int + max_results: int = int(app.current_event.get_query_string_value(name="limit", default_value=0)) + + todos: Response = requests.get(f"https://jsonplaceholder.typicode.com/todos?limit={max_results}") + todos.raise_for_status() + + return {"todos": todos.json()} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/fine_grained_responses.py b/examples/event_handler_rest/src/fine_grained_responses.py new file mode 100644 index 00000000000..3e477160307 --- /dev/null +++ b/examples/event_handler_rest/src/fine_grained_responses.py @@ -0,0 +1,36 @@ +from http import HTTPStatus +from uuid import uuid4 + +import requests + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver, Response, content_types +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todos: requests.Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + custom_headers = {"X-Transaction-Id": f"{uuid4()}"} + + return Response( + status_code=HTTPStatus.OK.value, # 200 + content_type=content_types.APPLICATION_JSON, + body=todos.json()[:10], + headers=custom_headers, + ) + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/fine_grained_responses_output.json b/examples/event_handler_rest/src/fine_grained_responses_output.json new file mode 100644 index 00000000000..c3d58098e80 --- /dev/null +++ b/examples/event_handler_rest/src/fine_grained_responses_output.json @@ -0,0 +1,9 @@ +{ + "statusCode": 200, + "headers": { + "Content-Type": "application/json", + "X-Transaction-Id": "3490eea9-791b-47a0-91a4-326317db61a9" + }, + "body": "{\"todos\":[{\"userId\":1,\"id\":1,\"title\":\"delectus aut autem\",\"completed\":false},{\"userId\":1,\"id\":2,\"title\":\"quis ut nam facilis et officia qui\",\"completed\":false},{\"userId\":1,\"id\":3,\"title\":\"fugiat veniam minus\",\"completed\":false},{\"userId\":1,\"id\":4,\"title\":\"et porro tempora\",\"completed\":true},{\"userId\":1,\"id\":5,\"title\":\"laboriosam mollitia et enim quasi adipisci quia provident illum\",\"completed\":false},{\"userId\":1,\"id\":6,\"title\":\"qui ullam ratione quibusdam voluptatem quia omnis\",\"completed\":false},{\"userId\":1,\"id\":7,\"title\":\"illo expedita consequatur quia in\",\"completed\":false},{\"userId\":1,\"id\":8,\"title\":\"quo adipisci enim quam ut ab\",\"completed\":true},{\"userId\":1,\"id\":9,\"title\":\"molestiae perspiciatis ipsa\",\"completed\":false},{\"userId\":1,\"id\":10,\"title\":\"illo est ratione doloremque quia maiores aut\",\"completed\":true}]}", + "isBase64Encoded": false +} diff --git a/examples/event_handler_rest/src/getting_started_alb_api_resolver.py b/examples/event_handler_rest/src/getting_started_alb_api_resolver.py new file mode 100644 index 00000000000..612823625ec --- /dev/null +++ b/examples/event_handler_rest/src/getting_started_alb_api_resolver.py @@ -0,0 +1,28 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import ALBResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = ALBResolver() + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return {"todos": todos.json()[:10]} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPLICATION_LOAD_BALANCER) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/getting_started_http_api_resolver.py b/examples/event_handler_rest/src/getting_started_http_api_resolver.py new file mode 100644 index 00000000000..e976ef4169f --- /dev/null +++ b/examples/event_handler_rest/src/getting_started_http_api_resolver.py @@ -0,0 +1,28 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayHttpResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayHttpResolver() + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return {"todos": todos.json()[:10]} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_HTTP) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/getting_started_rest_api_resolver.json b/examples/event_handler_rest/src/getting_started_rest_api_resolver.json new file mode 100644 index 00000000000..92d3e40f139 --- /dev/null +++ b/examples/event_handler_rest/src/getting_started_rest_api_resolver.json @@ -0,0 +1,58 @@ +{ + "body": "", + "resource": "/todos", + "path": "/todos", + "httpMethod": "GET", + "isBase64Encoded": false, + "queryStringParameters": {}, + "multiValueQueryStringParameters": {}, + "pathParameters": {}, + "stageVariables": {}, + "headers": { + "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", + "Accept-Encoding": "gzip, deflate, sdch", + "Accept-Language": "en-US,en;q=0.8", + "Cache-Control": "max-age=0", + "CloudFront-Forwarded-Proto": "https", + "CloudFront-Is-Desktop-Viewer": "true", + "CloudFront-Is-Mobile-Viewer": "false", + "CloudFront-Is-SmartTV-Viewer": "false", + "CloudFront-Is-Tablet-Viewer": "false", + "CloudFront-Viewer-Country": "US", + "Host": "1234567890.execute-api.us-east-1.amazonaws.com", + "Upgrade-Insecure-Requests": "1", + "User-Agent": "Custom User Agent String", + "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)", + "X-Amz-Cf-Id": "cDehVQoZnx43VYQb9j2-nvCh-9z396Uhbp027Y2JvkCPNLmGJHqlaA==", + "X-Forwarded-For": "127.0.0.1, 127.0.0.2", + "X-Forwarded-Port": "443", + "X-Forwarded-Proto": "https" + }, + "multiValueHeaders": {}, + "requestContext": { + "accountId": "123456789012", + "resourceId": "123456", + "stage": "Prod", + "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef", + "requestTime": "25/Jul/2020:12:34:56 +0000", + "requestTimeEpoch": 1428582896000, + "identity": { + "cognitoIdentityPoolId": null, + "accountId": null, + "cognitoIdentityId": null, + "caller": null, + "accessKey": null, + "sourceIp": "127.0.0.1", + "cognitoAuthenticationType": null, + "cognitoAuthenticationProvider": null, + "userArn": null, + "userAgent": "Custom User Agent String", + "user": null + }, + "path": "/Prod/todos", + "resourcePath": "/todos", + "httpMethod": "GET", + "apiId": "1234567890", + "protocol": "HTTP/1.1" + } +} diff --git a/examples/event_handler_rest/src/getting_started_rest_api_resolver.py b/examples/event_handler_rest/src/getting_started_rest_api_resolver.py new file mode 100644 index 00000000000..3b30b5810f2 --- /dev/null +++ b/examples/event_handler_rest/src/getting_started_rest_api_resolver.py @@ -0,0 +1,28 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return {"todos": todos.json()[:10]} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/getting_started_rest_api_resolver_output.json b/examples/event_handler_rest/src/getting_started_rest_api_resolver_output.json new file mode 100644 index 00000000000..2ef3714531f --- /dev/null +++ b/examples/event_handler_rest/src/getting_started_rest_api_resolver_output.json @@ -0,0 +1,8 @@ +{ + "statusCode": 200, + "headers": { + "Content-Type": "application/json" + }, + "body": "{\"todos\":[{\"userId\":1,\"id\":1,\"title\":\"delectus aut autem\",\"completed\":false},{\"userId\":1,\"id\":2,\"title\":\"quis ut nam facilis et officia qui\",\"completed\":false},{\"userId\":1,\"id\":3,\"title\":\"fugiat veniam minus\",\"completed\":false},{\"userId\":1,\"id\":4,\"title\":\"et porro tempora\",\"completed\":true},{\"userId\":1,\"id\":5,\"title\":\"laboriosam mollitia et enim quasi adipisci quia provident illum\",\"completed\":false},{\"userId\":1,\"id\":6,\"title\":\"qui ullam ratione quibusdam voluptatem quia omnis\",\"completed\":false},{\"userId\":1,\"id\":7,\"title\":\"illo expedita consequatur quia in\",\"completed\":false},{\"userId\":1,\"id\":8,\"title\":\"quo adipisci enim quam ut ab\",\"completed\":true},{\"userId\":1,\"id\":9,\"title\":\"molestiae perspiciatis ipsa\",\"completed\":false},{\"userId\":1,\"id\":10,\"title\":\"illo est ratione doloremque quia maiores aut\",\"completed\":true}]}", + "isBase64Encoded": false +} diff --git a/examples/event_handler_rest/src/http_methods.json b/examples/event_handler_rest/src/http_methods.json new file mode 100644 index 00000000000..e0f775d72df --- /dev/null +++ b/examples/event_handler_rest/src/http_methods.json @@ -0,0 +1,6 @@ +{ + "resource": "/todos", + "path": "/todos", + "httpMethod": "POST", + "body": "{\"title\": \"foo\", \"userId\": 1, \"completed\": false}" +} diff --git a/examples/event_handler_rest/src/http_methods.py b/examples/event_handler_rest/src/http_methods.py new file mode 100644 index 00000000000..47eb1499a38 --- /dev/null +++ b/examples/event_handler_rest/src/http_methods.py @@ -0,0 +1,28 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.post("/todos") +@tracer.capture_method +def create_todo(): + todo_data: dict = app.current_event.json_body # deserialize json str to dict + todo: Response = requests.post("https://jsonplaceholder.typicode.com/todos", data=todo_data) + todo.raise_for_status() + + return {"todo": todo.json()} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/http_methods_multiple.py b/examples/event_handler_rest/src/http_methods_multiple.py new file mode 100644 index 00000000000..a482c96d80f --- /dev/null +++ b/examples/event_handler_rest/src/http_methods_multiple.py @@ -0,0 +1,29 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +# PUT and POST HTTP requests to the path /hello will route to this function +@app.route("/todos", method=["PUT", "POST"]) +@tracer.capture_method +def create_todo(): + todo_data: dict = app.current_event.json_body # deserialize json str to dict + todo: Response = requests.post("https://jsonplaceholder.typicode.com/todos", data=todo_data) + todo.raise_for_status() + + return {"todo": todo.json()} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/not_found_routes.py b/examples/event_handler_rest/src/not_found_routes.py new file mode 100644 index 00000000000..889880292c0 --- /dev/null +++ b/examples/event_handler_rest/src/not_found_routes.py @@ -0,0 +1,35 @@ +import requests + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver, Response, content_types +from aws_lambda_powertools.event_handler.exceptions import NotFoundError +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.not_found +@tracer.capture_method +def handle_not_found_errors(exc: NotFoundError) -> Response: + logger.info(f"Not found route: {app.current_event.path}") + return Response(status_code=418, content_type=content_types.TEXT_PLAIN, body="I'm a teapot!") + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todos: requests.Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return {"todos": todos.json()[:10]} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/raising_http_errors.py b/examples/event_handler_rest/src/raising_http_errors.py new file mode 100644 index 00000000000..97e7cc5048f --- /dev/null +++ b/examples/event_handler_rest/src/raising_http_errors.py @@ -0,0 +1,59 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.event_handler.exceptions import ( + BadRequestError, + InternalServerError, + NotFoundError, + ServiceError, + UnauthorizedError, +) +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get(rule="/bad-request-error") +def bad_request_error(): + raise BadRequestError("Missing required parameter") # HTTP 400 + + +@app.get(rule="/unauthorized-error") +def unauthorized_error(): + raise UnauthorizedError("Unauthorized") # HTTP 401 + + +@app.get(rule="/not-found-error") +def not_found_error(): + raise NotFoundError # HTTP 404 + + +@app.get(rule="/internal-server-error") +def internal_server_error(): + raise InternalServerError("Internal server error") # HTTP 500 + + +@app.get(rule="/service-error", cors=True) +def service_error(): + raise ServiceError(502, "Something went wrong!") + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + return {"todos": todos.json()[:10]} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/setting_cors.py b/examples/event_handler_rest/src/setting_cors.py new file mode 100644 index 00000000000..101e013e552 --- /dev/null +++ b/examples/event_handler_rest/src/setting_cors.py @@ -0,0 +1,44 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver, CORSConfig +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +cors_config = CORSConfig(allow_origin="https://example.com", max_age=300) +app = APIGatewayRestResolver(cors=cors_config) + + +@app.get("/todos") +@tracer.capture_method +def get_todos(): + todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return {"todos": todos.json()[:10]} + + +@app.get("/todos/") +@tracer.capture_method +def get_todo_by_id(todo_id: str): # value come as str + todos: Response = requests.get(f"https://jsonplaceholder.typicode.com/todos/{todo_id}") + todos.raise_for_status() + + return {"todos": todos.json()} + + +@app.get("/healthcheck", cors=False) # optionally removes CORS for a given route +@tracer.capture_method +def am_i_alive(): + return {"am_i_alive": "yes"} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/setting_cors_output.json b/examples/event_handler_rest/src/setting_cors_output.json new file mode 100644 index 00000000000..ca86e892d38 --- /dev/null +++ b/examples/event_handler_rest/src/setting_cors_output.json @@ -0,0 +1,10 @@ +{ + "statusCode": 200, + "headers": { + "Content-Type": "application/json", + "Access-Control-Allow-Origin": "https://www.example.com", + "Access-Control-Allow-Headers": "Authorization,Content-Type,X-Amz-Date,X-Amz-Security-Token,X-Api-Key" + }, + "body": "{\"todos\":[{\"userId\":1,\"id\":1,\"title\":\"delectus aut autem\",\"completed\":false},{\"userId\":1,\"id\":2,\"title\":\"quis ut nam facilis et officia qui\",\"completed\":false},{\"userId\":1,\"id\":3,\"title\":\"fugiat veniam minus\",\"completed\":false},{\"userId\":1,\"id\":4,\"title\":\"et porro tempora\",\"completed\":true},{\"userId\":1,\"id\":5,\"title\":\"laboriosam mollitia et enim quasi adipisci quia provident illum\",\"completed\":false},{\"userId\":1,\"id\":6,\"title\":\"qui ullam ratione quibusdam voluptatem quia omnis\",\"completed\":false},{\"userId\":1,\"id\":7,\"title\":\"illo expedita consequatur quia in\",\"completed\":false},{\"userId\":1,\"id\":8,\"title\":\"quo adipisci enim quam ut ab\",\"completed\":true},{\"userId\":1,\"id\":9,\"title\":\"molestiae perspiciatis ipsa\",\"completed\":false},{\"userId\":1,\"id\":10,\"title\":\"illo est ratione doloremque quia maiores aut\",\"completed\":true}]}", + "isBase64Encoded": false +} diff --git a/examples/event_handler_rest/src/split_route.py b/examples/event_handler_rest/src/split_route.py new file mode 100644 index 00000000000..6c0933ea08e --- /dev/null +++ b/examples/event_handler_rest/src/split_route.py @@ -0,0 +1,18 @@ +import split_route_module + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() +app.include_router(split_route_module.router) + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/split_route_module.py b/examples/event_handler_rest/src/split_route_module.py new file mode 100644 index 00000000000..eeb696ede56 --- /dev/null +++ b/examples/event_handler_rest/src/split_route_module.py @@ -0,0 +1,33 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Tracer +from aws_lambda_powertools.event_handler.api_gateway import Router + +tracer = Tracer() +router = Router() + +endpoint = "https://jsonplaceholder.typicode.com/todos" + + +@router.get("/todos") +@tracer.capture_method +def get_todos(): + api_key: str = router.current_event.get_header_value(name="X-Api-Key", case_sensitive=True, default_value="") + + todos: Response = requests.get(endpoint, headers={"X-Api-Key": api_key}) + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return {"todos": todos.json()[:10]} + + +@router.get("/todos/") +@tracer.capture_method +def get_todo_by_id(todo_id: str): # value come as str + api_key: str = router.current_event.get_header_value(name="X-Api-Key", case_sensitive=True, default_value="") + + todos: Response = requests.get(f"{endpoint}/{todo_id}", headers={"X-Api-Key": api_key}) + todos.raise_for_status() + + return {"todos": todos.json()} diff --git a/examples/event_handler_rest/src/split_route_prefix.py b/examples/event_handler_rest/src/split_route_prefix.py new file mode 100644 index 00000000000..01129c80148 --- /dev/null +++ b/examples/event_handler_rest/src/split_route_prefix.py @@ -0,0 +1,19 @@ +import split_route_module + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() +# prefix '/todos' to any route in `split_route_module.router` +app.include_router(split_route_module.router, prefix="/todos") + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_rest/src/split_route_prefix_module.py b/examples/event_handler_rest/src/split_route_prefix_module.py new file mode 100644 index 00000000000..b4035282776 --- /dev/null +++ b/examples/event_handler_rest/src/split_route_prefix_module.py @@ -0,0 +1,36 @@ +import requests +from requests import Response + +from aws_lambda_powertools import Tracer +from aws_lambda_powertools.event_handler.api_gateway import Router + +tracer = Tracer() +router = Router() + +endpoint = "https://jsonplaceholder.typicode.com/todos" + + +@router.get("/") +@tracer.capture_method +def get_todos(): + api_key: str = router.current_event.get_header_value(name="X-Api-Key", case_sensitive=True, default_value="") + + todos: Response = requests.get(endpoint, headers={"X-Api-Key": api_key}) + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return {"todos": todos.json()[:10]} + + +@router.get("/") +@tracer.capture_method +def get_todo_by_id(todo_id: str): # value come as str + api_key: str = router.current_event.get_header_value(name="X-Api-Key", case_sensitive=True, default_value="") + + todos: Response = requests.get(f"{endpoint}/{todo_id}", headers={"X-Api-Key": api_key}) + todos.raise_for_status() + + return {"todos": todos.json()} + + +# many more routes From a34d34d066e8e5fb086f831d3f382aa20f23f005 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Thu, 7 Jul 2022 16:52:02 +0200 Subject: [PATCH 42/72] chore(ci): fix reference error in related_issue --- .github/scripts/label_related_issue.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/scripts/label_related_issue.js b/.github/scripts/label_related_issue.js index 8f851056cb3..fb47a7bc4b0 100644 --- a/.github/scripts/label_related_issue.js +++ b/.github/scripts/label_related_issue.js @@ -6,7 +6,7 @@ module.exports = async ({github, context}) => { const RELATED_ISSUE_REGEX = /Issue number:[^\d\r\n]+(?\d+)/; - const isMatch = RELATED_ISSUE_REGEX.exec(body); + const isMatch = RELATED_ISSUE_REGEX.exec(prBody); if (!isMatch) { core.setFailed(`Unable to find related issue for PR number ${prNumber}.\n\n Body details: ${prBody}`); return await github.rest.issues.createComment({ From ad86b1018af1275d8d74f7299cb3aa214d9a8979 Mon Sep 17 00:00:00 2001 From: Alexander Melnyk Date: Thu, 7 Jul 2022 17:03:43 +0200 Subject: [PATCH 43/72] fix: download artefact into the layer dir --- .github/workflows/reusable_deploy_layer_stack.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/reusable_deploy_layer_stack.yml b/.github/workflows/reusable_deploy_layer_stack.yml index 31f8830931d..6ac646f2bd8 100644 --- a/.github/workflows/reusable_deploy_layer_stack.yml +++ b/.github/workflows/reusable_deploy_layer_stack.yml @@ -78,6 +78,7 @@ jobs: uses: actions/download-artifact@v3 with: name: ${{ inputs.artefact-name }} + path: layer - name: unzip artefact run: unzip ${{ inputs.artefact-name }} - name: CDK Deploy Layer From 1962f192917413d308fa26e8faa81e61d66fd135 Mon Sep 17 00:00:00 2001 From: Alexander Melnyk Date: Thu, 7 Jul 2022 17:46:28 +0200 Subject: [PATCH 44/72] fix: mathc the name of the cdk synth from the build phase --- .github/workflows/reusable_deploy_layer_stack.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/reusable_deploy_layer_stack.yml b/.github/workflows/reusable_deploy_layer_stack.yml index 6ac646f2bd8..69f182c4189 100644 --- a/.github/workflows/reusable_deploy_layer_stack.yml +++ b/.github/workflows/reusable_deploy_layer_stack.yml @@ -80,7 +80,7 @@ jobs: name: ${{ inputs.artefact-name }} path: layer - name: unzip artefact - run: unzip ${{ inputs.artefact-name }} + run: unzip cdk.out.zip - name: CDK Deploy Layer run: cdk deploy --app cdk.out --context region=${{ matrix.region }} 'LayerStack ' --require-approval never --verbose - name: CDK Deploy Canary From 2ded8a6481836da7bdccf7a005fbe31bdf9dbd68 Mon Sep 17 00:00:00 2001 From: Alexander Melnyk Date: Thu, 7 Jul 2022 17:58:45 +0200 Subject: [PATCH 45/72] fix: sight, yes a whitespace character breaks the build --- .github/workflows/reusable_deploy_layer_stack.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/reusable_deploy_layer_stack.yml b/.github/workflows/reusable_deploy_layer_stack.yml index 69f182c4189..f03cb117d6f 100644 --- a/.github/workflows/reusable_deploy_layer_stack.yml +++ b/.github/workflows/reusable_deploy_layer_stack.yml @@ -82,6 +82,6 @@ jobs: - name: unzip artefact run: unzip cdk.out.zip - name: CDK Deploy Layer - run: cdk deploy --app cdk.out --context region=${{ matrix.region }} 'LayerStack ' --require-approval never --verbose + run: cdk deploy --app cdk.out --context region=${{ matrix.region }} 'LayerStack' --require-approval never --verbose - name: CDK Deploy Canary run: cdk deploy --app cdk.out --context region=${{ matrix.region}} --parameters DeployStage="${{ inputs.stage }}" 'CanaryStack' --require-approval never --verbose From fef70b0f98951b91969c510853df24351c9a0f0b Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Fri, 8 Jul 2022 16:00:48 +0200 Subject: [PATCH 46/72] docs(core): match code snippet name with filename (#1286) --- docs/core/event_handler/api_gateway.md | 62 +++++----- docs/core/logger.md | 111 +++++++++--------- docs/core/metrics.md | 34 +++--- docs/core/tracer.md | 16 +-- ...son => append_and_remove_keys_output.json} | 0 ....json => set_correlation_id_jmespath.json} | 0 ...nt.json => set_correlation_id_method.json} | 0 examples/tracer/src/tracer_reuse.py | 2 +- ...euse_payment.py => tracer_reuse_module.py} | 0 9 files changed, 115 insertions(+), 110 deletions(-) rename examples/logger/src/{append_and_remove_keys.json => append_and_remove_keys_output.json} (100%) rename examples/logger/src/{set_correlation_id_jmespath_event.json => set_correlation_id_jmespath.json} (100%) rename examples/logger/src/{set_correlation_id_method_event.json => set_correlation_id_method.json} (100%) rename examples/tracer/src/{tracer_reuse_payment.py => tracer_reuse_module.py} (100%) diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index 9db219e994e..6d8f441d661 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -48,13 +48,13 @@ Here's an example on how we can handle the `/todos` path. ???+ info We automatically serialize `Dict` responses as JSON, trim whitespace for compact responses, and set content-type to `application/json`. -=== "app.py" +=== "getting_started_rest_api_resolver.py" ```python hl_lines="5 11 14 28" --8<-- "examples/event_handler_rest/src/getting_started_rest_api_resolver.py" ``` -=== "Request" +=== "getting_started_rest_api_resolver.json" This utility uses `path` and `httpMethod` to route to the right function. This helps make unit tests and local invocation easier too. @@ -62,7 +62,7 @@ Here's an example on how we can handle the `/todos` path. --8<-- "examples/event_handler_rest/src/getting_started_rest_api_resolver.json" ``` -=== "Response" +=== "getting_started_rest_api_resolver_output.json" ```json --8<-- "examples/event_handler_rest/src/getting_started_rest_api_resolver_output.json" @@ -96,13 +96,13 @@ Each dynamic route you set must be part of your function signature. This allows ???+ note For brevity, we will only include the necessary keys for each sample request for the example to work. -=== "app.py" +=== "dynamic_routes.py" ```python hl_lines="14 16" --8<-- "examples/event_handler_rest/src/dynamic_routes.py" ``` -=== "Request" +=== "dynamic_routes.json" ```json --8<-- "examples/event_handler_rest/src/dynamic_routes.json" @@ -123,13 +123,13 @@ You can also combine nested paths with greedy regex to catch in between routes. ???+ warning We choose the most explicit registered route that matches an incoming event. -=== "app.py" +=== "dynamic_routes_catch_all.py" ```python hl_lines="11" --8<-- "examples/event_handler_rest/src/dynamic_routes_catch_all.py" ``` -=== "Request" +=== "dynamic_routes_catch_all.json" ```json --8<-- "examples/event_handler_rest/src/dynamic_routes_catch_all.json" @@ -139,13 +139,13 @@ You can also combine nested paths with greedy regex to catch in between routes. You can use named decorators to specify the HTTP method that should be handled in your functions. That is, `app.`, where the HTTP method could be `get`, `post`, `put`, `patch`, `delete`, and `options`. -=== "app.py" +=== "http_methods.py" ```python hl_lines="14 17" --8<-- "examples/event_handler_rest/src/http_methods.py" ``` -=== "Request" +=== "http_methods.json" ```json --8<-- "examples/event_handler_rest/src/http_methods.json" @@ -225,13 +225,13 @@ When using [Custom Domain API Mappings feature](https://docs.aws.amazon.com/apig To address this API Gateway behavior, we use `strip_prefixes` parameter to account for these prefixes that are now injected into the path regardless of which type of API Gateway you're using. -=== "app.py" +=== "custom_api_mapping.py" ```python hl_lines="8" --8<-- "examples/event_handler_rest/src/custom_api_mapping.py" ``` -=== "Request" +=== "custom_api_mapping.json" ```json --8<-- "examples/event_handler_rest/src/custom_api_mapping.json" @@ -253,13 +253,13 @@ This will ensure that CORS headers are always returned as part of the response w ???+ tip Optionally disable CORS on a per path basis with `cors=False` parameter. -=== "app.py" +=== "setting_cors.py" ```python hl_lines="5 11-12 34" --8<-- "examples/event_handler_rest/src/setting_cors.py" ``` -=== "Response" +=== "setting_cors_output.json" ```json --8<-- "examples/event_handler_rest/src/setting_cors_output.json" @@ -290,13 +290,13 @@ For convenience, these are the default values when using `CORSConfig` to enable You can use the `Response` class to have full control over the response, for example you might want to add additional headers or set a custom Content-type. -=== "app.py" +=== "fine_grained_responses.py" ```python hl_lines="7 24-28" --8<-- "examples/event_handler_rest/src/fine_grained_responses.py" ``` -=== "Response" +=== "fine_grained_responses_output.json" ```json --8<-- "examples/event_handler_rest/src/fine_grained_responses_output.json" @@ -309,19 +309,19 @@ You can compress with gzip and base64 encode your responses via `compress` param ???+ warning The client must send the `Accept-Encoding` header, otherwise a normal response will be sent. -=== "app.py" +=== "compressing_responses.py" ```python hl_lines="14" --8<-- "examples/event_handler_rest/src/compressing_responses.py" ``` -=== "Request" +=== "compressing_responses.json" ```json --8<-- "examples/event_handler_rest/src/compressing_responses.json" ``` -=== "Response" +=== "compressing_responses_output.json" ```json --8<-- "examples/event_handler_rest/src/compressing_responses_output.json" @@ -336,25 +336,25 @@ Like `compress` feature, the client must send the `Accept` header with the corre ???+ warning This feature requires API Gateway to configure binary media types, see [our sample infrastructure](#required-resources) for reference. -=== "app.py" +=== "binary_responses.py" ```python hl_lines="14 20" --8<-- "examples/event_handler_rest/src/binary_responses.py" ``` -=== "logo.svg" +=== "binary_responses_logo.svg" ```xml --8<-- "examples/event_handler_rest/src/binary_responses_logo.svg" ``` -=== "Request" +=== "binary_responses.json" ```json --8<-- "examples/event_handler_rest/src/binary_responses.json" ``` -=== "Response" +=== "binary_responses_output.json" ```json --8<-- "examples/event_handler_rest/src/binary_responses_output.json" @@ -387,9 +387,9 @@ You can instruct API Gateway handler to use a custom serializer to best suit you As you grow the number of routes a given Lambda function should handle, it is natural to split routes into separate files to ease maintenance - That's where the `Router` feature is useful. -Let's assume you have `app.py` as your Lambda function entrypoint and routes in `todos.py`, this is how you'd use the `Router` feature. +Let's assume you have `app.py` as your Lambda function entrypoint and routes in `split_route_module.py`, this is how you'd use the `Router` feature. -=== "todos.py" +=== "split_route_module.py" We import **Router** instead of **APIGatewayRestResolver**; syntax wise is exactly the same. @@ -397,7 +397,7 @@ Let's assume you have `app.py` as your Lambda function entrypoint and routes in --8<-- "examples/event_handler_rest/src/split_route_module.py" ``` -=== "app.py" +=== "split_route.py" We use `include_router` method and include all user routers registered in the `router` global object. @@ -407,17 +407,17 @@ Let's assume you have `app.py` as your Lambda function entrypoint and routes in #### Route prefix -In the previous example, `todos.py` routes had a `/todos` prefix. This might grow over time and become repetitive. +In the previous example, `split_route_module.py` routes had a `/todos` prefix. This might grow over time and become repetitive. -When necessary, you can set a prefix when including a router object. This means you could remove `/todos` prefix in `todos.py` altogether. +When necessary, you can set a prefix when including a router object. This means you could remove `/todos` prefix altogether. -=== "app.py" +=== "split_route_prefix.py" ```python hl_lines="12" --8<-- "examples/event_handler_rest/src/split_route_prefix.py" ``` -=== "todos.py" +=== "split_route_prefix_module.py" ```python hl_lines="13 25" --8<-- "examples/event_handler_rest/src/split_route_prefix_module.py" @@ -509,13 +509,13 @@ your development, building, deployment tooling need to accommodate the distinct You can test your routes by passing a proxy event request where `path` and `httpMethod`. -=== "test_app.py" +=== "assert_http_response.py" ```python hl_lines="21-24" --8<-- "examples/event_handler_rest/src/assert_http_response.py" ``` -=== "app.py" +=== "assert_http_response_module.py" ```python --8<-- "examples/event_handler_rest/src/assert_http_response_module.py" diff --git a/docs/core/logger.md b/docs/core/logger.md index b09cc6c85d3..c699568b349 100644 --- a/docs/core/logger.md +++ b/docs/core/logger.md @@ -48,13 +48,13 @@ Your Logger will include the following keys to your structured logging: You can enrich your structured logs with key Lambda context information via `inject_lambda_context`. -=== "collect.py" +=== "inject_lambda_context.py" ```python hl_lines="7" --8<-- "examples/logger/src/inject_lambda_context.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "inject_lambda_context_output.json" ```json hl_lines="8-12 17-20" --8<-- "examples/logger/src/inject_lambda_context_output.json" @@ -88,19 +88,19 @@ You can set a Correlation ID using `correlation_id_path` param by passing a [JME ???+ tip You can retrieve correlation IDs via `get_correlation_id` method -=== "collect.py" +=== "set_correlation_id.py" ```python hl_lines="7" --8<-- "examples/logger/src/set_correlation_id.py" ``` -=== "Example Event" +=== "set_correlation_id_event.json" ```json hl_lines="3" --8<-- "examples/logger/src/set_correlation_id_event.json" ``` -=== "Example CloudWatch Logs excerpt" +=== "set_correlation_id_output.json" ```json hl_lines="12" --8<-- "examples/logger/src/set_correlation_id_output.json" @@ -110,18 +110,19 @@ You can set a Correlation ID using `correlation_id_path` param by passing a [JME You can also use `set_correlation_id` method to inject it anywhere else in your code. Example below uses [Event Source Data Classes utility](../utilities/data_classes.md) to easily access events properties. -=== "collect.py" +=== "set_correlation_id_method.py" ```python hl_lines="11" --8<-- "examples/logger/src/set_correlation_id_method.py" ``` -=== "Example Event" + +=== "set_correlation_id_method.json" ```json hl_lines="3" - --8<-- "examples/logger/src/set_correlation_id_method_event.json" + --8<-- "examples/logger/src/set_correlation_id_method.json" ``` -=== "Example CloudWatch Logs excerpt" +=== "set_correlation_id_method_output.json" ```json hl_lines="7" --8<-- "examples/logger/src/set_correlation_id_method_output.json" @@ -131,19 +132,19 @@ You can also use `set_correlation_id` method to inject it anywhere else in your To ease routine tasks like extracting correlation ID from popular event sources, we provide [built-in JMESPath expressions](#built-in-correlation-id-expressions). -=== "collect.py" +=== "set_correlation_id_jmespath.py" ```python hl_lines="2 8" --8<-- "examples/logger/src/set_correlation_id_jmespath.py" ``` -=== "Example Event" +=== "set_correlation_id_jmespath.json" ```json hl_lines="3" - --8<-- "examples/logger/src/set_correlation_id_jmespath_event.json" + --8<-- "examples/logger/src/set_correlation_id_jmespath.json" ``` -=== "Example CloudWatch Logs excerpt" +=== "set_correlation_id_jmespath_output.json" ```json hl_lines="12" --8<-- "examples/logger/src/set_correlation_id_jmespath_output.json" @@ -166,12 +167,13 @@ You can append additional keys using either mechanism: You can append your own keys to your existing Logger via `append_keys(**additional_key_values)` method. -=== "collect.py" +=== "append_keys.py" ```python hl_lines="12" --8<-- "examples/logger/src/append_keys.py" ``` -=== "Example CloudWatch Logs excerpt" + +=== "append_keys_output.json" ```json hl_lines="7" --8<-- "examples/logger/src/append_keys_output.json" @@ -191,12 +193,13 @@ It accepts any dictionary, and all keyword arguments will be added as part of th ???+ info Any keyword argument added using `extra` will not be persisted for subsequent messages. -=== "extra_parameter.py" +=== "append_keys_extra.py" ```python hl_lines="9" --8<-- "examples/logger/src/append_keys_extra.py" ``` -=== "Example CloudWatch Logs excerpt" + +=== "append_keys_extra_output.json" ```json hl_lines="7" --8<-- "examples/logger/src/append_keys_extra_output.json" @@ -206,13 +209,13 @@ It accepts any dictionary, and all keyword arguments will be added as part of th You can remove any additional key from Logger state using `remove_keys`. -=== "collect.py" +=== "remove_keys.py" ```python hl_lines="11" --8<-- "examples/logger/src/remove_keys.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "remove_keys_output.json" ```json hl_lines="7" --8<-- "examples/logger/src/remove_keys_output.json" @@ -232,19 +235,19 @@ Logger is commonly initialized in the global scope. Due to [Lambda Execution Con You can either avoid running any code as part of Lambda Layers global scope, or override keys with their latest value as part of handler's execution. -=== "collect.py" +=== "clear_state.py" ```python hl_lines="7 10" --8<-- "examples/logger/src/clear_state.py" ``` -=== "#1 request" +=== "clear_state_event_one.json" ```json hl_lines="7" --8<-- "examples/logger/src/clear_state_event_one.json" ``` -=== "#2 request" +=== "clear_state_event_two.json" ```json hl_lines="7" --8<-- "examples/logger/src/clear_state_event_two.json" @@ -257,13 +260,13 @@ Use `logger.exception` method to log contextual information about exceptions. Lo ???+ tip You can use your preferred Log Analytics tool to enumerate and visualize exceptions across all your services using `exception_name` key. -=== "collect.py" +=== "logging_exceptions.py" ```python hl_lines="15" --8<-- "examples/logger/src/logging_exceptions.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "logging_exceptions_output.json" ```json hl_lines="7-8" --8<-- "examples/logger/src/logging_exceptions_output.json" @@ -292,19 +295,19 @@ Similar to [Tracer](./tracer.md#reusing-tracer-across-your-code), a new instance Notice in the CloudWatch Logs output how `payment_id` appeared as expected when logging in `collect.py`. -=== "collect.py" +=== "logger_reuse.py" ```python hl_lines="1 9 11 12" --8<-- "examples/logger/src/logger_reuse.py" ``` -=== "payment.py" +=== "logger_reuse_payment.py" ```python hl_lines="3 7" --8<-- "examples/logger/src/logger_reuse_payment.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "logger_reuse_output.json" ```json hl_lines="12" --8<-- "examples/logger/src/logger_reuse_output.json" @@ -313,7 +316,7 @@ Notice in the CloudWatch Logs output how `payment_id` appeared as expected when ???+ note "Note: About Child Loggers" Coming from standard library, you might be used to use `logging.getLogger(__name__)`. This will create a new instance of a Logger with a different name. - In Powertools, you can have the same effect by using `child=True` parameter: `Logger(child=True)`. This creates a new Logger instance named after `service.`. All state changes will be propagated bi-directonally between Child and Parent. + In Powertools, you can have the same effect by using `child=True` parameter: `Logger(child=True)`. This creates a new Logger instance named after `service.`. All state changes will be propagated bi-directionally between Child and Parent. For that reason, there could be side effects depending on the order the Child Logger is instantiated, because Child Loggers don't have a handler. @@ -337,15 +340,15 @@ Sampling decision happens at the Logger initialization. This means sampling may ???+ note Open a [feature request](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=feature-request%2C+triage&template=feature_request.md&title=) if you want Logger to calculate sampling for every invocation -=== "collect.py" +=== "sampling_debug_logs.py" ```python hl_lines="6 10" - --8<-- "examples/logger/src/logger_reuse.py" + --8<-- "examples/logger/src/sampling_debug_logs.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "sampling_debug_logs_output.json" - ```json hl_lines="3 5 13 16 25" + ```json hl_lines="3 5 13 16 26" --8<-- "examples/logger/src/sampling_debug_logs_output.json" ``` @@ -393,13 +396,13 @@ For child Loggers, we introspect the name of your module where `Logger(child=Tru ???+ danger A common issue when migrating from other Loggers is that `service` might be defined in the parent Logger (no child param), and not defined in the child Logger: -=== "incorrect_logger_inheritance.py" +=== "logging_inheritance_bad.py" ```python hl_lines="1 9" --8<-- "examples/logger/src/logging_inheritance_bad.py" ``` -=== "my_other_module.py" +=== "logging_inheritance_module.py" ```python hl_lines="1 9" --8<-- "examples/logger/src/logging_inheritance_module.py" @@ -412,13 +415,13 @@ In this case, Logger will register a Logger named `payment`, and a Logger named Do this instead: -=== "correct_logger_inheritance.py" +=== "logging_inheritance_good.py" ```python hl_lines="1 9" --8<-- "examples/logger/src/logging_inheritance_good.py" ``` -=== "my_other_module.py" +=== "logging_inheritance_module.py" ```python hl_lines="1 9" --8<-- "examples/logger/src/logging_inheritance_module.py" @@ -435,13 +438,13 @@ You might want to continue to use the same date formatting style, or override `l Logger allows you to either change the format or suppress the following keys altogether at the initialization: `location`, `timestamp`, `level`, `xray_trace_id`. -=== "lambda_handler.py" +=== "overriding_log_records.py" ```python hl_lines="7 10" --8<-- "examples/logger/src/overriding_log_records.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "overriding_log_records_output.json" ```json hl_lines="3 5" --8<-- "examples/logger/src/overriding_log_records_output.json" @@ -451,12 +454,13 @@ Logger allows you to either change the format or suppress the following keys alt You can change the order of [standard Logger keys](#standard-structured-keys) or any keys that will be appended later at runtime via the `log_record_order` parameter. -=== "app.py" +=== "reordering_log_keys.py" ```python hl_lines="5 8" --8<-- "examples/logger/src/reordering_log_keys.py" ``` -=== "Example CloudWatch Logs excerpt" + +=== "reordering_log_keys_output.json" ```json hl_lines="3 10" --8<-- "examples/logger/src/reordering_log_keys_output.json" @@ -466,13 +470,13 @@ You can change the order of [standard Logger keys](#standard-structured-keys) or By default, this Logger and standard logging library emits records using local time timestamp. You can override this behavior via `utc` parameter: -=== "app.py" +=== "setting_utc_timestamp.py" ```python hl_lines="6" --8<-- "examples/logger/src/setting_utc_timestamp.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "setting_utc_timestamp_output.json" ```json hl_lines="6 13" --8<-- "examples/logger/src/setting_utc_timestamp_output.json" @@ -482,13 +486,13 @@ By default, this Logger and standard logging library emits records using local t By default, Logger uses `str` to handle values non-serializable by JSON. You can override this behavior via `json_default` parameter by passing a Callable: -=== "app.py" +=== "unserializable_values.py" ```python hl_lines="6 17" --8<-- "examples/logger/src/unserializable_values.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "unserializable_values_output.json" ```json hl_lines="4-6" --8<-- "examples/logger/src/unserializable_values_output.json" @@ -511,13 +515,13 @@ By default, Logger uses [LambdaPowertoolsFormatter](#lambdapowertoolsformatter) For these, you can override the `serialize` method from [LambdaPowertoolsFormatter](#lambdapowertoolsformatter). -=== "custom_formatter.py" +=== "bring_your_own_formatter.py" ```python hl_lines="2 5-6 12" --8<-- "examples/logger/src/bring_your_own_formatter.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "bring_your_own_formatter_output.json" ```json hl_lines="6" --8<-- "examples/logger/src/bring_your_own_formatter_output.json" ``` @@ -529,13 +533,13 @@ For exceptional cases where you want to completely replace our formatter logic, ???+ warning You will need to implement `append_keys`, `clear_state`, override `format`, and optionally `remove_keys` to keep the same feature set Powertools Logger provides. This also means keeping state of logging keys added. -=== "collect.py" +=== "bring_your_own_formatter_from_scratch.py" ```python hl_lines="6 9 11-12 15 19 23 26 38" --8<-- "examples/logger/src/bring_your_own_formatter_from_scratch.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "bring_your_own_formatter_from_scratch_output.json" ```json hl_lines="2-4" --8<-- "examples/logger/src/bring_your_own_formatter_from_scratch_output.json" @@ -615,15 +619,16 @@ You can include any of these logging attributes as key value arguments (`kwargs` You can also add them later anywhere in your code with `append_keys`, or remove them with `remove_keys` methods. -=== "collect.py" +=== "append_and_remove_keys.py" ```python hl_lines="3 8 10" ---8<-- "examples/logger/src/append_and_remove_keys.py" ``` -=== "Example CloudWatch Logs excerpt" + +=== "append_and_remove_keys_output.json" ```json hl_lines="6 15-16" - ---8<-- "examples/logger/src/append_and_remove_keys.json" + ---8<-- "examples/logger/src/append_and_remove_keys_output.json" ``` For log records originating from Powertools Logger, the `name` attribute will be the same as `service`, for log records coming from standard library logger, it will be the name of the logger (i.e. what was used as name argument to `logging.getLogger`). @@ -634,13 +639,13 @@ Keys added with `append_keys` will persist across multiple log messages while ke Here's an example where we persist `payment_id` not `request_id`. Note that `payment_id` remains in both log messages while `booking_id` is only available in the first message. -=== "collect.py" +=== "append_keys_vs_extra.py" ```python hl_lines="16 23" ---8<-- "examples/logger/src/append_keys_vs_extra.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "append_keys_vs_extra_output.json" ```json hl_lines="9-10 19" ---8<-- "examples/logger/src/append_keys_vs_extra_output.json" diff --git a/docs/core/metrics.md b/docs/core/metrics.md index 713a53b193c..843e35b7eb8 100644 --- a/docs/core/metrics.md +++ b/docs/core/metrics.md @@ -57,13 +57,13 @@ You can create metrics using `add_metric`, and you can create dimensions for all ???+ tip You can initialize Metrics in any other module too. It'll keep track of your aggregate metrics in memory to optimize costs (one blob instead of multiples). -=== "Metrics" +=== "add_metrics.py" ```python hl_lines="10" --8<-- "examples/metrics/src/add_metrics.py" ``` -=== "Metrics with custom dimensions" +=== "add_dimension.py" ```python hl_lines="13" --8<-- "examples/metrics/src/add_dimension.py" @@ -82,13 +82,13 @@ You can create metrics using `add_metric`, and you can create dimensions for all You can call `add_metric()` with the same metric name multiple times. The values will be grouped together in a list. -=== "Metrics" +=== "add_multi_value_metrics.py" ```python hl_lines="14-15" --8<-- "examples/metrics/src/add_multi_value_metrics.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "add_multi_value_metrics_output.json" ```python hl_lines="15 24-26" --8<-- "examples/metrics/src/add_multi_value_metrics_output.json" @@ -100,13 +100,13 @@ You can use `set_default_dimensions` method, or `default_dimensions` parameter i If you'd like to remove them at some point, you can use `clear_default_dimensions` method. -=== "set_default_dimensions method" +=== "set_default_dimensions.py" ```python hl_lines="9" --8<-- "examples/metrics/src/set_default_dimensions.py" ``` -=== "with log_metrics decorator" +=== "set_default_dimensions_log_metrics.py" ```python hl_lines="9 13" --8<-- "examples/metrics/src/set_default_dimensions_log_metrics.py" @@ -118,13 +118,13 @@ As you finish adding all your metrics, you need to serialize and flush them to s This decorator also **validates**, **serializes**, and **flushes** all your metrics. During metrics validation, if no metrics are provided then a warning will be logged, but no exception will be raised. -=== "app.py" +=== "add_metrics.py" ```python hl_lines="8" --8<-- "examples/metrics/src/add_metrics.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "log_metrics_output.json" ```json hl_lines="6 9 14 21-23" --8<-- "examples/metrics/src/log_metrics_output.json" @@ -152,13 +152,13 @@ If you want to ensure at least one metric is always emitted, you can pass `raise You can optionally capture cold start metrics with `log_metrics` decorator via `capture_cold_start_metric` param. -=== "app.py" +=== "capture_cold_start_metric.py" ```python hl_lines="7" --8<-- "examples/metrics/src/capture_cold_start_metric.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "capture_cold_start_metric_output.json" ```json hl_lines="9 15 22 24-25" --8<-- "examples/metrics/src/capture_cold_start_metric_output.json" @@ -183,13 +183,13 @@ You can add high-cardinality data as part of your Metrics log with `add_metadata ???+ info **This will not be available during metrics visualization** - Use **dimensions** for this purpose -=== "app.py" +=== "add_metadata.py" ```python hl_lines="14" --8<-- "examples/metrics/src/add_metadata.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "add_metadata_output.json" ```json hl_lines="22" --8<-- "examples/metrics/src/add_metadata_output.json" @@ -204,13 +204,13 @@ CloudWatch EMF uses the same dimensions across all your metrics. Use `single_met **unique metric = (metric_name + dimension_name + dimension_value)** -=== "app.py" +=== "single_metric.py" ```python hl_lines="11" --8<-- "examples/metrics/src/single_metric.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "single_metric_output.json" ```json hl_lines="15" --8<-- "examples/metrics/src/single_metric_output.json" @@ -257,7 +257,7 @@ Make sure to set `POWERTOOLS_METRICS_NAMESPACE` and `POWERTOOLS_SERVICE_NAME` be You can read standard output and assert whether metrics have been flushed. Here's an example using `pytest` with `capsys` built-in fixture: -=== "Asserting single EMF blob" +=== "assert_single_emf_blob.py" ```python hl_lines="6 9-10 23-34" --8<-- "examples/metrics/src/assert_single_emf_blob.py" @@ -269,7 +269,7 @@ You can read standard output and assert whether metrics have been flushed. Here' --8<-- "examples/metrics/src/add_metrics.py" ``` -=== "Asserting multiple EMF blobs" +=== "assert_multiple_emf_blobs.py" This will be needed when using `capture_cold_start_metric=True`, or when both `Metrics` and `single_metric` are used. @@ -277,7 +277,7 @@ You can read standard output and assert whether metrics have been flushed. Here' --8<-- "examples/metrics/src/assert_multiple_emf_blobs.py" ``` -=== "my_other_module.py" +=== "assert_multiple_emf_blobs_module.py" ```python --8<-- "examples/metrics/src/assert_multiple_emf_blobs_module.py" diff --git a/docs/core/tracer.md b/docs/core/tracer.md index 7664231cc31..8fbfc0e29f7 100644 --- a/docs/core/tracer.md +++ b/docs/core/tracer.md @@ -77,19 +77,19 @@ You can trace synchronous functions using the `capture_method` decorator. You can trace asynchronous functions and generator functions (including context managers) using `capture_method`. -=== "Async" +=== "capture_method_async.py" ```python hl_lines="9" --8<-- "examples/tracer/src/capture_method_async.py" ``` -=== "Context manager" +=== "capture_method_context_manager.py" ```python hl_lines="12-13" --8<-- "examples/tracer/src/capture_method_context_manager.py" ``` -=== "Generators" +=== "capture_method_generators.py" ```python hl_lines="9" --8<-- "examples/tracer/src/capture_method_generators.py" @@ -116,13 +116,13 @@ Use **`capture_response=False`** parameter in both `capture_lambda_handler` and 2. You might manipulate **streaming objects that can be read only once**; this prevents subsequent calls from being empty 3. You might return **more than 64K** of data _e.g., `message too long` error_ -=== "sensitive_data_scenario.py" +=== "disable_capture_response.py" ```python hl_lines="8 15" --8<-- "examples/tracer/src/disable_capture_response.py" ``` -=== "streaming_object_scenario.py" +=== "disable_capture_response_streaming_body.py" ```python hl_lines="19" --8<-- "examples/tracer/src/disable_capture_response_streaming_body.py" @@ -192,17 +192,17 @@ Tracer keeps a copy of its configuration after the first initialization. This is Tracer will automatically ignore imported modules that have been patched. -=== "handler.py" +=== "tracer_reuse.py" ```python hl_lines="1 6" --8<-- "examples/tracer/src/tracer_reuse.py" ``` -=== "tracer_reuse_payment.py" +=== "tracer_reuse_module.py" A new instance of Tracer will be created but will reuse the previous Tracer instance configuration, similar to a Singleton. ```python hl_lines="3" - --8<-- "examples/tracer/src/tracer_reuse_payment.py" + --8<-- "examples/tracer/src/tracer_reuse_module.py" ``` ## Testing your code diff --git a/examples/logger/src/append_and_remove_keys.json b/examples/logger/src/append_and_remove_keys_output.json similarity index 100% rename from examples/logger/src/append_and_remove_keys.json rename to examples/logger/src/append_and_remove_keys_output.json diff --git a/examples/logger/src/set_correlation_id_jmespath_event.json b/examples/logger/src/set_correlation_id_jmespath.json similarity index 100% rename from examples/logger/src/set_correlation_id_jmespath_event.json rename to examples/logger/src/set_correlation_id_jmespath.json diff --git a/examples/logger/src/set_correlation_id_method_event.json b/examples/logger/src/set_correlation_id_method.json similarity index 100% rename from examples/logger/src/set_correlation_id_method_event.json rename to examples/logger/src/set_correlation_id_method.json diff --git a/examples/tracer/src/tracer_reuse.py b/examples/tracer/src/tracer_reuse.py index 5f12f82b714..bdfe7bc9d91 100644 --- a/examples/tracer/src/tracer_reuse.py +++ b/examples/tracer/src/tracer_reuse.py @@ -1,4 +1,4 @@ -from tracer_reuse_payment import collect_payment +from tracer_reuse_module import collect_payment from aws_lambda_powertools import Tracer from aws_lambda_powertools.utilities.typing import LambdaContext diff --git a/examples/tracer/src/tracer_reuse_payment.py b/examples/tracer/src/tracer_reuse_module.py similarity index 100% rename from examples/tracer/src/tracer_reuse_payment.py rename to examples/tracer/src/tracer_reuse_module.py From d91838743cb2d7e3ae8b161ae8521194487585bf Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 8 Jul 2022 16:03:31 +0200 Subject: [PATCH 47/72] fix(ci): accept core arg in label related issue workflow --- .github/scripts/label_related_issue.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/scripts/label_related_issue.js b/.github/scripts/label_related_issue.js index fb47a7bc4b0..a66a63fd005 100644 --- a/.github/scripts/label_related_issue.js +++ b/.github/scripts/label_related_issue.js @@ -1,4 +1,4 @@ -module.exports = async ({github, context}) => { +module.exports = async ({github, context, core}) => { const prBody = context.payload.body; const prNumber = context.payload.number; const releaseLabel = process.env.RELEASE_LABEL; From 92b475b9ba789cdebb1c07cf4ad06d1b596bef42 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Mon, 11 Jul 2022 09:12:28 +0200 Subject: [PATCH 48/72] docs(graphql): snippets split, improved, and lint (#1287) --- .pre-commit-config.yaml | 1 - docs/core/event_handler/api_gateway.md | 2 +- docs/core/event_handler/appsync.md | 802 +++--------------- .../event_handler_graphql/sam/template.yaml | 124 +++ ...amplify_graphql_transformer_schema.graphql | 23 + .../src/assert_async_graphql_response.json | 43 + .../src/assert_async_graphql_response.py | 34 + .../assert_async_graphql_response_module.py | 37 + .../src/assert_graphql_response.json | 45 + .../src/assert_graphql_response.py | 29 + .../src/assert_graphql_response_module.py | 30 + .../src/async_resolvers.py | 36 + .../src/custom_models.py | 43 + .../src/getting_started_get_todo.json | 46 + .../getting_started_graphql_api_resolver.py | 48 ++ .../src/getting_started_list_todos.json | 44 + .../src}/getting_started_schema.graphql | 4 +- .../src/graphql_transformer_common_field.json | 17 + .../graphql_transformer_find_merchant.json | 19 + .../graphql_transformer_list_locations.json | 22 + .../src/graphql_transformer_merchant_info.py | 36 + .../graphql_transformer_search_merchant.py | 42 + .../src/nested_mappings.py | 30 + .../src/nested_mappings_schema.graphql | 21 + .../src/scalar_functions.py | 15 + .../src/split_operation.py | 17 + .../src/split_operation_module.py | 22 + examples/event_handler_rest/sam/template.yaml | 6 +- 28 files changed, 935 insertions(+), 703 deletions(-) create mode 100644 examples/event_handler_graphql/sam/template.yaml create mode 100644 examples/event_handler_graphql/src/amplify_graphql_transformer_schema.graphql create mode 100644 examples/event_handler_graphql/src/assert_async_graphql_response.json create mode 100644 examples/event_handler_graphql/src/assert_async_graphql_response.py create mode 100644 examples/event_handler_graphql/src/assert_async_graphql_response_module.py create mode 100644 examples/event_handler_graphql/src/assert_graphql_response.json create mode 100644 examples/event_handler_graphql/src/assert_graphql_response.py create mode 100644 examples/event_handler_graphql/src/assert_graphql_response_module.py create mode 100644 examples/event_handler_graphql/src/async_resolvers.py create mode 100644 examples/event_handler_graphql/src/custom_models.py create mode 100644 examples/event_handler_graphql/src/getting_started_get_todo.json create mode 100644 examples/event_handler_graphql/src/getting_started_graphql_api_resolver.py create mode 100644 examples/event_handler_graphql/src/getting_started_list_todos.json rename {docs/shared => examples/event_handler_graphql/src}/getting_started_schema.graphql (76%) create mode 100644 examples/event_handler_graphql/src/graphql_transformer_common_field.json create mode 100644 examples/event_handler_graphql/src/graphql_transformer_find_merchant.json create mode 100644 examples/event_handler_graphql/src/graphql_transformer_list_locations.json create mode 100644 examples/event_handler_graphql/src/graphql_transformer_merchant_info.py create mode 100644 examples/event_handler_graphql/src/graphql_transformer_search_merchant.py create mode 100644 examples/event_handler_graphql/src/nested_mappings.py create mode 100644 examples/event_handler_graphql/src/nested_mappings_schema.graphql create mode 100644 examples/event_handler_graphql/src/scalar_functions.py create mode 100644 examples/event_handler_graphql/src/split_operation.py create mode 100644 examples/event_handler_graphql/src/split_operation_module.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8a614f78968..6a41e0d945c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,6 @@ repos: hooks: - id: check-merge-conflict - id: trailing-whitespace - - id: end-of-file-fixer - id: check-toml - repo: local hooks: diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index 6d8f441d661..6943d6ed9bb 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -387,7 +387,7 @@ You can instruct API Gateway handler to use a custom serializer to best suit you As you grow the number of routes a given Lambda function should handle, it is natural to split routes into separate files to ease maintenance - That's where the `Router` feature is useful. -Let's assume you have `app.py` as your Lambda function entrypoint and routes in `split_route_module.py`, this is how you'd use the `Router` feature. +Let's assume you have `split_route.py` as your Lambda function entrypoint and routes in `split_route_module.py`. This is how you'd use the `Router` feature. === "split_route_module.py" diff --git a/docs/core/event_handler/appsync.md b/docs/core/event_handler/appsync.md index f3203e37834..4d28b41a81f 100644 --- a/docs/core/event_handler/appsync.md +++ b/docs/core/event_handler/appsync.md @@ -5,7 +5,7 @@ description: Core utility Event handler for AWS AppSync Direct Lambda Resolver and Amplify GraphQL Transformer. -### Key Features +## Key Features * Automatically parse API arguments to function arguments * Choose between strictly match a GraphQL field name or all of them to a function @@ -30,144 +30,16 @@ This is the sample infrastructure we are using for the initial examples with a A ???+ tip "Tip: Designing GraphQL Schemas for the first time?" Visit [AWS AppSync schema documentation](https://docs.aws.amazon.com/appsync/latest/devguide/designing-your-schema.html){target="_blank"} for understanding how to define types, nesting, and pagination. -=== "schema.graphql" +=== "getting_started_schema.graphql" ```typescript - --8<-- "docs/shared/getting_started_schema.graphql" + --8<-- "examples/event_handler_graphql/src/getting_started_schema.graphql" ``` === "template.yml" - ```yaml hl_lines="37-42 50-55 61-62 78-91 96-120" - AWSTemplateFormatVersion: '2010-09-09' - Transform: AWS::Serverless-2016-10-31 - Description: Hello world Direct Lambda Resolver - - Globals: - Function: - Timeout: 5 - Runtime: python3.8 - Tracing: Active - Environment: - Variables: - # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/latest/#environment-variables - LOG_LEVEL: INFO - POWERTOOLS_LOGGER_SAMPLE_RATE: 0.1 - POWERTOOLS_LOGGER_LOG_EVENT: true - POWERTOOLS_SERVICE_NAME: sample_resolver - - Resources: - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - Handler: app.lambda_handler - CodeUri: hello_world - Description: Sample Lambda Powertools Direct Lambda Resolver - Tags: - SOLUTION: LambdaPowertoolsPython - - # IAM Permissions and Roles - - AppSyncServiceRole: - Type: "AWS::IAM::Role" - Properties: - AssumeRolePolicyDocument: - Version: "2012-10-17" - Statement: - - - Effect: "Allow" - Principal: - Service: - - "appsync.amazonaws.com" - Action: - - "sts:AssumeRole" - - InvokeLambdaResolverPolicy: - Type: "AWS::IAM::Policy" - Properties: - PolicyName: "DirectAppSyncLambda" - PolicyDocument: - Version: "2012-10-17" - Statement: - - - Effect: "Allow" - Action: "lambda:invokeFunction" - Resource: - - !GetAtt HelloWorldFunction.Arn - Roles: - - !Ref AppSyncServiceRole - - # GraphQL API - - HelloWorldApi: - Type: "AWS::AppSync::GraphQLApi" - Properties: - Name: HelloWorldApi - AuthenticationType: "API_KEY" - XrayEnabled: true - - HelloWorldApiKey: - Type: AWS::AppSync::ApiKey - Properties: - ApiId: !GetAtt HelloWorldApi.ApiId - - HelloWorldApiSchema: - Type: "AWS::AppSync::GraphQLSchema" - Properties: - ApiId: !GetAtt HelloWorldApi.ApiId - Definition: | - schema { - query:Query - } - - type Query { - getTodo(id: ID!): Todo - listTodos: [Todo] - } - - type Todo { - id: ID! - title: String - description: String - done: Boolean - } - - # Lambda Direct Data Source and Resolver - - HelloWorldFunctionDataSource: - Type: "AWS::AppSync::DataSource" - Properties: - ApiId: !GetAtt HelloWorldApi.ApiId - Name: "HelloWorldLambdaDirectResolver" - Type: "AWS_LAMBDA" - ServiceRoleArn: !GetAtt AppSyncServiceRole.Arn - LambdaConfig: - LambdaFunctionArn: !GetAtt HelloWorldFunction.Arn - - ListTodosResolver: - Type: "AWS::AppSync::Resolver" - Properties: - ApiId: !GetAtt HelloWorldApi.ApiId - TypeName: "Query" - FieldName: "listTodos" - DataSourceName: !GetAtt HelloWorldFunctionDataSource.Name - - GetTodoResolver: - Type: "AWS::AppSync::Resolver" - Properties: - ApiId: !GetAtt HelloWorldApi.ApiId - TypeName: "Query" - FieldName: "getTodo" - DataSourceName: !GetAtt HelloWorldFunctionDataSource.Name - - - Outputs: - HelloWorldFunction: - Description: "Hello World Lambda Function ARN" - Value: !GetAtt HelloWorldFunction.Arn - - HelloWorldAPI: - Value: !GetAtt HelloWorldApi.Arn + ```yaml hl_lines="59-60 71-72 94-95 104-105 112-113" + --8<-- "examples/event_handler_graphql/sam/template.yaml" ``` ### Resolver decorator @@ -176,248 +48,86 @@ You can define your functions to match GraphQL types and fields with the `app.re Here's an example where we have two separate functions to resolve `getTodo` and `listTodos` fields within the `Query` type. For completion, we use Scalar type utilities to generate the right output based on our schema definition. -???+ info - GraphQL arguments are passed as function arguments. +???+ important + GraphQL arguments are passed as function keyword arguments. -=== "app.py" + **Example** - ```python hl_lines="3-5 9 31-32 39-40 47" - from aws_lambda_powertools import Logger, Tracer + The GraphQL Query `getTodo(id: "todo_id_value")` will + call `get_todo` as `get_todo(id="todo_id_value")`. - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import AppSyncResolver - from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils +=== "getting_started_graphql_api_resolver.py" - tracer = Tracer(service="sample_resolver") - logger = Logger(service="sample_resolver") - app = AppSyncResolver() + ```python hl_lines="7 13 23 25-26 35 37 48" + --8<-- "examples/event_handler_graphql/src/getting_started_graphql_api_resolver.py" + ``` - # Note that `creation_time` isn't available in the schema - # This utility also takes into account what info you make available at API level vs what's stored - TODOS = [ - { - "id": scalar_types_utils.make_id(), # type ID or String - "title": "First task", - "description": "String", - "done": False, - "creation_time": scalar_types_utils.aws_datetime(), # type AWSDateTime - }, - { - "id": scalar_types_utils.make_id(), - "title": "Second task", - "description": "String", - "done": True, - "creation_time": scalar_types_utils.aws_datetime(), - }, - ] +=== "getting_started_schema.graphql" + ```typescript hl_lines="6-7" + --8<-- "examples/event_handler_graphql/src/getting_started_schema.graphql" + ``` - @app.resolver(type_name="Query", field_name="getTodo") - def get_todo(id: str = ""): - logger.info(f"Fetching Todo {id}") - todo = [todo for todo in TODOS if todo["id"] == id] +=== "getting_started_get_todo.json" - return todo + ```json hl_lines="2-3" + --8<-- "examples/event_handler_graphql/src/getting_started_get_todo.json" + ``` +=== "getting_started_list_todos.json" - @app.resolver(type_name="Query", field_name="listTodos") - def list_todos(): - return TODOS + ```json hl_lines="2 40 42" + --8<-- "examples/event_handler_graphql/src/getting_started_list_todos.json" + ``` +### Scalar functions - @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) - ``` +When working with [AWS AppSync Scalar types](https://docs.aws.amazon.com/appsync/latest/devguide/scalars.html){target="_blank"}, you might want to generate the same values for data validation purposes. -=== "schema.graphql" +For convenience, the most commonly used values are available as functions within `scalar_types_utils` module. - ```typescript - --8<-- "docs/shared/getting_started_schema.graphql" - ``` +```python hl_lines="1-6" title="Creating key scalar values with scalar_types_utils" +--8<-- "examples/event_handler_graphql/src/scalar_functions.py" +``` -=== "getTodo_event.json" - - ```json - { - "arguments": { - "id": "7e362732-c8cd-4405-b090-144ac9b38960" - }, - "identity": null, - "source": null, - "request": { - "headers": { - "x-forwarded-for": "1.2.3.4, 5.6.7.8", - "accept-encoding": "gzip, deflate, br", - "cloudfront-viewer-country": "NL", - "cloudfront-is-tablet-viewer": "false", - "referer": "https://eu-west-1.console.aws.amazon.com/appsync/home?region=eu-west-1", - "via": "2.0 9fce949f3749407c8e6a75087e168b47.cloudfront.net (CloudFront)", - "cloudfront-forwarded-proto": "https", - "origin": "https://eu-west-1.console.aws.amazon.com", - "x-api-key": "da1-c33ullkbkze3jg5hf5ddgcs4fq", - "content-type": "application/json", - "x-amzn-trace-id": "Root=1-606eb2f2-1babc433453a332c43fb4494", - "x-amz-cf-id": "SJw16ZOPuMZMINx5Xcxa9pB84oMPSGCzNOfrbJLvd80sPa0waCXzYQ==", - "content-length": "114", - "x-amz-user-agent": "AWS-Console-AppSync/", - "x-forwarded-proto": "https", - "host": "ldcvmkdnd5az3lm3gnf5ixvcyy.appsync-api.eu-west-1.amazonaws.com", - "accept-language": "en-US,en;q=0.5", - "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:78.0) Gecko/20100101 Firefox/78.0", - "cloudfront-is-desktop-viewer": "true", - "cloudfront-is-mobile-viewer": "false", - "accept": "*/*", - "x-forwarded-port": "443", - "cloudfront-is-smarttv-viewer": "false" - } - }, - "prev": null, - "info": { - "parentTypeName": "Query", - "selectionSetList": [ - "title", - "id" - ], - "selectionSetGraphQL": "{\n title\n id\n}", - "fieldName": "getTodo", - "variables": {} - }, - "stash": {} - } - ``` +Here's a table with their related scalar as a quick reference: -=== "listTodos_event.json" - - ```json - { - "arguments": {}, - "identity": null, - "source": null, - "request": { - "headers": { - "x-forwarded-for": "1.2.3.4, 5.6.7.8", - "accept-encoding": "gzip, deflate, br", - "cloudfront-viewer-country": "NL", - "cloudfront-is-tablet-viewer": "false", - "referer": "https://eu-west-1.console.aws.amazon.com/appsync/home?region=eu-west-1", - "via": "2.0 9fce949f3749407c8e6a75087e168b47.cloudfront.net (CloudFront)", - "cloudfront-forwarded-proto": "https", - "origin": "https://eu-west-1.console.aws.amazon.com", - "x-api-key": "da1-c33ullkbkze3jg5hf5ddgcs4fq", - "content-type": "application/json", - "x-amzn-trace-id": "Root=1-606eb2f2-1babc433453a332c43fb4494", - "x-amz-cf-id": "SJw16ZOPuMZMINx5Xcxa9pB84oMPSGCzNOfrbJLvd80sPa0waCXzYQ==", - "content-length": "114", - "x-amz-user-agent": "AWS-Console-AppSync/", - "x-forwarded-proto": "https", - "host": "ldcvmkdnd5az3lm3gnf5ixvcyy.appsync-api.eu-west-1.amazonaws.com", - "accept-language": "en-US,en;q=0.5", - "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:78.0) Gecko/20100101 Firefox/78.0", - "cloudfront-is-desktop-viewer": "true", - "cloudfront-is-mobile-viewer": "false", - "accept": "*/*", - "x-forwarded-port": "443", - "cloudfront-is-smarttv-viewer": "false" - } - }, - "prev": null, - "info": { - "parentTypeName": "Query", - "selectionSetList": [ - "id", - "title" - ], - "selectionSetGraphQL": "{\n id\n title\n}", - "fieldName": "listTodos", - "variables": {} - }, - "stash": {} - } - ``` +| Scalar type | Scalar function | Sample value | +| ---------------- | ---------------------------------- | -------------------------------------- | +| **ID** | `scalar_types_utils.make_id` | `e916c84d-48b6-484c-bef3-cee3e4d86ebf` | +| **AWSDate** | `scalar_types_utils.aws_date` | `2022-07-08Z` | +| **AWSTime** | `scalar_types_utils.aws_time` | `15:11:00.189Z` | +| **AWSDateTime** | `scalar_types_utils.aws_datetime` | `2022-07-08T15:11:00.189Z` | +| **AWSTimestamp** | `scalar_types_utils.aws_timestamp` | `1657293060` | ## Advanced ### Nested mappings -You can nest `app.resolver()` decorator multiple times when resolving fields with the same return. - -=== "nested_mappings.py" - - ```python hl_lines="4 8 10-12 18" - from aws_lambda_powertools import Logger, Tracer +???+ note - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import AppSyncResolver + The following examples use a more advanced schema. These schemas differ from [initial sample infrastructure we used earlier](#required-resources). - tracer = Tracer(service="sample_resolver") - logger = Logger(service="sample_resolver") - app = AppSyncResolver() +You can nest `app.resolver()` decorator multiple times when resolving fields with the same return value. - @app.resolver(field_name="listLocations") - @app.resolver(field_name="locations") - def get_locations(name: str, description: str = ""): - return name + description +=== "nested_mappings.py" - @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="4 10 20-21 23 30" + --8<-- "examples/event_handler_graphql/src/nested_mappings.py" ``` -=== "schema.graphql" +=== "nested_mappings_schema.graphql" ```typescript hl_lines="6 20" - schema { - query: Query - } - - type Query { - listLocations: [Location] - } - - type Location { - id: ID! - name: String! - description: String - address: String - } - - type Merchant { - id: String! - name: String! - description: String - locations: [Location] - } + --8<-- "examples/event_handler_graphql/src/nested_mappings_schema.graphql" ``` ### Async functions For Lambda Python3.8+ runtime, this utility supports async functions when you use in conjunction with `asyncio.run`. -```python hl_lines="5 9 11-13 21" title="Resolving GraphQL resolvers async" -import asyncio -from aws_lambda_powertools import Logger, Tracer - -from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler import AppSyncResolver - -tracer = Tracer(service="sample_resolver") -logger = Logger(service="sample_resolver") -app = AppSyncResolver() - -@app.resolver(type_name="Query", field_name="listTodos") -async def list_todos(): - todos = await some_async_io_call() - return todos - -@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) -@tracer.capture_lambda_handler -def lambda_handler(event, context): - result = app.resolve(event, context) - - return asyncio.run(result) +```python hl_lines="7 14 24-25 34 36" title="Resolving GraphQL resolvers async" +--8<-- "examples/event_handler_graphql/src/async_resolvers.py" ``` ### Amplify GraphQL Transformer @@ -427,29 +137,7 @@ Assuming you have [Amplify CLI installed](https://docs.amplify.aws/cli/start/ins ```typescript hl_lines="7 15 20 22" title="Example GraphQL Schema" -@model -type Merchant { - id: String! - name: String! - description: String - # Resolves to `common_field` - commonField: String @function(name: "merchantInfo-${env}") -} - -type Location { - id: ID! - name: String! - address: String - # Resolves to `common_field` - commonField: String @function(name: "merchantInfo-${env}") -} - -type Query { - # List of locations resolves to `list_locations` - listLocations(page: Int, size: Int): [Location] @function(name: "merchantInfo-${env}") - # List of locations resolves to `list_locations` - findMerchant(search: str): [Merchant] @function(name: "searchMerchant-${env}") -} +--8<-- "examples/event_handler_graphql/src/amplify_graphql_transformer_schema.graphql" ``` [Create two new basic Python functions](https://docs.amplify.aws/cli/function#set-up-a-function){target="_blank"} via `amplify add function`. @@ -457,257 +145,60 @@ type Query { ???+ note Amplify CLI generated functions use `Pipenv` as a dependency manager. Your function source code is located at **`amplify/backend/function/your-function-name`**. -Within your function's folder, add Lambda Powertools as a dependency with `pipenv install aws-lambda-powertools`. +Within your function's folder, add Powertools as a dependency with `pipenv install aws-lambda-powertools`. Use the following code for `merchantInfo` and `searchMerchant` functions respectively. -=== "merchantInfo/src/app.py" - - ```python hl_lines="4-5 9 11-12 15-16 23" - from aws_lambda_powertools import Logger, Tracer - - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import AppSyncResolver - from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils - - tracer = Tracer(service="sample_graphql_transformer_resolver") - logger = Logger(service="sample_graphql_transformer_resolver") - app = AppSyncResolver() +=== "graphql_transformer_merchant_info.py" - @app.resolver(type_name="Query", field_name="listLocations") - def list_locations(page: int = 0, size: int = 10): - return [{"id": 100, "name": "Smooth Grooves"}] + ```python hl_lines="4 6 22-23 27-28 36" + --8<-- "examples/event_handler_graphql/src/graphql_transformer_merchant_info.py" + ``` - @app.resolver(field_name="commonField") - def common_field(): - # Would match all fieldNames matching 'commonField' - return scalar_types_utils.make_id() +=== "graphql_transformer_search_merchant.py" - @tracer.capture_lambda_handler - @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) - def lambda_handler(event, context): - app.resolve(event, context) - ``` -=== "searchMerchant/src/app.py" - - ```python hl_lines="1 4 6-7" - from aws_lambda_powertools.event_handler import AppSyncResolver - from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils - - app = AppSyncResolver() - - @app.resolver(type_name="Query", field_name="findMerchant") - def find_merchant(search: str): - return [ - { - "id": scalar_types_utils.make_id(), - "name": "Brewer Brewing", - "description": "Mike Brewer's IPA brewing place" - }, - { - "id": scalar_types_utils.make_id(), - "name": "Serverlessa's Bakery", - "description": "Lessa's sourdough place" - }, - ] + ```python hl_lines="4 6 21-22 36 42" + --8<-- "examples/event_handler_graphql/src/graphql_transformer_search_merchant.py" ``` -**Example AppSync GraphQL Transformer Function resolver events** - -=== "Query.listLocations event" +=== "graphql_transformer_list_locations.json" ```json hl_lines="2-7" - { - "typeName": "Query", - "fieldName": "listLocations", - "arguments": { - "page": 2, - "size": 1 - }, - "identity": { - "claims": { - "iat": 1615366261 - ... - }, - "username": "mike", - ... - }, - "request": { - "headers": { - "x-amzn-trace-id": "Root=1-60488877-0b0c4e6727ab2a1c545babd0", - "x-forwarded-for": "127.0.0.1" - ... - } - }, - ... - } + --8<-- "examples/event_handler_graphql/src/graphql_transformer_list_locations.json" ``` -=== "*.commonField event" +=== "graphql_transformer_common_field.json" ```json hl_lines="2 3" - { - "typeName": "Merchant", - "fieldName": "commonField", - "arguments": { - }, - "identity": { - "claims": { - "iat": 1615366261 - ... - }, - "username": "mike", - ... - }, - "request": { - "headers": { - "x-amzn-trace-id": "Root=1-60488877-0b0c4e6727ab2a1c545babd0", - "x-forwarded-for": "127.0.0.1" - ... - } - }, - ... - } + --8<-- "examples/event_handler_graphql/src/graphql_transformer_common_field.json" ``` -=== "Query.findMerchant event" +=== "graphql_transformer_find_merchant.json" ```json hl_lines="2-6" - { - "typeName": "Query", - "fieldName": "findMerchant", - "arguments": { - "search": "Brewers Coffee" - }, - "identity": { - "claims": { - "iat": 1615366261 - ... - }, - "username": "mike", - ... - }, - "request": { - "headers": { - "x-amzn-trace-id": "Root=1-60488877-0b0c4e6727ab2a1c545babd0", - "x-forwarded-for": "127.0.0.1" - ... - } - }, - ... - } + --8<-- "examples/event_handler_graphql/src/graphql_transformer_find_merchant.json" ``` ### Custom data models -You can subclass `AppSyncResolverEvent` to bring your own set of methods to handle incoming events, by using `data_model` param in the `resolve` method. - -=== "custom_model.py" - - ```python hl_lines="12-15 20 27" - from aws_lambda_powertools import Logger, Tracer - - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import AppSyncResolver - from aws_lambda_powertools.utilities.data_classes.appsync_resolver_event import AppSyncResolverEvent - - tracer = Tracer(service="sample_resolver") - logger = Logger(service="sample_resolver") - app = AppSyncResolver() +You can subclass [AppSyncResolverEvent](../../utilities/data_classes.md#appsync-resolver){target="_blank"} to bring your own set of methods to handle incoming events, by using `data_model` param in the `resolve` method. +=== "custom_models.py.py" - class MyCustomModel(AppSyncResolverEvent): - @property - def country_viewer(self) -> str: - return self.request_headers.get("cloudfront-viewer-country") - - @app.resolver(field_name="listLocations") - @app.resolver(field_name="locations") - def get_locations(name: str, description: str = ""): - if app.current_event.country_viewer == "US": - ... - return name + description - - @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context, data_model=MyCustomModel) + ```python hl_lines="4 7 23-25 28-29 36 43" + --8<-- "examples/event_handler_graphql/src/custom_models.py" ``` -=== "schema.graphql" +=== "nested_mappings_schema.graphql" ```typescript hl_lines="6 20" - schema { - query: Query - } - - type Query { - listLocations: [Location] - } - - type Location { - id: ID! - name: String! - description: String - address: String - } - - type Merchant { - id: String! - name: String! - description: String - locations: [Location] - } + --8<-- "examples/event_handler_graphql/src/nested_mappings_schema.graphql" ``` -=== "listLocations_event.json" - - ```json - { - "arguments": {}, - "identity": null, - "source": null, - "request": { - "headers": { - "x-forwarded-for": "1.2.3.4, 5.6.7.8", - "accept-encoding": "gzip, deflate, br", - "cloudfront-viewer-country": "NL", - "cloudfront-is-tablet-viewer": "false", - "referer": "https://eu-west-1.console.aws.amazon.com/appsync/home?region=eu-west-1", - "via": "2.0 9fce949f3749407c8e6a75087e168b47.cloudfront.net (CloudFront)", - "cloudfront-forwarded-proto": "https", - "origin": "https://eu-west-1.console.aws.amazon.com", - "x-api-key": "da1-c33ullkbkze3jg5hf5ddgcs4fq", - "content-type": "application/json", - "x-amzn-trace-id": "Root=1-606eb2f2-1babc433453a332c43fb4494", - "x-amz-cf-id": "SJw16ZOPuMZMINx5Xcxa9pB84oMPSGCzNOfrbJLvd80sPa0waCXzYQ==", - "content-length": "114", - "x-amz-user-agent": "AWS-Console-AppSync/", - "x-forwarded-proto": "https", - "host": "ldcvmkdnd5az3lm3gnf5ixvcyy.appsync-api.eu-west-1.amazonaws.com", - "accept-language": "en-US,en;q=0.5", - "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:78.0) Gecko/20100101 Firefox/78.0", - "cloudfront-is-desktop-viewer": "true", - "cloudfront-is-mobile-viewer": "false", - "accept": "*/*", - "x-forwarded-port": "443", - "cloudfront-is-smarttv-viewer": "false" - } - }, - "prev": null, - "info": { - "parentTypeName": "Query", - "selectionSetList": [ - "id", - "name", - "description" - ], - "selectionSetGraphQL": "{\n id\n name\n description\n}", - "fieldName": "listLocations", - "variables": {} - }, - "stash": {} - } +=== "graphql_transformer_list_locations.json" + + ```json hl_lines="18-19" + --8<-- "examples/event_handler_graphql/src/graphql_transformer_list_locations.json" ``` ### Split operations with Router @@ -715,59 +206,24 @@ You can subclass `AppSyncResolverEvent` to bring your own set of methods to hand ???+ tip Read the **[considerations section for trade-offs between monolithic and micro functions](./api_gateway.md#considerations){target="_blank"}**, as it's also applicable here. -As you grow the number of related GraphQL operations a given Lambda function should handle, it is natural to split them into separate files to ease maintenance - That's where the `Router` feature is useful. +As you grow the number of related GraphQL operations a given Lambda function should handle, it is natural to split them into separate files to ease maintenance - That's when the `Router` feature comes handy. -Let's assume you have `app.py` as your Lambda function entrypoint and routes in `location.py`, this is how you'd use the `Router` feature. +Let's assume you have `split_operation.py` as your Lambda function entrypoint and routes in `split_operation_module.py`. This is how you'd use the `Router` feature. -=== "resolvers/location.py" +=== "split_operation_module.py" We import **Router** instead of **AppSyncResolver**; syntax wise is exactly the same. - ```python hl_lines="4 7 10 15" - from typing import Any, Dict, List - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.event_handler.appsync import Router - - logger = Logger(child=True) - router = Router() - - - @router.resolver(type_name="Query", field_name="listLocations") - def list_locations(merchant_id: str) -> List[Dict[str, Any]]: - return [{"name": "Location name", "merchant_id": merchant_id}] - + ```python hl_lines="4 8 18-19" + --8<-- "examples/event_handler_graphql/src/split_operation_module.py" + ``` - @router.resolver(type_name="Location", field_name="status") - def resolve_status(merchant_id: str) -> str: - logger.debug(f"Resolve status for merchant_id: {merchant_id}") - return "FOO" - ``` +=== "split_operation.py" -=== "app.py" + We use `include_router` method and include all `location` operations registered in the `router` global object. - We use `include_router` method and include all `location` operations registered in the `router` global object. - - ```python hl_lines="8 13" - from typing import Dict - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.event_handler import AppSyncResolver - from aws_lambda_powertools.logging.correlation_paths import APPSYNC_RESOLVER - from aws_lambda_powertools.utilities.typing import LambdaContext - - from resolvers import location - - tracer = Tracer() - logger = Logger() - app = AppSyncResolver() - app.include_router(location.router) - - - @tracer.capture_lambda_handler - @logger.inject_lambda_context(correlation_id_path=APPSYNC_RESOLVER) - def lambda_handler(event: Dict, context: LambdaContext): - app.resolve(event, context) + ```python hl_lines="1 11" + --8<-- "examples/event_handler_graphql/src/split_operation.py" ``` ## Testing your code @@ -778,89 +234,43 @@ You can use either `app.resolve(event, context)` or simply `app(event, context)` Here's an example of how you can test your synchronous resolvers: -=== "test_resolver.py" - - ```python - import json - import pytest - from pathlib import Path +=== "assert_graphql_response.py" - from src.index import app # import the instance of AppSyncResolver from your code - - def test_direct_resolver(): - # Load mock event from a file - json_file_path = Path("appSyncDirectResolver.json") - with open(json_file_path) as json_file: - mock_event = json.load(json_file) - - # Call the implicit handler - result = app(mock_event, {}) - - assert result == "created this value" + ```python hl_lines="6 26 29" + --8<-- "examples/event_handler_graphql/src/assert_graphql_response.py" ``` -=== "src/index.py" - - ```python - - from aws_lambda_powertools.event_handler import AppSyncResolver - - app = AppSyncResolver() - - @app.resolver(field_name="createSomething") - def create_something(): - return "created this value" +=== "assert_graphql_response_module.py" + ```python hl_lines="10" + --8<-- "examples/event_handler_graphql/src/assert_graphql_response_module.py" ``` -=== "appSyncDirectResolver.json" +=== "assert_graphql_response.json" - ```json - --8<-- "tests/events/appSyncDirectResolver.json" + ```json hl_lines="5" + --8<-- "examples/event_handler_graphql/src/assert_graphql_response.json" ``` -And an example for testing asynchronous resolvers. Note that this requires the `pytest-asyncio` package: - -=== "test_async_resolver.py" +And an example for testing asynchronous resolvers. Note that this requires the `pytest-asyncio` package. This tests a specific async GraphQL operation. - ```python - import json - import pytest - from pathlib import Path - - from src.index import app # import the instance of AppSyncResolver from your code - - @pytest.mark.asyncio - async def test_direct_resolver(): - # Load mock event from a file - json_file_path = Path("appSyncDirectResolver.json") - with open(json_file_path) as json_file: - mock_event = json.load(json_file) +???+ note + Alternatively, you can continue call `lambda_handler` function synchronously as it'd run `asyncio.run` to await for the coroutine to complete. - # Call the implicit handler - result = await app(mock_event, {}) +=== "assert_async_graphql_response.py" - assert result == "created this value" + ```python hl_lines="27" + --8<-- "examples/event_handler_graphql/src/assert_async_graphql_response.py" ``` -=== "src/index.py" - - ```python - import asyncio - - from aws_lambda_powertools.event_handler import AppSyncResolver - - app = AppSyncResolver() - - @app.resolver(field_name="createSomething") - async def create_something_async(): - await asyncio.sleep(1) # Do async stuff - return "created this value" +=== "assert_async_graphql_response_module.py" + ```python hl_lines="14" + --8<-- "examples/event_handler_graphql/src/assert_async_graphql_response_module.py" ``` -=== "appSyncDirectResolver.json" +=== "assert_async_graphql_response.json" - ```json - --8<-- "tests/events/appSyncDirectResolver.json" + ```json hl_lines="3 4" + --8<-- "examples/event_handler_graphql/src/assert_async_graphql_response.json" ``` diff --git a/examples/event_handler_graphql/sam/template.yaml b/examples/event_handler_graphql/sam/template.yaml new file mode 100644 index 00000000000..3e2ab60ab10 --- /dev/null +++ b/examples/event_handler_graphql/sam/template.yaml @@ -0,0 +1,124 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 +Description: Hello world Direct Lambda Resolver + +Globals: + Function: + Timeout: 5 + Runtime: python3.9 + Tracing: Active + Environment: + Variables: + # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/latest/#environment-variables + LOG_LEVEL: INFO + POWERTOOLS_LOGGER_SAMPLE_RATE: 0.1 + POWERTOOLS_LOGGER_LOG_EVENT: true + POWERTOOLS_SERVICE_NAME: example + +Resources: + TodosFunction: + Type: AWS::Serverless::Function + Properties: + Handler: getting_started_graphql_api_resolver.lambda_handler + CodeUri: ../src + Description: Sample Direct Lambda Resolver + + # IAM Permissions and Roles + + AppSyncServiceRole: + Type: "AWS::IAM::Role" + Properties: + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: "Allow" + Principal: + Service: + - "appsync.amazonaws.com" + Action: + - "sts:AssumeRole" + + InvokeLambdaResolverPolicy: + Type: "AWS::IAM::Policy" + Properties: + PolicyName: "DirectAppSyncLambda" + PolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: "Allow" + Action: "lambda:invokeFunction" + Resource: + - !GetAtt TodosFunction.Arn + Roles: + - !Ref AppSyncServiceRole + + # GraphQL API + + TodosApi: + Type: "AWS::AppSync::GraphQLApi" + Properties: + Name: TodosApi + AuthenticationType: "API_KEY" + XrayEnabled: true + + TodosApiKey: + Type: AWS::AppSync::ApiKey + Properties: + ApiId: !GetAtt TodosApi.ApiId + + TodosApiSchema: + Type: "AWS::AppSync::GraphQLSchema" + Properties: + ApiId: !GetAtt TodosApi.ApiId + Definition: | + schema { + query:Query + } + + type Query { + getTodo(id: ID!): Todo + listTodos: [Todo] + } + + type Todo { + id: ID! + userId: String + title: String + completed: Boolean + } + + # Lambda Direct Data Source and Resolver + + TodosFunctionDataSource: + Type: "AWS::AppSync::DataSource" + Properties: + ApiId: !GetAtt TodosApi.ApiId + Name: "HelloWorldLambdaDirectResolver" + Type: "AWS_LAMBDA" + ServiceRoleArn: !GetAtt AppSyncServiceRole.Arn + LambdaConfig: + LambdaFunctionArn: !GetAtt TodosFunction.Arn + + ListTodosResolver: + Type: "AWS::AppSync::Resolver" + Properties: + ApiId: !GetAtt TodosApi.ApiId + TypeName: "Query" + FieldName: "listTodos" + DataSourceName: !GetAtt TodosFunctionDataSource.Name + + GetTodoResolver: + Type: "AWS::AppSync::Resolver" + Properties: + ApiId: !GetAtt TodosApi.ApiId + TypeName: "Query" + FieldName: "getTodo" + DataSourceName: !GetAtt TodosFunctionDataSource.Name + +Outputs: + TodosFunction: + Description: "Hello World Lambda Function ARN" + Value: !GetAtt TodosFunction.Arn + + TodosApi: + Value: !GetAtt TodosApi.Arn diff --git a/examples/event_handler_graphql/src/amplify_graphql_transformer_schema.graphql b/examples/event_handler_graphql/src/amplify_graphql_transformer_schema.graphql new file mode 100644 index 00000000000..0bd6949cb91 --- /dev/null +++ b/examples/event_handler_graphql/src/amplify_graphql_transformer_schema.graphql @@ -0,0 +1,23 @@ +@model +type Merchant { + id: String! + name: String! + description: String + # Resolves to `common_field` + commonField: String @function(name: "merchantInfo-${env}") +} + +type Location { + id: ID! + name: String! + address: String + # Resolves to `common_field` + commonField: String @function(name: "merchantInfo-${env}") +} + +type Query { + # List of locations resolves to `list_locations` + listLocations(page: Int, size: Int): [Location] @function(name: "merchantInfo-${env}") + # List of locations resolves to `list_locations` + findMerchant(search: str): [Merchant] @function(name: "searchMerchant-${env}") +} diff --git a/examples/event_handler_graphql/src/assert_async_graphql_response.json b/examples/event_handler_graphql/src/assert_async_graphql_response.json new file mode 100644 index 00000000000..e22d4e741cd --- /dev/null +++ b/examples/event_handler_graphql/src/assert_async_graphql_response.json @@ -0,0 +1,43 @@ +{ + "typeName": "Query", + "fieldName": "listTodos", + "arguments": {}, + "selectionSetList": [ + "id", + "userId", + "completed" + ], + "identity": { + "claims": { + "sub": "192879fc-a240-4bf1-ab5a-d6a00f3063f9", + "email_verified": true, + "iss": "https://cognito-idp.us-west-2.amazonaws.com/us-west-xxxxxxxxxxx", + "phone_number_verified": false, + "cognito:username": "jdoe", + "aud": "7471s60os7h0uu77i1tk27sp9n", + "event_id": "bc334ed8-a938-4474-b644-9547e304e606", + "token_use": "id", + "auth_time": 1599154213, + "phone_number": "+19999999999", + "exp": 1599157813, + "iat": 1599154213, + "email": "jdoe@email.com" + }, + "defaultAuthStrategy": "ALLOW", + "groups": null, + "issuer": "https://cognito-idp.us-west-2.amazonaws.com/us-west-xxxxxxxxxxx", + "sourceIp": [ + "1.1.1.1" + ], + "sub": "192879fc-a240-4bf1-ab5a-d6a00f3063f9", + "username": "jdoe" + }, + "request": { + "headers": { + "x-amzn-trace-id": "Root=1-60488877-0b0c4e6727ab2a1c545babd0", + "x-forwarded-for": "127.0.0.1", + "cloudfront-viewer-country": "NL", + "x-api-key": "da1-c33ullkbkze3jg5hf5ddgcs4fq" + } + } +} \ No newline at end of file diff --git a/examples/event_handler_graphql/src/assert_async_graphql_response.py b/examples/event_handler_graphql/src/assert_async_graphql_response.py new file mode 100644 index 00000000000..22eceb1c5d0 --- /dev/null +++ b/examples/event_handler_graphql/src/assert_async_graphql_response.py @@ -0,0 +1,34 @@ +import json +from dataclasses import dataclass +from pathlib import Path + +import pytest +from assert_async_graphql_response_module import Location, app # instance of AppSyncResolver + + +@pytest.fixture +def lambda_context(): + @dataclass + class LambdaContext: + function_name: str = "test" + memory_limit_in_mb: int = 128 + invoked_function_arn: str = "arn:aws:lambda:eu-west-1:123456789012:function:test" + aws_request_id: str = "da658bd3-2d6f-4e7b-8ec2-937234644fdc" + + return LambdaContext() + + +@pytest.mark.asyncio +async def test_async_direct_resolver(lambda_context): + # GIVEN + fake_event = json.loads(Path("assert_async_graphql_response.json").read_text()) + + # WHEN + result: list[Location] = await app(fake_event, lambda_context) + # alternatively, you can also run a sync test against `lambda_handler` + # since `lambda_handler` awaits the coroutine to complete + + # THEN + assert result[0]["userId"] == 1 + assert result[0]["id"] == 1 + assert result[0]["completed"] is False diff --git a/examples/event_handler_graphql/src/assert_async_graphql_response_module.py b/examples/event_handler_graphql/src/assert_async_graphql_response_module.py new file mode 100644 index 00000000000..892da71fb0f --- /dev/null +++ b/examples/event_handler_graphql/src/assert_async_graphql_response_module.py @@ -0,0 +1,37 @@ +import asyncio +from typing import TypedDict + +import aiohttp + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.tracing import aiohttp_trace_config +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = AppSyncResolver() + + +class Todo(TypedDict, total=False): + id: str # noqa AA03 VNE003, required due to GraphQL Schema + userId: str + title: str + completed: bool + + +@app.resolver(type_name="Query", field_name="listTodos") +async def list_todos() -> list[Todo]: + async with aiohttp.ClientSession(trace_configs=[aiohttp_trace_config()]) as session: + async with session.get("https://jsonplaceholder.typicode.com/todos") as resp: + # first two results to demo assertion + return await resp.json()[:2] + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + result = app.resolve(event, context) + + return asyncio.run(result) diff --git a/examples/event_handler_graphql/src/assert_graphql_response.json b/examples/event_handler_graphql/src/assert_graphql_response.json new file mode 100644 index 00000000000..7d5fe1be12e --- /dev/null +++ b/examples/event_handler_graphql/src/assert_graphql_response.json @@ -0,0 +1,45 @@ +{ + "typeName": "Query", + "fieldName": "listLocations", + "arguments": { + "name": "Perkins-Reed", + "description": "Nulla sed amet. Earum libero qui sunt perspiciatis. Non aliquid accusamus." + }, + "selectionSetList": [ + "id", + "name" + ], + "identity": { + "claims": { + "sub": "192879fc-a240-4bf1-ab5a-d6a00f3063f9", + "email_verified": true, + "iss": "https://cognito-idp.us-west-2.amazonaws.com/us-west-xxxxxxxxxxx", + "phone_number_verified": false, + "cognito:username": "jdoe", + "aud": "7471s60os7h0uu77i1tk27sp9n", + "event_id": "bc334ed8-a938-4474-b644-9547e304e606", + "token_use": "id", + "auth_time": 1599154213, + "phone_number": "+19999999999", + "exp": 1599157813, + "iat": 1599154213, + "email": "jdoe@email.com" + }, + "defaultAuthStrategy": "ALLOW", + "groups": null, + "issuer": "https://cognito-idp.us-west-2.amazonaws.com/us-west-xxxxxxxxxxx", + "sourceIp": [ + "1.1.1.1" + ], + "sub": "192879fc-a240-4bf1-ab5a-d6a00f3063f9", + "username": "jdoe" + }, + "request": { + "headers": { + "x-amzn-trace-id": "Root=1-60488877-0b0c4e6727ab2a1c545babd0", + "x-forwarded-for": "127.0.0.1", + "cloudfront-viewer-country": "NL", + "x-api-key": "da1-c33ullkbkze3jg5hf5ddgcs4fq" + } + } +} \ No newline at end of file diff --git a/examples/event_handler_graphql/src/assert_graphql_response.py b/examples/event_handler_graphql/src/assert_graphql_response.py new file mode 100644 index 00000000000..548aece15e0 --- /dev/null +++ b/examples/event_handler_graphql/src/assert_graphql_response.py @@ -0,0 +1,29 @@ +import json +from dataclasses import dataclass +from pathlib import Path + +import pytest +from assert_graphql_response_module import Location, app # instance of AppSyncResolver + + +@pytest.fixture +def lambda_context(): + @dataclass + class LambdaContext: + function_name: str = "test" + memory_limit_in_mb: int = 128 + invoked_function_arn: str = "arn:aws:lambda:eu-west-1:123456789012:function:test" + aws_request_id: str = "da658bd3-2d6f-4e7b-8ec2-937234644fdc" + + return LambdaContext() + + +def test_direct_resolver(lambda_context): + # GIVEN + fake_event = json.loads(Path("assert_graphql_response.json").read_text()) + + # WHEN + result: list[Location] = app(fake_event, lambda_context) + + # THEN + assert result[0]["name"] == "Perkins-Reed" diff --git a/examples/event_handler_graphql/src/assert_graphql_response_module.py b/examples/event_handler_graphql/src/assert_graphql_response_module.py new file mode 100644 index 00000000000..2f9c8ac3c41 --- /dev/null +++ b/examples/event_handler_graphql/src/assert_graphql_response_module.py @@ -0,0 +1,30 @@ +from typing import TypedDict + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = AppSyncResolver() + + +class Location(TypedDict, total=False): + id: str # noqa AA03 VNE003, required due to GraphQL Schema + name: str + description: str + address: str + + +@app.resolver(field_name="listLocations") +@app.resolver(field_name="locations") +@tracer.capture_method +def get_locations(name: str, description: str = "") -> list[Location]: # match GraphQL Query arguments + return [{"name": name, "description": description}] + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_graphql/src/async_resolvers.py b/examples/event_handler_graphql/src/async_resolvers.py new file mode 100644 index 00000000000..229e015c886 --- /dev/null +++ b/examples/event_handler_graphql/src/async_resolvers.py @@ -0,0 +1,36 @@ +import asyncio +from typing import TypedDict + +import aiohttp + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.tracing import aiohttp_trace_config +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = AppSyncResolver() + + +class Todo(TypedDict, total=False): + id: str # noqa AA03 VNE003, required due to GraphQL Schema + userId: str + title: str + completed: bool + + +@app.resolver(type_name="Query", field_name="listTodos") +async def list_todos() -> list[Todo]: + async with aiohttp.ClientSession(trace_configs=[aiohttp_trace_config()]) as session: + async with session.get("https://jsonplaceholder.typicode.com/todos") as resp: + return await resp.json() + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + result = app.resolve(event, context) + + return asyncio.run(result) diff --git a/examples/event_handler_graphql/src/custom_models.py b/examples/event_handler_graphql/src/custom_models.py new file mode 100644 index 00000000000..92763ca3401 --- /dev/null +++ b/examples/event_handler_graphql/src/custom_models.py @@ -0,0 +1,43 @@ +from typing import TypedDict + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils +from aws_lambda_powertools.utilities.data_classes.appsync_resolver_event import AppSyncResolverEvent +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = AppSyncResolver() + + +class Location(TypedDict, total=False): + id: str # noqa AA03 VNE003, required due to GraphQL Schema + name: str + description: str + address: str + commonField: str + + +class MyCustomModel(AppSyncResolverEvent): + @property + def country_viewer(self) -> str: + return self.get_header_value(name="cloudfront-viewer-country", default_value="", case_sensitive=False) + + @property + def api_key(self) -> str: + return self.get_header_value(name="x-api-key", default_value="", case_sensitive=False) + + +@app.resolver(type_name="Query", field_name="listLocations") +def list_locations(page: int = 0, size: int = 10) -> list[Location]: + # additional properties/methods will now be available under current_event + logger.debug(f"Request country origin: {app.current_event.country_viewer}") + return [{"id": scalar_types_utils.make_id(), "name": "Perry, James and Carroll"}] + + +@tracer.capture_lambda_handler +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +def lambda_handler(event: dict, context: LambdaContext) -> dict: + app.resolve(event, context, data_model=MyCustomModel) diff --git a/examples/event_handler_graphql/src/getting_started_get_todo.json b/examples/event_handler_graphql/src/getting_started_get_todo.json new file mode 100644 index 00000000000..6cbf15ba36c --- /dev/null +++ b/examples/event_handler_graphql/src/getting_started_get_todo.json @@ -0,0 +1,46 @@ +{ + "arguments": { + "id": "7e362732-c8cd-4405-b090-144ac9b38960" + }, + "identity": null, + "source": null, + "request": { + "headers": { + "x-forwarded-for": "1.2.3.4, 5.6.7.8", + "accept-encoding": "gzip, deflate, br", + "cloudfront-viewer-country": "NL", + "cloudfront-is-tablet-viewer": "false", + "referer": "https://eu-west-1.console.aws.amazon.com/appsync/home?region=eu-west-1", + "via": "2.0 9fce949f3749407c8e6a75087e168b47.cloudfront.net (CloudFront)", + "cloudfront-forwarded-proto": "https", + "origin": "https://eu-west-1.console.aws.amazon.com", + "x-api-key": "da1-c33ullkbkze3jg5hf5ddgcs4fq", + "content-type": "application/json", + "x-amzn-trace-id": "Root=1-606eb2f2-1babc433453a332c43fb4494", + "x-amz-cf-id": "SJw16ZOPuMZMINx5Xcxa9pB84oMPSGCzNOfrbJLvd80sPa0waCXzYQ==", + "content-length": "114", + "x-amz-user-agent": "AWS-Console-AppSync/", + "x-forwarded-proto": "https", + "host": "ldcvmkdnd5az3lm3gnf5ixvcyy.appsync-api.eu-west-1.amazonaws.com", + "accept-language": "en-US,en;q=0.5", + "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:78.0) Gecko/20100101 Firefox/78.0", + "cloudfront-is-desktop-viewer": "true", + "cloudfront-is-mobile-viewer": "false", + "accept": "*/*", + "x-forwarded-port": "443", + "cloudfront-is-smarttv-viewer": "false" + } + }, + "prev": null, + "info": { + "parentTypeName": "Query", + "selectionSetList": [ + "title", + "id" + ], + "selectionSetGraphQL": "{\n title\n id\n}", + "fieldName": "getTodo", + "variables": {} + }, + "stash": {} +} \ No newline at end of file diff --git a/examples/event_handler_graphql/src/getting_started_graphql_api_resolver.py b/examples/event_handler_graphql/src/getting_started_graphql_api_resolver.py new file mode 100644 index 00000000000..4e42bd42f58 --- /dev/null +++ b/examples/event_handler_graphql/src/getting_started_graphql_api_resolver.py @@ -0,0 +1,48 @@ +from typing import TypedDict + +import requests +from requests import Response + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = AppSyncResolver() + + +class Todo(TypedDict, total=False): + id: str # noqa AA03 VNE003, required due to GraphQL Schema + userId: str + title: str + completed: bool + + +@app.resolver(type_name="Query", field_name="getTodo") +@tracer.capture_method +def get_todo( + id: str = "", # noqa AA03 VNE003 shadows built-in id to match query argument, e.g., getTodo(id: "some_id") +) -> Todo: + logger.info(f"Fetching Todo {id}") + todos: Response = requests.get(f"https://jsonplaceholder.typicode.com/todos/{id}") + todos.raise_for_status() + + return todos.json() + + +@app.resolver(type_name="Query", field_name="listTodos") +@tracer.capture_method +def list_todos() -> list[Todo]: + todos: Response = requests.get("https://jsonplaceholder.typicode.com/todos") + todos.raise_for_status() + + # for brevity, we'll limit to the first 10 only + return todos.json()[:10] + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_graphql/src/getting_started_list_todos.json b/examples/event_handler_graphql/src/getting_started_list_todos.json new file mode 100644 index 00000000000..5be5094cf94 --- /dev/null +++ b/examples/event_handler_graphql/src/getting_started_list_todos.json @@ -0,0 +1,44 @@ +{ + "arguments": {}, + "identity": null, + "source": null, + "request": { + "headers": { + "x-forwarded-for": "1.2.3.4, 5.6.7.8", + "accept-encoding": "gzip, deflate, br", + "cloudfront-viewer-country": "NL", + "cloudfront-is-tablet-viewer": "false", + "referer": "https://eu-west-1.console.aws.amazon.com/appsync/home?region=eu-west-1", + "via": "2.0 9fce949f3749407c8e6a75087e168b47.cloudfront.net (CloudFront)", + "cloudfront-forwarded-proto": "https", + "origin": "https://eu-west-1.console.aws.amazon.com", + "x-api-key": "da1-c33ullkbkze3jg5hf5ddgcs4fq", + "content-type": "application/json", + "x-amzn-trace-id": "Root=1-606eb2f2-1babc433453a332c43fb4494", + "x-amz-cf-id": "SJw16ZOPuMZMINx5Xcxa9pB84oMPSGCzNOfrbJLvd80sPa0waCXzYQ==", + "content-length": "114", + "x-amz-user-agent": "AWS-Console-AppSync/", + "x-forwarded-proto": "https", + "host": "ldcvmkdnd5az3lm3gnf5ixvcyy.appsync-api.eu-west-1.amazonaws.com", + "accept-language": "en-US,en;q=0.5", + "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:78.0) Gecko/20100101 Firefox/78.0", + "cloudfront-is-desktop-viewer": "true", + "cloudfront-is-mobile-viewer": "false", + "accept": "*/*", + "x-forwarded-port": "443", + "cloudfront-is-smarttv-viewer": "false" + } + }, + "prev": null, + "info": { + "parentTypeName": "Query", + "selectionSetList": [ + "id", + "title" + ], + "selectionSetGraphQL": "{\n id\n title\n}", + "fieldName": "listTodos", + "variables": {} + }, + "stash": {} +} \ No newline at end of file diff --git a/docs/shared/getting_started_schema.graphql b/examples/event_handler_graphql/src/getting_started_schema.graphql similarity index 76% rename from docs/shared/getting_started_schema.graphql rename to examples/event_handler_graphql/src/getting_started_schema.graphql index c738156bd73..b8ef8f995d0 100644 --- a/docs/shared/getting_started_schema.graphql +++ b/examples/event_handler_graphql/src/getting_started_schema.graphql @@ -9,7 +9,7 @@ type Query { type Todo { id: ID! + userId: String title: String - description: String - done: Boolean + completed: Boolean } diff --git a/examples/event_handler_graphql/src/graphql_transformer_common_field.json b/examples/event_handler_graphql/src/graphql_transformer_common_field.json new file mode 100644 index 00000000000..6b8b47b8172 --- /dev/null +++ b/examples/event_handler_graphql/src/graphql_transformer_common_field.json @@ -0,0 +1,17 @@ +{ + "typeName": "Merchant", + "fieldName": "commonField", + "arguments": {}, + "identity": { + "claims": { + "iat": 1615366261 + }, + "username": "marieellis" + }, + "request": { + "headers": { + "x-amzn-trace-id": "Root=1-60488877-0b0c4e6727ab2a1c545babd0", + "x-forwarded-for": "127.0.0.1" + } + }, +} \ No newline at end of file diff --git a/examples/event_handler_graphql/src/graphql_transformer_find_merchant.json b/examples/event_handler_graphql/src/graphql_transformer_find_merchant.json new file mode 100644 index 00000000000..8186ebc110e --- /dev/null +++ b/examples/event_handler_graphql/src/graphql_transformer_find_merchant.json @@ -0,0 +1,19 @@ +{ + "typeName": "Query", + "fieldName": "findMerchant", + "arguments": { + "search": "Parry-Wood" + }, + "identity": { + "claims": { + "iat": 1615366261 + }, + "username": "wwilliams" + }, + "request": { + "headers": { + "x-amzn-trace-id": "Root=1-60488877-0b0c4e6727ab2a1c545babd0", + "x-forwarded-for": "127.0.0.1" + } + }, +} \ No newline at end of file diff --git a/examples/event_handler_graphql/src/graphql_transformer_list_locations.json b/examples/event_handler_graphql/src/graphql_transformer_list_locations.json new file mode 100644 index 00000000000..b8f24aa70b6 --- /dev/null +++ b/examples/event_handler_graphql/src/graphql_transformer_list_locations.json @@ -0,0 +1,22 @@ +{ + "typeName": "Query", + "fieldName": "listLocations", + "arguments": { + "page": 2, + "size": 1 + }, + "identity": { + "claims": { + "iat": 1615366261 + }, + "username": "treid" + }, + "request": { + "headers": { + "x-amzn-trace-id": "Root=1-60488877-0b0c4e6727ab2a1c545babd0", + "x-forwarded-for": "127.0.0.1", + "cloudfront-viewer-country": "NL", + "x-api-key": "da1-c33ullkbkze3jg5hf5ddgcs4fq" + } + } +} \ No newline at end of file diff --git a/examples/event_handler_graphql/src/graphql_transformer_merchant_info.py b/examples/event_handler_graphql/src/graphql_transformer_merchant_info.py new file mode 100644 index 00000000000..272f119f3b8 --- /dev/null +++ b/examples/event_handler_graphql/src/graphql_transformer_merchant_info.py @@ -0,0 +1,36 @@ +from typing import TypedDict + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = AppSyncResolver() + + +class Location(TypedDict, total=False): + id: str # noqa AA03 VNE003, required due to GraphQL Schema + name: str + description: str + address: str + commonField: str + + +@app.resolver(type_name="Query", field_name="listLocations") +def list_locations(page: int = 0, size: int = 10) -> list[Location]: + return [{"id": scalar_types_utils.make_id(), "name": "Smooth Grooves"}] + + +@app.resolver(field_name="commonField") +def common_field() -> str: + # Would match all fieldNames matching 'commonField' + return scalar_types_utils.make_id() + + +@tracer.capture_lambda_handler +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +def lambda_handler(event: dict, context: LambdaContext) -> dict: + app.resolve(event, context) diff --git a/examples/event_handler_graphql/src/graphql_transformer_search_merchant.py b/examples/event_handler_graphql/src/graphql_transformer_search_merchant.py new file mode 100644 index 00000000000..e2adb566f93 --- /dev/null +++ b/examples/event_handler_graphql/src/graphql_transformer_search_merchant.py @@ -0,0 +1,42 @@ +from typing import TypedDict + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils +from aws_lambda_powertools.utilities.typing import LambdaContext + +app = AppSyncResolver() +tracer = Tracer() +logger = Logger() + + +class Merchant(TypedDict, total=False): + id: str # noqa AA03 VNE003, required due to GraphQL Schema + name: str + description: str + commonField: str + + +@app.resolver(type_name="Query", field_name="findMerchant") +def find_merchant(search: str) -> list[Merchant]: + merchants: list[Merchant] = [ + { + "id": scalar_types_utils.make_id(), + "name": "Parry-Wood", + "description": "Possimus doloremque tempora harum deleniti eum.", + }, + { + "id": scalar_types_utils.make_id(), + "name": "Shaw, Owen and Jones", + "description": "Aliquam iste architecto suscipit in.", + }, + ] + + return next((merchant for merchant in merchants if search == merchant["name"]), [{}]) + + +@tracer.capture_lambda_handler +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +def lambda_handler(event: dict, context: LambdaContext) -> dict: + app.resolve(event, context) diff --git a/examples/event_handler_graphql/src/nested_mappings.py b/examples/event_handler_graphql/src/nested_mappings.py new file mode 100644 index 00000000000..2f9c8ac3c41 --- /dev/null +++ b/examples/event_handler_graphql/src/nested_mappings.py @@ -0,0 +1,30 @@ +from typing import TypedDict + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = AppSyncResolver() + + +class Location(TypedDict, total=False): + id: str # noqa AA03 VNE003, required due to GraphQL Schema + name: str + description: str + address: str + + +@app.resolver(field_name="listLocations") +@app.resolver(field_name="locations") +@tracer.capture_method +def get_locations(name: str, description: str = "") -> list[Location]: # match GraphQL Query arguments + return [{"name": name, "description": description}] + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_graphql/src/nested_mappings_schema.graphql b/examples/event_handler_graphql/src/nested_mappings_schema.graphql new file mode 100644 index 00000000000..23a9ae468b1 --- /dev/null +++ b/examples/event_handler_graphql/src/nested_mappings_schema.graphql @@ -0,0 +1,21 @@ +schema { + query: Query +} + +type Query { + listLocations: [Location] +} + +type Location { + id: ID! + name: String! + description: String + address: String +} + +type Merchant { + id: String! + name: String! + description: String + locations: [Location] +} diff --git a/examples/event_handler_graphql/src/scalar_functions.py b/examples/event_handler_graphql/src/scalar_functions.py new file mode 100644 index 00000000000..0d8fa98b7b3 --- /dev/null +++ b/examples/event_handler_graphql/src/scalar_functions.py @@ -0,0 +1,15 @@ +from aws_lambda_powertools.utilities.data_classes.appsync.scalar_types_utils import ( + aws_date, + aws_datetime, + aws_time, + aws_timestamp, + make_id, +) + +# Scalars: https://docs.aws.amazon.com/appsync/latest/devguide/scalars.html + +_: str = make_id() # Scalar: ID! +_: str = aws_date() # Scalar: AWSDate +_: str = aws_time() # Scalar: AWSTime +_: str = aws_datetime() # Scalar: AWSDateTime +_: int = aws_timestamp() # Scalar: AWSTimestamp diff --git a/examples/event_handler_graphql/src/split_operation.py b/examples/event_handler_graphql/src/split_operation.py new file mode 100644 index 00000000000..5704181d78c --- /dev/null +++ b/examples/event_handler_graphql/src/split_operation.py @@ -0,0 +1,17 @@ +import split_operation_module + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = AppSyncResolver() +app.include_router(split_operation_module.router) + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +@tracer.capture_lambda_handler +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/examples/event_handler_graphql/src/split_operation_module.py b/examples/event_handler_graphql/src/split_operation_module.py new file mode 100644 index 00000000000..43c413672b6 --- /dev/null +++ b/examples/event_handler_graphql/src/split_operation_module.py @@ -0,0 +1,22 @@ +from typing import TypedDict + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler.appsync import Router + +tracer = Tracer() +logger = Logger() +router = Router() + + +class Location(TypedDict, total=False): + id: str # noqa AA03 VNE003, required due to GraphQL Schema + name: str + description: str + address: str + + +@router.resolver(field_name="listLocations") +@router.resolver(field_name="locations") +@tracer.capture_method +def get_locations(name: str, description: str = "") -> list[Location]: # match GraphQL Query arguments + return [{"name": name, "description": description}] diff --git a/examples/event_handler_rest/sam/template.yaml b/examples/event_handler_rest/sam/template.yaml index f9837e729a5..513e6196f13 100644 --- a/examples/event_handler_rest/sam/template.yaml +++ b/examples/event_handler_rest/sam/template.yaml @@ -13,7 +13,7 @@ Globals: - "*~1*" # converts to */* for any binary type Function: Timeout: 5 - Runtime: python3.8 + Runtime: python3.9 Tracing: Active Environment: Variables: @@ -26,8 +26,8 @@ Resources: ApiFunction: Type: AWS::Serverless::Function Properties: - Handler: app.lambda_handler - CodeUri: api_handler/ + Handler: getting_started_rest_api_resolver.lambda_handler + CodeUri: ../src Description: API handler function Events: AnyApiEvent: From 2d4df8a0703043a9076fc4c19c7ec128f250ce04 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 11 Jul 2022 15:01:45 +0200 Subject: [PATCH 49/72] chore(ci): convert inline gh-script to file Signed-off-by: heitorlessa --- .github/scripts/save_pr_details.js | 15 +++++++++++++++ .github/workflows/record_pr.yml | 18 ++++++++---------- 2 files changed, 23 insertions(+), 10 deletions(-) create mode 100644 .github/scripts/save_pr_details.js diff --git a/.github/scripts/save_pr_details.js b/.github/scripts/save_pr_details.js new file mode 100644 index 00000000000..2a4d7149af0 --- /dev/null +++ b/.github/scripts/save_pr_details.js @@ -0,0 +1,15 @@ +module.exports = async ({github, context, core}) => { + const fs = require('fs'); + const filename = "pr.txt"; + + try { + core.debug("Payload as it comes.."); + core.debug(context.payload); + fs.writeFileSync(`./${filename}`, JSON.stringify(context.payload)); + + return `PR successfully saved ${filename}` + } catch (err) { + core.setFailed("Failed to save PR details"); + console.error(err); + } +} \ No newline at end of file diff --git a/.github/workflows/record_pr.yml b/.github/workflows/record_pr.yml index fcee8876286..8d56dd29358 100644 --- a/.github/workflows/record_pr.yml +++ b/.github/workflows/record_pr.yml @@ -2,7 +2,7 @@ name: Record PR number on: pull_request: - types: [opened, edited] + types: [opened, edited, closed] jobs: build: @@ -10,15 +10,13 @@ jobs: steps: - uses: actions/checkout@v3 - - name: Save PR number - run: | - mkdir -p ./pr - echo ${{ github.event.number }} > ./pr/number - echo "${{ github.event.pull_request.title }}" > ./pr/title - echo "${{ github.event.pull_request.body }}" > ./pr/body - echo "${{ github.event.pull_request.user.login }}" > ./pr/author - echo "${{ github.event.action }}" > ./pr/action + - name: "Extract PR details" + uses: actions/github-script@v6 + with: + script: | + const script = require('.github/scripts/save_pr_details.js') + await script({github, context, core}) - uses: actions/upload-artifact@v3 with: name: pr - path: pr/ + path: pr.txt From 06965bbd42c607ea7ef7d2147e56344a588137e6 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 11 Jul 2022 15:04:46 +0200 Subject: [PATCH 50/72] chore(ci): make export PR reusable Signed-off-by: heitorlessa --- .github/scripts/download_pr_artifact.js | 26 ++++++ .../scripts/label_missing_related_issue.js | 28 ++++++ .github/scripts/label_pr_based_on_title.js | 39 ++++++++ .github/scripts/label_related_issue.js | 8 +- .github/workflows/export_pr_details.yml | 75 ---------------- .github/workflows/label_pr_on_title.yml | 88 ++++--------------- .github/workflows/on_merged_pr.yml | 27 ++++-- .github/workflows/on_opened_pr.yml | 51 ++++------- .github/workflows/record_pr.yml | 4 +- .../workflows/reusable_export_pr_details.yml | 81 +++++++++++++++++ 10 files changed, 234 insertions(+), 193 deletions(-) create mode 100644 .github/scripts/download_pr_artifact.js create mode 100644 .github/scripts/label_missing_related_issue.js create mode 100644 .github/scripts/label_pr_based_on_title.js delete mode 100644 .github/workflows/export_pr_details.yml create mode 100644 .github/workflows/reusable_export_pr_details.yml diff --git a/.github/scripts/download_pr_artifact.js b/.github/scripts/download_pr_artifact.js new file mode 100644 index 00000000000..274467c1f1c --- /dev/null +++ b/.github/scripts/download_pr_artifact.js @@ -0,0 +1,26 @@ +module.exports = async ({github, context, core}) => { + const fs = require('fs'); + + const workflowRunId = process.env.WORKFLOW_ID; + core.info(`Listing artifacts for workflow run ${workflowRunId}`); + + const artifacts = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: workflowRunId, + }); + + const matchArtifact = artifacts.data.artifacts.filter(artifact => artifact.name == "pr")[0]; + + core.info(`Downloading artifacts for workflow run ${workflowRunId}`); + const artifact = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: 'zip', + }); + + core.info("Saving artifact found", artifact); + + fs.writeFileSync('pr.zip', Buffer.from(artifact.data)); +} diff --git a/.github/scripts/label_missing_related_issue.js b/.github/scripts/label_missing_related_issue.js new file mode 100644 index 00000000000..2fbd2674a4b --- /dev/null +++ b/.github/scripts/label_missing_related_issue.js @@ -0,0 +1,28 @@ +module.exports = async ({github, context, core}) => { + const prBody = process.env.PR_BODY; + const prNumber = process.env.PR_NUMBER; + const blockLabel = process.env.BLOCK_LABEL; + const blockReasonLabel = process.env.BLOCK_REASON_LABEL; + + const RELATED_ISSUE_REGEX = /Issue number:[^\d\r\n]+(?\d+)/; + + const isMatch = RELATED_ISSUE_REGEX.exec(prBody); + if (isMatch == null) { + core.info(`No related issue found, maybe the author didn't use the template but there is one.`) + + let msg = "No related issues found. Please ensure there is an open issue related to this change to avoid significant delays or closure."; + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + body: msg, + issue_number: prNumber, + }); + + return await github.rest.issues.addLabels({ + issue_number: prNumber, + owner: context.repo.owner, + repo: context.repo.repo, + labels: [blockLabel, blockReasonLabel] + }) + } +} diff --git a/.github/scripts/label_pr_based_on_title.js b/.github/scripts/label_pr_based_on_title.js new file mode 100644 index 00000000000..f13d48f32bc --- /dev/null +++ b/.github/scripts/label_pr_based_on_title.js @@ -0,0 +1,39 @@ +module.exports = async ({github, context, core}) => { + const pr_number = process.env.PR_NUMBER + const pr_title = process.env.PR_TITLE + + console.log(pr_title) + + const FEAT_REGEX = /feat(\((.+)\))?(\:.+)/ + const BUG_REGEX = /(fix|bug)(\((.+)\))?(\:.+)/ + const DOCS_REGEX = /(docs|doc)(\((.+)\))?(\:.+)/ + const CHORE_REGEX = /(chore)(\((.+)\))?(\:.+)/ + const DEPRECATED_REGEX = /(deprecated)(\((.+)\))?(\:.+)/ + const REFACTOR_REGEX = /(refactor)(\((.+)\))?(\:.+)/ + + const labels = { + "feature": FEAT_REGEX, + "bug": BUG_REGEX, + "documentation": DOCS_REGEX, + "internal": CHORE_REGEX, + "enhancement": REFACTOR_REGEX, + "deprecated": DEPRECATED_REGEX, + } + + for (const label in labels) { + const matcher = new RegExp(labels[label]) + const isMatch = matcher.exec(pr_title) + if (isMatch != null) { + console.info(`Auto-labeling PR ${pr_number} with ${label}`) + + await github.rest.issues.addLabels({ + issue_number: pr_number, + owner: context.repo.owner, + repo: context.repo.repo, + labels: [label] + }) + + break + } + } +} \ No newline at end of file diff --git a/.github/scripts/label_related_issue.js b/.github/scripts/label_related_issue.js index a66a63fd005..3fe5727163d 100644 --- a/.github/scripts/label_related_issue.js +++ b/.github/scripts/label_related_issue.js @@ -1,11 +1,11 @@ module.exports = async ({github, context, core}) => { - const prBody = context.payload.body; - const prNumber = context.payload.number; + const prBody = process.env.PR_BODY; + const prNumber = process.env.PR_NUMBER; const releaseLabel = process.env.RELEASE_LABEL; - const maintainersTeam = process.env.MAINTAINERS_TEAM - + const maintainersTeam = process.env.MAINTAINERS_TEAM; const RELATED_ISSUE_REGEX = /Issue number:[^\d\r\n]+(?\d+)/; + core.info(prBody); const isMatch = RELATED_ISSUE_REGEX.exec(prBody); if (!isMatch) { core.setFailed(`Unable to find related issue for PR number ${prNumber}.\n\n Body details: ${prBody}`); diff --git a/.github/workflows/export_pr_details.yml b/.github/workflows/export_pr_details.yml deleted file mode 100644 index af03150c3d5..00000000000 --- a/.github/workflows/export_pr_details.yml +++ /dev/null @@ -1,75 +0,0 @@ -on: - workflow_call: - inputs: - record_pr_workflow_id: - required: true - type: number - secrets: - token: - required: true - # Map the workflow outputs to job outputs - outputs: - prNumber: - description: "The first output string" - value: ${{ jobs.export_pr_details.outputs.prNumber }} - prTitle: - description: "The second output string" - value: ${{ jobs.export_pr_details.outputs.prTitle }} - prBody: - description: "The second output string" - value: ${{ jobs.export_pr_details.outputs.prBody }} - prAuthor: - description: "The second output string" - value: ${{ jobs.export_pr_details.outputs.prAuthor }} - prAction: - description: "The second output string" - value: ${{ jobs.export_pr_details.outputs.prAction }} - -name: Export Pull Request details from fork -jobs: - export_pr_details: - runs-on: ubuntu-latest - # Map the job outputs to step outputs - outputs: - prNumber: ${{ steps.prNumber.outputs.prNumber }} - prTitle: ${{ steps.prTitle.outputs.prTitle }} - prBody: ${{ steps.prBody.outputs.prBody }} - prAuthor: ${{ steps.prAuthor.outputs.prAuthor }} - prAction: ${{ steps.prAction.outputs.prAction }} - steps: - - name: "Download artifact" - uses: actions/github-script@v6 - # For security, we only download artifacts tied to the successful PR recording workflow - with: - github-token: ${{ secrets.token }} - script: | - const fs = require('fs'); - - const artifacts = await github.rest.actions.listWorkflowRunArtifacts({ - owner: context.repo.owner, - repo: context.repo.repo, - run_id: ${{inputs.record_pr_workflow_id}}, - }); - - const matchArtifact = artifacts.data.artifacts.filter(artifact => artifact.name == "pr")[0]; - - const artifact = await github.rest.actions.downloadArtifact({ - owner: context.repo.owner, - repo: context.repo.repo, - artifact_id: matchArtifact.id, - archive_format: 'zip', - }); - - fs.writeFileSync('${{github.workspace}}/pr.zip', Buffer.from(artifact.data)); - # NodeJS standard library doesn't provide ZIP capabilities; use system `unzip` command instead - - run: unzip pr.zip - - id: prNumber - run: echo ::set-output name=prNumber::$(cat ./number) - - id: prTitle - run: echo ::set-output name=prTitle::$(cat ./title) - - id: prBody - run: echo ::set-output name=prBody::$(cat ./body) - - id: prAuthor - run: echo ::set-output name=prAuthor::$(cat ./author) - - id: prAction - run: echo ::set-output name=prAction::$(cat ./action) diff --git a/.github/workflows/label_pr_on_title.yml b/.github/workflows/label_pr_on_title.yml index 06e3f552cc2..562959bb516 100644 --- a/.github/workflows/label_pr_on_title.yml +++ b/.github/workflows/label_pr_on_title.yml @@ -2,86 +2,36 @@ name: Label PR based on title on: workflow_run: - workflows: ["Record PR number"] + workflows: ["Record PR details"] types: - completed jobs: - upload: - runs-on: ubuntu-latest + get_pr_details: # Guardrails to only ever run if PR recording workflow was indeed # run in a PR event and ran successfully - if: > - ${{ github.event.workflow_run.event == 'pull_request' && - github.event.workflow_run.conclusion == 'success' }} + if: ${{ github.event.workflow_run.conclusion == 'success' }} + uses: ./.github/workflows/reusable_export_pr_details.yml + with: + record_pr_workflow_id: ${{ github.event.workflow_run.id }} + secrets: + token: ${{ secrets.GITHUB_TOKEN }} + label_pr: + needs: get_pr_details + runs-on: ubuntu-latest steps: - - name: 'Download artifact' - uses: actions/github-script@v6 - # For security, we only download artifacts tied to the successful PR recording workflow - with: - script: | - const fs = require('fs'); - - const artifacts = await github.rest.actions.listWorkflowRunArtifacts({ - owner: context.repo.owner, - repo: context.repo.repo, - run_id: ${{github.event.workflow_run.id }}, - }); - - const matchArtifact = artifacts.data.artifacts.filter(artifact => artifact.name == "pr")[0]; - - const artifact = await github.rest.actions.downloadArtifact({ - owner: context.repo.owner, - repo: context.repo.repo, - artifact_id: matchArtifact.id, - archive_format: 'zip', - }); - - fs.writeFileSync('${{github.workspace}}/pr.zip', Buffer.from(artifact.data)); - # NodeJS standard library doesn't provide ZIP capabilities; use system `unzip` command instead - - run: unzip pr.zip - - - name: 'Label PR based on title' + - name: Checkout repository + uses: actions/checkout@v3 + - name: "Label PR based on title" uses: actions/github-script@v6 + env: + PR_NUMBER: ${{ needs.get_pr_details.outputs.prNumber }} + PR_TITLE: ${{ needs.get_pr_details.outputs.prTitle }} with: github-token: ${{ secrets.GITHUB_TOKEN }} # This safely runs in our base repo, not on fork # thus allowing us to provide a write access token to label based on PR title # and label PR based on semantic title accordingly script: | - const fs = require('fs'); - const pr_number = Number(fs.readFileSync('./number')); - const pr_title = fs.readFileSync('./title', 'utf-8').trim(); - - const FEAT_REGEX = /feat(\((.+)\))?(\:.+)/ - const BUG_REGEX = /(fix|bug)(\((.+)\))?(\:.+)/ - const DOCS_REGEX = /(docs|doc)(\((.+)\))?(\:.+)/ - const CHORE_REGEX = /(chore)(\((.+)\))?(\:.+)/ - const DEPRECATED_REGEX = /(deprecated)(\((.+)\))?(\:.+)/ - const REFACTOR_REGEX = /(refactor)(\((.+)\))?(\:.+)/ - - const labels = { - "feature": FEAT_REGEX, - "bug": BUG_REGEX, - "documentation": DOCS_REGEX, - "internal": CHORE_REGEX, - "enhancement": REFACTOR_REGEX, - "deprecated": DEPRECATED_REGEX, - } - - for (const label in labels) { - const matcher = new RegExp(labels[label]) - const isMatch = matcher.exec(pr_title) - if (isMatch != null) { - console.info(`Auto-labeling PR ${pr_number} with ${label}`) - - await github.rest.issues.addLabels({ - issue_number: pr_number, - owner: context.repo.owner, - repo: context.repo.repo, - labels: [label] - }) - - break - } - } + const script = require('.github/scripts/label_pr_based_on_title.js') + await script({github, context, core}) diff --git a/.github/workflows/on_merged_pr.yml b/.github/workflows/on_merged_pr.yml index 70f43daab3b..689ec9f65d2 100644 --- a/.github/workflows/on_merged_pr.yml +++ b/.github/workflows/on_merged_pr.yml @@ -1,26 +1,37 @@ -# Maintenance: Verify why we're having permissions issues even with write scope, then re-enable it. -# logs: https://github.com/awslabs/aws-lambda-powertools-python/runs/7030238348?check_suite_focus=true +name: On PR merge on: - pull_request: + workflow_run: + workflows: ["Record PR details"] types: - - closed + - completed env: RELEASE_LABEL: "pending-release" MAINTAINERS_TEAM: "@awslabs/aws-lambda-powertools-python" jobs: + get_pr_details: + if: github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success' + uses: ./.github/workflows/reusable_export_pr_details.yml + with: + record_pr_workflow_id: ${{ github.event.workflow_run.id }} + secrets: + token: ${{ secrets.GITHUB_TOKEN }} release_label_on_merge: - if: github.event.pull_request.merged == true && github.event.pull_request.user.login != 'dependabot[bot]' + needs: get_pr_details + # Maintenance: Conditional isn't working as expected + if: | + needs.get_pr_details.outputs.prAuthor != 'dependabot[bot]' + && needs.get_pr_details.outputs.prIsMerged == true runs-on: ubuntu-latest - permissions: - issues: write # required for new scoped token - pull-requests: write # required for new scoped token steps: - uses: actions/checkout@v3 - name: "Label PR related issue for release" uses: actions/github-script@v6 + env: + PR_NUMBER: ${{ needs.get_pr_details.outputs.prNumber }} + PR_BODY: ${{ needs.get_pr_details.outputs.prBody }} with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | diff --git a/.github/workflows/on_opened_pr.yml b/.github/workflows/on_opened_pr.yml index 9a539976467..831fa89563a 100644 --- a/.github/workflows/on_opened_pr.yml +++ b/.github/workflows/on_opened_pr.yml @@ -1,59 +1,40 @@ +name: On PR open + on: workflow_run: - workflows: ["Record PR number"] + workflows: ["Record PR details"] types: - completed env: BLOCK_LABEL: "do-not-merge" BLOCK_REASON_LABEL: "need-issue" + IGNORE_AUTHORS: '["dependabot[bot]", "markdownify[bot]"]' jobs: get_pr_details: - if: github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success' - uses: ./.github/workflows/export_pr_details.yml + if: ${{ github.event.workflow_run.conclusion == 'success' }} + uses: ./.github/workflows/reusable_export_pr_details.yml with: record_pr_workflow_id: ${{ github.event.workflow_run.id }} secrets: token: ${{ secrets.GITHUB_TOKEN }} check_related_issue: needs: get_pr_details - if: > - ${{ needs.get_pr_details.outputs.prAuthor != 'dependabot[bot]' && - needs.get_pr_details.outputs.prAction == 'opened' - }} + # Maintenance: Refactor condition to the correct env syntax later + if: | + needs.get_pr_details.outputs.prAction == 'opened' + && contains(fromJson('["dependabot[bot]", "markdownify[bot]"]'), needs.get_pr_details.outputs.prAuthor) != true runs-on: ubuntu-latest steps: + - uses: actions/checkout@v3 - name: "Ensure related issue is present" uses: actions/github-script@v6 + env: + PR_BODY: ${{ needs.get_pr_details.outputs.prBody }} + PR_NUMBER: ${{ needs.get_pr_details.outputs.prNumber }} with: github-token: ${{ secrets.GITHUB_TOKEN }} - # Maintenance: convert into a standalone JS like post_release.js script: | - const prBody = "${{ needs.get_pr_details.outputs.prBody }}"; - const prNumber = ${{ needs.get_pr_details.outputs.prNumber }}; - const blockLabel = process.env.BLOCK_LABEL; - const blockReasonLabel = process.env.BLOCK_REASON_LABEL; - - const RELATED_ISSUE_REGEX = /Issue number:.+(\d)/ - - const matcher = new RegExp(RELATED_ISSUE_REGEX) - const isMatch = matcher.exec(prBody) - if (isMatch == null) { - console.info(`No related issue found, maybe the author didn't use the template but there is one.`) - - let msg = "No related issues found. Please ensure there is an open issue related to this change to avoid significant delays or closure."; - await github.rest.issues.createComment({ - owner: context.repo.owner, - repo: context.repo.repo, - body: msg, - issue_number: prNumber, - }); - - await github.rest.issues.addLabels({ - issue_number: prNumber, - owner: context.repo.owner, - repo: context.repo.repo, - labels: [blockLabel, blockReasonLabel] - }) - } + const script = require('.github/scripts/label_missing_related_issue.js') + await script({github, context, core}) diff --git a/.github/workflows/record_pr.yml b/.github/workflows/record_pr.yml index 8d56dd29358..44f445a70ac 100644 --- a/.github/workflows/record_pr.yml +++ b/.github/workflows/record_pr.yml @@ -1,11 +1,11 @@ -name: Record PR number +name: Record PR details on: pull_request: types: [opened, edited, closed] jobs: - build: + record_pr: runs-on: ubuntu-latest steps: diff --git a/.github/workflows/reusable_export_pr_details.yml b/.github/workflows/reusable_export_pr_details.yml new file mode 100644 index 00000000000..dcbb959a4ea --- /dev/null +++ b/.github/workflows/reusable_export_pr_details.yml @@ -0,0 +1,81 @@ +name: Export previously recorded PR + +on: + workflow_call: + inputs: + record_pr_workflow_id: + required: true + type: number + secrets: + token: + required: true + # Map the workflow outputs to job outputs + outputs: + prNumber: + description: "PR Number" + value: ${{ jobs.export_pr_details.outputs.prNumber }} + prTitle: + description: "PR Title" + value: ${{ jobs.export_pr_details.outputs.prTitle }} + prBody: + description: "PR Body as string" + value: ${{ jobs.export_pr_details.outputs.prBody }} + prAuthor: + description: "PR author username" + value: ${{ jobs.export_pr_details.outputs.prAuthor }} + prAction: + description: "PR event action" + value: ${{ jobs.export_pr_details.outputs.prAction }} + prIsMerged: + description: "Whether PR is merged" + value: ${{ jobs.export_pr_details.outputs.prIsMerged }} + +jobs: + export_pr_details: + runs-on: ubuntu-latest + env: + FILENAME: pr.txt + # Map the job outputs to step outputs + outputs: + prNumber: ${{ steps.prNumber.outputs.prNumber }} + prTitle: ${{ steps.prTitle.outputs.prTitle }} + prBody: ${{ steps.prBody.outputs.prBody }} + prAuthor: ${{ steps.prAuthor.outputs.prAuthor }} + prAction: ${{ steps.prAction.outputs.prAction }} + prIsMerged: ${{ steps.prIsMerged.outputs.prIsMerged }} + steps: + - name: Checkout repository # in case caller workflow doesn't checkout thus failing with file not found + uses: actions/checkout@v3 + - name: "Download previously saved PR" + uses: actions/github-script@v6 + env: + WORKFLOW_ID: ${{ inputs.record_pr_workflow_id }} + # For security, we only download artifacts tied to the successful PR recording workflow + with: + github-token: ${{ secrets.token }} + script: | + const script = require('.github/scripts/download_pr_artifact.js') + await script({github, context, core}) + # NodeJS standard library doesn't provide ZIP capabilities; use system `unzip` command instead + - name: "Unzip PR artifact" + run: unzip pr.zip + # NOTE: We need separate steps for each mapped output and respective IDs + # otherwise the parent caller won't see them regardless on how outputs are set. + - name: "Export Pull Request Number" + id: prNumber + run: echo ::set-output name=prNumber::$(jq -c '.number' ${FILENAME}) + - name: "Export Pull Request Title" + id: prTitle + run: echo ::set-output name=prTitle::$(jq -c '.pull_request.title' ${FILENAME}) + - name: "Export Pull Request Body" + id: prBody + run: echo ::set-output name=prBody::$(jq -c '.pull_request.body' ${FILENAME}) + - name: "Export Pull Request Author" + id: prAuthor + run: echo ::set-output name=prAuthor::$(jq -c '.pull_request.user.login' ${FILENAME}) + - name: "Export Pull Request Action" + id: prAction + run: echo ::set-output name=prAction::$(jq -c '.action' ${FILENAME}) + - name: "Export Pull Request Merged status" + id: prIsMerged + run: echo ::set-output name=prIsMerged::$(jq -c '.pull_request.merged' ${FILENAME}) From a2c9e346164668b18e8526673a3002cf90bee75f Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Mon, 11 Jul 2022 20:50:27 +0200 Subject: [PATCH 51/72] Refactor asset uploading to use asset manifest --- Makefile | 4 +- tests/e2e/utils/helpers.py | 2 +- tests/e2e/utils/infrastructure.py | 68 +++++++++++++++++-------------- 3 files changed, 41 insertions(+), 33 deletions(-) diff --git a/Makefile b/Makefile index d11a567ea58..afe0518fd43 100644 --- a/Makefile +++ b/Makefile @@ -17,14 +17,14 @@ lint: format poetry run flake8 aws_lambda_powertools/* tests/* test: - poetry run pytest -m "not (perf or e2e)" --cov=aws_lambda_powertools --cov-report=xml + poetry run pytest -m "not perf" --ignore tests/e2e --cov=aws_lambda_powertools --cov-report=xml poetry run pytest --cache-clear tests/performance unit-test: poetry run pytest tests/unit e2e-test: - poetry run pytest -n 3 --dist loadscope --durations=0 --durations-min=1 tests/e2e + poetry run pytest -rP -n 3 --dist loadscope --durations=0 --durations-min=1 tests/e2e coverage-html: poetry run pytest -m "not (perf or e2e)" --cov=aws_lambda_powertools --cov-report=html diff --git a/tests/e2e/utils/helpers.py b/tests/e2e/utils/helpers.py index f38deb40853..a51109d6b31 100644 --- a/tests/e2e/utils/helpers.py +++ b/tests/e2e/utils/helpers.py @@ -38,7 +38,7 @@ def trigger_lambda(lambda_arn: str, client: LambdaClient): @lru_cache(maxsize=10, typed=False) -@retry(ValueError, delay=1, jitter=1, tries=10) +@retry(ValueError, delay=1, jitter=1, tries=20) def get_logs(lambda_function_name: str, log_client: CloudWatchClient, start_time: int, **kwargs: dict) -> List[Log]: response = log_client.filter_log_events(logGroupName=f"/aws/lambda/{lambda_function_name}", startTime=start_time) if not response["events"]: diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index df54b561114..b0052f0671c 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -1,4 +1,5 @@ import io +import json import os import sys import zipfile @@ -57,8 +58,7 @@ def _create_layer(self, stack: Stack): "-c", rf"poetry export --with-credentials --format requirements.txt --output /tmp/requirements.txt &&\ pip install -r /tmp/requirements.txt -t {output_dir} &&\ - cp -R {input_dir} {output_dir} &&\ - find {output_dir}/ -type d -name __pycache__ -prune -exec rm -rf {{}} \;", + cp -R {input_dir} {output_dir}", ], ), ), @@ -69,7 +69,7 @@ def _find_handlers(self, directory: str) -> List: for root, _, files in os.walk(directory): return [os.path.join(root, filename) for filename in files if filename.endswith(".py")] - def synthesize(self, handlers: List[str]) -> Tuple[dict, str]: + def synthesize(self, handlers: List[str]) -> Tuple[dict, str, str]: integration_test_app = App() stack = Stack(integration_test_app, self.stack_name) powertools_layer = self._create_layer(stack) @@ -98,10 +98,12 @@ def synthesize(self, handlers: List[str]) -> Tuple[dict, str]: removal_policy=RemovalPolicy.DESTROY, ) CfnOutput(stack, f"{filename}_arn", value=function_python.function_arn) - return ( - integration_test_app.synth().get_stack_by_name(self.stack_name).template, - integration_test_app.synth().directory, - ) + cloud_assembly = integration_test_app.synth() + cf_template = cloud_assembly.get_stack_by_name(self.stack_name).template + cloud_assembly_directory = cloud_assembly.directory + cloud_assembly_assets_manifest_path = cloud_assembly.get_stack_by_name(self.stack_name).dependencies[0].file + + return (cf_template, cloud_assembly_directory, cloud_assembly_assets_manifest_path) def __call__(self) -> Tuple[dict, str]: handlers = self._find_handlers(directory=self.handlers_dir) @@ -124,8 +126,8 @@ def __init__(self, stack_name: str, handlers_dir: str, config: dict) -> None: def deploy(self, Stack: Type[InfrastructureStackInterface]) -> Dict[str, str]: stack = Stack(handlers_dir=self.handlers_dir, stack_name=self.stack_name, config=self.config) - template, asset_root_dir = stack() - self._upload_assets(template, asset_root_dir) + template, asset_root_dir, asset_manifest_file = stack() + self._upload_assets(asset_root_dir, asset_manifest_file) response = self._deploy_stack(self.stack_name, template) @@ -134,25 +136,31 @@ def deploy(self, Stack: Type[InfrastructureStackInterface]) -> Dict[str, str]: def delete(self): self.cf_client.delete_stack(StackName=self.stack_name) - def _upload_assets(self, template: dict, asset_root_dir: str): + def _upload_assets(self, asset_root_dir: str, asset_manifest_file: str): + + assets = self._find_assets(asset_manifest_file, self.account_id, self.region) - assets = self._find_assets(template, self.account_id, self.region) + for s3_key, config in assets.items(): + print(config) + s3_bucket = self.s3_resource.Bucket(config["bucket_name"]) + + if config["asset_packaging"] != "zip": + print("Asset is not a zip file. Skipping upload") + continue - for s3_key, bucket in assets.items(): - s3_bucket = self.s3_resource.Bucket(bucket) if bool(list(s3_bucket.objects.filter(Prefix=s3_key))): print("object exists, skipping") continue buf = io.BytesIO() - asset_dir = f"{asset_root_dir}/asset.{Path(s3_key).with_suffix('')}" + asset_dir = f"{asset_root_dir}/{config['asset_path']}" os.chdir(asset_dir) asset_files = self._find_files(directory=".") with zipfile.ZipFile(buf, "w", compression=zipfile.ZIP_DEFLATED) as zf: for asset_file in asset_files: zf.write(os.path.join(asset_file)) buf.seek(0) - self.s3_client.upload_fileobj(Fileobj=buf, Bucket=bucket, Key=s3_key) + self.s3_client.upload_fileobj(Fileobj=buf, Bucket=config["bucket_name"], Key=s3_key) def _find_files(self, directory: str) -> List: file_paths = [] @@ -174,22 +182,22 @@ def _deploy_stack(self, stack_name: str, template: dict): response = self.cf_client.describe_stacks(StackName=stack_name) return response - def _find_assets(self, template: dict, account_id: str, region: str): + def _find_assets(self, asset_template: str, account_id: str, region: str): assets = {} - for _, resource in template["Resources"].items(): - bucket = None - S3Key = None - - if resource["Properties"].get("Code"): - bucket = resource["Properties"]["Code"]["S3Bucket"] - S3Key = resource["Properties"]["Code"]["S3Key"] - elif resource["Properties"].get("Content"): - bucket = resource["Properties"]["Content"]["S3Bucket"] - S3Key = resource["Properties"]["Content"]["S3Key"] - if S3Key and bucket: - assets[S3Key] = ( - bucket["Fn::Sub"].replace("${AWS::AccountId}", account_id).replace("${AWS::Region}", region) - ) + with open(asset_template, mode="r") as template: + for _, config in json.loads(template.read())["files"].items(): + asset_path = config["source"]["path"] + asset_packaging = config["source"]["packaging"] + bucket_name = config["destinations"]["current_account-current_region"]["bucketName"] + object_key = config["destinations"]["current_account-current_region"]["objectKey"] + + assets[object_key] = { + "bucket_name": bucket_name.replace("${AWS::AccountId}", account_id).replace( + "${AWS::Region}", region + ), + "asset_path": asset_path, + "asset_packaging": asset_packaging, + } return assets From db1b756127c3957c2b7e11d9738fb6df4dd6ae59 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 12 Jul 2022 14:35:41 +0200 Subject: [PATCH 52/72] fix(ci): move conditionals from yaml to code --- .github/scripts/constants.js | 20 +++++++++++ .../scripts/label_missing_related_issue.js | 35 ++++++++++++++----- .github/workflows/on_opened_pr.yml | 14 ++++---- 3 files changed, 52 insertions(+), 17 deletions(-) create mode 100644 .github/scripts/constants.js diff --git a/.github/scripts/constants.js b/.github/scripts/constants.js new file mode 100644 index 00000000000..3efe833df7b --- /dev/null +++ b/.github/scripts/constants.js @@ -0,0 +1,20 @@ +module.exports = Object.freeze({ + /** @type {string} */ + "PR_ACTION": process.env.PR_ACTION || "", + /** @type {string} */ + "PR_AUTHOR": process.env.PR_AUTHOR || "", + /** @type {string} */ + "PR_BODY": process.env.PR_BODY || "", + /** @type {string} */ + "PR_TITLE": process.env.PR_TITLE || "", + /** @type {number} */ + "PR_NUMBER": process.env.PR_NUMBER || 0, + /** @type {boolean} */ + "PR_IS_MERGED": process.env.PR_IS_MERGED || false, + /** @type {string[]} */ + "IGNORE_AUTHORS": ["dependabot[bot]", "markdownify[bot]"], + /** @type {string} */ + "BLOCK_LABEL": "do-not-merge", + /** @type {string} */ + "BLOCK_REASON_LABEL": "need-issue", +}); diff --git a/.github/scripts/label_missing_related_issue.js b/.github/scripts/label_missing_related_issue.js index 2fbd2674a4b..bb9a2bf4b9c 100644 --- a/.github/scripts/label_missing_related_issue.js +++ b/.github/scripts/label_missing_related_issue.js @@ -1,12 +1,29 @@ +const { + PR_ACTION, + PR_AUTHOR, + PR_BODY, + PR_NUMBER, + IGNORE_AUTHORS, + BLOCK_LABEL, + BLOCK_REASON_LABEL +} = require("./constants") + module.exports = async ({github, context, core}) => { - const prBody = process.env.PR_BODY; - const prNumber = process.env.PR_NUMBER; - const blockLabel = process.env.BLOCK_LABEL; - const blockReasonLabel = process.env.BLOCK_REASON_LABEL; + core.debug(`Number: ${PR_BODY}`); + core.debug(`Action: ${PR_ACTION}`); + core.debug(`Author: ${PR_AUTHOR}`); + core.debug(`Body: ${PR_BODY}`); - const RELATED_ISSUE_REGEX = /Issue number:[^\d\r\n]+(?\d+)/; + if (IGNORE_AUTHORS.includes(PR_AUTHOR)) { + return core.notice("Author in IGNORE_AUTHORS list; skipping...") + } - const isMatch = RELATED_ISSUE_REGEX.exec(prBody); + if (PR_ACTION != "opened") { + return core.notice("Only newly open PRs are labelled to avoid spam; skipping") + } + + const RELATED_ISSUE_REGEX = /Issue number:[^\d\r\n]+(?\d+)/; + const isMatch = RELATED_ISSUE_REGEX.exec(PR_BODY); if (isMatch == null) { core.info(`No related issue found, maybe the author didn't use the template but there is one.`) @@ -15,14 +32,14 @@ module.exports = async ({github, context, core}) => { owner: context.repo.owner, repo: context.repo.repo, body: msg, - issue_number: prNumber, + issue_number: PR_NUMBER, }); return await github.rest.issues.addLabels({ - issue_number: prNumber, + issue_number: PR_NUMBER, owner: context.repo.owner, repo: context.repo.repo, - labels: [blockLabel, blockReasonLabel] + labels: [BLOCK_LABEL, BLOCK_REASON_LABEL] }) } } diff --git a/.github/workflows/on_opened_pr.yml b/.github/workflows/on_opened_pr.yml index 831fa89563a..bf4aa556783 100644 --- a/.github/workflows/on_opened_pr.yml +++ b/.github/workflows/on_opened_pr.yml @@ -6,10 +6,6 @@ on: types: - completed -env: - BLOCK_LABEL: "do-not-merge" - BLOCK_REASON_LABEL: "need-issue" - IGNORE_AUTHORS: '["dependabot[bot]", "markdownify[bot]"]' jobs: get_pr_details: @@ -21,18 +17,20 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} check_related_issue: needs: get_pr_details - # Maintenance: Refactor condition to the correct env syntax later - if: | - needs.get_pr_details.outputs.prAction == 'opened' - && contains(fromJson('["dependabot[bot]", "markdownify[bot]"]'), needs.get_pr_details.outputs.prAuthor) != true runs-on: ubuntu-latest steps: + - name: Debug outputs + run: echo "Outputs ${{ toJSON(needs.get_pr_details.outputs) }}" - uses: actions/checkout@v3 - name: "Ensure related issue is present" + if: contains('["opened", "edited"]', needs.get_pr_details.outputs.prAction) + if: needs.get_pr_details.outputs.prAction == 'opened' || needs.get_pr_details.outputs.prAction == 'edited' uses: actions/github-script@v6 env: PR_BODY: ${{ needs.get_pr_details.outputs.prBody }} PR_NUMBER: ${{ needs.get_pr_details.outputs.prNumber }} + PR_ACTION: ${{ needs.get_pr_details.outputs.prAction }} + PR_AUTHOR: ${{ needs.get_pr_details.outputs.prAuthor }} with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | From 95a8f77ca445dcf126f075ec4df06c3cfd19193d Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 12 Jul 2022 15:02:59 +0200 Subject: [PATCH 53/72] chore(ci): move error prone env to code as constants --- .github/scripts/constants.js | 23 +++++++++-- .../scripts/label_missing_related_issue.js | 6 +-- .github/scripts/label_pr_based_on_title.js | 17 +++++---- .github/scripts/label_related_issue.js | 38 +++++++++++++------ .github/workflows/on_merged_pr.yml | 8 ++-- .github/workflows/on_opened_pr.yml | 2 +- 6 files changed, 64 insertions(+), 30 deletions(-) diff --git a/.github/scripts/constants.js b/.github/scripts/constants.js index 3efe833df7b..014baaa751e 100644 --- a/.github/scripts/constants.js +++ b/.github/scripts/constants.js @@ -1,20 +1,35 @@ module.exports = Object.freeze({ /** @type {string} */ + // Values: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request "PR_ACTION": process.env.PR_ACTION || "", + /** @type {string} */ "PR_AUTHOR": process.env.PR_AUTHOR || "", + /** @type {string} */ "PR_BODY": process.env.PR_BODY || "", + /** @type {string} */ "PR_TITLE": process.env.PR_TITLE || "", + /** @type {number} */ "PR_NUMBER": process.env.PR_NUMBER || 0, + /** @type {boolean} */ "PR_IS_MERGED": process.env.PR_IS_MERGED || false, - /** @type {string[]} */ - "IGNORE_AUTHORS": ["dependabot[bot]", "markdownify[bot]"], + + /** @type {string} */ + "LABEL_BLOCK": "do-not-merge", + /** @type {string} */ - "BLOCK_LABEL": "do-not-merge", + "LABEL_BLOCK_REASON": "need-issue", + /** @type {string} */ - "BLOCK_REASON_LABEL": "need-issue", + "LABEL_PENDING_RELEASE": "pending-release", + + /** @type {string} */ + "HANDLE_MAINTAINERS_TEAM": "@awslabs/aws-lambda-powertools-python", + + /** @type {string[]} */ + "IGNORE_AUTHORS": ["dependabot[bot]", "markdownify[bot]"], }); diff --git a/.github/scripts/label_missing_related_issue.js b/.github/scripts/label_missing_related_issue.js index bb9a2bf4b9c..aaf1eba3076 100644 --- a/.github/scripts/label_missing_related_issue.js +++ b/.github/scripts/label_missing_related_issue.js @@ -4,8 +4,8 @@ const { PR_BODY, PR_NUMBER, IGNORE_AUTHORS, - BLOCK_LABEL, - BLOCK_REASON_LABEL + LABEL_BLOCK, + LABEL_BLOCK_REASON } = require("./constants") module.exports = async ({github, context, core}) => { @@ -39,7 +39,7 @@ module.exports = async ({github, context, core}) => { issue_number: PR_NUMBER, owner: context.repo.owner, repo: context.repo.repo, - labels: [BLOCK_LABEL, BLOCK_REASON_LABEL] + labels: [LABEL_BLOCK, LABEL_BLOCK_REASON] }) } } diff --git a/.github/scripts/label_pr_based_on_title.js b/.github/scripts/label_pr_based_on_title.js index f13d48f32bc..3d448c0e545 100644 --- a/.github/scripts/label_pr_based_on_title.js +++ b/.github/scripts/label_pr_based_on_title.js @@ -1,8 +1,8 @@ -module.exports = async ({github, context, core}) => { - const pr_number = process.env.PR_NUMBER - const pr_title = process.env.PR_TITLE +const { PR_NUMBER, PR_TITLE } = require("./constants") - console.log(pr_title) +module.exports = async ({github, context, core}) => { + core.debug(PR_NUMBER); + core.debug(PR_TITLE); const FEAT_REGEX = /feat(\((.+)\))?(\:.+)/ const BUG_REGEX = /(fix|bug)(\((.+)\))?(\:.+)/ @@ -20,14 +20,15 @@ module.exports = async ({github, context, core}) => { "deprecated": DEPRECATED_REGEX, } + // Maintenance: We should keep track of modified PRs in case their titles change for (const label in labels) { const matcher = new RegExp(labels[label]) - const isMatch = matcher.exec(pr_title) + const isMatch = matcher.exec(PR_TITLE) if (isMatch != null) { - console.info(`Auto-labeling PR ${pr_number} with ${label}`) + core.info(`Auto-labeling PR ${PR_NUMBER} with ${label}`) await github.rest.issues.addLabels({ - issue_number: pr_number, + issue_number: PR_NUMBER, owner: context.repo.owner, repo: context.repo.repo, labels: [label] @@ -36,4 +37,6 @@ module.exports = async ({github, context, core}) => { break } } + + return core.notice(`PR ${PR_NUMBER} title '${PR_TITLE}' doesn't follow semantic titles; skipping...`) } \ No newline at end of file diff --git a/.github/scripts/label_related_issue.js b/.github/scripts/label_related_issue.js index 3fe5727163d..5965c518421 100644 --- a/.github/scripts/label_related_issue.js +++ b/.github/scripts/label_related_issue.js @@ -1,19 +1,35 @@ +const { + PR_AUTHOR, + PR_BODY, + PR_NUMBER, + IGNORE_AUTHORS, + LABEL_PENDING_RELEASE, + HANDLE_MAINTAINERS_TEAM, + PR_IS_MERGED, +} = require("./constants") + module.exports = async ({github, context, core}) => { - const prBody = process.env.PR_BODY; - const prNumber = process.env.PR_NUMBER; - const releaseLabel = process.env.RELEASE_LABEL; - const maintainersTeam = process.env.MAINTAINERS_TEAM; - const RELATED_ISSUE_REGEX = /Issue number:[^\d\r\n]+(?\d+)/; + core.debug(PR_BODY); + core.debug(PR_IS_MERGED); + + if (IGNORE_AUTHORS.includes(PR_AUTHOR)) { + return core.notice("Author in IGNORE_AUTHORS list; skipping...") + } - core.info(prBody); - const isMatch = RELATED_ISSUE_REGEX.exec(prBody); + if (!PR_IS_MERGED) { + return core.notice("Only merged PRs to avoid spam; skipping") + } + + + const RELATED_ISSUE_REGEX = /Issue number:[^\d\r\n]+(?\d+)/; + const isMatch = RELATED_ISSUE_REGEX.exec(PR_BODY); if (!isMatch) { - core.setFailed(`Unable to find related issue for PR number ${prNumber}.\n\n Body details: ${prBody}`); + core.setFailed(`Unable to find related issue for PR number ${PR_NUMBER}.\n\n Body details: ${PR_BODY}`); return await github.rest.issues.createComment({ owner: context.repo.owner, repo: context.repo.repo, - body: `${maintainersTeam} No related issues found. Please ensure '${releaseLabel}' label is applied before releasing.`, - issue_number: prNumber, + body: `${HANDLE_MAINTAINERS_TEAM} No related issues found. Please ensure '${LABEL_PENDING_RELEASE}' label is applied before releasing.`, + issue_number: PR_NUMBER, }); } @@ -24,6 +40,6 @@ module.exports = async ({github, context, core}) => { issue_number: relatedIssueNumber, owner: context.repo.owner, repo: context.repo.repo, - labels: [releaseLabel] + labels: [LABEL_PENDING_RELEASE] }) } diff --git a/.github/workflows/on_merged_pr.yml b/.github/workflows/on_merged_pr.yml index 689ec9f65d2..95887779085 100644 --- a/.github/workflows/on_merged_pr.yml +++ b/.github/workflows/on_merged_pr.yml @@ -6,10 +6,6 @@ on: types: - completed -env: - RELEASE_LABEL: "pending-release" - MAINTAINERS_TEAM: "@awslabs/aws-lambda-powertools-python" - jobs: get_pr_details: if: github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success' @@ -26,12 +22,16 @@ jobs: && needs.get_pr_details.outputs.prIsMerged == true runs-on: ubuntu-latest steps: + - name: Debug outputs + run: echo "Outputs ${{ toJSON(needs.get_pr_details.outputs) }}" - uses: actions/checkout@v3 - name: "Label PR related issue for release" uses: actions/github-script@v6 env: PR_NUMBER: ${{ needs.get_pr_details.outputs.prNumber }} PR_BODY: ${{ needs.get_pr_details.outputs.prBody }} + PR_IS_MERGED: ${{ needs.get_pr_details.outputs.prIsMerged }} + PR_AUTHOR: ${{ needs.get_pr_details.outputs.prAuthor }} with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | diff --git a/.github/workflows/on_opened_pr.yml b/.github/workflows/on_opened_pr.yml index bf4aa556783..25b7f8acbde 100644 --- a/.github/workflows/on_opened_pr.yml +++ b/.github/workflows/on_opened_pr.yml @@ -1,4 +1,4 @@ -name: On PR open +name: On new PR on: workflow_run: From 15f0c9eb26641b6f3b6b557ab7e1761eeff48007 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 12 Jul 2022 15:08:21 +0200 Subject: [PATCH 54/72] chore(ci): move all scripts under .github/scripts --- .../{workflows => scripts}/post_release.js | 0 .github/workflows/publish.yml | 19 +++---------------- 2 files changed, 3 insertions(+), 16 deletions(-) rename .github/{workflows => scripts}/post_release.js (100%) diff --git a/.github/workflows/post_release.js b/.github/scripts/post_release.js similarity index 100% rename from .github/workflows/post_release.js rename to .github/scripts/post_release.js diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 218df1aef35..c3b6537f314 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -20,9 +20,8 @@ name: Publish to PyPi # 6. Kick off Lambda Layer pipeline to publish latest version with minimal dependencies as a SAR App # 7. Kick off Lambda Layer pipeline to publish latest version with extra dependencies as a SAR App # 8. Builds a fresh version of docs including Changelog updates -# 9. Push latest release source code to master using release title as the commit message -# 10. Builds latest documentation for new release, and update latest alias pointing to the new release tag -# 11. Close and notify all issues labeled "pending-release" about the release details +# 9. Builds latest documentation for new release, and update latest alias pointing to the new release tag +# 10. Close and notify all issues labeled "pending-release" about the release details # # === Fallback mechanism due to external failures === @@ -111,17 +110,5 @@ jobs: uses: actions/github-script@v6 with: script: | - const post_release = require('.github/workflows/post_release.js') + const post_release = require('.github/scripts/post_release.js') await post_release({github, context, core}) - - sync_master: - needs: release - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Sync master from detached head - # If version matches CHANGELOG and pyproject.toml - # If it passes all checks, successfully releases to test and prod - # Then sync up master with latest source code release - # where commit message will be Release notes title - run: git push origin HEAD:refs/heads/master --force From 6e5492c8987c004585fc650fe94b25301f651604 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 12 Jul 2022 15:09:29 +0200 Subject: [PATCH 55/72] chore(governance): remove any step relying on master branch --- .github/mergify.yml | 1 - .github/workflows/python_build.yml | 2 -- MAINTAINERS.md | 2 +- 3 files changed, 1 insertion(+), 4 deletions(-) diff --git a/.github/mergify.yml b/.github/mergify.yml index a623796a514..dc3f1953586 100644 --- a/.github/mergify.yml +++ b/.github/mergify.yml @@ -22,7 +22,6 @@ # - name: Automatic merge ⬇️ on approval ✔ # conditions: -# - base!=master # - "#approved-reviews-by>=2" # actions: # queue: diff --git a/.github/workflows/python_build.yml b/.github/workflows/python_build.yml index c0cdcc85cff..4d4f1ed135b 100644 --- a/.github/workflows/python_build.yml +++ b/.github/workflows/python_build.yml @@ -10,7 +10,6 @@ on: - "mypy.ini" branches: - develop - - master push: paths: - "aws_lambda_powertools/**" @@ -20,7 +19,6 @@ on: - "mypy.ini" branches: - develop - - master jobs: build: diff --git a/MAINTAINERS.md b/MAINTAINERS.md index a706b94c6ab..63b61cf1b91 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -217,7 +217,7 @@ This workflow will update both user guide and API documentation. ### Maintain Overall Health of the Repo -> TODO: Coordinate removing `master` and renaming `develop` to `main` +> TODO: Coordinate renaming `develop` to `main` Keep the `develop` branch at production quality at all times. Backport features as needed. Cut release branches and tags to enable future patches. From da118024538856467f3856d46cb73dfd92f7e241 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 12 Jul 2022 15:25:55 +0200 Subject: [PATCH 56/72] fix(ci): move conditionals from yaml to code; leftover --- .github/workflows/on_merged_pr.yml | 4 ---- .github/workflows/on_opened_pr.yml | 3 --- 2 files changed, 7 deletions(-) diff --git a/.github/workflows/on_merged_pr.yml b/.github/workflows/on_merged_pr.yml index 95887779085..b749017f319 100644 --- a/.github/workflows/on_merged_pr.yml +++ b/.github/workflows/on_merged_pr.yml @@ -16,10 +16,6 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} release_label_on_merge: needs: get_pr_details - # Maintenance: Conditional isn't working as expected - if: | - needs.get_pr_details.outputs.prAuthor != 'dependabot[bot]' - && needs.get_pr_details.outputs.prIsMerged == true runs-on: ubuntu-latest steps: - name: Debug outputs diff --git a/.github/workflows/on_opened_pr.yml b/.github/workflows/on_opened_pr.yml index 25b7f8acbde..f6465b94c02 100644 --- a/.github/workflows/on_opened_pr.yml +++ b/.github/workflows/on_opened_pr.yml @@ -6,7 +6,6 @@ on: types: - completed - jobs: get_pr_details: if: ${{ github.event.workflow_run.conclusion == 'success' }} @@ -23,8 +22,6 @@ jobs: run: echo "Outputs ${{ toJSON(needs.get_pr_details.outputs) }}" - uses: actions/checkout@v3 - name: "Ensure related issue is present" - if: contains('["opened", "edited"]', needs.get_pr_details.outputs.prAction) - if: needs.get_pr_details.outputs.prAction == 'opened' || needs.get_pr_details.outputs.prAction == 'edited' uses: actions/github-script@v6 env: PR_BODY: ${{ needs.get_pr_details.outputs.prBody }} From 10aaf71dba2b07b1c499a680c0606a398af8a0d4 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 12 Jul 2022 15:26:36 +0200 Subject: [PATCH 57/72] chore(ci): auto-merge cdk lib and lambda layer construct --- .github/workflows/auto-merge.yml | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/.github/workflows/auto-merge.yml b/.github/workflows/auto-merge.yml index 7ce6ff8ba29..b63d0331edd 100644 --- a/.github/workflows/auto-merge.yml +++ b/.github/workflows/auto-merge.yml @@ -18,7 +18,21 @@ jobs: with: github-token: "${{ secrets.GITHUB_TOKEN }}" - name: Enable auto-merge for mypy-boto3 stubs Dependabot PRs - if: ${{contains(steps.metadata.outputs.dependency-names, 'mypy-boto3')}} # && steps.metadata.outputs.update-type == 'version-update:semver-patch' + if: ${{ contains(steps.metadata.outputs.dependency-names, 'mypy-boto3') && steps.metadata.outputs.update-type != 'version-update:semver-major' }} + run: gh pr merge --auto --squash "$PR_URL" + env: + PR_URL: ${{github.event.pull_request.html_url}} + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} + # Maintenance: Experiment with literal array (toJSON('["dep1", "dep2"]')) to ease extending it + - name: Enable auto-merge for CDK Construct Lambda Layer Dependabot PRs + if: ${{ contains(steps.metadata.outputs.dependency-names, 'cdk-lambda-powertools-python-layer') && steps.metadata.outputs.update-type != 'version-update:semver-major' }} + run: gh pr merge --auto --squash "$PR_URL" + env: + PR_URL: ${{github.event.pull_request.html_url}} + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} + # Maintenance: Revisit if CDK Constructs make breaking changes like CDK v1 + - name: Enable auto-merge for CDK Lib Construct + if: ${{ contains(steps.metadata.outputs.dependency-names, 'aws-cdk-lib') && steps.metadata.outputs.update-type != 'version-update:semver-major' }} run: gh pr merge --auto --squash "$PR_URL" env: PR_URL: ${{github.event.pull_request.html_url}} From b8a0238715089fadb74db161e55d0b802423eb75 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Jul 2022 15:27:47 +0200 Subject: [PATCH 58/72] chore(deps): bump actions/setup-node from 2 to 3 (#1281) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/publish_layer.yml | 2 +- .github/workflows/reusable_deploy_layer_stack.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish_layer.yml b/.github/workflows/publish_layer.yml index b4814fbb98a..7d1371115f5 100644 --- a/.github/workflows/publish_layer.yml +++ b/.github/workflows/publish_layer.yml @@ -27,7 +27,7 @@ jobs: - name: checkout uses: actions/checkout@v2 - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: '16.12' - name: Setup python diff --git a/.github/workflows/reusable_deploy_layer_stack.yml b/.github/workflows/reusable_deploy_layer_stack.yml index f03cb117d6f..73805d1407b 100644 --- a/.github/workflows/reusable_deploy_layer_stack.yml +++ b/.github/workflows/reusable_deploy_layer_stack.yml @@ -59,7 +59,7 @@ jobs: aws-region: ${{ matrix.region }} role-to-assume: ${{ secrets.target-account-role }} - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: '16.12' - name: Setup python From 839e252a2056cf7a0f4abd7f53b114afc3221a18 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 12 Jul 2022 15:34:13 +0200 Subject: [PATCH 59/72] fix(ci): escape outputs as certain PRs can break GH Actions expressions --- .github/workflows/on_merged_pr.yml | 2 +- .github/workflows/on_opened_pr.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/on_merged_pr.yml b/.github/workflows/on_merged_pr.yml index b749017f319..a3e3d64b875 100644 --- a/.github/workflows/on_merged_pr.yml +++ b/.github/workflows/on_merged_pr.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Debug outputs - run: echo "Outputs ${{ toJSON(needs.get_pr_details.outputs) }}" + run: echo 'Outputs ${{ toJSON(needs.get_pr_details.outputs) }}' - uses: actions/checkout@v3 - name: "Label PR related issue for release" uses: actions/github-script@v6 diff --git a/.github/workflows/on_opened_pr.yml b/.github/workflows/on_opened_pr.yml index f6465b94c02..f4e413092f2 100644 --- a/.github/workflows/on_opened_pr.yml +++ b/.github/workflows/on_opened_pr.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Debug outputs - run: echo "Outputs ${{ toJSON(needs.get_pr_details.outputs) }}" + run: echo 'Outputs ${{ toJSON(needs.get_pr_details.outputs) }}' - uses: actions/checkout@v3 - name: "Ensure related issue is present" uses: actions/github-script@v6 From ac07fe27a6c4e315259ba224f0149eaf8f57e3b3 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 12 Jul 2022 15:42:14 +0200 Subject: [PATCH 60/72] chore(governance): update emeritus affiliation --- MAINTAINERS.md | 79 ++++++++++++++++++++++++++------------------------ 1 file changed, 41 insertions(+), 38 deletions(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 63b61cf1b91..fa8b3287238 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -1,4 +1,4 @@ - + ## Table of contents - [Overview](#overview) @@ -6,29 +6,29 @@ - [Emeritus](#emeritus) - [Labels](#labels) - [Maintainer Responsibilities](#maintainer-responsibilities) - - [Uphold Code of Conduct](#uphold-code-of-conduct) - - [Prioritize Security](#prioritize-security) - - [Review Pull Requests](#review-pull-requests) - - [Triage New Issues](#triage-new-issues) - - [Triage Bug Reports](#triage-bug-reports) - - [Triage RFCs](#triage-rfcs) - - [Releasing a new version](#releasing-a-new-version) - - [Changelog generation](#changelog-generation) - - [Bumping the version](#bumping-the-version) - - [Drafting release notes](#drafting-release-notes) - - [Releasing a documentation hotfix](#releasing-a-documentation-hotfix) - - [Maintain Overall Health of the Repo](#maintain-overall-health-of-the-repo) - - [Manage Roadmap](#manage-roadmap) - - [Add Continuous Integration Checks](#add-continuous-integration-checks) - - [Negative Impact on the Project](#negative-impact-on-the-project) - - [Becoming a maintainer](#becoming-a-maintainer) + - [Uphold Code of Conduct](#uphold-code-of-conduct) + - [Prioritize Security](#prioritize-security) + - [Review Pull Requests](#review-pull-requests) + - [Triage New Issues](#triage-new-issues) + - [Triage Bug Reports](#triage-bug-reports) + - [Triage RFCs](#triage-rfcs) + - [Releasing a new version](#releasing-a-new-version) + - [Changelog generation](#changelog-generation) + - [Bumping the version](#bumping-the-version) + - [Drafting release notes](#drafting-release-notes) + - [Releasing a documentation hotfix](#releasing-a-documentation-hotfix) + - [Maintain Overall Health of the Repo](#maintain-overall-health-of-the-repo) + - [Manage Roadmap](#manage-roadmap) + - [Add Continuous Integration Checks](#add-continuous-integration-checks) + - [Negative Impact on the Project](#negative-impact-on-the-project) + - [Becoming a maintainer](#becoming-a-maintainer) - [Common scenarios](#common-scenarios) - - [Contribution is stuck](#contribution-is-stuck) - - [Insufficient feedback or information](#insufficient-feedback-or-information) - - [Crediting contributions](#crediting-contributions) - - [Is that a bug?](#is-that-a-bug) - - [Mentoring contributions](#mentoring-contributions) - - [Long running issues or PRs](#long-running-issues-or-prs) + - [Contribution is stuck](#contribution-is-stuck) + - [Insufficient feedback or information](#insufficient-feedback-or-information) + - [Crediting contributions](#crediting-contributions) + - [Is that a bug?](#is-that-a-bug) + - [Mentoring contributions](#mentoring-contributions) + - [Long running issues or PRs](#long-running-issues-or-prs) ## Overview @@ -52,7 +52,7 @@ Previous active maintainers who contributed to this project. | Maintainer | GitHub ID | Affiliation | | ----------------- | ----------------------------------------------- | ----------- | | Tom McCarthy | [cakepietoast](https://github.com/cakepietoast) | MongoDB | -| Nicolas Moutschen | [nmoutschen](https://github.com/nmoutschen) | Amazon | +| Nicolas Moutschen | [nmoutschen](https://github.com/nmoutschen) | Apollo | ## Labels @@ -100,7 +100,9 @@ Be aware of recurring ambiguous situations and [document them](#common-scenarios ### Uphold Code of Conduct -Model the behavior set forward by the [Code of Conduct](CODE_OF_CONDUCT.md) and raise any violations to other maintainers and admins. There could be unusual circumstances where inappropriate behavior does not immediately fall within the [Code of Conduct](CODE_OF_CONDUCT.md). These might be nuanced and should be handled with extra care - when in doubt, do not engage and reach out to other maintainers and admins. +Model the behavior set forward by the [Code of Conduct](CODE_OF_CONDUCT.md) and raise any violations to other maintainers and admins. There could be unusual circumstances where inappropriate behavior does not immediately fall within the [Code of Conduct](CODE_OF_CONDUCT.md). + +These might be nuanced and should be handled with extra care - when in doubt, do not engage and reach out to other maintainers and admins. ### Prioritize Security @@ -152,14 +154,14 @@ RFC is a collaborative process to help us get to the most optimal solution given Make sure you ask these questions in mind when reviewing: -* Does it use our [RFC template](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=RFC%2Ctriage&template=rfc.yml&title=RFC%3A+TITLE)? -* Does the match our [Tenets](https://awslabs.github.io/aws-lambda-powertools-python/latest/#tenets)? -* Does the proposal address the use case? If so, is the recommended usage explicit? -* Does it focus on the mechanics to solve the use case over fine-grained implementation details? -* Can anyone familiar with the code base implement it? -* If approved, are they interested in contributing? Do they need any guidance? -* Does this significantly increase the overall project maintenance? Do we have the skills to maintain it? -* If we can't take this use case, are there alternative projects we could recommend? Or does it call for a new project altogether? +- Does it use our [RFC template](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=RFC%2Ctriage&template=rfc.yml&title=RFC%3A+TITLE)? +- Does the match our [Tenets](https://awslabs.github.io/aws-lambda-powertools-python/latest/#tenets)? +- Does the proposal address the use case? If so, is the recommended usage explicit? +- Does it focus on the mechanics to solve the use case over fine-grained implementation details? +- Can anyone familiar with the code base implement it? +- If approved, are they interested in contributing? Do they need any guidance? +- Does this significantly increase the overall project maintenance? Do we have the skills to maintain it? +- If we can't take this use case, are there alternative projects we could recommend? Or does it call for a new project altogether? When necessary, be upfront that the time to review, approve, and implement a RFC can vary - see [Contribution is stuck](#contribution-is-stuck). Some RFCs may be further updated after implementation, as certain areas become clearer. @@ -199,11 +201,11 @@ Edit the respective PR title and update their [labels](#labels). Then run the [R The best part comes now. Replace the placeholder `[Human readable summary of changes]` with what you'd like to communicate to customers what this release is all about. Always put yourself in the customers shoes. For that, these are some questions to keep in mind when drafting your first or future release notes: -* Can customers understand at a high level what changed in this release? -* Is there a link to the documentation where they can read more about each main change? -* Are there any graphics or code snippets that can enhance readability? -* Are we calling out any key contributor(s) to this release? - - All contributors are automatically credited, use this as an exceptional case to feature them +- Can customers understand at a high level what changed in this release? +- Is there a link to the documentation where they can read more about each main change? +- Are there any graphics or code snippets that can enhance readability? +- Are we calling out any key contributor(s) to this release? + - All contributors are automatically credited, use this as an exceptional case to feature them Once you're happy, hit `Publish release`. This will kick off the [Publishing workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/publish.yml) and within a few minutes you should see the latest version in PyPi, and all issues labeled as `pending-release` will be notified. @@ -240,6 +242,7 @@ Actions that negatively impact the project will be handled by the admins, in coo In 2023, we will revisit this. We need to improve our understanding of how other projects are doing, their mechanisms to promote key contributors, and how they interact daily. We suspect this process might look similar to the [OpenSearch project](https://github.com/opensearch-project/.github/blob/main/MAINTAINERS.md#becoming-a-maintainer). + ## Common scenarios These are recurring ambiguous situations that new and existing maintainers may encounter. They serve as guidance. It is up to each maintainer to follow, adjust, or handle in a different manner as long as [our conduct is consistent](#uphold-code-of-conduct) From 69073b46bd9a672a82247d848dadec87dc69c21d Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 12 Jul 2022 15:43:33 +0200 Subject: [PATCH 61/72] chore(ci): disable output debugging as pr body isnt accepted --- .github/workflows/on_merged_pr.yml | 2 -- .github/workflows/on_opened_pr.yml | 2 -- 2 files changed, 4 deletions(-) diff --git a/.github/workflows/on_merged_pr.yml b/.github/workflows/on_merged_pr.yml index a3e3d64b875..e1fcb16526f 100644 --- a/.github/workflows/on_merged_pr.yml +++ b/.github/workflows/on_merged_pr.yml @@ -18,8 +18,6 @@ jobs: needs: get_pr_details runs-on: ubuntu-latest steps: - - name: Debug outputs - run: echo 'Outputs ${{ toJSON(needs.get_pr_details.outputs) }}' - uses: actions/checkout@v3 - name: "Label PR related issue for release" uses: actions/github-script@v6 diff --git a/.github/workflows/on_opened_pr.yml b/.github/workflows/on_opened_pr.yml index f4e413092f2..3d5aab45b5d 100644 --- a/.github/workflows/on_opened_pr.yml +++ b/.github/workflows/on_opened_pr.yml @@ -18,8 +18,6 @@ jobs: needs: get_pr_details runs-on: ubuntu-latest steps: - - name: Debug outputs - run: echo 'Outputs ${{ toJSON(needs.get_pr_details.outputs) }}' - uses: actions/checkout@v3 - name: "Ensure related issue is present" uses: actions/github-script@v6 From 0e72f8cb93215b678f4237e82e2c3ff4e8f0ec71 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Jul 2022 15:45:27 +0200 Subject: [PATCH 62/72] chore(deps): bump aws-cdk-lib from 2.29.0 to 2.31.1 (#1290) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- layer/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/layer/requirements.txt b/layer/requirements.txt index 0484892d321..56bf68168f0 100644 --- a/layer/requirements.txt +++ b/layer/requirements.txt @@ -11,9 +11,9 @@ attrs==21.4.0 \ # -r requirements.txt # cattrs # jsii -aws-cdk-lib==2.29.0 \ - --hash=sha256:4f852105cafd28a2bbd9bd2c6d24a2e1ab503bba923fd49a1782390b235af999 \ - --hash=sha256:53a78788219d9bf3a998211223225b34a10f066124e2812adcd40fd0a2058572 +aws-cdk-lib==2.31.1 \ + --hash=sha256:a07f6a247be110e874af374fa683d6c7eba86dfc9781cb555428b534c75bd4c0 \ + --hash=sha256:a3868c367cab3cf09e6bb68405e31f4342fc4a4905ccc3e3fdde133d520206c0 # via # -r requirements.txt # cdk-lambda-powertools-python-layer From e76a26aac7a881dd8f6d85c15db5ed85cefc943f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Jul 2022 14:46:17 +0100 Subject: [PATCH 63/72] chore(deps): bump cdk-lambda-powertools-python-layer (#1284) Bumps [cdk-lambda-powertools-python-layer](https://github.com/aws-samples/cdk-lambda-powertools-python-layer) from 2.0.48 to 2.0.49. - [Release notes](https://github.com/aws-samples/cdk-lambda-powertools-python-layer/releases) - [Commits](https://github.com/aws-samples/cdk-lambda-powertools-python-layer/compare/v2.0.48...v2.0.49) --- updated-dependencies: - dependency-name: cdk-lambda-powertools-python-layer dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- layer/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/layer/requirements.txt b/layer/requirements.txt index 56bf68168f0..dbf74ff8701 100644 --- a/layer/requirements.txt +++ b/layer/requirements.txt @@ -23,9 +23,9 @@ cattrs==22.1.0 \ # via # -r requirements.txt # jsii -cdk-lambda-powertools-python-layer==2.0.48 \ - --hash=sha256:7bdd5a196e74b48d403223722f2838d1d10064d02e960a5565482cc0b7aad18d \ - --hash=sha256:9afeacea31eba14d67360db71af385c654c9e0af9b29a0d4e0922b52f862ae03 +cdk-lambda-powertools-python-layer==2.0.49 \ + --hash=sha256:8055fc691539f16e22a40e3d3df9c3f59fb28012437b08c47c639aefb001f1b2 \ + --hash=sha256:9b0a7b7344f9ccb486564af728cefeac743687bfb131631e6d9171a55800dbac # via -r requirements.txt constructs==10.1.43 \ --hash=sha256:69fd6da574c9506f44ca61e112af7d5db08ebb29b4bedc67b6d200b616f4abce \ From 69baefe59b38c8de1d90e405347b43a37cb86d2b Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Tue, 12 Jul 2022 16:16:40 +0200 Subject: [PATCH 64/72] docs(homepage): emphasize additional powertools languages (#1292) --- .github/scripts/label_related_issue.js | 1 + docs/index.md | 5 ++--- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/scripts/label_related_issue.js b/.github/scripts/label_related_issue.js index 5965c518421..fe216d922cf 100644 --- a/.github/scripts/label_related_issue.js +++ b/.github/scripts/label_related_issue.js @@ -11,6 +11,7 @@ const { module.exports = async ({github, context, core}) => { core.debug(PR_BODY); core.debug(PR_IS_MERGED); + core.debug(PR_AUTHOR); if (IGNORE_AUTHORS.includes(PR_AUTHOR)) { return core.notice("Author in IGNORE_AUTHORS list; skipping...") diff --git a/docs/index.md b/docs/index.md index 5396ccb3f2e..6467dcdeb35 100644 --- a/docs/index.md +++ b/docs/index.md @@ -7,9 +7,8 @@ description: AWS Lambda Powertools Python A suite of utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, idempotency, batching, and more. -???+ tip "Tip: Looking for a quick read through how the core features are used?" - - Check out [this detailed blog post](https://aws.amazon.com/blogs/opensource/simplifying-serverless-best-practices-with-lambda-powertools/) with a practical example. +???+ note + Lambda Powertools is also available for [Java](https://awslabs.github.io/aws-lambda-powertools-java/){target="_blank"} and [TypeScript](https://awslabs.github.io/aws-lambda-powertools-typescript/latest/){target="_blank"}. ## Install From cfc15b8dd9fb685fa63758566a98825e5b49f4dc Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 12 Jul 2022 16:23:18 +0200 Subject: [PATCH 65/72] fix(ci): regex group name for on_merge workflow --- .github/scripts/label_related_issue.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/scripts/label_related_issue.js b/.github/scripts/label_related_issue.js index fe216d922cf..885c7c420c3 100644 --- a/.github/scripts/label_related_issue.js +++ b/.github/scripts/label_related_issue.js @@ -34,11 +34,11 @@ module.exports = async ({github, context, core}) => { }); } - const { groups: {relatedIssueNumber} } = isMatch + const { groups: {issue} } = isMatch - core.info(`Auto-labeling related issue ${relatedIssueNumber} for release`) + core.info(`Auto-labeling related issue ${issue} for release`) return await github.rest.issues.addLabels({ - issue_number: relatedIssueNumber, + issue_number: issue, owner: context.repo.owner, repo: context.repo.repo, labels: [LABEL_PENDING_RELEASE] From 83e60911c8784a315b6a58c78c5183ed09081e2b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Jul 2022 16:24:32 +0200 Subject: [PATCH 66/72] chore(deps): bump attrs from 21.2.0 to 21.4.0 (#1282) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 77300c37213..3068cf58295 100644 --- a/poetry.lock +++ b/poetry.lock @@ -8,17 +8,17 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "attrs" -version = "21.2.0" +version = "21.4.0" description = "Classes Without Boilerplate" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] [[package]] name = "aws-xray-sdk" @@ -1133,8 +1133,8 @@ atomicwrites = [ {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, ] attrs = [ - {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, - {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] aws-xray-sdk = [ {file = "aws-xray-sdk-2.10.0.tar.gz", hash = "sha256:9b14924fd0628cf92936055864655354003f0b1acc3e1c3ffde6403d0799dd7a"}, From c0519454d0de3106090841e3b12854add9d47924 Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Tue, 12 Jul 2022 17:46:14 +0200 Subject: [PATCH 67/72] Fix documentation and relative imports --- .github/workflows/run-e2e-tests.yml | 3 --- MAINTAINERS.md | 19 +++++++------------ tests/e2e/logger/test_logger.py | 5 ++--- tests/e2e/metrics/test_metrics.py | 6 +++--- tests/e2e/tracer/test_tracer.py | 5 ++--- tests/e2e/utils/helpers.py | 3 ++- 6 files changed, 16 insertions(+), 25 deletions(-) diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml index a79e5248299..2e186037853 100644 --- a/.github/workflows/run-e2e-tests.yml +++ b/.github/workflows/run-e2e-tests.yml @@ -16,9 +16,6 @@ jobs: steps: - name: "Checkout" uses: actions/checkout@v3 - ######################### - # Release new version - ######################### - name: "Use Python" uses: actions/setup-python@v3 with: diff --git a/MAINTAINERS.md b/MAINTAINERS.md index b84675f02f0..47c1771acde 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -13,10 +13,10 @@ - [Triage Bug Reports](#triage-bug-reports) - [Triage RFCs](#triage-rfcs) - [Releasing a new version](#releasing-a-new-version) - - [Run end to end tests](#run-end-to-end-tests) - [Changelog generation](#changelog-generation) - [Bumping the version](#bumping-the-version) - [Drafting release notes](#drafting-release-notes) + - [Run end to end tests](#run-end-to-end-tests) - [Releasing a documentation hotfix](#releasing-a-documentation-hotfix) - [Maintain Overall Health of the Repo](#maintain-overall-health-of-the-repo) - [Manage Roadmap](#manage-roadmap) @@ -166,17 +166,6 @@ When necessary, be upfront that the time to review, approve, and implement a RFC Some examples using our initial and new RFC templates: #92, #94, #95, #991, #1226 -### Run end to end tests - -In order to run end to end tests you need to install CDK CLI first and bootstrap your account with `cdk bootstrap` command. For additional details follow [documentation](https://docs.aws.amazon.com/cdk/v2/guide/bootstrapping.html). - -To run locally, export `AWS_PROFILE` environment variable and run `make e2e tests`. To run from GitHub Actions, use [run-e2e-tests workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/run-e2e-tests.yml) and pick the branch you want to run tests against. - -**NOTE**: E2E tests are run as part of each merge to `develop` branch. - -> **Q: What if there's an error?** -To be agreed - ### Releasing a new version > TODO: This is an area we want to increase automation while keeping communication at human level. @@ -220,7 +209,13 @@ The best part comes now. Replace the placeholder `[Human readable summary of cha Once you're happy, hit `Publish release`. This will kick off the [Publishing workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/publish.yml) and within a few minutes you should see the latest version in PyPi, and all issues labeled as `pending-release` will be notified. > TODO: Wait for @am29d new Lambda Layers pipeline work to complete, then add how Lambda Layers are published +### Run end to end tests + +In order to run end to end tests you need to install CDK CLI first and bootstrap your account with `cdk bootstrap` command. For additional details follow [documentation](https://docs.aws.amazon.com/cdk/v2/guide/bootstrapping.html). +To run locally, export `AWS_PROFILE` environment variable and run `make e2e tests`. To run from GitHub Actions, use [run-e2e-tests workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/run-e2e-tests.yml) and pick the branch you want to run tests against. + +**NOTE**: E2E tests are run as part of each merge to `develop` branch. ### Releasing a documentation hotfix You can rebuild the latest documentation without a full release via this [GitHub Actions Workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/rebuild_latest_docs.yml). Choose `Run workflow`, keep `develop` as the branch, and input the latest Powertools version available. diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index 156877b8f35..f381274201a 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -1,8 +1,7 @@ import boto3 import pytest - -from .. import conftest -from ..utils import helpers +from e2e import conftest +from e2e.utils import helpers @pytest.fixture(scope="module") diff --git a/tests/e2e/metrics/test_metrics.py b/tests/e2e/metrics/test_metrics.py index 76c1b25285f..ed16eb9eec7 100644 --- a/tests/e2e/metrics/test_metrics.py +++ b/tests/e2e/metrics/test_metrics.py @@ -36,6 +36,6 @@ def test_basic_lambda_metric_visible(execute_lambda: conftest.InfrastructureOutp ) # THEN - assert metrics["Timestamps"] and len(metrics["Timestamps"]) == 1 - assert metrics["Values"] and len(metrics["Values"]) == 1 - assert metrics["Values"][0] == 1 + assert metrics.get("Timestamps") and len(metrics.get("Timestamps")) == 1 + assert metrics.get("Values") and len(metrics.get("Values")) == 1 + assert metrics.get("Values") and metrics.get("Values")[0] == 1 diff --git a/tests/e2e/tracer/test_tracer.py b/tests/e2e/tracer/test_tracer.py index 57e954121e2..a22dbd9a321 100644 --- a/tests/e2e/tracer/test_tracer.py +++ b/tests/e2e/tracer/test_tracer.py @@ -3,9 +3,8 @@ import boto3 import pytest - -from .. import conftest -from ..utils import helpers +from e2e import conftest +from e2e.utils import helpers @pytest.fixture(scope="module") diff --git a/tests/e2e/utils/helpers.py b/tests/e2e/utils/helpers.py index a51109d6b31..2fb80611f5f 100644 --- a/tests/e2e/utils/helpers.py +++ b/tests/e2e/utils/helpers.py @@ -3,6 +3,7 @@ from functools import lru_cache from typing import Dict, List, Optional, Union +from mypy_boto3_cloudwatch import type_defs from mypy_boto3_cloudwatch.client import CloudWatchClient from mypy_boto3_lambda.client import LambdaClient from mypy_boto3_xray.client import XRayClient @@ -63,7 +64,7 @@ def get_metrics( metric_name: str, service_name: str, end_date: Optional[datetime] = None, -): +) -> type_defs.MetricDataResultTypeDef: response = cw_client.get_metric_data( MetricDataQueries=[ { From 9db0dcdff1debe2f9494888b5838dbc8ac507e05 Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Tue, 12 Jul 2022 18:19:30 +0200 Subject: [PATCH 68/72] PR suggestions applied --- tests/e2e/conftest.py | 8 +++-- tests/e2e/logger/test_logger.py | 42 ++++++++-------------- tests/e2e/metrics/test_metrics.py | 7 ++-- tests/e2e/tracer/handlers/basic_handler.py | 2 ++ tests/e2e/tracer/test_tracer.py | 8 ++--- tests/e2e/utils/helpers.py | 1 + 6 files changed, 30 insertions(+), 38 deletions(-) diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py index 2e0adfe4f13..4be6a26c6a6 100644 --- a/tests/e2e/conftest.py +++ b/tests/e2e/conftest.py @@ -30,8 +30,12 @@ class InfrastructureOutput: def get_lambda_arns(self) -> Dict[str, str]: return self.arns - def get_lambda_arn(self, name: str) -> Optional[str]: - return self.arns.get(name) + def get_lambda_function_arn(self, cf_output_name: str) -> Optional[str]: + return self.arns.get(cf_output_name) + + def get_lambda_function_name(self, cf_output_name: str) -> Optional[str]: + lambda_arn = self.get_lambda_function_arn(cf_output_name=cf_output_name) + return lambda_arn.split(":")[-1] if lambda_arn else None def get_lambda_execution_time(self) -> datetime.datetime: return self.execution_time diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index f381274201a..ea27b93740b 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -18,14 +18,12 @@ def config() -> conftest.LambdaConfig: def test_basic_lambda_logs_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): # GIVEN - lambda_arn = execute_lambda.get_lambda_arn(name="basichandlerarn") + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") timestamp = execute_lambda.get_lambda_execution_time_timestamp() cw_client = boto3.client("logs") # WHEN - filtered_logs = helpers.get_logs( - lambda_function_name=lambda_arn.split(":")[-1], start_time=timestamp, log_client=cw_client - ) + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) # THEN assert any( @@ -39,14 +37,12 @@ def test_basic_lambda_no_debug_logs_visible( execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig ): # GIVEN - lambda_arn = execute_lambda.get_lambda_arn(name="basichandlerarn") + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") timestamp = execute_lambda.get_lambda_execution_time_timestamp() cw_client = boto3.client("logs") # WHEN - filtered_logs = helpers.get_logs( - lambda_function_name=lambda_arn.split(":")[-1], start_time=timestamp, log_client=cw_client - ) + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) # THEN assert not any( @@ -65,14 +61,12 @@ def test_basic_lambda_contextual_data_logged(execute_lambda: conftest.Infrastruc "cold_start", ) - lambda_arn = execute_lambda.get_lambda_arn(name="basichandlerarn") + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") timestamp = execute_lambda.get_lambda_execution_time_timestamp() cw_client = boto3.client("logs") # WHEN - filtered_logs = helpers.get_logs( - lambda_function_name=lambda_arn.split(":")[-1], start_time=timestamp, log_client=cw_client - ) + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) # THEN assert all(keys in logs.dict(exclude_unset=True) for logs in filtered_logs for keys in required_keys) @@ -82,14 +76,12 @@ def test_basic_lambda_additional_key_persistence_basic_lambda( execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig ): # GIVEN - lambda_arn = execute_lambda.get_lambda_arn(name="basichandlerarn") + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") timestamp = execute_lambda.get_lambda_execution_time_timestamp() cw_client = boto3.client("logs") # WHEN - filtered_logs = helpers.get_logs( - lambda_function_name=lambda_arn.split(":")[-1], start_time=timestamp, log_client=cw_client - ) + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) # THEN assert any( @@ -103,14 +95,12 @@ def test_basic_lambda_additional_key_persistence_basic_lambda( def test_basic_lambda_empty_event_logged(execute_lambda: conftest.InfrastructureOutput): # GIVEN - lambda_arn = execute_lambda.get_lambda_arn(name="basichandlerarn") + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") timestamp = execute_lambda.get_lambda_execution_time_timestamp() cw_client = boto3.client("logs") # WHEN - filtered_logs = helpers.get_logs( - lambda_function_name=lambda_arn.split(":")[-1], start_time=timestamp, log_client=cw_client - ) + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) # THEN assert any(log.message == {} for log in filtered_logs) @@ -127,14 +117,12 @@ def test_no_context_lambda_contextual_data_not_logged(execute_lambda: conftest.I "cold_start", ) - lambda_arn = execute_lambda.get_lambda_arn(name="nocontexthandlerarn") + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="nocontexthandlerarn") timestamp = execute_lambda.get_lambda_execution_time_timestamp() cw_client = boto3.client("logs") # WHEN - filtered_logs = helpers.get_logs( - lambda_function_name=lambda_arn.split(":")[-1], start_time=timestamp, log_client=cw_client - ) + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) # THEN assert not any(keys in logs.dict(exclude_unset=True) for logs in filtered_logs for keys in required_missing_keys) @@ -143,14 +131,12 @@ def test_no_context_lambda_contextual_data_not_logged(execute_lambda: conftest.I def test_no_context_lambda_event_not_logged(execute_lambda: conftest.InfrastructureOutput): # GIVEN - lambda_arn = execute_lambda.get_lambda_arn(name="nocontexthandlerarn") + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="nocontexthandlerarn") timestamp = execute_lambda.get_lambda_execution_time_timestamp() cw_client = boto3.client("logs") # WHEN - filtered_logs = helpers.get_logs( - lambda_function_name=lambda_arn.split(":")[-1], start_time=timestamp, log_client=cw_client - ) + filtered_logs = helpers.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) # THEN assert not any(log.message == {} for log in filtered_logs) diff --git a/tests/e2e/metrics/test_metrics.py b/tests/e2e/metrics/test_metrics.py index ed16eb9eec7..7d3aa7efa61 100644 --- a/tests/e2e/metrics/test_metrics.py +++ b/tests/e2e/metrics/test_metrics.py @@ -3,9 +3,8 @@ import boto3 import pytest - -from .. import conftest -from ..utils import helpers +from e2e import conftest +from e2e.utils import helpers @pytest.fixture(scope="module") @@ -15,7 +14,7 @@ def config() -> conftest.LambdaConfig: "environment_variables": { "POWERTOOLS_METRICS_NAMESPACE": "powertools-e2e-metric", "POWERTOOLS_SERVICE_NAME": "test-powertools-service", - "METRIC_NAME": f"business-metric-{uuid.uuid4()}", + "METRIC_NAME": f"business-metric-{str(uuid.uuid4()).replace('-','_')}", }, } diff --git a/tests/e2e/tracer/handlers/basic_handler.py b/tests/e2e/tracer/handlers/basic_handler.py index 557397ea62a..d074b30796f 100644 --- a/tests/e2e/tracer/handlers/basic_handler.py +++ b/tests/e2e/tracer/handlers/basic_handler.py @@ -13,11 +13,13 @@ @tracer.capture_lambda_handler def lambda_handler(event: dict, context: LambdaContext): + tracer.put_annotation(key=ANNOTATION_KEY, value=ANNOTATION_VALUE) tracer.put_metadata(key=ANNOTATION_KEY, value=ANNOTATION_VALUE) return asyncio.run(collect_payment()) @tracer.capture_method async def collect_payment() -> str: + tracer.put_annotation(key=ANNOTATION_KEY, value=ANNOTATION_ASYNC_VALUE) tracer.put_metadata(key=ANNOTATION_KEY, value=ANNOTATION_ASYNC_VALUE) return "success" diff --git a/tests/e2e/tracer/test_tracer.py b/tests/e2e/tracer/test_tracer.py index a22dbd9a321..c2af4386749 100644 --- a/tests/e2e/tracer/test_tracer.py +++ b/tests/e2e/tracer/test_tracer.py @@ -8,11 +8,11 @@ @pytest.fixture(scope="module") -def config(): +def config() -> conftest.LambdaConfig: return { "parameters": {"tracing": "ACTIVE"}, "environment_variables": { - "ANNOTATION_KEY": f"e2e-tracer-{uuid.uuid4()}", + "ANNOTATION_KEY": f"e2e-tracer-{str(uuid.uuid4()).replace('-','_')}", "ANNOTATION_VALUE": "stored", "ANNOTATION_ASYNC_VALUE": "payments", }, @@ -21,10 +21,10 @@ def config(): def test_basic_lambda_async_trace_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): # GIVEN - lambda_arn = execute_lambda.get_lambda_arn(name="basichandlerarn") + lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") start_date = execute_lambda.get_lambda_execution_time() end_date = start_date + datetime.timedelta(minutes=5) - trace_filter_exporession = f'service("{lambda_arn.split(":")[-1]}")' + trace_filter_exporession = f'service("{lambda_name}")' # WHEN trace = helpers.get_traces( diff --git a/tests/e2e/utils/helpers.py b/tests/e2e/utils/helpers.py index 2fb80611f5f..3f88f44f933 100644 --- a/tests/e2e/utils/helpers.py +++ b/tests/e2e/utils/helpers.py @@ -107,6 +107,7 @@ def get_traces(filter_expression: str, xray_client: XRayClient, start_date: date def find_trace_additional_info(trace: Dict) -> List[TraceSegment]: + """Find all trace annotations and metadata and return them to the caller""" info = [] for segment in trace["Traces"][0]["Segments"]: document = json.loads(segment["Document"]) From 616c830f884c4e5e474410ac7b3bd5abed63dacc Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Tue, 12 Jul 2022 18:34:37 +0200 Subject: [PATCH 69/72] Fix makefile coverage html to not include e2e tests --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index ff10e12705a..fb7457d9908 100644 --- a/Makefile +++ b/Makefile @@ -33,7 +33,7 @@ e2e-test: poetry run pytest -rP -n 3 --dist loadscope --durations=0 --durations-min=1 tests/e2e coverage-html: - poetry run pytest -m "not (perf or e2e)" --cov=aws_lambda_powertools --cov-report=html + poetry run pytest -m "not perf" --ignore tests/e2e --cov=aws_lambda_powertools --cov-report=html pre-commit: pre-commit run --show-diff-on-failure From 6e37fcefdf239ea15d955c42d29209dcfb25eac7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Jul 2022 21:25:45 +0100 Subject: [PATCH 70/72] chore(deps-dev): bump mypy-boto3-dynamodb from 1.24.12 to 1.24.27 (#1293) Bumps [mypy-boto3-dynamodb](https://github.com/youtype/mypy_boto3_builder) from 1.24.12 to 1.24.27. - [Release notes](https://github.com/youtype/mypy_boto3_builder/releases) - [Commits](https://github.com/youtype/mypy_boto3_builder/commits) --- updated-dependencies: - dependency-name: mypy-boto3-dynamodb dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3068cf58295..81b70e885fe 100644 --- a/poetry.lock +++ b/poetry.lock @@ -626,8 +626,8 @@ typing-extensions = ">=4.1.0" [[package]] name = "mypy-boto3-dynamodb" -version = "1.24.12" -description = "Type annotations for boto3.DynamoDB 1.24.12 service generated with mypy-boto3-builder 7.7.1" +version = "1.24.27" +description = "Type annotations for boto3.DynamoDB 1.24.27 service generated with mypy-boto3-builder 7.6.0" category = "dev" optional = false python-versions = ">=3.6" @@ -1125,7 +1125,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.2" -content-hash = "3f3f95ab1a8cf1351639687362e20bd26b784330d309f49a2f5a307682fe5879" +content-hash = "6af7b54d63191efc8278be9ea2c5faf3a539b7e8ed94eb57e595e5a271e7da62" [metadata.files] atomicwrites = [ @@ -1458,8 +1458,8 @@ mypy-boto3-appconfig = [ {file = "mypy_boto3_appconfig-1.24.0-py3-none-any.whl", hash = "sha256:ca53b0b9606f13257dd0feb800d36531f2eba54f46bd9db7765f69baf9583485"}, ] mypy-boto3-dynamodb = [ - {file = "mypy-boto3-dynamodb-1.24.12.tar.gz", hash = "sha256:4fc6f0f84988ae3d307a43ec31930483828b199f1179cb801238c16cd8be5901"}, - {file = "mypy_boto3_dynamodb-1.24.12-py3-none-any.whl", hash = "sha256:7ad9aa9c23a9f90d0aa0018df3a975e6c1da32f76c11aef60bf1a49cfca840cc"}, + {file = "mypy-boto3-dynamodb-1.24.27.tar.gz", hash = "sha256:c982d24f9b2525a70f408ad40eff69660d56928217597d88860b60436b25efbf"}, + {file = "mypy_boto3_dynamodb-1.24.27-py3-none-any.whl", hash = "sha256:63f7d9755fc5cf2e637edf8d33024050152a53013d1a102716ae0d534563ef07"}, ] mypy-boto3-secretsmanager = [ {file = "mypy-boto3-secretsmanager-1.24.11.post3.tar.gz", hash = "sha256:f153b3f5ff2c65664a906fb2c97a6598a57da9f1da77679dbaf541051dcff36e"}, diff --git a/pyproject.toml b/pyproject.toml index b665a760889..c5dbb3306d6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,7 +58,7 @@ mkdocs-material = "^8.2.7" mypy-boto3-secretsmanager = "^1.24.11" mypy-boto3-ssm = "^1.24.0" mypy-boto3-appconfig = "^1.24.0" -mypy-boto3-dynamodb = "^1.24.12" +mypy-boto3-dynamodb = "^1.24.27" pytest-benchmark = "^3.4.1" From 0dffaf3f87e21a00bb09601e101a0e028d9aaaf3 Mon Sep 17 00:00:00 2001 From: Michal Ploski Date: Thu, 14 Jul 2022 10:56:04 +0200 Subject: [PATCH 71/72] Add docstring about using custom s3 asset uploader instead of cdk-assets --- tests/e2e/utils/infrastructure.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index eccb58fa56d..967dcbf0760 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -137,6 +137,13 @@ def delete(self): self.cf_client.delete_stack(StackName=self.stack_name) def _upload_assets(self, asset_root_dir: str, asset_manifest_file: str): + """ + This method is drop-in replacement for cdk-assets package s3 upload part. + https://www.npmjs.com/package/cdk-assets. + We use custom solution to avoid dependencies from nodejs ecosystem. + We follow the same design cdk-assets follows: + https://github.com/aws/aws-cdk-rfcs/blob/master/text/0092-asset-publishing.md. + """ assets = self._find_assets(asset_manifest_file, self.account_id, self.region) From 3610d08e5a12a8c26ce1d6fb48770d86093b3198 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Thu, 14 Jul 2022 11:27:11 +0200 Subject: [PATCH 72/72] chore: typo --- tests/e2e/utils/infrastructure.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 967dcbf0760..001ae0e6346 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -141,7 +141,7 @@ def _upload_assets(self, asset_root_dir: str, asset_manifest_file: str): This method is drop-in replacement for cdk-assets package s3 upload part. https://www.npmjs.com/package/cdk-assets. We use custom solution to avoid dependencies from nodejs ecosystem. - We follow the same design cdk-assets follows: + We follow the same design cdk-assets: https://github.com/aws/aws-cdk-rfcs/blob/master/text/0092-asset-publishing.md. """