diff --git a/.github/workflows/test-integration-asyncpg.yml b/.github/workflows/test-integration-asyncpg.yml new file mode 100644 index 0000000000..5340d40cef --- /dev/null +++ b/.github/workflows/test-integration-asyncpg.yml @@ -0,0 +1,102 @@ +name: Test asyncpg + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: asyncpg, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 30 + + strategy: + fail-fast: false + matrix: + python-version: ["3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: sentry + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + # Maps tcp port 5432 on service container to the host + ports: + - 5432:5432 + env: + SENTRY_PYTHON_TEST_POSTGRES_USER: postgres + SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry + SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test + SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install coverage "tox>=3,<4" + + - name: Test asyncpg + uses: nick-fields/retry@v2 + with: + timeout_minutes: 15 + max_attempts: 2 + retry_wait_seconds: 5 + shell: bash + command: | + set -x # print commands that are executed + coverage erase + + # Run tests + ./scripts/runtox.sh "py${{ matrix.python-version }}-asyncpg" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch && + coverage combine .coverage* && + coverage xml -i + + - uses: codecov/codecov-action@v3 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + + + check_required_tests: + name: All asyncpg tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 diff --git a/scripts/split-tox-gh-actions/split-tox-gh-actions.py b/scripts/split-tox-gh-actions/split-tox-gh-actions.py index c216534d31..87759462bb 100755 --- a/scripts/split-tox-gh-actions/split-tox-gh-actions.py +++ b/scripts/split-tox-gh-actions/split-tox-gh-actions.py @@ -30,7 +30,10 @@ TEMPLATE_SNIPPET_TEST = TEMPLATE_DIR / "ci-yaml-test-snippet.txt" TEMPLATE_SNIPPET_TEST_PY27 = TEMPLATE_DIR / "ci-yaml-test-py27-snippet.txt" -FRAMEWORKS_NEEDING_POSTGRES = ["django"] +FRAMEWORKS_NEEDING_POSTGRES = [ + "django", + "asyncpg", +] MATRIX_DEFINITION = """ strategy: diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 8be1be3da7..4cd1916439 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -74,6 +74,13 @@ class SPANDATA: Example: myDatabase """ + DB_USER = "db.user" + """ + The name of the database user used for connecting to the database. + See: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/database.md + Example: my_user + """ + DB_OPERATION = "db.operation" """ The name of the operation being executed, e.g. the MongoDB command name such as findAndModify, or the SQL keyword. diff --git a/sentry_sdk/integrations/asyncpg.py b/sentry_sdk/integrations/asyncpg.py new file mode 100644 index 0000000000..8262b2efab --- /dev/null +++ b/sentry_sdk/integrations/asyncpg.py @@ -0,0 +1,202 @@ +from __future__ import annotations +import contextlib +from typing import Any, TypeVar, Callable, Awaitable, Iterator + +from asyncpg.cursor import BaseCursor # type: ignore + +from sentry_sdk import Hub +from sentry_sdk.consts import OP, SPANDATA +from sentry_sdk.integrations import Integration, DidNotEnable +from sentry_sdk.tracing import Span +from sentry_sdk.tracing_utils import record_sql_queries +from sentry_sdk.utils import parse_version, capture_internal_exceptions + +try: + import asyncpg # type: ignore[import] + +except ImportError: + raise DidNotEnable("asyncpg not installed.") + +# asyncpg.__version__ is a string containing the semantic version in the form of ".." +asyncpg_version = parse_version(asyncpg.__version__) + +if asyncpg_version is not None and asyncpg_version < (0, 23, 0): + raise DidNotEnable("asyncpg >= 0.23.0 required") + + +class AsyncPGIntegration(Integration): + identifier = "asyncpg" + _record_params = False + + def __init__(self, *, record_params: bool = False): + AsyncPGIntegration._record_params = record_params + + @staticmethod + def setup_once() -> None: + asyncpg.Connection.execute = _wrap_execute( + asyncpg.Connection.execute, + ) + + asyncpg.Connection._execute = _wrap_connection_method( + asyncpg.Connection._execute + ) + asyncpg.Connection._executemany = _wrap_connection_method( + asyncpg.Connection._executemany, executemany=True + ) + asyncpg.Connection.cursor = _wrap_cursor_creation(asyncpg.Connection.cursor) + asyncpg.Connection.prepare = _wrap_connection_method(asyncpg.Connection.prepare) + asyncpg.connect_utils._connect_addr = _wrap_connect_addr( + asyncpg.connect_utils._connect_addr + ) + + +T = TypeVar("T") + + +def _wrap_execute(f: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]: + async def _inner(*args: Any, **kwargs: Any) -> T: + hub = Hub.current + integration = hub.get_integration(AsyncPGIntegration) + + # Avoid recording calls to _execute twice. + # Calls to Connection.execute with args also call + # Connection._execute, which is recorded separately + # args[0] = the connection object, args[1] is the query + if integration is None or len(args) > 2: + return await f(*args, **kwargs) + + query = args[1] + with record_sql_queries(hub, None, query, None, None, executemany=False): + res = await f(*args, **kwargs) + return res + + return _inner + + +SubCursor = TypeVar("SubCursor", bound=BaseCursor) + + +@contextlib.contextmanager +def _record( + hub: Hub, + cursor: SubCursor | None, + query: str, + params_list: tuple[Any, ...] | None, + *, + executemany: bool = False, +) -> Iterator[Span]: + integration = hub.get_integration(AsyncPGIntegration) + if not integration._record_params: + params_list = None + + param_style = "pyformat" if params_list else None + + with record_sql_queries( + hub, + cursor, + query, + params_list, + param_style, + executemany=executemany, + record_cursor_repr=cursor is not None, + ) as span: + yield span + + +def _wrap_connection_method( + f: Callable[..., Awaitable[T]], *, executemany: bool = False +) -> Callable[..., Awaitable[T]]: + async def _inner(*args: Any, **kwargs: Any) -> T: + hub = Hub.current + integration = hub.get_integration(AsyncPGIntegration) + + if integration is None: + return await f(*args, **kwargs) + + query = args[1] + params_list = args[2] if len(args) > 2 else None + with _record(hub, None, query, params_list, executemany=executemany) as span: + _set_db_data(span, args[0]) + res = await f(*args, **kwargs) + return res + + return _inner + + +def _wrap_cursor_creation(f: Callable[..., T]) -> Callable[..., T]: + def _inner(*args: Any, **kwargs: Any) -> T: # noqa: N807 + hub = Hub.current + integration = hub.get_integration(AsyncPGIntegration) + + if integration is None: + return f(*args, **kwargs) + + query = args[1] + params_list = args[2] if len(args) > 2 else None + + with _record( + hub, + None, + query, + params_list, + executemany=False, + ) as span: + _set_db_data(span, args[0]) + res = f(*args, **kwargs) + span.set_data("db.cursor", res) + + return res + + return _inner + + +def _wrap_connect_addr(f: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]: + async def _inner(*args: Any, **kwargs: Any) -> T: + hub = Hub.current + integration = hub.get_integration(AsyncPGIntegration) + + if integration is None: + return await f(*args, **kwargs) + + user = kwargs["params"].user + database = kwargs["params"].database + + with hub.start_span(op=OP.DB, description="connect") as span: + span.set_data(SPANDATA.DB_SYSTEM, "postgresql") + addr = kwargs.get("addr") + if addr: + try: + span.set_data(SPANDATA.SERVER_ADDRESS, addr[0]) + span.set_data(SPANDATA.SERVER_PORT, addr[1]) + except IndexError: + pass + span.set_data(SPANDATA.DB_NAME, database) + span.set_data(SPANDATA.DB_USER, user) + + with capture_internal_exceptions(): + hub.add_breadcrumb(message="connect", category="query", data=span._data) + res = await f(*args, **kwargs) + + return res + + return _inner + + +def _set_db_data(span: Span, conn: Any) -> None: + span.set_data(SPANDATA.DB_SYSTEM, "postgresql") + + addr = conn._addr + if addr: + try: + span.set_data(SPANDATA.SERVER_ADDRESS, addr[0]) + span.set_data(SPANDATA.SERVER_PORT, addr[1]) + except IndexError: + pass + + database = conn._params.database + if database: + span.set_data(SPANDATA.DB_NAME, database) + + user = conn._params.user + if user: + span.set_data(SPANDATA.DB_USER, user) diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index fca416028b..40ae525bbe 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -107,6 +107,7 @@ def record_sql_queries( params_list, # type: Any paramstyle, # type: Optional[str] executemany, # type: bool + record_cursor_repr=False, # type: bool ): # type: (...) -> Generator[sentry_sdk.tracing.Span, None, None] @@ -132,6 +133,8 @@ def record_sql_queries( data["db.paramstyle"] = paramstyle if executemany: data["db.executemany"] = True + if record_cursor_repr and cursor is not None: + data["db.cursor"] = cursor with capture_internal_exceptions(): hub.add_breadcrumb(message=query, category="query", data=data) diff --git a/setup.py b/setup.py index b886dab6f2..f7ed4f4026 100644 --- a/setup.py +++ b/setup.py @@ -46,6 +46,7 @@ def get_file_text(file_name): extras_require={ "aiohttp": ["aiohttp>=3.5"], "arq": ["arq>=0.23"], + "asyncpg": ["asyncpg>=0.23"], "beam": ["apache-beam>=2.12"], "bottle": ["bottle>=0.12.13"], "celery": ["celery>=3"], diff --git a/tests/integrations/asyncpg/__init__.py b/tests/integrations/asyncpg/__init__.py new file mode 100644 index 0000000000..b0e360057e --- /dev/null +++ b/tests/integrations/asyncpg/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("asyncpg") diff --git a/tests/integrations/asyncpg/test_asyncpg.py b/tests/integrations/asyncpg/test_asyncpg.py new file mode 100644 index 0000000000..89dcb2595b --- /dev/null +++ b/tests/integrations/asyncpg/test_asyncpg.py @@ -0,0 +1,458 @@ +""" +Tests need pytest-asyncio installed. + +Tests need a local postgresql instance running, this can best be done using +```sh +docker run --rm --name some-postgres -e POSTGRES_USER=foo -e POSTGRES_PASSWORD=bar -d -p 5432:5432 postgres +``` + +The tests use the following credentials to establish a database connection. +""" +import os + + +PG_NAME = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_NAME", "postgres") +PG_USER = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_USER", "foo") +PG_PASSWORD = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PASSWORD", "bar") +PG_HOST = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost") +PG_PORT = 5432 + + +import datetime + +import asyncpg +import pytest +from asyncpg import connect, Connection + +from sentry_sdk import capture_message +from sentry_sdk.integrations.asyncpg import AsyncPGIntegration +from tests.integrations.asgi import pytest_asyncio + + +PG_CONNECTION_URI = f"postgresql://{PG_USER}:{PG_PASSWORD}@{PG_HOST}/{PG_NAME}" +CRUMBS_CONNECT = { + "category": "query", + "data": { + "db.name": "postgres", + "db.system": "postgresql", + "db.user": "foo", + "server.address": "localhost", + "server.port": 5432, + }, + "message": "connect", + "type": "default", +} + + +@pytest_asyncio.fixture(autouse=True) +async def _clean_pg(): + conn = await connect(PG_CONNECTION_URI) + await conn.execute("DROP TABLE IF EXISTS users") + await conn.execute( + """ + CREATE TABLE users( + id serial PRIMARY KEY, + name text, + password text, + dob date + ) + """ + ) + await conn.close() + + +@pytest.mark.asyncio +async def test_connect(sentry_init, capture_events) -> None: + sentry_init( + integrations=[AsyncPGIntegration()], + _experiments={"record_sql_params": True}, + ) + events = capture_events() + + conn: Connection = await connect(PG_CONNECTION_URI) + + await conn.close() + + capture_message("hi") + + (event,) = events + + for crumb in event["breadcrumbs"]["values"]: + del crumb["timestamp"] + + assert event["breadcrumbs"]["values"] == [CRUMBS_CONNECT] + + +@pytest.mark.asyncio +async def test_execute(sentry_init, capture_events) -> None: + sentry_init( + integrations=[AsyncPGIntegration()], + _experiments={"record_sql_params": True}, + ) + events = capture_events() + + conn: Connection = await connect(PG_CONNECTION_URI) + + await conn.execute( + "INSERT INTO users(name, password, dob) VALUES ('Alice', 'pw', '1990-12-25')", + ) + + await conn.execute( + "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", + "Bob", + "secret_pw", + datetime.date(1984, 3, 1), + ) + + row = await conn.fetchrow("SELECT * FROM users WHERE name = $1", "Bob") + assert row == (2, "Bob", "secret_pw", datetime.date(1984, 3, 1)) + + row = await conn.fetchrow("SELECT * FROM users WHERE name = 'Bob'") + assert row == (2, "Bob", "secret_pw", datetime.date(1984, 3, 1)) + + await conn.close() + + capture_message("hi") + + (event,) = events + + for crumb in event["breadcrumbs"]["values"]: + del crumb["timestamp"] + + assert event["breadcrumbs"]["values"] == [ + CRUMBS_CONNECT, + { + "category": "query", + "data": {}, + "message": "INSERT INTO users(name, password, dob) VALUES ('Alice', 'pw', '1990-12-25')", + "type": "default", + }, + { + "category": "query", + "data": {}, + "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", + "type": "default", + }, + { + "category": "query", + "data": {}, + "message": "SELECT * FROM users WHERE name = $1", + "type": "default", + }, + { + "category": "query", + "data": {}, + "message": "SELECT * FROM users WHERE name = 'Bob'", + "type": "default", + }, + ] + + +@pytest.mark.asyncio +async def test_execute_many(sentry_init, capture_events) -> None: + sentry_init( + integrations=[AsyncPGIntegration()], + _experiments={"record_sql_params": True}, + ) + events = capture_events() + + conn: Connection = await connect(PG_CONNECTION_URI) + + await conn.executemany( + "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", + [ + ("Bob", "secret_pw", datetime.date(1984, 3, 1)), + ("Alice", "pw", datetime.date(1990, 12, 25)), + ], + ) + + await conn.close() + + capture_message("hi") + + (event,) = events + + for crumb in event["breadcrumbs"]["values"]: + del crumb["timestamp"] + + assert event["breadcrumbs"]["values"] == [ + CRUMBS_CONNECT, + { + "category": "query", + "data": {"db.executemany": True}, + "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", + "type": "default", + }, + ] + + +@pytest.mark.asyncio +async def test_record_params(sentry_init, capture_events) -> None: + sentry_init( + integrations=[AsyncPGIntegration(record_params=True)], + _experiments={"record_sql_params": True}, + ) + events = capture_events() + + conn: Connection = await connect(PG_CONNECTION_URI) + + await conn.execute( + "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", + "Bob", + "secret_pw", + datetime.date(1984, 3, 1), + ) + + await conn.close() + + capture_message("hi") + + (event,) = events + + for crumb in event["breadcrumbs"]["values"]: + del crumb["timestamp"] + + assert event["breadcrumbs"]["values"] == [ + CRUMBS_CONNECT, + { + "category": "query", + "data": { + "db.params": ["Bob", "secret_pw", "datetime.date(1984, 3, 1)"], + "db.paramstyle": "format", + }, + "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", + "type": "default", + }, + ] + + +@pytest.mark.asyncio +async def test_cursor(sentry_init, capture_events) -> None: + sentry_init( + integrations=[AsyncPGIntegration()], + _experiments={"record_sql_params": True}, + ) + events = capture_events() + + conn: Connection = await connect(PG_CONNECTION_URI) + + await conn.executemany( + "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", + [ + ("Bob", "secret_pw", datetime.date(1984, 3, 1)), + ("Alice", "pw", datetime.date(1990, 12, 25)), + ], + ) + + async with conn.transaction(): + # Postgres requires non-scrollable cursors to be created + # and used in a transaction. + async for record in conn.cursor( + "SELECT * FROM users WHERE dob > $1", datetime.date(1970, 1, 1) + ): + print(record) + + await conn.close() + + capture_message("hi") + + (event,) = events + + for crumb in event["breadcrumbs"]["values"]: + del crumb["timestamp"] + + assert event["breadcrumbs"]["values"] == [ + CRUMBS_CONNECT, + { + "category": "query", + "data": {"db.executemany": True}, + "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", + "type": "default", + }, + {"category": "query", "data": {}, "message": "BEGIN;", "type": "default"}, + { + "category": "query", + "data": {}, + "message": "SELECT * FROM users WHERE dob > $1", + "type": "default", + }, + {"category": "query", "data": {}, "message": "COMMIT;", "type": "default"}, + ] + + +@pytest.mark.asyncio +async def test_cursor_manual(sentry_init, capture_events) -> None: + sentry_init( + integrations=[AsyncPGIntegration()], + _experiments={"record_sql_params": True}, + ) + events = capture_events() + + conn: Connection = await connect(PG_CONNECTION_URI) + + await conn.executemany( + "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", + [ + ("Bob", "secret_pw", datetime.date(1984, 3, 1)), + ("Alice", "pw", datetime.date(1990, 12, 25)), + ], + ) + # + async with conn.transaction(): + # Postgres requires non-scrollable cursors to be created + # and used in a transaction. + cur = await conn.cursor( + "SELECT * FROM users WHERE dob > $1", datetime.date(1970, 1, 1) + ) + record = await cur.fetchrow() + print(record) + while await cur.forward(1): + record = await cur.fetchrow() + print(record) + + await conn.close() + + capture_message("hi") + + (event,) = events + + for crumb in event["breadcrumbs"]["values"]: + del crumb["timestamp"] + + assert event["breadcrumbs"]["values"] == [ + CRUMBS_CONNECT, + { + "category": "query", + "data": {"db.executemany": True}, + "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", + "type": "default", + }, + {"category": "query", "data": {}, "message": "BEGIN;", "type": "default"}, + { + "category": "query", + "data": {}, + "message": "SELECT * FROM users WHERE dob > $1", + "type": "default", + }, + {"category": "query", "data": {}, "message": "COMMIT;", "type": "default"}, + ] + + +@pytest.mark.asyncio +async def test_prepared_stmt(sentry_init, capture_events) -> None: + sentry_init( + integrations=[AsyncPGIntegration()], + _experiments={"record_sql_params": True}, + ) + events = capture_events() + + conn: Connection = await connect(PG_CONNECTION_URI) + + await conn.executemany( + "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", + [ + ("Bob", "secret_pw", datetime.date(1984, 3, 1)), + ("Alice", "pw", datetime.date(1990, 12, 25)), + ], + ) + + stmt = await conn.prepare("SELECT * FROM users WHERE name = $1") + + print(await stmt.fetchval("Bob")) + print(await stmt.fetchval("Alice")) + + await conn.close() + + capture_message("hi") + + (event,) = events + + for crumb in event["breadcrumbs"]["values"]: + del crumb["timestamp"] + + assert event["breadcrumbs"]["values"] == [ + CRUMBS_CONNECT, + { + "category": "query", + "data": {"db.executemany": True}, + "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", + "type": "default", + }, + { + "category": "query", + "data": {}, + "message": "SELECT * FROM users WHERE name = $1", + "type": "default", + }, + ] + + +@pytest.mark.asyncio +async def test_connection_pool(sentry_init, capture_events) -> None: + sentry_init( + integrations=[AsyncPGIntegration()], + _experiments={"record_sql_params": True}, + ) + events = capture_events() + + pool_size = 2 + + pool = await asyncpg.create_pool( + PG_CONNECTION_URI, min_size=pool_size, max_size=pool_size + ) + + async with pool.acquire() as conn: + await conn.execute( + "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", + "Bob", + "secret_pw", + datetime.date(1984, 3, 1), + ) + + async with pool.acquire() as conn: + row = await conn.fetchrow("SELECT * FROM users WHERE name = $1", "Bob") + assert row == (1, "Bob", "secret_pw", datetime.date(1984, 3, 1)) + + await pool.close() + + capture_message("hi") + + (event,) = events + + for crumb in event["breadcrumbs"]["values"]: + del crumb["timestamp"] + + assert event["breadcrumbs"]["values"] == [ + # The connection pool opens pool_size connections so we have the crumbs pool_size times + *[CRUMBS_CONNECT] * pool_size, + { + "category": "query", + "data": {}, + "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", + "type": "default", + }, + { + "category": "query", + "data": {}, + "message": "SELECT pg_advisory_unlock_all();\n" + "CLOSE ALL;\n" + "UNLISTEN *;\n" + "RESET ALL;", + "type": "default", + }, + { + "category": "query", + "data": {}, + "message": "SELECT * FROM users WHERE name = $1", + "type": "default", + }, + { + "category": "query", + "data": {}, + "message": "SELECT pg_advisory_unlock_all();\n" + "CLOSE ALL;\n" + "UNLISTEN *;\n" + "RESET ALL;", + "type": "default", + }, + ] diff --git a/tox.ini b/tox.ini index 09dae82849..e6f636a177 100644 --- a/tox.ini +++ b/tox.ini @@ -28,6 +28,9 @@ envlist = # Asgi {py3.7,py3.8,py3.9,py3.10,py3.11}-asgi + # asyncpg + {py3.7,py3.8,py3.9,py3.10,py3.11}-asyncpg + # AWS Lambda # The aws_lambda tests deploy to the real AWS and have their own matrix of Python versions. {py3.7}-aws_lambda @@ -188,6 +191,10 @@ deps = asgi: pytest-asyncio asgi: async-asgi-testclient + # Asyncpg + asyncpg: pytest-asyncio + asyncpg: asyncpg + # AWS Lambda aws_lambda: boto3 @@ -455,6 +462,7 @@ setenv = aiohttp: TESTPATH=tests/integrations/aiohttp arq: TESTPATH=tests/integrations/arq asgi: TESTPATH=tests/integrations/asgi + asyncpg: TESTPATH=tests/integrations/asyncpg aws_lambda: TESTPATH=tests/integrations/aws_lambda beam: TESTPATH=tests/integrations/beam boto3: TESTPATH=tests/integrations/boto3