From 95c4231ee5c354cc6ddb1dbfb218a893c30ccb41 Mon Sep 17 00:00:00 2001 From: Anton Agestam Date: Wed, 31 May 2023 12:49:03 +0200 Subject: [PATCH] fix: Run mypy with dependencies Due to not having dependencies installed, we had hidden type errors. Maintaining dependencies separately for pre-commit becomes unwieldy very fast, and is not a good option in my experience. This commit changes so that we instead run mypy in a Github Actions job where we can install runtime dependencies along with typing-specific dependencies. Another rejected alternative is to install from requirements files within pre-commit. This also isn't a good alternative and strongly discouraged by the pre-commit author, because pre-commit has no notion of external dependency files, and couldn't be expected to maintain reproducible environments for mypy This commit also fixes the type errors revealed by running with dependencies. --- .github/workflows/lint.yml | 14 +++- .pre-commit-config.yaml | 6 -- GNUmakefile | 9 ++- karapace/avro_dataclasses/models.py | 4 +- karapace/compatibility/__init__.py | 10 ++- karapace/compatibility/jsonschema/checks.py | 90 +++++++++++++++------ karapace/compatibility/protobuf/checks.py | 3 +- karapace/karapace.py | 3 +- karapace/schema_models.py | 18 ++--- mypy.ini | 15 ++++ requirements/requirements-dev.txt | 8 +- requirements/requirements-typing.in | 6 ++ requirements/requirements-typing.txt | 30 +++++++ requirements/requirements.txt | 4 +- 14 files changed, 167 insertions(+), 53 deletions(-) create mode 100644 requirements/requirements-typing.in create mode 100644 requirements/requirements-typing.txt diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 5a1c55743..66c1d507e 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -25,10 +25,22 @@ jobs: cache: pip python-version: '3.11' # required for pylint - - run: make version + - run: make karapace/version.py - run: pip install pre-commit - uses: actions/cache@v3 with: path: ~/.cache/pre-commit key: pre-commit-3|${{ env.pythonLocation }}|${{ hashFiles('.pre-commit-config.yaml') }} - run: pre-commit run --all-files --show-diff-on-failure + + type-check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + cache: pip + python-version: '3.11' + - run: pip install -r requirements/requirements.txt -r requirements/requirements-typing.txt + - run: make karapace/version.py + - run: mypy diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0427dfd77..8af01f93a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -60,12 +60,6 @@ repos: hooks: - id: flake8 -- repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.2.0 - hooks: - - id: mypy - pass_filenames: false - - repo: https://github.com/hadolint/hadolint rev: v2.12.0 hooks: diff --git a/GNUmakefile b/GNUmakefile index ffb04e279..0eb6d33d4 100644 --- a/GNUmakefile +++ b/GNUmakefile @@ -46,10 +46,12 @@ venv/.deps: requirements/requirements-dev.txt requirements/requirements.txt | ve $(PIP) check touch '$(@)' + +karapace/version.py: + $(PYTHON) version.py + .PHONY: version -version: karapace/version.py -karapace/version.py: version.py | venv/.make - $(PYTHON) '$(<)' '$(@)' +version: venv/.make | karapace/version.py .PHONY: test tests: unit-tests integration-tests @@ -86,6 +88,7 @@ requirements: pip install --upgrade pip setuptools pip-tools cd requirements && pip-compile --upgrade --resolver=backtracking requirements.in cd requirements && pip-compile --upgrade --resolver=backtracking requirements-dev.in + cd requirements && pip-compile --upgrade --resolver=backtracking requirements-typing.in .PHONY: schema schema: against := origin/main diff --git a/karapace/avro_dataclasses/models.py b/karapace/avro_dataclasses/models.py index 4d96fea68..dc9d09af8 100644 --- a/karapace/avro_dataclasses/models.py +++ b/karapace/avro_dataclasses/models.py @@ -126,7 +126,9 @@ def parser_transformations(cls: type[DataclassInstance]) -> Mapping[str, Parser] T = TypeVar("T", bound=DataclassInstance) -def from_avro_dict(cls: type[T], data: Mapping[str, object]) -> T: +def from_avro_dict(cls: type[T], data: object) -> T: + if not isinstance(data, Mapping): + raise TypeError("Expected mapping") cls_transformations = parser_transformations(cls) return cls(**{key: cls_transformations.get(key, noop)(value) for key, value in data.items()}) diff --git a/karapace/compatibility/__init__.py b/karapace/compatibility/__init__.py index a8115ea63..e5f61e710 100644 --- a/karapace/compatibility/__init__.py +++ b/karapace/compatibility/__init__.py @@ -69,7 +69,9 @@ def check_protobuf_compatibility(reader: ProtobufSchema, writer: ProtobufSchema) def check_compatibility( - old_schema: ParsedTypedSchema, new_schema: ValidatedTypedSchema, compatibility_mode: CompatibilityModes + old_schema: ParsedTypedSchema, + new_schema: ValidatedTypedSchema, + compatibility_mode: CompatibilityModes, ) -> SchemaCompatibilityResult: """Check that `old_schema` and `new_schema` are compatible under `compatibility_mode`.""" if compatibility_mode is CompatibilityModes.NONE: @@ -84,6 +86,8 @@ def check_compatibility( ) if old_schema.schema_type is SchemaType.AVRO: + assert isinstance(old_schema.schema, AvroSchema) + assert isinstance(new_schema.schema, AvroSchema) if compatibility_mode in {CompatibilityModes.BACKWARD, CompatibilityModes.BACKWARD_TRANSITIVE}: result = check_avro_compatibility( reader_schema=new_schema.schema, @@ -110,6 +114,8 @@ def check_compatibility( ) elif old_schema.schema_type is SchemaType.JSONSCHEMA: + assert isinstance(old_schema.schema, Draft7Validator) + assert isinstance(new_schema.schema, Draft7Validator) if compatibility_mode in {CompatibilityModes.BACKWARD, CompatibilityModes.BACKWARD_TRANSITIVE}: result = check_jsonschema_compatibility( reader=new_schema.schema, @@ -136,6 +142,8 @@ def check_compatibility( ) elif old_schema.schema_type is SchemaType.PROTOBUF: + assert isinstance(old_schema.schema, ProtobufSchema) + assert isinstance(new_schema.schema, ProtobufSchema) if compatibility_mode in {CompatibilityModes.BACKWARD, CompatibilityModes.BACKWARD_TRANSITIVE}: result = check_protobuf_compatibility( reader=new_schema.schema, diff --git a/karapace/compatibility/jsonschema/checks.py b/karapace/compatibility/jsonschema/checks.py index a94a8507b..483da7f85 100644 --- a/karapace/compatibility/jsonschema/checks.py +++ b/karapace/compatibility/jsonschema/checks.py @@ -2,6 +2,8 @@ Copyright (c) 2023 Aiven Ltd See LICENSE for details """ +from __future__ import annotations + from avro.compatibility import merge, SchemaCompatibilityResult, SchemaCompatibilityType, SchemaIncompatibilityType from dataclasses import dataclass from itertools import product @@ -32,7 +34,7 @@ normalize_schema, schema_from_partially_open_content_model, ) -from typing import Any, List, Optional +from typing import Any import networkx as nx @@ -133,20 +135,26 @@ def type_mismatch( reader_type: JSONSCHEMA_TYPES, writer_type: JSONSCHEMA_TYPES, - location: List[str], + location: list[str], ) -> SchemaCompatibilityResult: locations = "/".join(location) if len(location) > 1: # Remove ROOT_REFERENCE_TOKEN locations = locations[1:] return SchemaCompatibilityResult( compatibility=SchemaCompatibilityType.incompatible, - incompatibilities=[Incompatibility.type_changed], + # TODO: https://github.com/aiven/karapace/issues/633 + incompatibilities=[Incompatibility.type_changed], # type: ignore[list-item] locations={locations}, messages={f"type {reader_type} is not compatible with type {writer_type}"}, ) -def count_uniquely_compatible_schemas(reader_type: Instance, reader_schema, writer_schema, location: List[str]) -> int: +def count_uniquely_compatible_schemas( + reader_type: Instance, + reader_schema, + writer_schema, + location: list[str], +) -> int: # allOf/anyOf/oneOf subschemas do not enforce order, as a consequence the # new schema may change the order of the entries without breaking # compatibility. @@ -185,24 +193,27 @@ class Node: def incompatible_schema( - incompat_type: SchemaIncompatibilityType, message: str, location: List[str] + incompat_type: Incompatibility | SchemaIncompatibilityType, + message: str, + location: list[str], ) -> SchemaCompatibilityResult: locations = "/".join(location) if len(location) > 1: # Remove ROOT_REFERENCE_TOKEN locations = locations[1:] return SchemaCompatibilityResult( compatibility=SchemaCompatibilityType.incompatible, - incompatibilities=[incompat_type], + # TODO: https://github.com/aiven/karapace/issues/633 + incompatibilities=[incompat_type], # type: ignore[list-item] locations={locations}, messages={message}, ) -def is_incompatible(result: "SchemaCompatibilityResult") -> bool: +def is_incompatible(result: SchemaCompatibilityResult) -> bool: return result.compatibility is SchemaCompatibilityType.incompatible -def is_compatible(result: "SchemaCompatibilityResult") -> bool: +def is_compatible(result: SchemaCompatibilityResult) -> bool: return result.compatibility is SchemaCompatibilityType.compatible @@ -224,7 +235,7 @@ def check_simple_subschema( simplified_writer_schema: Any, original_reader_type: JSONSCHEMA_TYPES, original_writer_type: JSONSCHEMA_TYPES, - location: List[str], + location: list[str], ) -> SchemaCompatibilityResult: rec_result = compatibility_rec(simplified_reader_schema, simplified_writer_schema, location) if is_compatible(rec_result): @@ -233,7 +244,9 @@ def check_simple_subschema( def compatibility_rec( - reader_schema: Optional[Any], writer_schema: Optional[Any], location: List[str] + reader_schema: Any | None, + writer_schema: Any | None, + location: list[str], ) -> SchemaCompatibilityResult: if introduced_constraint(reader_schema, writer_schema): return incompatible_schema( @@ -324,7 +337,10 @@ def compatibility_rec( def check_assertion_compatibility( - reader_schema, writer_schema, assertion_check: AssertionCheck, location: List[str] + reader_schema, + writer_schema, + assertion_check: AssertionCheck, + location: list[str], ) -> SchemaCompatibilityResult: result = SchemaCompatibilityResult(SchemaCompatibilityType.compatible) @@ -355,7 +371,11 @@ def check_assertion_compatibility( return result -def compatibility_enum(reader_schema, writer_schema, location: List[str]) -> SchemaCompatibilityResult: +def compatibility_enum( + reader_schema, + writer_schema, + location: list[str], +) -> SchemaCompatibilityResult: # https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.1.2 assert Keyword.ENUM.value in reader_schema, "types should have been previously checked" assert Keyword.ENUM.value in writer_schema, "types should have been previously checked" @@ -372,7 +392,11 @@ def compatibility_enum(reader_schema, writer_schema, location: List[str]) -> Sch return SchemaCompatibilityResult(SchemaCompatibilityType.compatible) -def compatibility_numerical(reader_schema, writer_schema, location: List[str]) -> SchemaCompatibilityResult: +def compatibility_numerical( + reader_schema, + writer_schema, + location: list[str], +) -> SchemaCompatibilityResult: # https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.2 result = SchemaCompatibilityResult(SchemaCompatibilityType.compatible) @@ -384,7 +408,7 @@ def compatibility_numerical(reader_schema, writer_schema, location: List[str]) - assert reader_is_number, "types should have been previously checked" assert writer_is_number, "types should have been previously checked" - checks: List[AssertionCheck] = [MAXIMUM_CHECK, MINIMUM_CHECK, EXCLUSIVE_MAXIMUM_CHECK, EXCLUSIVE_MINIMUM_CHECK] + checks: list[AssertionCheck] = [MAXIMUM_CHECK, MINIMUM_CHECK, EXCLUSIVE_MAXIMUM_CHECK, EXCLUSIVE_MINIMUM_CHECK] for assertion_check in checks: check_result = check_assertion_compatibility( reader_schema, @@ -430,14 +454,14 @@ def compatibility_numerical(reader_schema, writer_schema, location: List[str]) - return result -def compatibility_string(reader_schema, writer_schema, location: List[str]) -> SchemaCompatibilityResult: +def compatibility_string(reader_schema, writer_schema, location: list[str]) -> SchemaCompatibilityResult: # https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.3 result = SchemaCompatibilityResult(SchemaCompatibilityType.compatible) assert get_type_of(reader_schema) == Instance.STRING, "types should have been previously checked" assert get_type_of(writer_schema) == Instance.STRING, "types should have been previously checked" - checks: List[AssertionCheck] = [MAX_LENGTH_CHECK, MIN_LENGTH_CHECK, PATTERN_CHECK] + checks: list[AssertionCheck] = [MAX_LENGTH_CHECK, MIN_LENGTH_CHECK, PATTERN_CHECK] for assertion_check in checks: check_result = check_assertion_compatibility( reader_schema, @@ -449,7 +473,11 @@ def compatibility_string(reader_schema, writer_schema, location: List[str]) -> S return result -def compatibility_array(reader_schema, writer_schema, location: List[str]) -> SchemaCompatibilityResult: +def compatibility_array( + reader_schema, + writer_schema, + location: list[str], +) -> SchemaCompatibilityResult: # https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.4 reader_type = get_type_of(reader_schema) writer_type = get_type_of(writer_schema) @@ -554,7 +582,7 @@ def compatibility_array(reader_schema, writer_schema, location: List[str]) -> Sc rec_result = compatibility_rec(reader_additional_items, writer_additional_items, location_additional_items) result = merge(result, rec_result) - checks: List[AssertionCheck] = [MAX_ITEMS_CHECK, MIN_ITEMS_CHECK] + checks: list[AssertionCheck] = [MAX_ITEMS_CHECK, MIN_ITEMS_CHECK] for assertion_check in checks: check_result = check_assertion_compatibility( reader_schema, @@ -582,18 +610,26 @@ def compatibility_array(reader_schema, writer_schema, location: List[str]) -> Sc def add_incompatibility( - result: SchemaCompatibilityResult, incompat_type: SchemaIncompatibilityType, message: str, location: List[str] + result: SchemaCompatibilityResult, + incompat_type: Incompatibility, + message: str, + location: list[str], ) -> None: """Add an incompatibility, this will modify the object in-place.""" formatted_location = "/".join(location[1:] if len(location) > 1 else location) result.compatibility = SchemaCompatibilityType.incompatible - result.incompatibilities.append(incompat_type) + # TODO: https://github.com/aiven/karapace/issues/633 + result.incompatibilities.append(incompat_type) # type: ignore[arg-type] result.messages.add(message) result.locations.add(formatted_location) -def compatibility_object(reader_schema, writer_schema, location: List[str]) -> SchemaCompatibilityResult: +def compatibility_object( + reader_schema, + writer_schema, + location: list[str], +) -> SchemaCompatibilityResult: # https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.5 result = SchemaCompatibilityResult(SchemaCompatibilityType.compatible) @@ -760,7 +796,7 @@ def compatibility_object(reader_schema, writer_schema, location: List[str]) -> S rec_result = compatibility_rec(reader_dependent_schema, writer_dependent_schema, location) result = merge(result, rec_result) - checks: List[AssertionCheck] = [MAX_PROPERTIES_CHECK, MIN_PROPERTIES_CHECK] + checks: list[AssertionCheck] = [MAX_PROPERTIES_CHECK, MIN_PROPERTIES_CHECK] for assertion_check in checks: check_result = check_assertion_compatibility( reader_schema, @@ -791,13 +827,17 @@ def compatibility_object(reader_schema, writer_schema, location: List[str]) -> S return result -def compatibility_subschemas(reader_schema, writer_schema, location: List[str]) -> SchemaCompatibilityResult: +def compatibility_subschemas( + reader_schema, + writer_schema, + location: list[str], +) -> SchemaCompatibilityResult: # https://json-schema.org/draft/2020-12/json-schema-core.html#rfc.section.10 # pylint: disable=too-many-return-statements reader_subschemas_and_type = maybe_get_subschemas_and_type(reader_schema) writer_subschemas_and_type = maybe_get_subschemas_and_type(writer_schema) - reader_subschemas: Optional[List[Any]] + reader_subschemas: list[Any] | None reader_type: JSONSCHEMA_TYPES if reader_subschemas_and_type is not None: reader_subschemas = reader_subschemas_and_type[0] @@ -808,7 +848,7 @@ def compatibility_subschemas(reader_schema, writer_schema, location: List[str]) reader_type = get_type_of(reader_schema) reader_has_subschema = False - writer_subschemas: Optional[List[Any]] + writer_subschemas: list[Any] | None writer_type: JSONSCHEMA_TYPES if writer_subschemas_and_type is not None: writer_subschemas = writer_subschemas_and_type[0] diff --git a/karapace/compatibility/protobuf/checks.py b/karapace/compatibility/protobuf/checks.py index ca4068ad8..b463b72ce 100644 --- a/karapace/compatibility/protobuf/checks.py +++ b/karapace/compatibility/protobuf/checks.py @@ -24,7 +24,8 @@ def check_protobuf_schema_compatibility(reader: ProtobufSchema, writer: Protobuf return SchemaCompatibilityResult( compatibility=SchemaCompatibilityType.incompatible, - incompatibilities=list(incompatibilities), + # TODO: https://github.com/aiven/karapace/issues/633 + incompatibilities=incompatibilities, # type: ignore[arg-type] locations=set(locations), messages=set(messages), ) diff --git a/karapace/karapace.py b/karapace/karapace.py index 4457d9608..c06b6bcb5 100644 --- a/karapace/karapace.py +++ b/karapace/karapace.py @@ -7,6 +7,7 @@ from __future__ import annotations +from aiohttp.web_request import Request from functools import partial from http import HTTPStatus from karapace.config import Config @@ -77,7 +78,7 @@ def not_found(message: str, sub_code: int, content_type: str) -> NoReturn: async def root_get(self) -> NoReturn: self.r({}, "application/json") - async def health(self, _request: HTTPRequest) -> aiohttp.web.Response: + async def health(self, _request: Request) -> aiohttp.web.Response: resp: JsonObject = {"process_uptime_sec": int(time.monotonic() - self._process_start_time)} for hook in self.health_hooks: resp.update(await hook()) diff --git a/karapace/schema_models.py b/karapace/schema_models.py index 9aaa22da3..3858d6393 100644 --- a/karapace/schema_models.py +++ b/karapace/schema_models.py @@ -21,8 +21,8 @@ from karapace.schema_references import Reference from karapace.schema_type import SchemaType from karapace.typing import ResolvedVersion, SchemaId, Subject -from karapace.utils import json_decode, json_encode, JSONDecodeError -from typing import Any, cast, Dict, List, NoReturn, Optional, Union +from karapace.utils import assert_never, json_decode, json_encode, JSONDecodeError +from typing import Any, cast, Dict, List, Optional, Union import hashlib import logging @@ -49,8 +49,10 @@ def parse_jsonschema_definition(schema_definition: str) -> Draft7Validator: SchemaError: If `schema_definition` is not a valid Draft7 schema. """ schema = json_decode(schema_definition) - Draft7Validator.check_schema(schema) - return Draft7Validator(schema) + # TODO: Annotations dictate Mapping[str, Any] here, but we have unit tests that + # use bool values and fail if we assert isinstance(_, dict). + Draft7Validator.check_schema(schema) # type: ignore[arg-type] + return Draft7Validator(schema) # type: ignore[arg-type] def parse_protobuf_schema_definition( @@ -73,10 +75,6 @@ def parse_protobuf_schema_definition( return protobuf_schema -def _assert_never(no_return: NoReturn) -> NoReturn: - raise AssertionError(f"Expected to be unreachable {no_return}") - - class TypedSchema: def __init__( self, @@ -86,7 +84,7 @@ def __init__( schema: Optional[Union[Draft7Validator, AvroSchema, ProtobufSchema]] = None, references: Optional[List[Reference]] = None, dependencies: Optional[Dict[str, Dependency]] = None, - ): + ) -> None: """Schema with type information Args: @@ -135,7 +133,7 @@ def normalize_schema_str( raise e else: - _assert_never(schema_type) + assert_never(schema_type) return schema_str def __str__(self) -> str: diff --git a/mypy.ini b/mypy.ini index aa31f951c..f5d48f9f1 100644 --- a/mypy.ini +++ b/mypy.ini @@ -108,3 +108,18 @@ ignore_errors = True [mypy-karapace.kafka_rest_apis.admin] ignore_errors = True + +# Third-party libraries with no stubs available. Before adding libraries here, +# first consider: +# - Look for already existing stubs for the library, and add those as typing +# dependencies. +# - Write your own stubs. You don't need to write stubs for the whole library, +# only the parts that Karapace is interacting with. +[mypy-kafka.*] +ignore_missing_imports = True + +[mypy-isodate.*] +ignore_missing_imports = True + +[mypy-networkx.*] +ignore_missing_imports = True diff --git a/requirements/requirements-dev.txt b/requirements/requirements-dev.txt index e17c85f80..c0bdf48a9 100644 --- a/requirements/requirements-dev.txt +++ b/requirements/requirements-dev.txt @@ -14,7 +14,7 @@ aiosignal==1.3.1 # via # -r requirements.txt # aiohttp -anyio==3.6.2 +anyio==3.7.0 # via # -r requirements.txt # watchfiles @@ -55,6 +55,8 @@ configargparse==1.5.3 # via locust exceptiongroup==1.1.1 # via + # -r requirements.txt + # anyio # hypothesis # pytest execnet==1.9.0 @@ -85,7 +87,7 @@ geventhttpclient==2.0.9 # via locust greenlet==2.0.2 # via gevent -hypothesis==6.75.4 +hypothesis==6.75.7 # via -r requirements-dev.in idna==3.4 # via @@ -162,7 +164,7 @@ pytest-xdist[psutil]==3.3.1 # via -r requirements-dev.in python-dateutil==2.8.2 # via -r requirements.txt -pyzmq==25.0.2 +pyzmq==25.1.0 # via locust requests==2.31.0 # via diff --git a/requirements/requirements-typing.in b/requirements/requirements-typing.in new file mode 100644 index 000000000..77b6ff0b2 --- /dev/null +++ b/requirements/requirements-typing.in @@ -0,0 +1,6 @@ +-c requirements.txt +-c requirements-dev.txt + +mypy +types-jsonschema +sentry-sdk diff --git a/requirements/requirements-typing.txt b/requirements/requirements-typing.txt new file mode 100644 index 000000000..ea4657ecb --- /dev/null +++ b/requirements/requirements-typing.txt @@ -0,0 +1,30 @@ +# +# This file is autogenerated by pip-compile with Python 3.8 +# by the following command: +# +# 'make requirements' +# +certifi==2023.5.7 + # via + # -c requirements-dev.txt + # sentry-sdk +mypy==1.3.0 + # via -r requirements-typing.in +mypy-extensions==1.0.0 + # via mypy +sentry-sdk==1.24.0 + # via -r requirements-typing.in +tomli==2.0.1 + # via + # -c requirements-dev.txt + # mypy +types-jsonschema==4.17.0.8 + # via -r requirements-typing.in +typing-extensions==4.6.2 + # via + # -c requirements-dev.txt + # mypy +urllib3==1.26.16 + # via + # -c requirements-dev.txt + # sentry-sdk diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 1f83054b7..9bc49e18d 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -12,7 +12,7 @@ aiokafka==0.8.0 # via -r requirements.in aiosignal==1.3.1 # via aiohttp -anyio==3.6.2 +anyio==3.7.0 # via watchfiles async-timeout==4.0.2 # via @@ -28,6 +28,8 @@ charset-normalizer==3.1.0 # via aiohttp commonmark==0.9.1 # via rich +exceptiongroup==1.1.1 + # via anyio frozenlist==1.3.3 # via # aiohttp