Skip to content

Commit

Permalink
fix: Run mypy with dependencies
Browse files Browse the repository at this point in the history
Due to not having dependencies installed, we had hidden type errors.
Maintaining dependencies separately for pre-commit becomes unwieldy very
fast, and is not a good option in my experience. This commit changes so
that we instead run mypy in a Github Actions job where we can install
runtime dependencies along with typing-specific dependencies.

Another rejected alternative is to install from requirements files
within pre-commit. This also isn't a good alternative and strongly
discouraged by the pre-commit author, because pre-commit has no notion
of external dependency files, and couldn't be expected to maintain
reproducible environments for mypy

This commit also fixes the type errors revealed by running with
dependencies.
  • Loading branch information
aiven-anton committed May 31, 2023
1 parent d68db8a commit 95c4231
Show file tree
Hide file tree
Showing 14 changed files with 167 additions and 53 deletions.
14 changes: 13 additions & 1 deletion .github/workflows/lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,22 @@ jobs:
cache: pip
python-version: '3.11'
# required for pylint
- run: make version
- run: make karapace/version.py
- run: pip install pre-commit
- uses: actions/cache@v3
with:
path: ~/.cache/pre-commit
key: pre-commit-3|${{ env.pythonLocation }}|${{ hashFiles('.pre-commit-config.yaml') }}
- run: pre-commit run --all-files --show-diff-on-failure

type-check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
cache: pip
python-version: '3.11'
- run: pip install -r requirements/requirements.txt -r requirements/requirements-typing.txt
- run: make karapace/version.py
- run: mypy
6 changes: 0 additions & 6 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -60,12 +60,6 @@ repos:
hooks:
- id: flake8

- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.2.0
hooks:
- id: mypy
pass_filenames: false

- repo: https://github.com/hadolint/hadolint
rev: v2.12.0
hooks:
Expand Down
9 changes: 6 additions & 3 deletions GNUmakefile
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,12 @@ venv/.deps: requirements/requirements-dev.txt requirements/requirements.txt | ve
$(PIP) check
touch '$(@)'


karapace/version.py:
$(PYTHON) version.py

.PHONY: version
version: karapace/version.py
karapace/version.py: version.py | venv/.make
$(PYTHON) '$(<)' '$(@)'
version: venv/.make | karapace/version.py

.PHONY: test
tests: unit-tests integration-tests
Expand Down Expand Up @@ -86,6 +88,7 @@ requirements:
pip install --upgrade pip setuptools pip-tools
cd requirements && pip-compile --upgrade --resolver=backtracking requirements.in
cd requirements && pip-compile --upgrade --resolver=backtracking requirements-dev.in
cd requirements && pip-compile --upgrade --resolver=backtracking requirements-typing.in

.PHONY: schema
schema: against := origin/main
Expand Down
4 changes: 3 additions & 1 deletion karapace/avro_dataclasses/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,9 @@ def parser_transformations(cls: type[DataclassInstance]) -> Mapping[str, Parser]
T = TypeVar("T", bound=DataclassInstance)


def from_avro_dict(cls: type[T], data: Mapping[str, object]) -> T:
def from_avro_dict(cls: type[T], data: object) -> T:
if not isinstance(data, Mapping):
raise TypeError("Expected mapping")
cls_transformations = parser_transformations(cls)
return cls(**{key: cls_transformations.get(key, noop)(value) for key, value in data.items()})

Expand Down
10 changes: 9 additions & 1 deletion karapace/compatibility/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,9 @@ def check_protobuf_compatibility(reader: ProtobufSchema, writer: ProtobufSchema)


def check_compatibility(
old_schema: ParsedTypedSchema, new_schema: ValidatedTypedSchema, compatibility_mode: CompatibilityModes
old_schema: ParsedTypedSchema,
new_schema: ValidatedTypedSchema,
compatibility_mode: CompatibilityModes,
) -> SchemaCompatibilityResult:
"""Check that `old_schema` and `new_schema` are compatible under `compatibility_mode`."""
if compatibility_mode is CompatibilityModes.NONE:
Expand All @@ -84,6 +86,8 @@ def check_compatibility(
)

if old_schema.schema_type is SchemaType.AVRO:
assert isinstance(old_schema.schema, AvroSchema)
assert isinstance(new_schema.schema, AvroSchema)
if compatibility_mode in {CompatibilityModes.BACKWARD, CompatibilityModes.BACKWARD_TRANSITIVE}:
result = check_avro_compatibility(
reader_schema=new_schema.schema,
Expand All @@ -110,6 +114,8 @@ def check_compatibility(
)

elif old_schema.schema_type is SchemaType.JSONSCHEMA:
assert isinstance(old_schema.schema, Draft7Validator)
assert isinstance(new_schema.schema, Draft7Validator)
if compatibility_mode in {CompatibilityModes.BACKWARD, CompatibilityModes.BACKWARD_TRANSITIVE}:
result = check_jsonschema_compatibility(
reader=new_schema.schema,
Expand All @@ -136,6 +142,8 @@ def check_compatibility(
)

elif old_schema.schema_type is SchemaType.PROTOBUF:
assert isinstance(old_schema.schema, ProtobufSchema)
assert isinstance(new_schema.schema, ProtobufSchema)
if compatibility_mode in {CompatibilityModes.BACKWARD, CompatibilityModes.BACKWARD_TRANSITIVE}:
result = check_protobuf_compatibility(
reader=new_schema.schema,
Expand Down
90 changes: 65 additions & 25 deletions karapace/compatibility/jsonschema/checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
Copyright (c) 2023 Aiven Ltd
See LICENSE for details
"""
from __future__ import annotations

from avro.compatibility import merge, SchemaCompatibilityResult, SchemaCompatibilityType, SchemaIncompatibilityType
from dataclasses import dataclass
from itertools import product
Expand Down Expand Up @@ -32,7 +34,7 @@
normalize_schema,
schema_from_partially_open_content_model,
)
from typing import Any, List, Optional
from typing import Any

import networkx as nx

Expand Down Expand Up @@ -133,20 +135,26 @@
def type_mismatch(
reader_type: JSONSCHEMA_TYPES,
writer_type: JSONSCHEMA_TYPES,
location: List[str],
location: list[str],
) -> SchemaCompatibilityResult:
locations = "/".join(location)
if len(location) > 1: # Remove ROOT_REFERENCE_TOKEN
locations = locations[1:]
return SchemaCompatibilityResult(
compatibility=SchemaCompatibilityType.incompatible,
incompatibilities=[Incompatibility.type_changed],
# TODO: https://github.com/aiven/karapace/issues/633
incompatibilities=[Incompatibility.type_changed], # type: ignore[list-item]
locations={locations},
messages={f"type {reader_type} is not compatible with type {writer_type}"},
)


def count_uniquely_compatible_schemas(reader_type: Instance, reader_schema, writer_schema, location: List[str]) -> int:
def count_uniquely_compatible_schemas(
reader_type: Instance,
reader_schema,
writer_schema,
location: list[str],
) -> int:
# allOf/anyOf/oneOf subschemas do not enforce order, as a consequence the
# new schema may change the order of the entries without breaking
# compatibility.
Expand Down Expand Up @@ -185,24 +193,27 @@ class Node:


def incompatible_schema(
incompat_type: SchemaIncompatibilityType, message: str, location: List[str]
incompat_type: Incompatibility | SchemaIncompatibilityType,
message: str,
location: list[str],
) -> SchemaCompatibilityResult:
locations = "/".join(location)
if len(location) > 1: # Remove ROOT_REFERENCE_TOKEN
locations = locations[1:]
return SchemaCompatibilityResult(
compatibility=SchemaCompatibilityType.incompatible,
incompatibilities=[incompat_type],
# TODO: https://github.com/aiven/karapace/issues/633
incompatibilities=[incompat_type], # type: ignore[list-item]
locations={locations},
messages={message},
)


def is_incompatible(result: "SchemaCompatibilityResult") -> bool:
def is_incompatible(result: SchemaCompatibilityResult) -> bool:
return result.compatibility is SchemaCompatibilityType.incompatible


def is_compatible(result: "SchemaCompatibilityResult") -> bool:
def is_compatible(result: SchemaCompatibilityResult) -> bool:
return result.compatibility is SchemaCompatibilityType.compatible


Expand All @@ -224,7 +235,7 @@ def check_simple_subschema(
simplified_writer_schema: Any,
original_reader_type: JSONSCHEMA_TYPES,
original_writer_type: JSONSCHEMA_TYPES,
location: List[str],
location: list[str],
) -> SchemaCompatibilityResult:
rec_result = compatibility_rec(simplified_reader_schema, simplified_writer_schema, location)
if is_compatible(rec_result):
Expand All @@ -233,7 +244,9 @@ def check_simple_subschema(


def compatibility_rec(
reader_schema: Optional[Any], writer_schema: Optional[Any], location: List[str]
reader_schema: Any | None,
writer_schema: Any | None,
location: list[str],
) -> SchemaCompatibilityResult:
if introduced_constraint(reader_schema, writer_schema):
return incompatible_schema(
Expand Down Expand Up @@ -324,7 +337,10 @@ def compatibility_rec(


def check_assertion_compatibility(
reader_schema, writer_schema, assertion_check: AssertionCheck, location: List[str]
reader_schema,
writer_schema,
assertion_check: AssertionCheck,
location: list[str],
) -> SchemaCompatibilityResult:
result = SchemaCompatibilityResult(SchemaCompatibilityType.compatible)

Expand Down Expand Up @@ -355,7 +371,11 @@ def check_assertion_compatibility(
return result


def compatibility_enum(reader_schema, writer_schema, location: List[str]) -> SchemaCompatibilityResult:
def compatibility_enum(
reader_schema,
writer_schema,
location: list[str],
) -> SchemaCompatibilityResult:
# https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.1.2
assert Keyword.ENUM.value in reader_schema, "types should have been previously checked"
assert Keyword.ENUM.value in writer_schema, "types should have been previously checked"
Expand All @@ -372,7 +392,11 @@ def compatibility_enum(reader_schema, writer_schema, location: List[str]) -> Sch
return SchemaCompatibilityResult(SchemaCompatibilityType.compatible)


def compatibility_numerical(reader_schema, writer_schema, location: List[str]) -> SchemaCompatibilityResult:
def compatibility_numerical(
reader_schema,
writer_schema,
location: list[str],
) -> SchemaCompatibilityResult:
# https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.2
result = SchemaCompatibilityResult(SchemaCompatibilityType.compatible)

Expand All @@ -384,7 +408,7 @@ def compatibility_numerical(reader_schema, writer_schema, location: List[str]) -
assert reader_is_number, "types should have been previously checked"
assert writer_is_number, "types should have been previously checked"

checks: List[AssertionCheck] = [MAXIMUM_CHECK, MINIMUM_CHECK, EXCLUSIVE_MAXIMUM_CHECK, EXCLUSIVE_MINIMUM_CHECK]
checks: list[AssertionCheck] = [MAXIMUM_CHECK, MINIMUM_CHECK, EXCLUSIVE_MAXIMUM_CHECK, EXCLUSIVE_MINIMUM_CHECK]
for assertion_check in checks:
check_result = check_assertion_compatibility(
reader_schema,
Expand Down Expand Up @@ -430,14 +454,14 @@ def compatibility_numerical(reader_schema, writer_schema, location: List[str]) -
return result


def compatibility_string(reader_schema, writer_schema, location: List[str]) -> SchemaCompatibilityResult:
def compatibility_string(reader_schema, writer_schema, location: list[str]) -> SchemaCompatibilityResult:
# https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.3
result = SchemaCompatibilityResult(SchemaCompatibilityType.compatible)

assert get_type_of(reader_schema) == Instance.STRING, "types should have been previously checked"
assert get_type_of(writer_schema) == Instance.STRING, "types should have been previously checked"

checks: List[AssertionCheck] = [MAX_LENGTH_CHECK, MIN_LENGTH_CHECK, PATTERN_CHECK]
checks: list[AssertionCheck] = [MAX_LENGTH_CHECK, MIN_LENGTH_CHECK, PATTERN_CHECK]
for assertion_check in checks:
check_result = check_assertion_compatibility(
reader_schema,
Expand All @@ -449,7 +473,11 @@ def compatibility_string(reader_schema, writer_schema, location: List[str]) -> S
return result


def compatibility_array(reader_schema, writer_schema, location: List[str]) -> SchemaCompatibilityResult:
def compatibility_array(
reader_schema,
writer_schema,
location: list[str],
) -> SchemaCompatibilityResult:
# https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.4
reader_type = get_type_of(reader_schema)
writer_type = get_type_of(writer_schema)
Expand Down Expand Up @@ -554,7 +582,7 @@ def compatibility_array(reader_schema, writer_schema, location: List[str]) -> Sc
rec_result = compatibility_rec(reader_additional_items, writer_additional_items, location_additional_items)
result = merge(result, rec_result)

checks: List[AssertionCheck] = [MAX_ITEMS_CHECK, MIN_ITEMS_CHECK]
checks: list[AssertionCheck] = [MAX_ITEMS_CHECK, MIN_ITEMS_CHECK]
for assertion_check in checks:
check_result = check_assertion_compatibility(
reader_schema,
Expand Down Expand Up @@ -582,18 +610,26 @@ def compatibility_array(reader_schema, writer_schema, location: List[str]) -> Sc


def add_incompatibility(
result: SchemaCompatibilityResult, incompat_type: SchemaIncompatibilityType, message: str, location: List[str]
result: SchemaCompatibilityResult,
incompat_type: Incompatibility,
message: str,
location: list[str],
) -> None:
"""Add an incompatibility, this will modify the object in-place."""
formatted_location = "/".join(location[1:] if len(location) > 1 else location)

result.compatibility = SchemaCompatibilityType.incompatible
result.incompatibilities.append(incompat_type)
# TODO: https://github.com/aiven/karapace/issues/633
result.incompatibilities.append(incompat_type) # type: ignore[arg-type]
result.messages.add(message)
result.locations.add(formatted_location)


def compatibility_object(reader_schema, writer_schema, location: List[str]) -> SchemaCompatibilityResult:
def compatibility_object(
reader_schema,
writer_schema,
location: list[str],
) -> SchemaCompatibilityResult:
# https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.5
result = SchemaCompatibilityResult(SchemaCompatibilityType.compatible)

Expand Down Expand Up @@ -760,7 +796,7 @@ def compatibility_object(reader_schema, writer_schema, location: List[str]) -> S
rec_result = compatibility_rec(reader_dependent_schema, writer_dependent_schema, location)
result = merge(result, rec_result)

checks: List[AssertionCheck] = [MAX_PROPERTIES_CHECK, MIN_PROPERTIES_CHECK]
checks: list[AssertionCheck] = [MAX_PROPERTIES_CHECK, MIN_PROPERTIES_CHECK]
for assertion_check in checks:
check_result = check_assertion_compatibility(
reader_schema,
Expand Down Expand Up @@ -791,13 +827,17 @@ def compatibility_object(reader_schema, writer_schema, location: List[str]) -> S
return result


def compatibility_subschemas(reader_schema, writer_schema, location: List[str]) -> SchemaCompatibilityResult:
def compatibility_subschemas(
reader_schema,
writer_schema,
location: list[str],
) -> SchemaCompatibilityResult:
# https://json-schema.org/draft/2020-12/json-schema-core.html#rfc.section.10
# pylint: disable=too-many-return-statements
reader_subschemas_and_type = maybe_get_subschemas_and_type(reader_schema)
writer_subschemas_and_type = maybe_get_subschemas_and_type(writer_schema)

reader_subschemas: Optional[List[Any]]
reader_subschemas: list[Any] | None
reader_type: JSONSCHEMA_TYPES
if reader_subschemas_and_type is not None:
reader_subschemas = reader_subschemas_and_type[0]
Expand All @@ -808,7 +848,7 @@ def compatibility_subschemas(reader_schema, writer_schema, location: List[str])
reader_type = get_type_of(reader_schema)
reader_has_subschema = False

writer_subschemas: Optional[List[Any]]
writer_subschemas: list[Any] | None
writer_type: JSONSCHEMA_TYPES
if writer_subschemas_and_type is not None:
writer_subschemas = writer_subschemas_and_type[0]
Expand Down
3 changes: 2 additions & 1 deletion karapace/compatibility/protobuf/checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@ def check_protobuf_schema_compatibility(reader: ProtobufSchema, writer: Protobuf

return SchemaCompatibilityResult(
compatibility=SchemaCompatibilityType.incompatible,
incompatibilities=list(incompatibilities),
# TODO: https://github.com/aiven/karapace/issues/633
incompatibilities=incompatibilities, # type: ignore[arg-type]
locations=set(locations),
messages=set(messages),
)
3 changes: 2 additions & 1 deletion karapace/karapace.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

from __future__ import annotations

from aiohttp.web_request import Request
from functools import partial
from http import HTTPStatus
from karapace.config import Config
Expand Down Expand Up @@ -77,7 +78,7 @@ def not_found(message: str, sub_code: int, content_type: str) -> NoReturn:
async def root_get(self) -> NoReturn:
self.r({}, "application/json")

async def health(self, _request: HTTPRequest) -> aiohttp.web.Response:
async def health(self, _request: Request) -> aiohttp.web.Response:
resp: JsonObject = {"process_uptime_sec": int(time.monotonic() - self._process_start_time)}
for hook in self.health_hooks:
resp.update(await hook())
Expand Down
Loading

0 comments on commit 95c4231

Please sign in to comment.