Skip to content

Commit

Permalink
Merge pull request #1005 from Aiven-Open/jjaakola-aiven-fastapi-drop-…
Browse files Browse the repository at this point in the history
…python39-support

Drop python39 support
  • Loading branch information
nosahama authored Dec 10, 2024
2 parents 8024753 + 3941499 commit d9b5130
Show file tree
Hide file tree
Showing 59 changed files with 210 additions and 230 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [ '3.9', '3.10', '3.11', '3.12' ]
python-version: [ '3.10', '3.11', '3.12' ]
env:
PYTEST_ADDOPTS: >-
--log-dir=/tmp/ci-logs
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -21,3 +21,4 @@ src/karapace/version.py
.python-version
.hypothesis/
.DS_Store
*.coverage.*
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ repos:
rev: v3.4.0
hooks:
- id: pyupgrade
args: [ --py39-plus ]
args: [ --py310-plus ]

- repo: https://github.com/pycqa/autoflake
rev: v2.1.1
Expand Down
6 changes: 3 additions & 3 deletions GNUmakefile
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ SHELL := /usr/bin/env bash
VENV_DIR ?= $(CURDIR)/venv
PIP ?= pip3 --disable-pip-version-check --no-input --require-virtualenv
PYTHON ?= python3
PYTHON_VERSION ?= 3.9
PYTHON_VERSION ?= 3.10
DOCKER_COMPOSE ?= docker compose
KARAPACE-CLI ?= $(DOCKER_COMPOSE) -f container/compose.yml run --rm karapace-cli

Expand Down Expand Up @@ -108,8 +108,8 @@ pin-requirements:
.PHONY: start-karapace-docker-resources
start-karapace-docker-resources: export KARAPACE_VERSION ?= 4.1.1.dev44+gac20eeed.d20241205
start-karapace-docker-resources:
sudo touch .coverage.3.9 .coverage.3.10 .coverage.3.11 .coverage.3.12
sudo chown ${RUNNER_UID}:${RUNNER_GID} .coverage.3.9 .coverage.3.10 .coverage.3.11 .coverage.3.12
sudo touch .coverage.3.10 .coverage.3.11 .coverage.3.12
sudo chown ${RUNNER_UID}:${RUNNER_GID} .coverage.3.10 .coverage.3.11 .coverage.3.12
$(DOCKER_COMPOSE) -f container/compose.yml up -d --build --wait --detach

.PHONY: unit-tests-in-docker
Expand Down
2 changes: 1 addition & 1 deletion container/Dockerfile.dev
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ ARG RUNNER_GID

# Setup files and directories.
RUN mkdir /opt/karapace /opt/karapace/runtime /var/log/karapace /opt/karapace/coverage \
&& touch /opt/karapace/coverage/.coverage.3.9 /opt/karapace/coverage/.coverage.3.10 /opt/karapace/coverage/.coverage.3.11 /opt/karapace/coverage/.coverage.3.12 \
&& touch /opt/karapace/coverage/.coverage.3.10 /opt/karapace/coverage/.coverage.3.11 /opt/karapace/coverage/.coverage.3.12 \
&& chown --recursive "$RUNNER_UID:$RUNNER_GID" /opt/karapace /opt/karapace/coverage /var/log/karapace

# Create, activate, and enforce usage of virtualenv.
Expand Down
1 change: 0 additions & 1 deletion container/compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,6 @@ services:
- ../.pre-commit-config.yaml:/opt/karapace/.pre-commit-config.yaml
- ../.pylintrc:/opt/karapace/.pylintrc
- ../.coveragerc:/opt/karapace/.coveragerc
- ../.coverage.3.9:/opt/karapace/coverage/.coverage.3.9
- ../.coverage.3.10:/opt/karapace/coverage/.coverage.3.10
- ../.coverage.3.11:/opt/karapace/coverage/.coverage.3.11
- ../.coverage.3.12:/opt/karapace/coverage/.coverage.3.12
Expand Down
2 changes: 1 addition & 1 deletion mypy.ini
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[mypy]
mypy_path = $MYPY_CONFIG_FILE_DIR/stubs
python_version = 3.9
python_version = 3.10
packages = karapace
show_error_codes = True
pretty = True
Expand Down
3 changes: 1 addition & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "karapace"
requires-python = ">= 3.9"
requires-python = ">= 3.10"
dynamic = ["version"]
readme = "README.rst"
license = {file = "LICENSE"}
Expand Down Expand Up @@ -56,7 +56,6 @@ classifiers=[
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
Expand Down
13 changes: 5 additions & 8 deletions requirements/requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#
# This file is autogenerated by pip-compile with Python 3.9
# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
# make pin-requirements
Expand Down Expand Up @@ -119,9 +119,9 @@ httpcore==1.0.7
# via httpx
httptools==0.6.4
# via uvicorn
httpx==0.28.0
httpx==0.28.1
# via fastapi
hypothesis==6.122.1
hypothesis==6.122.3
# via karapace (/karapace/pyproject.toml)
idna==3.10
# via
Expand All @@ -131,9 +131,7 @@ idna==3.10
# requests
# yarl
importlib-metadata==8.5.0
# via
# flask
# opentelemetry-api
# via opentelemetry-api
iniconfig==2.0.0
# via pytest
isodate==0.7.2
Expand Down Expand Up @@ -166,7 +164,7 @@ multidict==6.1.0
# via
# aiohttp
# yarl
networkx==3.2.1
networkx==3.4.2
# via karapace (/karapace/pyproject.toml)
opentelemetry-api==1.28.2
# via
Expand Down Expand Up @@ -314,7 +312,6 @@ typing-extensions==4.12.2
# pydantic
# pydantic-core
# rich-toolkit
# starlette
# typer
# uvicorn
ujson==5.10.0
Expand Down
9 changes: 4 additions & 5 deletions requirements/requirements-typing.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#
# This file is autogenerated by pip-compile with Python 3.9
# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
# make pin-requirements
Expand Down Expand Up @@ -78,7 +78,7 @@ httpcore==1.0.7
# via httpx
httptools==0.6.4
# via uvicorn
httpx==0.28.0
httpx==0.28.1
# via fastapi
idna==3.10
# via
Expand Down Expand Up @@ -112,7 +112,7 @@ mypy==1.13.0
# via karapace (/karapace/pyproject.toml)
mypy-extensions==1.0.0
# via mypy
networkx==3.2.1
networkx==3.4.2
# via karapace (/karapace/pyproject.toml)
opentelemetry-api==1.28.2
# via
Expand Down Expand Up @@ -211,7 +211,7 @@ typer==0.15.1
# via fastapi-cli
types-cachetools==5.5.0.20240820
# via karapace (/karapace/pyproject.toml)
types-jsonschema==4.23.0.20240813
types-jsonschema==4.23.0.20241208
# via karapace (/karapace/pyproject.toml)
types-protobuf==3.20.4.6
# via karapace (/karapace/pyproject.toml)
Expand All @@ -227,7 +227,6 @@ typing-extensions==4.12.2
# pydantic
# pydantic-core
# rich-toolkit
# starlette
# typer
# uvicorn
ujson==5.10.0
Expand Down
7 changes: 3 additions & 4 deletions requirements/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#
# This file is autogenerated by pip-compile with Python 3.9
# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
# make pin-requirements
Expand Down Expand Up @@ -77,7 +77,7 @@ httpcore==1.0.7
# via httpx
httptools==0.6.4
# via uvicorn
httpx==0.28.0
httpx==0.28.1
# via fastapi
idna==3.10
# via
Expand Down Expand Up @@ -107,7 +107,7 @@ multidict==6.1.0
# via
# aiohttp
# yarl
networkx==3.2.1
networkx==3.4.2
# via karapace (/karapace/pyproject.toml)
opentelemetry-api==1.28.2
# via
Expand Down Expand Up @@ -210,7 +210,6 @@ typing-extensions==4.12.2
# pydantic
# pydantic-core
# rich-toolkit
# starlette
# typer
# uvicorn
ujson==5.10.0
Expand Down
2 changes: 1 addition & 1 deletion runtime.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
python-3.9.20
python-3.10.16
3 changes: 1 addition & 2 deletions src/karapace/anonymize_schemas/anonymize_avro.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@
Copyright (c) 2023 Aiven Ltd
See LICENSE for details
"""
from typing import Any, Union
from typing_extensions import TypeAlias
from typing import Any, TypeAlias, Union

import hashlib
import re
Expand Down
5 changes: 3 additions & 2 deletions src/karapace/avro_dataclasses/introspect.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
from dataclasses import Field, fields, is_dataclass, MISSING
from enum import Enum
from functools import lru_cache
from typing import Final, get_args, get_origin, TYPE_CHECKING, TypeVar, Union
from types import UnionType
from typing import Final, get_args, get_origin, TYPE_CHECKING, TypeVar

import datetime
import uuid
Expand Down Expand Up @@ -90,7 +91,7 @@ def _field_type(field: Field, type_: object) -> AvroType: # pylint: disable=too
origin = get_origin(type_)

# Handle union types.
if origin is Union:
if origin is UnionType:
return [_field_type(field, unit) for unit in get_args(type_)] # type: ignore[misc]

# Handle array types.
Expand Down
9 changes: 5 additions & 4 deletions src/karapace/avro_dataclasses/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,13 @@
from __future__ import annotations

from .introspect import record_schema
from collections.abc import Iterable, Mapping
from collections.abc import Callable, Iterable, Mapping
from dataclasses import asdict, fields, is_dataclass
from enum import Enum
from functools import lru_cache, partial
from typing import Callable, cast, IO, TYPE_CHECKING, TypeVar, Union
from typing_extensions import get_args, get_origin, Self
from types import UnionType
from typing import cast, get_args, get_origin, IO, TYPE_CHECKING, TypeVar
from typing_extensions import Self

import avro
import avro.io
Expand Down Expand Up @@ -100,7 +101,7 @@ def from_avro_value(type_: object) -> Parser | None:

# With the avro library we need to manually handle union types. We only support the
# special case of nullable types for now.
if origin is Union:
if origin is UnionType:
try:
a, b = get_args(type_)
except ValueError:
Expand Down
4 changes: 2 additions & 2 deletions src/karapace/avro_dataclasses/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
from __future__ import annotations

from collections.abc import Mapping
from typing import Literal
from typing_extensions import NotRequired, TypeAlias, TypedDict
from typing import Literal, TypeAlias
from typing_extensions import NotRequired, TypedDict

Primitive: TypeAlias = Literal["int", "long", "string", "null", "bytes", "boolean"]
LogicalType: TypeAlias = Literal["timestamp-millis", "uuid"]
Expand Down
4 changes: 2 additions & 2 deletions src/karapace/backup/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from .poll_timeout import PollTimeout
from .topic_configurations import ConfigSource, get_topic_configurations
from aiokafka.errors import KafkaError, TopicAlreadyExistsError
from collections.abc import Iterator, Mapping, Sized
from collections.abc import Callable, Iterator, Mapping, Sized
from concurrent.futures import Future
from confluent_kafka import Message, TopicPartition
from enum import Enum
Expand All @@ -42,7 +42,7 @@
from pathlib import Path
from rich.console import Console
from tenacity import retry, retry_if_exception_type, RetryCallState, stop_after_delay, wait_fixed
from typing import Callable, Literal, NewType, TypeVar
from typing import Literal, NewType, TypeVar

import contextlib
import datetime
Expand Down
5 changes: 2 additions & 3 deletions src/karapace/backup/backends/reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,11 @@
"""
from __future__ import annotations

from collections.abc import Generator, Iterator, Mapping, Sequence
from collections.abc import Callable, Generator, Iterator, Mapping, Sequence
from karapace.dataclasses import default_dataclass
from karapace.typing import JsonData, JsonObject
from pathlib import Path
from typing import Callable, ClassVar, Final, IO, Optional, TypeVar, Union
from typing_extensions import TypeAlias
from typing import ClassVar, Final, IO, Optional, TypeAlias, TypeVar, Union

import abc

Expand Down
5 changes: 2 additions & 3 deletions src/karapace/backup/backends/v3/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from .readers import read_metadata, read_records
from .schema import ChecksumAlgorithm, DataFile, Header, Metadata, Record
from .writers import write_metadata, write_record
from collections.abc import Generator, Iterator, Mapping, Sequence
from collections.abc import Callable, Generator, Iterator, Mapping, Sequence
from confluent_kafka import Message
from dataclasses import dataclass
from karapace.backup.backends.reader import BaseBackupReader, Instruction, ProducerSend, RestoreTopic
Expand All @@ -19,8 +19,7 @@
from karapace.utils import assert_never
from karapace.version import __version__
from pathlib import Path
from typing import Callable, ContextManager, Final, IO, TypeVar
from typing_extensions import TypeAlias
from typing import ContextManager, Final, IO, TypeAlias, TypeVar

import datetime
import io
Expand Down
9 changes: 4 additions & 5 deletions src/karapace/backup/backends/v3/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from dataclasses import field
from karapace.avro_dataclasses.models import AvroModel
from karapace.dataclasses import default_dataclass
from typing import Optional

import datetime
import enum
Expand Down Expand Up @@ -53,7 +52,7 @@ class Metadata(AvroModel):
finished_at: datetime.datetime
record_count: int = field(metadata={"type": "int"})
topic_name: str
topic_id: Optional[uuid.UUID]
topic_id: uuid.UUID | None
partition_count: int = field(metadata={"type": "int"})
replication_factor: int = field(metadata={"type": "int"})
topic_configurations: Mapping[str, str]
Expand All @@ -77,8 +76,8 @@ class Header(AvroModel):

@default_dataclass
class Record(AvroModel):
key: Optional[bytes]
value: Optional[bytes]
key: bytes | None
value: bytes | None
headers: tuple[Header, ...]
offset: int = field(metadata={"type": "long"})
timestamp: int = field(metadata={"type": "long"})
Expand All @@ -87,7 +86,7 @@ class Record(AvroModel):
# of records. When restoring, we accumulate parsed records until
# encountering a checkpoint, verify the running checksum against it, and
# only then produce the verified records to Kafka.
checksum_checkpoint: Optional[bytes]
checksum_checkpoint: bytes | None

def __post_init__(self) -> None:
assert self.offset >= 0
Expand Down
3 changes: 1 addition & 2 deletions src/karapace/backup/backends/writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@
from confluent_kafka import Message
from karapace.backup.safe_writer import bytes_writer, str_writer
from pathlib import Path
from typing import ContextManager, Generic, IO, Literal, TypeVar
from typing_extensions import TypeAlias
from typing import ContextManager, Generic, IO, Literal, TypeAlias, TypeVar

import abc
import contextlib
Expand Down
3 changes: 1 addition & 2 deletions src/karapace/backup/safe_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@
from collections.abc import Generator
from pathlib import Path
from tempfile import mkstemp, TemporaryDirectory
from typing import Final, IO, Literal
from typing_extensions import TypeAlias
from typing import Final, IO, Literal, TypeAlias

import contextlib
import os
Expand Down
Loading

0 comments on commit d9b5130

Please sign in to comment.