From a5fdc4f68cb277bcdbf0f85d4be1b4869159c74c Mon Sep 17 00:00:00 2001 From: Joey Grable Date: Mon, 9 Oct 2023 13:07:01 -0700 Subject: [PATCH] removed all HTTPException try/catch blocks and replaced with FastAPI exception handler, moved auth module into core/security module, expanded initial API endpoints/models/schemas with no security layers implemented yet, added custom CSRF module, updated project dependencies --- .github/workflows/ci.yml | 2 +- app/api/deps/get_db_items.py | 184 +++---- app/api/deps/get_query.py | 3 +- app/api/deps/permissions.py | 4 +- app/api/exceptions.py | 117 ----- app/api/exceptions/__init__.py | 353 ++++++++++++++ app/api/{ => exceptions}/errors.py | 12 +- app/api/exceptions/exceptions.py | 113 +++++ app/api/middleware/__init__.py | 58 +++ app/api/middleware/rate_limiter.py | 16 + app/api/middleware/utilities.py | 13 + app/api/openapi.py | 2 +- app/api/v1/endpoints/clients.py | 65 ++- app/api/v1/endpoints/notes.py | 61 +-- app/api/v1/endpoints/public.py | 44 +- app/api/v1/endpoints/tasks.py | 2 +- app/api/v1/endpoints/users.py | 43 +- app/api/v1/endpoints/web_keywordcorpus.py | 76 ++- app/api/v1/endpoints/web_pages.py | 105 ++-- app/api/v1/endpoints/web_pagespeedinsights.py | 76 ++- app/api/v1/endpoints/web_sitemaps.py | 52 +- app/api/v1/endpoints/websites.py | 84 ++-- app/core/__init__.py | 1 + app/core/auth/permissions.py | 0 app/core/celery.py | 1 + app/core/config.py | 20 +- app/core/logger/__init__.py | 4 + app/core/logger/console.py | 11 +- app/core/security/__init__.py | 28 ++ app/core/{ => security}/auth/__init__.py | 14 + app/core/{ => security}/auth/auth0.py | 0 app/core/security/csrf/__init__.py | 23 + app/core/security/csrf/core.py | 195 ++++++++ app/core/security/csrf/csrf_config.py | 56 +++ app/core/security/csrf/exceptions.py | 29 ++ app/core/security/csrf/load_config.py | 58 +++ app/core/security/csrf/middleware.py | 81 ++++ app/core/{auth => security}/encryption.py | 0 app/db/commands.py | 2 +- app/main.py | 40 +- app/models/ipaddress.py | 8 +- app/schemas/__init__.py | 2 + app/schemas/ipaddress.py | 4 +- app/schemas/security.py | 10 + app/schemas/website_pagespeedinsights.py | 8 +- app/worker.py | 3 +- poetry.lock | 449 +++++++++--------- pyproject.toml | 8 +- requirements.txt | 2 +- tests/api/deps/test_deps.py | 7 - tests/api/deps/test_get_clients.py | 13 +- tests/api/deps/test_get_current_user.py | 4 +- .../test_get_website_page_speed_insights.py | 6 +- tests/api/deps/test_get_website_pages.py | 10 +- tests/api/deps/test_get_website_sitemap.py | 10 +- tests/api/deps/test_get_websites.py | 10 +- tests/api/v1/clients/test_clients_create.py | 2 +- tests/api/v1/clients/test_clients_delete.py | 2 +- tests/api/v1/clients/test_clients_read.py | 2 +- tests/api/v1/clients/test_clients_update.py | 2 +- tests/api/v1/websites/test_websites_create.py | 2 +- tests/api/v1/websites/test_websites_delete.py | 2 +- tests/api/v1/websites/test_websites_read.py | 2 +- tests/api/v1/websites/test_websites_update.py | 2 +- .../test_websites_pages_create.py | 6 +- .../test_websites_pages_delete.py | 2 +- .../test_websites_pages_read.py | 2 +- .../test_websites_pagespeedinsights_create.py | 2 +- .../test_websites_pagespeedinsights_delete.py | 2 +- .../test_websites_pagespeedinsights_read.py | 2 +- .../test_websites_sitemaps_create.py | 6 +- .../test_websites_sitemaps_delete.py | 2 +- .../test_websites_sitemaps_read.py | 2 +- 73 files changed, 1726 insertions(+), 918 deletions(-) delete mode 100644 app/api/exceptions.py create mode 100644 app/api/exceptions/__init__.py rename app/api/{ => exceptions}/errors.py (78%) create mode 100644 app/api/exceptions/exceptions.py create mode 100644 app/api/middleware/__init__.py create mode 100644 app/api/middleware/rate_limiter.py create mode 100644 app/api/middleware/utilities.py delete mode 100644 app/core/auth/permissions.py create mode 100644 app/core/security/__init__.py rename app/core/{ => security}/auth/__init__.py (61%) rename app/core/{ => security}/auth/auth0.py (100%) create mode 100644 app/core/security/csrf/__init__.py create mode 100644 app/core/security/csrf/core.py create mode 100644 app/core/security/csrf/csrf_config.py create mode 100644 app/core/security/csrf/exceptions.py create mode 100644 app/core/security/csrf/load_config.py create mode 100644 app/core/security/csrf/middleware.py rename app/core/{auth => security}/encryption.py (100%) create mode 100644 app/schemas/security.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7b2bc7b6..362cf681 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -13,7 +13,7 @@ jobs: fail-fast: true matrix: os: ["ubuntu-latest"] - python-version: ["3.11", "3.10"] + python-version: ["3.11"] redis-version: ["7"] env: AUTH0_DOMAIN: ${{ secrets.AUTH0_DOMAIN }} diff --git a/app/api/deps/get_db_items.py b/app/api/deps/get_db_items.py index b2878e4d..e73d114e 100644 --- a/app/api/deps/get_db_items.py +++ b/app/api/deps/get_db_items.py @@ -1,14 +1,12 @@ from typing import Annotated, Any from uuid import UUID -from fastapi import Depends, HTTPException, status +from fastapi import Depends from app.api.deps.get_db import AsyncDatabaseSession -from app.api.errors import ErrorCode from app.api.exceptions import ( ClientNotExists, EntityIdNotProvided, - InvalidID, NoteNotExists, UserNotExists, WebsiteMapNotExists, @@ -45,22 +43,14 @@ async def get_user_or_404( user_id: Any | None = None, ) -> User | None: """Parses uuid/int and fetches user by id.""" - try: - if user_id is None: - raise EntityIdNotProvided() - parsed_id: UUID = parse_id(user_id) - user_repo: UserRepository = UserRepository(session=db) - user: User | None = await user_repo.read(entry_id=parsed_id) - if user is None: - raise UserNotExists() - return user - except (UserNotExists, InvalidID): - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail=ErrorCode.USER_NOT_FOUND, - ) - except EntityIdNotProvided: - return None + if user_id is None: + raise EntityIdNotProvided() + parsed_id: UUID = parse_id(user_id) + user_repo: UserRepository = UserRepository(session=db) + user: User | None = await user_repo.read(entry_id=parsed_id) + if user is None: + raise UserNotExists() + return user FetchUserOr404 = Annotated[User, Depends(get_user_or_404)] @@ -71,22 +61,14 @@ async def get_client_or_404( client_id: Any | None = None, ) -> Client | None: """Parses uuid/int and fetches client by id.""" - try: - if client_id is None: - raise EntityIdNotProvided() - parsed_id: UUID = parse_id(client_id) - client_repo: ClientRepository = ClientRepository(session=db) - client: Client | None = await client_repo.read(entry_id=parsed_id) - if client is None: - raise ClientNotExists() - return client - except (ClientNotExists, InvalidID): - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail=ErrorCode.CLIENT_NOT_FOUND, - ) - except EntityIdNotProvided: - return None + if client_id is None: + raise EntityIdNotProvided() + parsed_id: UUID = parse_id(client_id) + client_repo: ClientRepository = ClientRepository(session=db) + client: Client | None = await client_repo.read(entry_id=parsed_id) + if client is None: + raise ClientNotExists() + return client FetchClientOr404 = Annotated[Client, Depends(get_client_or_404)] @@ -97,22 +79,14 @@ async def get_note_or_404( note_id: Any | None = None, ) -> Note | None: """Parses uuid/int and fetches note by id.""" - try: - if note_id is None: - raise EntityIdNotProvided() - parsed_id: UUID = parse_id(note_id) - note_repo: NoteRepository = NoteRepository(session=db) - note: Note | None = await note_repo.read(entry_id=parsed_id) - if note is None: - raise NoteNotExists() - return note - except (NoteNotExists, InvalidID): - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail=ErrorCode.NOTE_NOT_FOUND, - ) - except EntityIdNotProvided: - return None + if note_id is None: + raise EntityIdNotProvided() + parsed_id: UUID = parse_id(note_id) + note_repo: NoteRepository = NoteRepository(session=db) + note: Note | None = await note_repo.read(entry_id=parsed_id) + if note is None: + raise NoteNotExists() + return note FetchNoteOr404 = Annotated[Note, Depends(get_note_or_404)] @@ -123,18 +97,12 @@ async def get_website_or_404( website_id: Any, ) -> Website | None: """Parses uuid/int and fetches website by id.""" - try: - parsed_id: UUID = parse_id(website_id) - website_repo: WebsiteRepository = WebsiteRepository(session=db) - website: Website | None = await website_repo.read(entry_id=parsed_id) - if website is None: - raise WebsiteNotExists() - return website - except (WebsiteNotExists, InvalidID): - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail=ErrorCode.WEBSITE_NOT_FOUND, - ) + parsed_id: UUID = parse_id(website_id) + website_repo: WebsiteRepository = WebsiteRepository(session=db) + website: Website | None = await website_repo.read(entry_id=parsed_id) + if website is None: + raise WebsiteNotExists() + return website FetchWebsiteOr404 = Annotated[Website, Depends(get_website_or_404)] @@ -145,18 +113,12 @@ async def get_website_map_or_404( sitemap_id: Any, ) -> WebsiteMap: """Parses uuid/int and fetches website map by id.""" - try: - parsed_id: UUID = parse_id(sitemap_id) - sitemap_repo: WebsiteMapRepository = WebsiteMapRepository(session=db) - sitemap: WebsiteMap | None = await sitemap_repo.read(entry_id=parsed_id) - if sitemap is None: - raise WebsiteMapNotExists() - return sitemap - except (WebsiteMapNotExists, InvalidID): - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail=ErrorCode.WEBSITE_MAP_NOT_FOUND, - ) + parsed_id: UUID = parse_id(sitemap_id) + sitemap_repo: WebsiteMapRepository = WebsiteMapRepository(session=db) + sitemap: WebsiteMap | None = await sitemap_repo.read(entry_id=parsed_id) + if sitemap is None: + raise WebsiteMapNotExists() + return sitemap FetchSitemapOr404 = Annotated[WebsiteMap, Depends(get_website_map_or_404)] @@ -167,20 +129,12 @@ async def get_website_page_or_404( page_id: Any, ) -> WebsitePage | None: """Parses uuid/int and fetches website page by id.""" - try: - parsed_id: UUID = parse_id(page_id) - website_page_repo: WebsitePageRepository = WebsitePageRepository(session=db) - website_page: WebsitePage | None = await website_page_repo.read( - entry_id=parsed_id - ) - if website_page is None: - raise WebsitePageNotExists() - return website_page - except (WebsitePageNotExists, InvalidID): - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail=ErrorCode.WEBSITE_PAGE_NOT_FOUND, - ) + parsed_id: UUID = parse_id(page_id) + website_page_repo: WebsitePageRepository = WebsitePageRepository(session=db) + website_page: WebsitePage | None = await website_page_repo.read(entry_id=parsed_id) + if website_page is None: + raise WebsitePageNotExists() + return website_page FetchWebPageOr404 = Annotated[WebsitePage, Depends(get_website_page_or_404)] @@ -191,22 +145,16 @@ async def get_website_page_psi_or_404( psi_id: Any, ) -> WebsitePageSpeedInsights | None: """Parses uuid/int and fetches website page speed insights by id.""" - try: - parsed_id: UUID = parse_id(psi_id) - website_page_psi_repo: WebsitePageSpeedInsightsRepository = ( - WebsitePageSpeedInsightsRepository(session=db) - ) - website_page_speed_insights: WebsitePageSpeedInsights | None = ( - await website_page_psi_repo.read(parsed_id) - ) - if website_page_speed_insights is None: - raise WebsitePageSpeedInsightsNotExists() - return website_page_speed_insights - except (WebsitePageSpeedInsightsNotExists, InvalidID): - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail=ErrorCode.WEBSITE_PAGE_SPEED_INSIGHTS_NOT_FOUND, - ) + parsed_id: UUID = parse_id(psi_id) + website_page_psi_repo: WebsitePageSpeedInsightsRepository = ( + WebsitePageSpeedInsightsRepository(session=db) + ) + website_page_speed_insights: WebsitePageSpeedInsights | None = ( + await website_page_psi_repo.read(parsed_id) + ) + if website_page_speed_insights is None: + raise WebsitePageSpeedInsightsNotExists() + return website_page_speed_insights FetchWebPageSpeedInsightOr404 = Annotated[ @@ -219,22 +167,16 @@ async def get_website_page_kwc_or_404( kwc_id: Any, ) -> WebsiteKeywordCorpus | None: """Parses uuid/int and fetches website keyword corpus by id.""" - try: - parsed_id: UUID = parse_id(kwc_id) - website_page_kwc_repo: WebsiteKeywordCorpusRepository = ( - WebsiteKeywordCorpusRepository(session=db) - ) - website_keyword_corpus: WebsiteKeywordCorpus | None = ( - await website_page_kwc_repo.read(parsed_id) - ) - if website_keyword_corpus is None: - raise WebsitePageKeywordCorpusNotExists() - return website_keyword_corpus - except (WebsitePageKeywordCorpusNotExists, InvalidID): - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail=ErrorCode.WEBSITE_PAGE_SPEED_INSIGHTS_NOT_FOUND, - ) + parsed_id: UUID = parse_id(kwc_id) + website_page_kwc_repo: WebsiteKeywordCorpusRepository = ( + WebsiteKeywordCorpusRepository(session=db) + ) + website_keyword_corpus: WebsiteKeywordCorpus | None = ( + await website_page_kwc_repo.read(parsed_id) + ) + if website_keyword_corpus is None: + raise WebsitePageKeywordCorpusNotExists() + return website_keyword_corpus FetchWebsiteKeywordCorpusOr404 = Annotated[ diff --git a/app/api/deps/get_query.py b/app/api/deps/get_query.py index fd8e31bb..fbc0cef0 100644 --- a/app/api/deps/get_query.py +++ b/app/api/deps/get_query.py @@ -106,8 +106,7 @@ def __init__(self, strategy: List[str] | None = None): class CommonQueryParams(PageQueryParams): - def __init__(self, page: int = Query(1), speak: str | None = Query(None)): - PageQueryParams.__init__(self, page) + def __init__(self, speak: str | None = Query(None)): self.speak = speak diff --git a/app/api/deps/permissions.py b/app/api/deps/permissions.py index e796f059..34c5171d 100644 --- a/app/api/deps/permissions.py +++ b/app/api/deps/permissions.py @@ -4,8 +4,8 @@ from fastapi_permissions import Authenticated # type: ignore # noqa: E501 from fastapi_permissions import Everyone, configure_permissions, has_permission -from app.api.errors import ErrorCode -from app.core.auth import Auth0User, auth +from app.api.exceptions import ErrorCode +from app.core.security import Auth0User, auth def get_current_user(user: Auth0User | None = Security(auth.get_user)) -> Auth0User: diff --git a/app/api/exceptions.py b/app/api/exceptions.py deleted file mode 100644 index 77e1a1db..00000000 --- a/app/api/exceptions.py +++ /dev/null @@ -1,117 +0,0 @@ -# Generics - - -class EntityException(Exception): - pass - - -class EntityAlreadyExists(EntityException): - pass - - -class EntityNotExists(EntityException): - pass - - -class InvalidID(EntityException): - pass - - -class EntityIdNotProvided(EntityException): - pass - - -class EntityValueRequired(EntityException): - pass - - -# Users -class UserException(EntityException): - pass - - -class UserAlreadyExists(UserException): - pass - - -class UserNotExists(UserException): - pass - - -# Clients -class ClientsException(EntityException): - pass - - -class ClientAlreadyExists(ClientsException): - pass - - -class ClientNotExists(ClientsException): - pass - - -# Notes -class NoteException(EntityException): - pass - - -class NoteAlreadyExists(NoteException): - pass - - -class NoteNotExists(NoteException): - pass - - -# Websites -class WebsitesException(EntityException): - pass - - -class WebsiteAlreadyExists(WebsitesException): - pass - - -class WebsiteNotExists(WebsitesException): - pass - - -class WebsiteDomainInvalid(WebsitesException): - pass - - -# Website Sitemaps -class WebsiteMapAlreadyExists(WebsitesException): - pass - - -class WebsiteMapNotExists(WebsitesException): - pass - - -# Website Pages -class WebsitePageAlreadyExists(WebsitesException): - pass - - -class WebsitePageNotExists(WebsitesException): - pass - - -# Website Page Speed Insights -class WebsitePageSpeedInsightsAlreadyExists(WebsitesException): - pass - - -class WebsitePageSpeedInsightsNotExists(WebsitesException): - pass - - -# Website Keyword Corpus -class WebsiteKeywordCorpusAlreadyExists(WebsitesException): - pass - - -class WebsitePageKeywordCorpusNotExists(WebsitesException): - pass diff --git a/app/api/exceptions/__init__.py b/app/api/exceptions/__init__.py new file mode 100644 index 00000000..ff8f5817 --- /dev/null +++ b/app/api/exceptions/__init__.py @@ -0,0 +1,353 @@ +from functools import lru_cache + +from asgi_correlation_id.context import correlation_id +from fastapi import FastAPI, HTTPException, Request, Response, status +from fastapi.exception_handlers import http_exception_handler + +from app.core.security import CsrfProtectError + +from .errors import ErrorCode, ErrorCodeReasonModel, ErrorModel +from .exceptions import ( + ApiException, + ClientAlreadyExists, + ClientNotExists, + EntityIdNotProvided, + InvalidID, + NoteAlreadyExists, + NoteNotExists, + UserAlreadyExists, + UserNotExists, + WebsiteAlreadyExists, + WebsiteDomainInvalid, + WebsiteMapAlreadyExists, + WebsiteMapNotExists, + WebsiteNotExists, + WebsitePageAlreadyExists, + WebsitePageKeywordCorpusAlreadyExists, + WebsitePageKeywordCorpusNotExists, + WebsitePageNotExists, + WebsitePageSpeedInsightsAlreadyExists, + WebsitePageSpeedInsightsNotExists, +) + + +@lru_cache() +def get_global_headers() -> dict[str, str]: + return { + "x-request-id": correlation_id.get() or "", + "Access-Control-Expose-Headers": "x-request-id", + } + + +def configure_exceptions(app: FastAPI) -> None: + @app.exception_handler(Exception) + async def unhandled_exception_handler(request: Request, exc: Exception) -> Response: + return await http_exception_handler( + request, + HTTPException( + status.HTTP_500_INTERNAL_SERVER_ERROR, + "Internal server error", + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(CsrfProtectError) + async def csrf_protect_exception_handler( + request: Request, exc: CsrfProtectError + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(ApiException) + async def api_exception_exception_handler( + request: Request, exc: ApiException + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(InvalidID) + async def invalid_id_exception_handler( + request: Request, exc: InvalidID + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(EntityIdNotProvided) + async def entity_id_not_provided_exception_handler( + request: Request, exc: EntityIdNotProvided + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(UserAlreadyExists) + async def user_already_exists_exception_handler( + request: Request, exc: UserAlreadyExists + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(UserNotExists) + async def user_not_exists_exception_handler( + request: Request, exc: UserNotExists + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(ClientAlreadyExists) + async def client_already_exists_exception_handler( + request: Request, exc: ClientAlreadyExists + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(ClientNotExists) + async def client_not_exists_exception_handler( + request: Request, exc: ClientNotExists + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(NoteAlreadyExists) + async def note_already_exists_exception_handler( + request: Request, exc: NoteAlreadyExists + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(NoteNotExists) + async def note_not_exists_exception_handler( + request: Request, exc: NoteNotExists + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(WebsiteAlreadyExists) + async def website_already_exists_exception_handler( + request: Request, exc: WebsiteAlreadyExists + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(WebsiteNotExists) + async def website_not_exists_exception_handler( + request: Request, exc: WebsiteNotExists + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(WebsiteDomainInvalid) + async def website_domain_invalid_exception_handler( + request: Request, exc: WebsiteDomainInvalid + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(WebsiteMapAlreadyExists) + async def website_map_already_exists_exception_handler( + request: Request, exc: WebsiteMapAlreadyExists + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(WebsiteMapNotExists) + async def website_map_not_exists_exception_handler( + request: Request, exc: WebsiteMapNotExists + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(WebsitePageAlreadyExists) + async def website_page_already_exists_exception_handler( + request: Request, exc: WebsitePageAlreadyExists + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(WebsitePageNotExists) + async def website_page_not_exists_exception_handler( + request: Request, exc: WebsitePageNotExists + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(WebsitePageSpeedInsightsAlreadyExists) + async def website_page_speed_insights_already_exists_exception_handler( + request: Request, exc: WebsitePageSpeedInsightsAlreadyExists + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(WebsitePageSpeedInsightsNotExists) + async def website_page_speed_insights_not_exists_exception_handler( + request: Request, exc: WebsitePageSpeedInsightsNotExists + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(WebsitePageKeywordCorpusAlreadyExists) + async def website_page_keyword_corpus_already_exists_exception_handler( + request: Request, exc: WebsitePageKeywordCorpusAlreadyExists + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + @app.exception_handler(WebsitePageKeywordCorpusNotExists) + async def website_page_keyword_corpus_not_exists_exception_handler( + request: Request, exc: WebsitePageKeywordCorpusNotExists + ) -> Response: # noqa: E501 + return await http_exception_handler( + request, + HTTPException( + exc.status_code, + detail=exc.message, + headers={**get_global_headers()}, + ), + ) + + +__all__ = [ + "ApiException", + "ClientAlreadyExists", + "ClientNotExists", + "configure_exceptions", + "EntityIdNotProvided", + "ErrorModel", + "ErrorCodeReasonModel", + "ErrorCode", + "InvalidID", + "NoteAlreadyExists", + "NoteNotExists", + "UserAlreadyExists", + "UserNotExists", + "WebsiteAlreadyExists", + "WebsiteNotExists", + "WebsiteDomainInvalid", + "WebsiteMapAlreadyExists", + "WebsiteMapNotExists", + "WebsitePageAlreadyExists", + "WebsitePageNotExists", + "WebsitePageSpeedInsightsAlreadyExists", + "WebsitePageSpeedInsightsNotExists", + "WebsitePageKeywordCorpusAlreadyExists", + "WebsitePageKeywordCorpusNotExists", +] diff --git a/app/api/errors.py b/app/api/exceptions/errors.py similarity index 78% rename from app/api/errors.py rename to app/api/exceptions/errors.py index 3da8a599..97729d55 100644 --- a/app/api/errors.py +++ b/app/api/exceptions/errors.py @@ -14,11 +14,14 @@ class ErrorCodeReasonModel(BaseModel): class ErrorCode(str, Enum): + # generics + ID_INVALID = "ID_INVALID" + ID_NOT_PROVIDED = "ID_NOT_PROVIDED" # authorization UNAUTHORIZED = "UNAUTHORIZED" INSUFFICIENT_PERMISSIONS = "INSUFFICIENT_PERMISSIONS" - # generics - INVALID_ID = "INVALID_ID" + # security + IP_RESTRICTED_TOO_MANY_REQUESTS = "call limit reached" # users USER_NOT_FOUND = "USER_NOT_FOUND" USERNAME_EXISTS = "USERNAME_EXISTS" @@ -35,11 +38,12 @@ class ErrorCode(str, Enum): # sitemaps WEBSITE_MAP_NOT_FOUND = "WEBSITE_MAP_NOT_FOUND" WEBSITE_MAP_EXISTS = "WEBSITE_MAP_EXISTS" - WEBSITE_MAP_UNASSIGNED_WEBSITE_ID = "WEBSITE_MAP_UNASSIGNED_WEBSITE_ID" # webpages WEBSITE_PAGE_NOT_FOUND = "WEBSITE_PAGE_NOT_FOUND" WEBSITE_PAGE_URL_EXISTS = "WEBSITE_PAGE_URL_EXISTS" - WEBSITE_PAGE_UNASSIGNED_WEBSITE_ID = "WEBSITE_PAGE_UNASSIGNED_WEBSITE_ID" # web page speed insights WEBSITE_PAGE_SPEED_INSIGHTS_EXISTS = "WEBSITE_PAGE_SPEED_INSIGHTS_EXISTS" WEBSITE_PAGE_SPEED_INSIGHTS_NOT_FOUND = "WEBSITE_PAGE_SPEED_INSIGHTS_NOT_FOUND" + # web page keyword corpus + WEBSITE_PAGE_KEYWORD_CORPUS_EXISTS = "WEBSITE_PAGE_KEYWORD_CORPUS_EXISTS" + WEBSITE_PAGE_KEYWORD_CORPUS_NOT_FOUND = "WEBSITE_PAGE_KEYWORD_CORPUS_NOT_FOUND" diff --git a/app/api/exceptions/exceptions.py b/app/api/exceptions/exceptions.py new file mode 100644 index 00000000..cbaee615 --- /dev/null +++ b/app/api/exceptions/exceptions.py @@ -0,0 +1,113 @@ +from fastapi import status + +from .errors import ErrorCode + + +# Generics +class ApiException(Exception): + def __init__(self, status_code: int, message: str): + self.status_code = status_code + self.message = message + + +class InvalidID(ApiException): + def __init__(self, message: str = ErrorCode.ID_INVALID): + super().__init__(status.HTTP_422_UNPROCESSABLE_ENTITY, message) + + +class EntityIdNotProvided(ApiException): + def __init__(self, message: str = ErrorCode.ID_NOT_PROVIDED): + super().__init__(status.HTTP_400_BAD_REQUEST, message) + + +# Users +class UserAlreadyExists(ApiException): + def __init__(self, message: str = ErrorCode.USERNAME_EXISTS): + super().__init__(status.HTTP_400_BAD_REQUEST, message) + + +class UserNotExists(ApiException): + def __init__(self, message: str = ErrorCode.USER_NOT_FOUND): + super().__init__(status.HTTP_404_NOT_FOUND, message) + + +# Clients +class ClientAlreadyExists(ApiException): + def __init__(self, message: str = ErrorCode.CLIENT_EXISTS): + super().__init__(status.HTTP_400_BAD_REQUEST, message) + + +class ClientNotExists(ApiException): + def __init__(self, message: str = ErrorCode.CLIENT_NOT_FOUND): + super().__init__(status.HTTP_404_NOT_FOUND, message) + + +# Notes +class NoteAlreadyExists(ApiException): + def __init__(self, message: str = ErrorCode.NOTE_EXISTS): + super().__init__(status.HTTP_400_BAD_REQUEST, message) + + +class NoteNotExists(ApiException): + def __init__(self, message: str = ErrorCode.NOTE_NOT_FOUND): + super().__init__(status.HTTP_404_NOT_FOUND, message) + + +# Websites +class WebsiteAlreadyExists(ApiException): + def __init__(self, message: str = ErrorCode.WEBSITE_DOMAIN_EXISTS): + super().__init__(status.HTTP_400_BAD_REQUEST, message) + + +class WebsiteNotExists(ApiException): + def __init__(self, message: str = ErrorCode.WEBSITE_NOT_FOUND): + super().__init__(status.HTTP_404_NOT_FOUND, message) + + +class WebsiteDomainInvalid(ApiException): + def __init__(self, message: str = ErrorCode.WEBSITE_DOMAIN_INVALID): + super().__init__(status.HTTP_400_BAD_REQUEST, message) + + +# Website Sitemaps +class WebsiteMapAlreadyExists(ApiException): + def __init__(self, message: str = ErrorCode.WEBSITE_MAP_EXISTS): + super().__init__(status.HTTP_400_BAD_REQUEST, message) + + +class WebsiteMapNotExists(ApiException): + def __init__(self, message: str = ErrorCode.WEBSITE_MAP_NOT_FOUND): + super().__init__(status.HTTP_404_NOT_FOUND, message) + + +# Website Pages +class WebsitePageAlreadyExists(ApiException): + def __init__(self, message: str = ErrorCode.WEBSITE_PAGE_URL_EXISTS): + super().__init__(status.HTTP_400_BAD_REQUEST, message) + + +class WebsitePageNotExists(ApiException): + def __init__(self, message: str = ErrorCode.WEBSITE_PAGE_NOT_FOUND): + super().__init__(status.HTTP_404_NOT_FOUND, message) + + +# Website Page Speed Insights +class WebsitePageSpeedInsightsAlreadyExists(ApiException): + def __init__(self, message: str = ErrorCode.WEBSITE_PAGE_SPEED_INSIGHTS_EXISTS): + super().__init__(status.HTTP_400_BAD_REQUEST, message) + + +class WebsitePageSpeedInsightsNotExists(ApiException): + def __init__(self, message: str = ErrorCode.WEBSITE_PAGE_SPEED_INSIGHTS_NOT_FOUND): + super().__init__(status.HTTP_404_NOT_FOUND, message) + + +# Website Keyword Corpus +class WebsitePageKeywordCorpusAlreadyExists(ApiException): + def __init__(self, message: str = ErrorCode.WEBSITE_PAGE_KEYWORD_CORPUS_EXISTS): + super().__init__(status.HTTP_400_BAD_REQUEST, message) + + +class WebsitePageKeywordCorpusNotExists(ApiException): + def __init__(self, message: str = ErrorCode.WEBSITE_PAGE_KEYWORD_CORPUS_NOT_FOUND): + super().__init__(status.HTTP_404_NOT_FOUND, message) diff --git a/app/api/middleware/__init__.py b/app/api/middleware/__init__.py new file mode 100644 index 00000000..8e3cbc06 --- /dev/null +++ b/app/api/middleware/__init__.py @@ -0,0 +1,58 @@ +import time +from typing import Any + +from asgi_correlation_id import CorrelationIdMiddleware +from fastapi import FastAPI, HTTPException, Request, status +from fastapi.exception_handlers import http_exception_handler +from starlette.middleware.sessions import SessionMiddleware + +from app.api.exceptions import ErrorCode +from app.core.config import settings +from app.schemas import RateLimitedToken + +from .rate_limiter import limiter +from .utilities import get_request_client_ip + + +def configure_middleware(app: FastAPI) -> None: + app.add_middleware(CorrelationIdMiddleware) + app.add_middleware(SessionMiddleware, secret_key=settings.SECRET_KEY) + + @app.middleware("http") + async def add_process_time_header(request: Request, call_next: Any) -> Any: + """Adds a header to each response with the time it took to process.""" + start_time: Any = time.perf_counter() + response_result: Any = await call_next(request) + process_time: Any = time.perf_counter() - start_time + response_result.headers["X-PROCESS-TIME"] = str(process_time) + return response_result + + @app.middleware("http") + async def add_global_request_rate_limit(request: Request, call_next: Any) -> Any: + """ + Manages a request token for each ip address and limits the number + of requests per second. + """ + client_ip = get_request_client_ip(request) + ip_limit: RateLimitedToken = await limiter(client_ip, 100, 60) + + if not ip_limit.call: + return await http_exception_handler( + request, + HTTPException( + status_code=status.HTTP_429_TOO_MANY_REQUESTS, + detail={ + "message": ErrorCode.IP_RESTRICTED_TOO_MANY_REQUESTS, + "ttl": ip_limit.ttl, + }, + ), + ) + + return await call_next(request) + + +__all__ = [ + "configure_middleware", + "limiter", + "get_request_client_ip", +] diff --git a/app/api/middleware/rate_limiter.py b/app/api/middleware/rate_limiter.py new file mode 100644 index 00000000..23674698 --- /dev/null +++ b/app/api/middleware/rate_limiter.py @@ -0,0 +1,16 @@ +from app.core.redis import redis_conn +from app.schemas import RateLimitedToken + + +async def limiter(key: str, limit: int, expires: int = 60) -> RateLimitedToken: + redis_key = "iplimit:" + key + req = await redis_conn.incr(redis_key) + if req == 1: + await redis_conn.expire(redis_key, expires) + ttl = expires + else: + ttl = await redis_conn.ttl(redis_key) + if req > limit: + return RateLimitedToken(call=False, ttl=ttl) + else: + return RateLimitedToken(call=True, ttl=ttl) diff --git a/app/api/middleware/utilities.py b/app/api/middleware/utilities.py new file mode 100644 index 00000000..97f0554c --- /dev/null +++ b/app/api/middleware/utilities.py @@ -0,0 +1,13 @@ +from fastapi import Request + + +def get_request_client_ip(request: Request) -> str: + client_ip: str + forwarded_ip = request.headers.get("X-Forwarded-For") + if forwarded_ip: + client_ip = forwarded_ip.split(",")[0] + elif request.client: + client_ip = request.client.host + else: + client_ip = "::0" + return client_ip diff --git a/app/api/openapi.py b/app/api/openapi.py index 43ddb627..2e80d804 100644 --- a/app/api/openapi.py +++ b/app/api/openapi.py @@ -2,7 +2,7 @@ from fastapi import status -from app.api.errors import ErrorCode, ErrorModel +from app.api.exceptions import ErrorCode, ErrorModel OpenAPIResponseType: Any = Dict[Union[int, str], Dict[str, Any]] diff --git a/app/api/v1/endpoints/clients.py b/app/api/v1/endpoints/clients.py index dd19f6e7..e1feac45 100644 --- a/app/api/v1/endpoints/clients.py +++ b/app/api/v1/endpoints/clients.py @@ -1,6 +1,6 @@ from typing import Dict, List -from fastapi import APIRouter, Depends, HTTPException, status +from fastapi import APIRouter, Depends from app.api.deps import ( AsyncDatabaseSession, @@ -10,10 +10,9 @@ get_async_db, get_client_or_404, ) -from app.api.errors import ErrorCode from app.api.exceptions import ClientAlreadyExists from app.api.openapi import clients_read_responses -from app.core.auth import auth +from app.core.security import auth from app.crud import ClientRepository from app.models import Client from app.schemas import ClientCreate, ClientRead, ClientReadRelations, ClientUpdate @@ -80,23 +79,18 @@ async def clients_create( `ClientRead` : the newly created client """ - try: - clients_repo: ClientRepository = ClientRepository(session=db) - data: Dict = client_in.model_dump() - check_title: str | None = data.get("title") - if check_title: - a_client: Client | None = await clients_repo.read_by( - field_name="title", - field_value=check_title, - ) - if a_client: - raise ClientAlreadyExists() - new_client: Client = await clients_repo.create(client_in) - return ClientRead.model_validate(new_client) - except ClientAlreadyExists: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, detail=ErrorCode.CLIENT_EXISTS + clients_repo: ClientRepository = ClientRepository(session=db) + data: Dict = client_in.model_dump() + check_title: str | None = data.get("title") + if check_title: + a_client: Client | None = await clients_repo.read_by( + field_name="title", + field_value=check_title, ) + if a_client: + raise ClientAlreadyExists() + new_client: Client = await clients_repo.create(client_in) + return ClientRead.model_validate(new_client) @router.get( @@ -158,26 +152,21 @@ async def clients_update( `ClientRead` : the updated client """ - try: - clients_repo: ClientRepository = ClientRepository(session=db) - if client_in.title is not None: - a_client: Client | None = await clients_repo.read_by( - field_name="title", field_value=client_in.title - ) - if a_client: - raise ClientAlreadyExists() - updated_client: Client | None = await clients_repo.update( - entry=client, schema=client_in - ) - return ( - ClientRead.model_validate(updated_client) - if updated_client - else ClientRead.model_validate(client) - ) - except ClientAlreadyExists: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, detail=ErrorCode.CLIENT_EXISTS + clients_repo: ClientRepository = ClientRepository(session=db) + if client_in.title is not None: + a_client: Client | None = await clients_repo.read_by( + field_name="title", field_value=client_in.title ) + if a_client: + raise ClientAlreadyExists() + updated_client: Client | None = await clients_repo.update( + entry=client, schema=client_in + ) + return ( + ClientRead.model_validate(updated_client) + if updated_client + else ClientRead.model_validate(client) + ) @router.delete( diff --git a/app/api/v1/endpoints/notes.py b/app/api/v1/endpoints/notes.py index 47dc472f..0c451ca3 100644 --- a/app/api/v1/endpoints/notes.py +++ b/app/api/v1/endpoints/notes.py @@ -1,6 +1,6 @@ from typing import Dict, List -from fastapi import APIRouter, Depends, HTTPException, status +from fastapi import APIRouter, Depends from app.api.deps import ( AsyncDatabaseSession, @@ -10,9 +10,8 @@ get_async_db, get_note_or_404, ) -from app.api.errors import ErrorCode from app.api.exceptions import NoteAlreadyExists -from app.core.auth import auth +from app.core.security import auth from app.crud import NoteRepository from app.models import Note from app.schemas import NoteCreate, NoteRead, NoteReadRelations, NoteUpdate @@ -81,23 +80,18 @@ async def notes_create( `NoteRead` : the newly created note """ - try: - notes_repo: NoteRepository = NoteRepository(session=db) - data: Dict = note_in.model_dump() - check_title: str | None = data.get("title") - if check_title: - a_note: Note | None = await notes_repo.read_by( - field_name="title", - field_value=check_title, - ) - if a_note: - raise NoteAlreadyExists() - new_note: Note = await notes_repo.create(note_in) - return NoteRead.model_validate(new_note) - except NoteAlreadyExists: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, detail=ErrorCode.NOTE_EXISTS + notes_repo: NoteRepository = NoteRepository(session=db) + data: Dict = note_in.model_dump() + check_title: str | None = data.get("title") + if check_title: + a_note: Note | None = await notes_repo.read_by( + field_name="title", + field_value=check_title, ) + if a_note: + raise NoteAlreadyExists() + new_note: Note = await notes_repo.create(note_in) + return NoteRead.model_validate(new_note) @router.get( @@ -164,24 +158,19 @@ async def notes_update( `NoteRead` : the updated note """ - try: - notes_repo: NoteRepository = NoteRepository(session=db) - if note_in.title is not None: - a_note: Note | None = await notes_repo.read_by( - field_name="title", field_value=note_in.title - ) - if a_note: - raise NoteAlreadyExists() - updated_note: Note | None = await notes_repo.update(entry=note, schema=note_in) - return ( - NoteRead.model_validate(updated_note) - if updated_note - else NoteRead.model_validate(note) - ) - except NoteAlreadyExists: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, detail=ErrorCode.NOTE_EXISTS + notes_repo: NoteRepository = NoteRepository(session=db) + if note_in.title is not None: + a_note: Note | None = await notes_repo.read_by( + field_name="title", field_value=note_in.title ) + if a_note: + raise NoteAlreadyExists() + updated_note: Note | None = await notes_repo.update(entry=note, schema=note_in) + return ( + NoteRead.model_validate(updated_note) + if updated_note + else NoteRead.model_validate(note) + ) @router.delete( diff --git a/app/api/v1/endpoints/public.py b/app/api/v1/endpoints/public.py index a0b893b7..27e14ed5 100644 --- a/app/api/v1/endpoints/public.py +++ b/app/api/v1/endpoints/public.py @@ -1,8 +1,11 @@ from typing import Any, Dict -from fastapi import APIRouter +from fastapi import APIRouter, Depends, Request, Response from app.api.deps import GetQueryParams +from app.core.config import Settings, get_settings +from app.core.security import CsrfProtect +from app.schemas import CsrfToken from app.worker import task_speak router: APIRouter = APIRouter() @@ -13,7 +16,10 @@ name="public:status", response_model=Dict[str, Any], ) -async def status(query: GetQueryParams) -> Dict[str, Any]: +async def status( + request: Request, + query: GetQueryParams, +) -> Dict[str, Any]: """Retrieve the status of the API. Permissions: @@ -29,3 +35,37 @@ async def status(query: GetQueryParams) -> Dict[str, Any]: speak_task = task_speak.delay(query.speak) return {"status": "ok", "speak_task_id": speak_task.id} return {"status": "ok"} + + +@router.get( + "/csrf", + name="public:csrf", + dependencies=[ + Depends(CsrfProtect), + Depends(get_settings), + ], + response_model=CsrfToken, +) +async def get_csrf( + response: Response, + csrf_protect: CsrfProtect = Depends(), + setting: Settings = Depends(get_settings), +) -> CsrfToken: + """Generates an secure CSRF token for the API. + + Permissions: + ------------ + anyone can access this endpoint + + Returns: + -------- + `Dict[str, Any]` : a dictionary containing the CSRF token for the API + + """ + csrf_token, signed_token = csrf_protect.generate_csrf_tokens( + setting.CSRF_SECRET_KEY + ) + + csrf_protect.set_csrf_cookie(signed_token, response) + + return CsrfToken(csrf_token=csrf_token) diff --git a/app/api/v1/endpoints/tasks.py b/app/api/v1/endpoints/tasks.py index 88142ef2..4226f246 100644 --- a/app/api/v1/endpoints/tasks.py +++ b/app/api/v1/endpoints/tasks.py @@ -4,7 +4,7 @@ from fastapi import APIRouter, Depends from app.api.deps import CurrentUser -from app.core.auth import auth +from app.core.security import auth from app.schemas import TaskState router: APIRouter = APIRouter() diff --git a/app/api/v1/endpoints/users.py b/app/api/v1/endpoints/users.py index f61de3e1..05303fc1 100644 --- a/app/api/v1/endpoints/users.py +++ b/app/api/v1/endpoints/users.py @@ -1,6 +1,6 @@ from typing import List -from fastapi import APIRouter, Depends, HTTPException, status +from fastapi import APIRouter, Depends, Request from app.api.deps import ( AsyncDatabaseSession, @@ -11,11 +11,11 @@ get_current_user, get_user_or_404, ) -from app.api.errors import ErrorCode from app.api.exceptions import UserAlreadyExists +from app.api.middleware import get_request_client_ip # from app.api.openapi import users_read_responses -from app.core.auth import auth +from app.core.security import auth from app.crud.user import UserRepository from app.models.user import User from app.schemas import UserCreate, UserRead, UserReadRelations, UserRole, UserUpdate @@ -34,8 +34,10 @@ response_model=UserRead | None, ) async def users_current( + request: Request, db: AsyncDatabaseSession, current_user: CurrentUser, + request_ip: str = Depends(get_request_client_ip), ) -> UserRead | None: """Retrieve the profile information about the currently active, verified user. @@ -67,6 +69,11 @@ async def users_current( roles=user_roles, ) ) + # set session vars + request.session["user_id"] = str(user.id) + req_sess_ip = request.session.get("ip_address", False) + if not req_sess_ip: + request.session["ip_address"] = str(request_ip) return UserRead.model_validate(user) @@ -182,25 +189,19 @@ async def users_update( - `role=user` : can only update non-sensitive profile information like: `username` """ - try: - users_repo: UserRepository = UserRepository(session=db) - if user_in.username is not None: - a_user: User | None = await users_repo.read_by( - field_name="username", field_value=user_in.username - ) - if a_user: - raise UserAlreadyExists() - updated_user: User | None = await users_repo.update(entry=user, schema=user_in) - return ( - UserRead.model_validate(updated_user) - if updated_user - else UserRead.model_validate(user) - ) - except UserAlreadyExists: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=ErrorCode.USERNAME_EXISTS, + users_repo: UserRepository = UserRepository(session=db) + if user_in.username is not None: + a_user: User | None = await users_repo.read_by( + field_name="username", field_value=user_in.username ) + if a_user: + raise UserAlreadyExists() + updated_user: User | None = await users_repo.update(entry=user, schema=user_in) + return ( + UserRead.model_validate(updated_user) + if updated_user + else UserRead.model_validate(user) + ) @router.delete( diff --git a/app/api/v1/endpoints/web_keywordcorpus.py b/app/api/v1/endpoints/web_keywordcorpus.py index 95d1276b..880e34d3 100644 --- a/app/api/v1/endpoints/web_keywordcorpus.py +++ b/app/api/v1/endpoints/web_keywordcorpus.py @@ -1,6 +1,6 @@ from typing import List -from fastapi import APIRouter, Depends, HTTPException, status +from fastapi import APIRouter, Depends from app.api.deps import ( AsyncDatabaseSession, @@ -10,10 +10,9 @@ get_async_db, get_website_page_kwc_or_404, ) -from app.api.errors import ErrorCode from app.api.exceptions import WebsiteNotExists, WebsitePageNotExists -from app.core.auth import auth from app.core.logger import logger +from app.core.security import auth from app.crud import ( WebsiteKeywordCorpusRepository, WebsitePageRepository, @@ -108,48 +107,35 @@ async def website_page_keyword_corpus_create( `WebsiteKeywordCorpusRead` : the newly created website keyword corpus """ - try: - # check if website exists - if query.website_id is None: - raise WebsiteNotExists() - website_repo: WebsiteRepository = WebsiteRepository(db) - a_website: Website | None = await website_repo.read(entry_id=query.website_id) - if a_website is None: - raise WebsiteNotExists() - # check if page exists - if query.page_id is None: - raise WebsitePageNotExists() - web_page_repo: WebsitePageRepository = WebsitePageRepository(db) - a_web_page: WebsitePage | None = await web_page_repo.read( - entry_id=query.page_id - ) - if a_web_page is None: - raise WebsitePageNotExists() - # create website keyword corpus - web_kwc_repo: WebsiteKeywordCorpusRepository - web_kwc_repo = WebsiteKeywordCorpusRepository(db) - kwc_create: WebsiteKeywordCorpusCreate = WebsiteKeywordCorpusCreate( - **kwc_in.model_dump(), - page_id=query.page_id, - website_id=query.website_id, - ) - kwc_in_db: WebsiteKeywordCorpus = await web_kwc_repo.create(schema=kwc_create) - logger.info( - "Created Website Keyword Corpus:", - kwc_in_db.id, - kwc_in_db.created_on, - ) - return WebsiteKeywordCorpusRead.model_validate(kwc_in_db) - except WebsiteNotExists: - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail=ErrorCode.WEBSITE_NOT_FOUND, - ) - except WebsitePageNotExists: - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail=ErrorCode.WEBSITE_PAGE_NOT_FOUND, - ) + # check if website exists + if query.website_id is None: + raise WebsiteNotExists() + website_repo: WebsiteRepository = WebsiteRepository(db) + a_website: Website | None = await website_repo.read(entry_id=query.website_id) + if a_website is None: + raise WebsiteNotExists() + # check if page exists + if query.page_id is None: + raise WebsitePageNotExists() + web_page_repo: WebsitePageRepository = WebsitePageRepository(db) + a_web_page: WebsitePage | None = await web_page_repo.read(entry_id=query.page_id) + if a_web_page is None: + raise WebsitePageNotExists() + # create website keyword corpus + web_kwc_repo: WebsiteKeywordCorpusRepository + web_kwc_repo = WebsiteKeywordCorpusRepository(db) + kwc_create: WebsiteKeywordCorpusCreate = WebsiteKeywordCorpusCreate( + **kwc_in.model_dump(), + page_id=query.page_id, + website_id=query.website_id, + ) + kwc_in_db: WebsiteKeywordCorpus = await web_kwc_repo.create(schema=kwc_create) + logger.info( + "Created Website Keyword Corpus:", + kwc_in_db.id, + kwc_in_db.created_on, + ) + return WebsiteKeywordCorpusRead.model_validate(kwc_in_db) @router.get( diff --git a/app/api/v1/endpoints/web_pages.py b/app/api/v1/endpoints/web_pages.py index 56e6cc0d..9cd8945e 100644 --- a/app/api/v1/endpoints/web_pages.py +++ b/app/api/v1/endpoints/web_pages.py @@ -1,6 +1,6 @@ from typing import Any, List -from fastapi import APIRouter, Depends, HTTPException, status +from fastapi import APIRouter, Depends from app.api.deps import ( AsyncDatabaseSession, @@ -10,9 +10,8 @@ get_async_db, get_website_page_or_404, ) -from app.api.errors import ErrorCode from app.api.exceptions import WebsiteNotExists, WebsitePageAlreadyExists -from app.core.auth import auth +from app.core.security import auth from app.crud import WebsitePageRepository, WebsiteRepository from app.models import Website, WebsitePage from app.schemas import ( @@ -22,6 +21,7 @@ WebsitePageReadRelations, WebsitePageUpdate, ) +from app.schemas.website_pagespeedinsights import PSIDevice from app.worker import task_website_page_pagespeedinsights_fetch router: APIRouter = APIRouter() @@ -104,35 +104,24 @@ async def website_page_create( `WebsitePageRead` : the newly created website page """ - try: - website_repo: WebsiteRepository = WebsiteRepository(session=db) - web_pages_repo: WebsitePageRepository = WebsitePageRepository(session=db) - # check website page url is unique to website_id - a_page: WebsitePage | None = await web_pages_repo.exists_by_two( - field_name_a="url", - field_value_a=website_page_in.url, - field_name_b="website_id", - field_value_b=website_page_in.website_id, - ) - if a_page is not None: - raise WebsitePageAlreadyExists() - # check website page is assigned to a website - a_website: Website | None = await website_repo.read(website_page_in.website_id) - if a_website is None: - raise WebsiteNotExists() - # create the website page - website_page: WebsitePage = await web_pages_repo.create(website_page_in) - return WebsitePageRead.model_validate(website_page) - except WebsiteNotExists: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=ErrorCode.WEBSITE_PAGE_UNASSIGNED_WEBSITE_ID, - ) - except WebsitePageAlreadyExists: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=ErrorCode.WEBSITE_PAGE_URL_EXISTS, - ) + website_repo: WebsiteRepository = WebsiteRepository(session=db) + web_pages_repo: WebsitePageRepository = WebsitePageRepository(session=db) + # check website page url is unique to website_id + a_page: WebsitePage | None = await web_pages_repo.exists_by_two( + field_name_a="url", + field_value_a=website_page_in.url, + field_name_b="website_id", + field_value_b=website_page_in.website_id, + ) + if a_page is not None: + raise WebsitePageAlreadyExists() + # check website page is assigned to a website + a_website: Website | None = await website_repo.read(website_page_in.website_id) + if a_website is None: + raise WebsiteNotExists() + # create the website page + website_page: WebsitePage = await web_pages_repo.create(website_page_in) + return WebsitePageRead.model_validate(website_page) @router.get( @@ -283,32 +272,26 @@ async def website_page_process_website_page_speed_insights( task_id's for the mobile and desktop page speed insights tasks """ - try: - # check website page is assigned to a website - website_repo: WebsiteRepository = WebsiteRepository(session=db) - a_website: Website | None = await website_repo.read(website_page.website_id) - if a_website is None: - raise WebsiteNotExists() - fetch_page = a_website.get_link() + website_page.url - website_page_psi_mobile: Any = task_website_page_pagespeedinsights_fetch.delay( - website_id=a_website.id, - page_id=website_page.id, - fetch_url=fetch_page, - device="mobile", - ) - website_page_psi_desktop: Any = task_website_page_pagespeedinsights_fetch.delay( - website_id=a_website.id, - page_id=website_page.id, - fetch_url=fetch_page, - device="desktop", - ) - return WebsitePagePSIProcessing( - page=WebsitePageRead.model_validate(website_page), - psi_mobile_task_id=website_page_psi_mobile.id, - psi_desktop_task_id=website_page_psi_desktop.id, - ) - except WebsiteNotExists: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=ErrorCode.WEBSITE_PAGE_UNASSIGNED_WEBSITE_ID, - ) + # check website page is assigned to a website + website_repo: WebsiteRepository = WebsiteRepository(session=db) + a_website: Website | None = await website_repo.read(website_page.website_id) + if a_website is None: + raise WebsiteNotExists() + fetch_page = a_website.get_link() + website_page.url + website_page_psi_mobile: Any = task_website_page_pagespeedinsights_fetch.delay( + website_id=a_website.id, + page_id=website_page.id, + fetch_url=fetch_page, + device=PSIDevice.mobile, + ) + website_page_psi_desktop: Any = task_website_page_pagespeedinsights_fetch.delay( + website_id=a_website.id, + page_id=website_page.id, + fetch_url=fetch_page, + device=PSIDevice.desktop, + ) + return WebsitePagePSIProcessing( + page=WebsitePageRead.model_validate(website_page), + psi_mobile_task_id=website_page_psi_mobile.id, + psi_desktop_task_id=website_page_psi_desktop.id, + ) diff --git a/app/api/v1/endpoints/web_pagespeedinsights.py b/app/api/v1/endpoints/web_pagespeedinsights.py index 64f4fc56..c3c629de 100644 --- a/app/api/v1/endpoints/web_pagespeedinsights.py +++ b/app/api/v1/endpoints/web_pagespeedinsights.py @@ -1,6 +1,6 @@ from typing import List -from fastapi import APIRouter, Depends, HTTPException, status +from fastapi import APIRouter, Depends from app.api.deps import ( AsyncDatabaseSession, @@ -10,10 +10,9 @@ get_async_db, get_website_page_psi_or_404, ) -from app.api.errors import ErrorCode from app.api.exceptions import WebsiteNotExists, WebsitePageNotExists -from app.core.auth import auth from app.core.logger import logger +from app.core.security import auth from app.crud import ( WebsitePageRepository, WebsitePageSpeedInsightsRepository, @@ -111,49 +110,34 @@ async def website_page_speed_insights_create( `WebsitePageSpeedInsightsRead` : the newly created website page speed insights """ - try: - # check if website exists - if query.website_id is None: - raise WebsiteNotExists() - website_repo: WebsiteRepository = WebsiteRepository(db) - a_website: Website | None = await website_repo.read(entry_id=query.website_id) - if a_website is None: - raise WebsiteNotExists() - # check if page exists - if query.page_id is None: - raise WebsitePageNotExists() - web_page_repo: WebsitePageRepository = WebsitePageRepository(db) - a_web_page: WebsitePage | None = await web_page_repo.read( - entry_id=query.page_id - ) - if a_web_page is None: - raise WebsitePageNotExists() - web_psi_repo: WebsitePageSpeedInsightsRepository - web_psi_repo = WebsitePageSpeedInsightsRepository(db) - psi_create: WebsitePageSpeedInsightsCreate = WebsitePageSpeedInsightsCreate( - **psi_in.model_dump(), - page_id=query.page_id, - website_id=query.website_id, - ) - psi_in_db: WebsitePageSpeedInsights = await web_psi_repo.create( - schema=psi_create - ) - logger.info( - "Created Website Page Speed Insights:", - psi_in_db.id, - psi_in_db.created_on, - ) - return WebsitePageSpeedInsightsRead.model_validate(psi_in_db) - except WebsiteNotExists: - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail=ErrorCode.WEBSITE_NOT_FOUND, - ) - except WebsitePageNotExists: - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail=ErrorCode.WEBSITE_PAGE_NOT_FOUND, - ) + # check if website exists + if query.website_id is None: + raise WebsiteNotExists() + website_repo: WebsiteRepository = WebsiteRepository(db) + a_website: Website | None = await website_repo.read(entry_id=query.website_id) + if a_website is None: + raise WebsiteNotExists() + # check if page exists + if query.page_id is None: + raise WebsitePageNotExists() + web_page_repo: WebsitePageRepository = WebsitePageRepository(db) + a_web_page: WebsitePage | None = await web_page_repo.read(entry_id=query.page_id) + if a_web_page is None: + raise WebsitePageNotExists() + web_psi_repo: WebsitePageSpeedInsightsRepository + web_psi_repo = WebsitePageSpeedInsightsRepository(db) + psi_create: WebsitePageSpeedInsightsCreate = WebsitePageSpeedInsightsCreate( + **psi_in.model_dump(), + page_id=query.page_id, + website_id=query.website_id, + ) + psi_in_db: WebsitePageSpeedInsights = await web_psi_repo.create(schema=psi_create) + logger.info( + "Created Website Page Speed Insights:", + psi_in_db.id, + psi_in_db.created_on, + ) + return WebsitePageSpeedInsightsRead.model_validate(psi_in_db) @router.get( diff --git a/app/api/v1/endpoints/web_sitemaps.py b/app/api/v1/endpoints/web_sitemaps.py index 44f10fda..a3375346 100644 --- a/app/api/v1/endpoints/web_sitemaps.py +++ b/app/api/v1/endpoints/web_sitemaps.py @@ -1,6 +1,6 @@ from typing import Any, List -from fastapi import APIRouter, Depends, HTTPException, status +from fastapi import APIRouter, Depends from app.api.deps import ( AsyncDatabaseSession, @@ -10,9 +10,8 @@ get_async_db, get_website_map_or_404, ) -from app.api.errors import ErrorCode from app.api.exceptions import WebsiteMapAlreadyExists, WebsiteNotExists -from app.core.auth import auth +from app.core.security import auth from app.crud import WebsiteMapRepository, WebsiteRepository from app.models import Website, WebsiteMap from app.schemas import ( @@ -98,35 +97,24 @@ async def sitemap_create( `WebsiteMapRead` : the newly created website map """ - try: - sitemap_repo: WebsiteMapRepository = WebsiteMapRepository(session=db) - # check website map url is unique to website_id - a_sitemap: WebsiteMap | None = await sitemap_repo.exists_by_two( - field_name_a="url", - field_value_a=sitemap_in.url, - field_name_b="website_id", - field_value_b=sitemap_in.website_id, - ) - if a_sitemap is not None: - raise WebsiteMapAlreadyExists() - # check website map is assigned to a website - website_repo: WebsiteRepository = WebsiteRepository(session=db) - a_website: Website | None = await website_repo.read(sitemap_in.website_id) - if a_website is None: - raise WebsiteNotExists() - # create website map - sitemap: WebsiteMap = await sitemap_repo.create(sitemap_in) - return WebsiteMapRead.model_validate(sitemap) - except WebsiteNotExists: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=ErrorCode.WEBSITE_MAP_UNASSIGNED_WEBSITE_ID, - ) - except WebsiteMapAlreadyExists: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=ErrorCode.WEBSITE_MAP_EXISTS, - ) + sitemap_repo: WebsiteMapRepository = WebsiteMapRepository(session=db) + # check website map url is unique to website_id + a_sitemap: WebsiteMap | None = await sitemap_repo.exists_by_two( + field_name_a="url", + field_value_a=sitemap_in.url, + field_name_b="website_id", + field_value_b=sitemap_in.website_id, + ) + if a_sitemap is not None: + raise WebsiteMapAlreadyExists() + # check website map is assigned to a website + website_repo: WebsiteRepository = WebsiteRepository(session=db) + a_website: Website | None = await website_repo.read(sitemap_in.website_id) + if a_website is None: + raise WebsiteNotExists() + # create website map + sitemap: WebsiteMap = await sitemap_repo.create(sitemap_in) + return WebsiteMapRead.model_validate(sitemap) @router.get( diff --git a/app/api/v1/endpoints/websites.py b/app/api/v1/endpoints/websites.py index 036b2112..e9712c0e 100644 --- a/app/api/v1/endpoints/websites.py +++ b/app/api/v1/endpoints/websites.py @@ -1,6 +1,6 @@ from typing import List -from fastapi import APIRouter, Depends, HTTPException, status +from fastapi import APIRouter, Depends from app.api.deps import ( AsyncDatabaseSession, @@ -10,9 +10,8 @@ get_async_db, get_website_or_404, ) -from app.api.errors import ErrorCode from app.api.exceptions import WebsiteAlreadyExists, WebsiteDomainInvalid -from app.core.auth import auth +from app.core.security import auth from app.crud import WebsiteRepository from app.models import Website from app.schemas import ( @@ -96,34 +95,21 @@ async def website_create( background task that will fetch the sitemap pages """ - try: - websites_repo: WebsiteRepository = WebsiteRepository(session=db) - a_site: Website | None = await websites_repo.read_by( - field_name="domain", - field_value=website_in.domain, - ) - if a_site: - raise WebsiteAlreadyExists() - if not await websites_repo.validate(domain=website_in.domain): - raise WebsiteDomainInvalid() - new_site: Website = await websites_repo.create(website_in) - a_sitemap_url = new_site.get_link() - sitemap_task = task_website_sitemap_fetch_pages.delay( - new_site.id, a_sitemap_url - ) - return WebsiteCreateProcessing( - website=WebsiteRead.model_validate(new_site), task_id=sitemap_task.id - ) - except WebsiteDomainInvalid: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=ErrorCode.WEBSITE_DOMAIN_INVALID, - ) - except WebsiteAlreadyExists: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=ErrorCode.WEBSITE_DOMAIN_EXISTS, - ) + websites_repo: WebsiteRepository = WebsiteRepository(session=db) + a_site: Website | None = await websites_repo.read_by( + field_name="domain", + field_value=website_in.domain, + ) + if a_site: + raise WebsiteAlreadyExists() + if not await websites_repo.validate(domain=website_in.domain): + raise WebsiteDomainInvalid() + new_site: Website = await websites_repo.create(website_in) + a_sitemap_url = new_site.get_link() + sitemap_task = task_website_sitemap_fetch_pages.delay(new_site.id, a_sitemap_url) + return WebsiteCreateProcessing( + website=WebsiteRead.model_validate(new_site), task_id=sitemap_task.id + ) @router.get( @@ -191,28 +177,22 @@ async def website_update( `WebsiteRead` : the updated website """ - try: - websites_repo: WebsiteRepository = WebsiteRepository(session=db) - if website_in.domain is not None: - domain_found: Website | None = await websites_repo.read_by( - field_name="domain", - field_value=website_in.domain, - ) - if domain_found: - raise WebsiteAlreadyExists() - updated_website: Website | None = await websites_repo.update( - entry=website, schema=website_in - ) - return ( - WebsiteRead.model_validate(updated_website) - if updated_website - else WebsiteRead.model_validate(website) - ) - except WebsiteAlreadyExists: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=ErrorCode.WEBSITE_DOMAIN_EXISTS, + websites_repo: WebsiteRepository = WebsiteRepository(session=db) + if website_in.domain is not None: + domain_found: Website | None = await websites_repo.read_by( + field_name="domain", + field_value=website_in.domain, ) + if domain_found: + raise WebsiteAlreadyExists() + updated_website: Website | None = await websites_repo.update( + entry=website, schema=website_in + ) + return ( + WebsiteRead.model_validate(updated_website) + if updated_website + else WebsiteRead.model_validate(website) + ) @router.delete( diff --git a/app/core/__init__.py b/app/core/__init__.py index e69de29b..a48c133d 100644 --- a/app/core/__init__.py +++ b/app/core/__init__.py @@ -0,0 +1 @@ +from .logger import logger diff --git a/app/core/auth/permissions.py b/app/core/auth/permissions.py deleted file mode 100644 index e69de29b..00000000 diff --git a/app/core/celery.py b/app/core/celery.py index 87cafc90..e9f459f7 100644 --- a/app/core/celery.py +++ b/app/core/celery.py @@ -32,6 +32,7 @@ def create_celery_worker(name: str = "worker") -> Celery: worker.conf.update(result_persistent=True) worker.conf.update(worker_send_task_events=False) worker.conf.update(worker_prefetch_multiplier=1) + worker.conf.update(broker_connection_retry_on_startup=True) worker.conf.update( task_queues=( Queue("tasks"), diff --git a/app/core/config.py b/app/core/config.py index 679d2236..d8e370ab 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -28,7 +28,12 @@ class Settings(BaseSettings): # Security ASGI_ID_HEADER_KEY: str = "x-request-id" SECRET_KEY: str = environ.get( - "SECRET_KEY", "54295fb3ad6577bf6ec55fc8a4e2ce86b4a490b5f1666f1e871e94855f6dc0a7" + "SECRET_KEY", + "54295fb3ad6577bf6ec55fc8a4e2ce86b4a490b5f1666f1e871e94855f6dc0a7", # noqa: E501 + ) + CSRF_SECRET_KEY: str = environ.get( + "CSRF_SECRET_KEY", + "fb4cd9547245b656a5a44441eebd5960432c95d9c45970be0d7442f91bf64366", # noqa: E501 ) BACKEND_CORS_ORIGINS: Union[str, List[str]] = [ f"http://{SERVER_NAME}", @@ -37,20 +42,7 @@ class Settings(BaseSettings): f"http://{SERVER_NAME}:8080", f"http://{SERVER_NAME}:4200", f"http://{SERVER_NAME}:3000", - f"https://{SERVER_NAME}", - f"https://{SERVER_NAME}:8888", - f"https://{SERVER_NAME}:80", - f"https://{SERVER_NAME}:8080", - f"https://{SERVER_NAME}:4200", - f"https://{SERVER_NAME}:3000", f"http://{SERVER_NAME_STAGING}", - f"https://{SERVER_NAME_STAGING}", - f"http://whoami.{SERVER_NAME_STAGING}", - f"https://whoami.{SERVER_NAME_STAGING}", - f"http://dbadmin.{SERVER_NAME_STAGING}", - f"https://dbadmin.{SERVER_NAME_STAGING}", - f"http://flower.{SERVER_NAME_STAGING}", - f"https://flower.{SERVER_NAME_STAGING}", ] # Auth0 diff --git a/app/core/logger/__init__.py b/app/core/logger/__init__.py index f87d678a..9b4e2d4d 100644 --- a/app/core/logger/__init__.py +++ b/app/core/logger/__init__.py @@ -3,3 +3,7 @@ from .console import Logger logger: Logger = Logger(name=settings.LOGGER_NAME, level=settings.LOGGING_LEVEL) + +__all__: list[str] = [ + "logger", +] diff --git a/app/core/logger/console.py b/app/core/logger/console.py index 2bd9b647..fbc15beb 100644 --- a/app/core/logger/console.py +++ b/app/core/logger/console.py @@ -147,18 +147,15 @@ def __init__(self, name: str = settings.PROJECT_NAME, level: str = "INFO") -> No name="celery_tracing", uuid_length=8 if not settings.DEBUG_MODE else 32, ) + # exception self.file_frmt: logging.Formatter = logging.Formatter( - "%(levelname)s:\t\b%(asctime)s [%(correlation_id)s] \ -%(name)s:%(lineno)d %(message)s" + "%(levelname)-11s\b%(asctime)s [%(correlation_id)s] %(name)s:%(lineno)d %(message)s" # noqa: E501 ) self.stream_frmt: logging.Formatter = logging.Formatter( - "%(levelname)s:\t\b%(asctime)s [%(correlation_id)s] \ -%(name)s:%(lineno)d %(message)s" + "%(levelname)-11s\b%(asctime)-6s [%(correlation_id)s] %(name)s:%(lineno)d %(message)s" # noqa: E501 ) self.worker_frmt: logging.Formatter = logging.Formatter( - "%(levelname)s:\t\b%(asctime)s [%(correlation_id)s] \ -[%(celery_parent_id)s-%(celery_current_id)s] \ -%(name)s:%(lineno)d %(name)s %(message)s" + "%(levelname)-11s\b%(asctime)-6s [%(correlation_id)s] [%(celery_parent_id)s-%(celery_current_id)s] %(name)s:%(lineno)d %(name)s %(message)s" # noqa: E501 ) self.logger: logging.Logger = logging.getLogger(name) self.logger.setLevel(logging.getLevelName(level)) diff --git a/app/core/security/__init__.py b/app/core/security/__init__.py new file mode 100644 index 00000000..a952d924 --- /dev/null +++ b/app/core/security/__init__.py @@ -0,0 +1,28 @@ +from .auth import ( + Auth0, + Auth0HTTPBearer, + Auth0UnauthenticatedException, + Auth0UnauthorizedException, + Auth0User, + HTTPAuth0Error, + JwksDict, + JwksKeyDict, + OAuth2ImplicitBearer, + auth, +) +from .csrf import CsrfProtect, CsrfProtectError + +__all__ = [ + "auth", + "Auth0", + "Auth0HTTPBearer", + "Auth0UnauthenticatedException", + "Auth0UnauthorizedException", + "Auth0User", + "CsrfProtect", + "CsrfProtectError", + "HTTPAuth0Error", + "JwksDict", + "JwksKeyDict", + "OAuth2ImplicitBearer", +] diff --git a/app/core/auth/__init__.py b/app/core/security/auth/__init__.py similarity index 61% rename from app/core/auth/__init__.py rename to app/core/security/auth/__init__.py index da8a0145..3b705cd5 100644 --- a/app/core/auth/__init__.py +++ b/app/core/security/auth/__init__.py @@ -17,3 +17,17 @@ api_audience=settings.AUTH0_API_AUDIENCE, scopes=settings.BASE_PRINCIPALS, ) + + +__all__ = [ + "Auth0", + "Auth0HTTPBearer", + "Auth0UnauthenticatedException", + "Auth0UnauthorizedException", + "Auth0User", + "HTTPAuth0Error", + "JwksDict", + "JwksKeyDict", + "OAuth2ImplicitBearer", + "auth", +] diff --git a/app/core/auth/auth0.py b/app/core/security/auth/auth0.py similarity index 100% rename from app/core/auth/auth0.py rename to app/core/security/auth/auth0.py diff --git a/app/core/security/csrf/__init__.py b/app/core/security/csrf/__init__.py new file mode 100644 index 00000000..4b553261 --- /dev/null +++ b/app/core/security/csrf/__init__.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +# Copyright (C) 2021-2023 All rights reserved. +# FILENAME: __init__.py +# VERSION: 0.3.2 +# CREATED: 2020-11-25 14:35 +# AUTHOR: Sitt Guruvanich +# DESCRIPTION: +# +# HISTORY: +# ************************************************************* +""" +FastAPI extension that provides Csrf Protection Token support +""" + +__version__ = "0.3.2" + +from .core import CsrfProtect +from .exceptions import CsrfProtectError + +__all__ = [ + "CsrfProtect", + "CsrfProtectError", +] diff --git a/app/core/security/csrf/core.py b/app/core/security/csrf/core.py new file mode 100644 index 00000000..d48efa33 --- /dev/null +++ b/app/core/security/csrf/core.py @@ -0,0 +1,195 @@ +#!/usr/bin/env python3 +# Copyright (C) 2021-2023 All rights reserved. +# FILENAME: core.py +# VERSION: 0.3.2 +# CREATED: 2020-11-25 14:35 +# AUTHOR: Sitt Guruvanich +# DESCRIPTION: +# +# HISTORY: +# ************************************************************* +import re +from hashlib import sha1 +from os import urandom +from typing import Any, Dict, Optional, Tuple +from warnings import warn + +from fastapi.requests import Request +from fastapi.responses import Response +from itsdangerous import BadData, SignatureExpired, URLSafeTimedSerializer +from pydantic import create_model +from starlette.datastructures import Headers + +from .csrf_config import CsrfConfig +from .exceptions import InvalidHeaderError, MissingTokenError, TokenValidationError + + +class CsrfProtect(CsrfConfig): + def generate_csrf(self, secret_key: Optional[str] = None) -> Tuple[str, str]: + """ + Deprecated. Please use `generate_csrf_tokens` method instead. + + --- + :param secret_key: (Optional) secret key used when generating tokens for users + :type secret_key: (str | None) Defaults to None. + """ + warn("This is deprecated; version=0.3.2", DeprecationWarning, stacklevel=2) + return self.generate_csrf_tokens(secret_key) + + def generate_csrf_tokens(self, secret_key: Optional[str] = None) -> Tuple[str, str]: + """ + Generate a CSRF token and a signed CSRF token using server's secret key to be + stored in cookie. + + --- + :param secret_key: (Optional) secret key used when generating tokens for users + :type secret_key: (str | None) Defaults to None. + """ + secret_key = secret_key or self._secret_key + if secret_key is None: + raise RuntimeError("A secret key is required to use CsrfProtect extension.") + serializer = URLSafeTimedSerializer(secret_key, salt="fastapi-csrf-token") + token = sha1(urandom(64)).hexdigest() + signed = serializer.dumps(token) + return token, str(signed) + + def get_csrf_from_body(self, data: bytes) -> str: + """ + Get token from the request body + + --- + :param data: attached request body containing cookie data + with configured `token_key` + :type data: bytes + """ + fields: Dict[str, Any] = {self._token_key: (str, "csrf-token")} + Body = create_model("Body", **fields) + content: str = ( + '{"' + data.decode("utf-8").replace("&", '","').replace("=", '":"') + '"}' + ) + body: Any = Body.model_validate_json(content) + return body.dict()[self._token_key] + + def get_csrf_from_headers(self, headers: Headers) -> str: + """ + Get token from the request headers + + --- + :param headers: Headers containing header with configured `header_name` + :type headers: starlette.datastructures.Headers + """ + header_name, header_type = self._header_name, self._header_type + header_parts = None + try: + header_parts = headers[header_name].split() + except KeyError: + raise InvalidHeaderError( + f'Bad headers. Expected "{header_name}" in headers' + ) + token = None + # Make sure the header is in a valid format that we are expecting, ie + if not header_type: + # : + if len(header_parts) != 1: + raise InvalidHeaderError( + f'Bad {header_name} header. Expected value ""' + ) + token = header_parts[0] + else: + # : + if ( + not re.match(r"{}\s".format(header_type), headers[header_name]) + or len(header_parts) != 2 + ): + raise InvalidHeaderError( + f'Bad {header_name} header. Expected value "{header_type} "' + ) + token = header_parts[1] + return token + + def set_csrf_cookie(self, csrf_signed_token: str, response: Response) -> None: + """ + Sets Csrf Protection token to the response cookies + + --- + :param csrf_signed_token: signed CSRF token from `generate_csrf_token` method + :type csrf_signed_token: str + :param response: The FastAPI response object to sets the access cookies in. + :type response: fastapi.responses.Response + """ + if not isinstance(response, Response): + raise TypeError("The response must be an object response FastAPI") + response.set_cookie( + self._cookie_key, + csrf_signed_token, + max_age=self._max_age, + path=self._cookie_path, + domain=self._cookie_domain, + secure=self._cookie_secure, + httponly=self._httponly, + samesite=self._cookie_samesite, + ) + + def unset_csrf_cookie(self, response: Response) -> None: + """ + Remove Csrf Protection token from the response cookies + + --- + :param response: The FastAPI response object to delete the access cookies in. + :type response: fastapi.responses.Response + """ + if not isinstance(response, Response): + raise TypeError("The response must be an object response FastAPI") + response.delete_cookie( + self._cookie_key, path=self._cookie_path, domain=self._cookie_domain + ) + + async def validate_csrf( + self, + request: Request, + cookie_key: Optional[str] = None, + secret_key: Optional[str] = None, + time_limit: Optional[int] = None, + ) -> None: + """ + Check if the given data is a valid CSRF token. This compares the given + signed token to the one stored in the session. + + --- + :param request: incoming Request instance + :type request: fastapi.requests.Request + :param cookie_key: (Optional) field name for CSRF token field stored in cookies + Default is set in CsrfConfig when `load_config` was called; + :type cookie_key: str + :param secret_key: (Optional) secret key used to decrypt the token + Default is set in CsrfConfig when `load_config` was called; + :type secret_key: str + :param time_limit: (Optional) Number of seconds that the token is valid. + Default is set in CsrfConfig when `load_config` was called; + :type time_limit: int + :raises TokenValidationError: Contains the reason that validation failed. + """ + secret_key = secret_key or self._secret_key + if secret_key is None: + raise RuntimeError("A secret key is required to use CsrfProtect extension.") + cookie_key = cookie_key or self._cookie_key + signed_token = request.cookies.get(cookie_key) + if signed_token is None: + raise MissingTokenError(f"Missing Cookie: `{cookie_key}`.") + time_limit = time_limit or self._max_age + token: str + if self._token_location == "header": + token = self.get_csrf_from_headers(request.headers) + else: + token = self.get_csrf_from_body(await request.body()) + serializer = URLSafeTimedSerializer(secret_key, salt="fastapi-csrf-token") + try: + signature: str = serializer.loads(signed_token, max_age=time_limit) + if token != signature: + raise TokenValidationError( + "The CSRF signatures submitted do not match." + ) + except SignatureExpired: + raise TokenValidationError("The CSRF token has expired.") + except BadData: + raise TokenValidationError("The CSRF token is invalid.") diff --git a/app/core/security/csrf/csrf_config.py b/app/core/security/csrf/csrf_config.py new file mode 100644 index 00000000..e9ba2c81 --- /dev/null +++ b/app/core/security/csrf/csrf_config.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python3 +# Copyright (C) 2021-2023 All rights reserved. +# FILENAME: fastapi_csrf_config.py +# VERSION: 0.3.2 +# CREATED: 2020-11-25 14:35 +# AUTHOR: Sitt Guruvanich +# DESCRIPTION: +# +# HISTORY: +# ************************************************************* +from typing import Callable, List, Optional, Set + +from pydantic import ValidationError + +from .load_config import CookieSamesite, LoadConfig + + +class CsrfConfig(object): # type: ignore + _cookie_key: str = "fastapi-csrf-token" + _cookie_path: str = "/" + _cookie_domain: Optional[str] = None + _cookie_samesite: Optional[CookieSamesite] = "lax" + _cookie_secure: bool = False + _header_name: str = "X-CSRF-Token" + _header_type: Optional[str] = None + _httponly: bool = True + _max_age: int = 3600 + _methods: Set[str] = {"POST", "PUT", "PATCH", "DELETE"} + _secret_key: str = "super-secret-key" + _token_location: str = "header" + _token_key: str = "csrf-token-key" + + @classmethod + def load_config(cls, settings: Callable[..., List[tuple]]) -> None: + try: + config = LoadConfig(**{key.lower(): value for key, value in settings()}) + cls._cookie_key = config.cookie_key or cls._cookie_key + cls._cookie_path = config.cookie_path or cls._cookie_path + cls._cookie_domain = config.cookie_domain or cls._cookie_domain + cls._cookie_samesite = config.cookie_samesite or cls._cookie_samesite + cls._cookie_secure = config.cookie_secure or cls._cookie_secure + cls._header_name = config.header_name or cls._header_name + cls._header_type = config.header_type or cls._header_type + cls._httponly = config.httponly or cls._httponly + cls._max_age = config.max_age or cls._max_age + cls._methods = config.methods or cls._methods + cls._secret_key = config.secret_key or cls._secret_key + cls._token_location = config.token_location or cls._token_location + cls._token_key = config.token_key or cls._token_key + except ValidationError: + raise + except Exception as err: + print(err) + raise TypeError( + 'CsrfConfig must be pydantic "BaseSettings" or list of tuple' + ) diff --git a/app/core/security/csrf/exceptions.py b/app/core/security/csrf/exceptions.py new file mode 100644 index 00000000..2e797580 --- /dev/null +++ b/app/core/security/csrf/exceptions.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python3 +# Copyright (C) 2021-2023 All rights reserved. +# FILENAME: exceptions.py +# VERSION: 0.3.2 +# CREATED: 2020-11-25 14:35 +# AUTHOR: Sitt Guruvanich +# DESCRIPTION: +# +# HISTORY: +# ************************************************************* +class CsrfProtectError(Exception): + def __init__(self, status_code: int, message: str): + self.status_code = status_code + self.message = message + + +class InvalidHeaderError(CsrfProtectError): + def __init__(self, message: str): + super().__init__(422, message) + + +class MissingTokenError(CsrfProtectError): + def __init__(self, message: str): + super().__init__(400, message) + + +class TokenValidationError(CsrfProtectError): + def __init__(self, message: str): + super().__init__(401, message) diff --git a/app/core/security/csrf/load_config.py b/app/core/security/csrf/load_config.py new file mode 100644 index 00000000..bc8459e3 --- /dev/null +++ b/app/core/security/csrf/load_config.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python3 +# Copyright (C) 2021-2023 All rights reserved. +# FILENAME: load_config.py +# VERSION: 0.3.2 +# CREATED: 2020-11-25 14:35 +# AUTHOR: Sitt Guruvanich +# DESCRIPTION: +# +# HISTORY: +# ************************************************************* +from typing import Annotated, Any, Dict, Literal, Optional, Set + +from pydantic import BaseModel, StrictBool, StrictInt, StrictStr, field_validator + +CookieSamesite = Annotated[Literal["lax", "strict", "none"], "cookie_samesite"] + + +class LoadConfig(BaseModel): + cookie_key: Optional[StrictStr] = "fastapi-csrf-token" + cookie_path: Optional[StrictStr] = "/" + cookie_domain: Optional[StrictStr] = None + # NOTE: `cookie_secure` must be placed before `cookie_samesite` + cookie_secure: Optional[StrictBool] = False + cookie_samesite: Optional[CookieSamesite] = "lax" + header_name: Optional[StrictStr] = "X-CSRF-Token" + header_type: Optional[StrictStr] = None + httponly: Optional[StrictBool] = True + max_age: Optional[StrictInt] = 3600 + methods: Optional[Set[StrictStr]] = {"POST", "PUT", "PATCH", "DELETE"} + secret_key: Optional[StrictStr] = None + token_location: Optional[StrictStr] = "header" + token_key: Optional[StrictStr] = "csrf-token-key" + + @field_validator("methods") + def validate_csrf_methods(cls, value: str) -> str: + if value.upper() not in {"GET", "HEAD", "POST", "PUT", "DELETE", "PATCH"}: + raise ValueError('The "csrf_methods" must be between http request methods') + return value.upper() + + @field_validator("cookie_samesite") # type: ignore + def validate_cookie_samesite( + cls, value: CookieSamesite, values: Dict[str, Any] + ) -> CookieSamesite: # noqa: E501 + if value not in {"strict", "lax", "none"}: + raise ValueError( + 'The "cookie_samesite" must be between "strict", "lax", or "none".' + ) + elif value == "none" and values.get("cookie_secure", False) is not True: + raise ValueError( + 'The "cookie_secure" must be True if "cookie_samesite" set to "none".' + ) + return value + + @field_validator("token_location") + def validate_token_location(cls, value: str) -> str: + if value not in {"body", "header"}: + raise ValueError('The "token_location" must be either "body" or "header".') + return value diff --git a/app/core/security/csrf/middleware.py b/app/core/security/csrf/middleware.py new file mode 100644 index 00000000..18ba6a9c --- /dev/null +++ b/app/core/security/csrf/middleware.py @@ -0,0 +1,81 @@ +import uuid +from typing import Any + +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.responses import PlainTextResponse + + +class CSRFMiddleware(BaseHTTPMiddleware): + """Middleware to add CSRF protection to requests. + + This middleware will add a CSRF cookie to all responses, and it will check + that cookie on all non-safe requests. If the cookie is missing or doesn't + match the header, the request will be rejected with a 403 Forbidden error. + + 1. Add this middleware using the middleware= parameter of your app. + 2. request.state.csrf_token will now be available. + 3. Use directly in an HTML
POST with + `` + 4. Use with javascript / ajax POST by sending a request header `csrf_token` + with request.state.csrf_token + + Notes + + - Users must should start on a "safe page" (a typical GET request) to + generate the initial CSRF cookie. + - Token is stored in request.state.csrf_token for use in templates. + + """ + + async def dispatch(self, request: Any, call_next: Any) -> Any: + CSRF_TOKEN_NAME = "csrf_token" + + # Valid for 10 days before regeneration. + CSRF_TOKEN_EXPIRY = 10 * 24 * 60 * 60 + # Always available even if we don't get it from cookie. + request.state.csrf_token = "" + + token_new_cookie = False + token_from_cookie = request.cookies.get(CSRF_TOKEN_NAME, None) + token_from_header = request.headers.get(CSRF_TOKEN_NAME, None) + if hasattr(request.state, "post"): + token_from_post = request.state.post.get(CSRF_TOKEN_NAME, None) + + # 🍪 Fetch the cookie only if we're using an appropriate request method. + if request.method not in ("GET", "HEAD", "OPTIONS", "TRACE"): + # Sanity check. UUID always > 30. + if not token_from_cookie or len(token_from_cookie) < 30: + # 🔴 Fail check. + return PlainTextResponse("No CSRF cookie set!", status_code=403) + # 🔴 Fail check. + if (str(token_from_cookie) != str(token_from_post)) and ( + str(token_from_cookie) != str(token_from_header) + ): # noqa: E501 + return PlainTextResponse("CSRF cookie does not match!", status_code=403) + else: + # 🍪 Generates the cookie if one does not exist. + # Has to be the same token throughout session! NOT a nonce. + if not token_from_cookie: + token_from_cookie = str(uuid.uuid4()) + token_new_cookie = True + + # 🟢 All good. Pass csrf_token up to controllers, templates. + request.state.csrf_token = token_from_cookie + + # ⏰ Wait for response to happen. + response = await call_next(request) + + # 🍪 Set CSRF cookie on the response. + if token_new_cookie and token_from_cookie: + response.set_cookie( + CSRF_TOKEN_NAME, + token_from_cookie, + CSRF_TOKEN_EXPIRY, + path="/", + domain=None, + secure=False, + httponly=False, + samesite="strict", + ) + + return response diff --git a/app/core/auth/encryption.py b/app/core/security/encryption.py similarity index 100% rename from app/core/auth/encryption.py rename to app/core/security/encryption.py diff --git a/app/db/commands.py b/app/db/commands.py index ad24de66..0a862d44 100644 --- a/app/db/commands.py +++ b/app/db/commands.py @@ -41,7 +41,7 @@ async def check_db_disconnected() -> None: # pragma: no cover with engine.connect() as connection: result: Any = connection.execute(stmt) if result is not None: - logger.info("+ ASYCN F(X) --> MYSQL CONNECTED!") + logger.info("+ ASYCN F(X) --> Disconnecting MYSQL...") logger.info("+ ASYNC F(X) --> MYSQL DISCONNECTED!") logger.info("+ ASYNC F(X) --> Database Disconnected. (-_-) Zzz") except Exception as e: diff --git a/app/main.py b/app/main.py index 4c1540ab..1ffdfca0 100644 --- a/app/main.py +++ b/app/main.py @@ -1,32 +1,12 @@ -import time -from typing import Any - -from asgi_correlation_id import CorrelationIdMiddleware -from asgi_correlation_id.context import correlation_id -from fastapi import FastAPI, HTTPException, Request, Response, status -from fastapi.exception_handlers import http_exception_handler +from fastapi import FastAPI from starlette.middleware.cors import CORSMiddleware +from app.api.exceptions import configure_exceptions +from app.api.middleware import configure_middleware from app.core.config import settings from app.core.templates import static_files -def configure_exceptions(app: FastAPI) -> None: - @app.exception_handler(Exception) - async def unhandled_exception_handler(request: Request, exc: Exception) -> Response: - return await http_exception_handler( # pragma: no cover - request, - HTTPException( - status.HTTP_500_INTERNAL_SERVER_ERROR, - "Internal server error", - headers={ - "x-request-id": correlation_id.get() or "", - "Access-Control-Expose-Headers": "x-request-id", - }, - ), - ) - - def configure_routers(app: FastAPI) -> None: from app.api.v1 import router_v1 @@ -45,15 +25,6 @@ def configure_events(app: FastAPI) -> None: async def on_startup() -> None: await check_db_connected() - # middlewares - @app.middleware("http") - async def add_process_time_header(request: Request, call_next: Any) -> None: - start_time: Any = time.perf_counter() - response: Any = await call_next(request) - process_time: Any = time.perf_counter() - start_time - response.headers["x-process-time"] = str(process_time) - return response - # shutdown actions @app.on_event("shutdown") async def on_shutdown() -> None: @@ -69,9 +40,7 @@ def create_app() -> FastAPI: docs_url="/api/v1/docs", redoc_url="/api/v1/redoc", ) - # MW: logger request process id correlation - app.add_middleware(CorrelationIdMiddleware) - # MW: Cross-Origin Resource Sharing protection + # Cross-Origin Resource Sharing protection if settings.BACKEND_CORS_ORIGINS: app.add_middleware( # pragma: no cover CORSMiddleware, @@ -81,6 +50,7 @@ def create_app() -> FastAPI: allow_headers=["*"], expose_headers=["*"], ) + configure_middleware(app) configure_exceptions(app) configure_routers(app) configure_static(app) diff --git a/app/models/ipaddress.py b/app/models/ipaddress.py index ae01e5e7..91fda636 100644 --- a/app/models/ipaddress.py +++ b/app/models/ipaddress.py @@ -35,14 +35,14 @@ class Ipaddress(Base): default=func.current_timestamp(), onupdate=func.current_timestamp(), ) - ip: Mapped[str] = mapped_column( + address: Mapped[str] = mapped_column( String(40), nullable=False, unique=True, primary_key=True, default="::1" ) isp: Mapped[str] = mapped_column( - String(255), nullable=False, index=True, default="unknown" + String(255), nullable=True, index=True, default="unknown" ) location: Mapped[str] = mapped_column( - String(255), nullable=False, index=True, default="unknown" + String(255), nullable=True, index=True, default="unknown" ) # relationships @@ -51,5 +51,5 @@ class Ipaddress(Base): ) def __repr__(self) -> str: # pragma: no cover - repr_str: str = f"Ipaddress({self.ip} by ISP: {self.isp})" + repr_str: str = f"Ipaddress({self.address} by ISP: {self.isp})" return repr_str diff --git a/app/schemas/__init__.py b/app/schemas/__init__.py index 0f1fb382..6b0f3cd1 100755 --- a/app/schemas/__init__.py +++ b/app/schemas/__init__.py @@ -91,6 +91,7 @@ ) from .ipaddress import IpaddressCreate, IpaddressRead, IpaddressUpdate from .note import NoteCreate, NoteRead, NoteReadRelations, NoteUpdate +from .security import CsrfToken, RateLimitedToken from .sharpspring import SharpspringCreate, SharpspringRead, SharpspringUpdate from .task import TaskState from .user import UserCreate, UserRead, UserReadRelations, UserUpdate @@ -128,6 +129,7 @@ ) from .website_pagespeedinsights import ( PageSpeedInsightsDevice, + PSIDevice, WebsitePageSpeedInsightsBase, WebsitePageSpeedInsightsCreate, WebsitePageSpeedInsightsProcessing, diff --git a/app/schemas/ipaddress.py b/app/schemas/ipaddress.py index 50389525..0ad67191 100644 --- a/app/schemas/ipaddress.py +++ b/app/schemas/ipaddress.py @@ -20,7 +20,7 @@ class IpaddressBase( ValidateSchemaIspRequired, ValidateSchemaIpLocationRequired, ): - ip: str + address: str isp: str location: str geocoord_id: Optional[UUID4] = None @@ -35,7 +35,7 @@ class IpaddressUpdate( ValidateSchemaIspOptional, ValidateSchemaIpLocationOptional, ): - ip: Optional[str] = None + address: Optional[str] = None isp: Optional[str] = None location: Optional[str] = None geocoord_id: Optional[UUID4] = None diff --git a/app/schemas/security.py b/app/schemas/security.py new file mode 100644 index 00000000..0bcb23c0 --- /dev/null +++ b/app/schemas/security.py @@ -0,0 +1,10 @@ +from pydantic import BaseModel + + +class CsrfToken(BaseModel): + csrf_token: str + + +class RateLimitedToken(BaseModel): + call: bool + ttl: int diff --git a/app/schemas/website_pagespeedinsights.py b/app/schemas/website_pagespeedinsights.py index bdf3409f..7d492f71 100755 --- a/app/schemas/website_pagespeedinsights.py +++ b/app/schemas/website_pagespeedinsights.py @@ -1,5 +1,6 @@ from __future__ import annotations +from enum import Enum from typing import Optional from pydantic import UUID4, BaseModel @@ -13,8 +14,13 @@ from app.schemas.base import BaseSchema, BaseSchemaRead +class PSIDevice(str, Enum): + desktop = "desktop" + mobile = "mobile" + + class PageSpeedInsightsDevice(ValidateSchemaDeviceRequired, BaseModel): - device: str + device: PSIDevice # schemas diff --git a/app/worker.py b/app/worker.py index eaac9f25..7dc4ca32 100644 --- a/app/worker.py +++ b/app/worker.py @@ -18,6 +18,7 @@ WebsitePageSpeedInsightsBase, WebsitePageSpeedInsightsProcessing, ) +from app.schemas.website_pagespeedinsights import PSIDevice celery_app: Celery = create_celery_worker() @@ -77,7 +78,7 @@ def task_website_page_pagespeedinsights_fetch( website_id: UUID4, page_id: UUID4, fetch_url: AnyHttpUrl, - device: str, + device: PSIDevice, ) -> WebsitePageSpeedInsightsProcessing: logger.info( f"Fetching PageSpeedInsights for website {website_id}, \ diff --git a/poetry.lock b/poetry.lock index 738e3f10..fdf9d266 100644 --- a/poetry.lock +++ b/poetry.lock @@ -230,13 +230,13 @@ vine = ">=5.0.0" [[package]] name = "annotated-types" -version = "0.5.0" +version = "0.6.0" description = "Reusable constraint types to use with typing.Annotated" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "annotated_types-0.5.0-py3-none-any.whl", hash = "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"}, - {file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"}, + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, ] [[package]] @@ -347,13 +347,13 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte [[package]] name = "autoflake" -version = "2.0.2" +version = "2.2.1" description = "Removes unused imports and unused variables" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "autoflake-2.0.2-py3-none-any.whl", hash = "sha256:a82d8efdcbbb7129a8a23238c529fb9d9919c562e26bb7963ea6890fbfff7d02"}, - {file = "autoflake-2.0.2.tar.gz", hash = "sha256:e0164421ff13f805f08a023e249d84200bd00463d213b490906bfefa67e83830"}, + {file = "autoflake-2.2.1-py3-none-any.whl", hash = "sha256:265cde0a43c1f44ecfb4f30d95b0437796759d07be7706a2f70e4719234c0f79"}, + {file = "autoflake-2.2.1.tar.gz", hash = "sha256:62b7b6449a692c3c9b0c916919bbc21648da7281e8506bcf8d3f8280e431ebc1"}, ] [package.dependencies] @@ -470,13 +470,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "blinker" -version = "1.6.2" +version = "1.6.3" description = "Fast, simple object-to-object and broadcast signaling" optional = false python-versions = ">=3.7" files = [ - {file = "blinker-1.6.2-py3-none-any.whl", hash = "sha256:c3d739772abb7bc2860abf5f2ec284223d9ad5c76da018234f6f50d6f31ab1f0"}, - {file = "blinker-1.6.2.tar.gz", hash = "sha256:4afd3de66ef3a9f8067559fb7a1cbe555c17dcbe15971b05d1b625c3e7abe213"}, + {file = "blinker-1.6.3-py3-none-any.whl", hash = "sha256:296320d6c28b006eb5e32d4712202dbcdcbf5dc482da298c2f44881c43884aaa"}, + {file = "blinker-1.6.3.tar.gz", hash = "sha256:152090d27c1c5c722ee7e48504b02d76502811ce02e1523553b4cf8c8b3d3a8d"}, ] [[package]] @@ -611,86 +611,101 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.2.0" +version = "3.3.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, + {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, + {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, ] [[package]] @@ -769,63 +784,63 @@ files = [ [[package]] name = "coverage" -version = "7.3.1" +version = "7.3.2" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cd0f7429ecfd1ff597389907045ff209c8fdb5b013d38cfa7c60728cb484b6e3"}, - {file = "coverage-7.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:966f10df9b2b2115da87f50f6a248e313c72a668248be1b9060ce935c871f276"}, - {file = "coverage-7.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0575c37e207bb9b98b6cf72fdaaa18ac909fb3d153083400c2d48e2e6d28bd8e"}, - {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:245c5a99254e83875c7fed8b8b2536f040997a9b76ac4c1da5bff398c06e860f"}, - {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c96dd7798d83b960afc6c1feb9e5af537fc4908852ef025600374ff1a017392"}, - {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:de30c1aa80f30af0f6b2058a91505ea6e36d6535d437520067f525f7df123887"}, - {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:50dd1e2dd13dbbd856ffef69196781edff26c800a74f070d3b3e3389cab2600d"}, - {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9c0c19f70d30219113b18fe07e372b244fb2a773d4afde29d5a2f7930765136"}, - {file = "coverage-7.3.1-cp310-cp310-win32.whl", hash = "sha256:770f143980cc16eb601ccfd571846e89a5fe4c03b4193f2e485268f224ab602f"}, - {file = "coverage-7.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdd088c00c39a27cfa5329349cc763a48761fdc785879220d54eb785c8a38520"}, - {file = "coverage-7.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74bb470399dc1989b535cb41f5ca7ab2af561e40def22d7e188e0a445e7639e3"}, - {file = "coverage-7.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:025ded371f1ca280c035d91b43252adbb04d2aea4c7105252d3cbc227f03b375"}, - {file = "coverage-7.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6191b3a6ad3e09b6cfd75b45c6aeeffe7e3b0ad46b268345d159b8df8d835f9"}, - {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb0b188f30e41ddd659a529e385470aa6782f3b412f860ce22b2491c89b8593"}, - {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c8f0df9dfd8ff745bccff75867d63ef336e57cc22b2908ee725cc552689ec8"}, - {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7eb3cd48d54b9bd0e73026dedce44773214064be93611deab0b6a43158c3d5a0"}, - {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ac3c5b7e75acac31e490b7851595212ed951889918d398b7afa12736c85e13ce"}, - {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b4ee7080878077af0afa7238df1b967f00dc10763f6e1b66f5cced4abebb0a3"}, - {file = "coverage-7.3.1-cp311-cp311-win32.whl", hash = "sha256:229c0dd2ccf956bf5aeede7e3131ca48b65beacde2029f0361b54bf93d36f45a"}, - {file = "coverage-7.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c6f55d38818ca9596dc9019eae19a47410d5322408140d9a0076001a3dcb938c"}, - {file = "coverage-7.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5289490dd1c3bb86de4730a92261ae66ea8d44b79ed3cc26464f4c2cde581fbc"}, - {file = "coverage-7.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca833941ec701fda15414be400c3259479bfde7ae6d806b69e63b3dc423b1832"}, - {file = "coverage-7.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd694e19c031733e446c8024dedd12a00cda87e1c10bd7b8539a87963685e969"}, - {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aab8e9464c00da5cb9c536150b7fbcd8850d376d1151741dd0d16dfe1ba4fd26"}, - {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87d38444efffd5b056fcc026c1e8d862191881143c3aa80bb11fcf9dca9ae204"}, - {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8a07b692129b8a14ad7a37941a3029c291254feb7a4237f245cfae2de78de037"}, - {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2829c65c8faaf55b868ed7af3c7477b76b1c6ebeee99a28f59a2cb5907a45760"}, - {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f111a7d85658ea52ffad7084088277135ec5f368457275fc57f11cebb15607f"}, - {file = "coverage-7.3.1-cp312-cp312-win32.whl", hash = "sha256:c397c70cd20f6df7d2a52283857af622d5f23300c4ca8e5bd8c7a543825baa5a"}, - {file = "coverage-7.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:5ae4c6da8b3d123500f9525b50bf0168023313963e0e2e814badf9000dd6ef92"}, - {file = "coverage-7.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca70466ca3a17460e8fc9cea7123c8cbef5ada4be3140a1ef8f7b63f2f37108f"}, - {file = "coverage-7.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f2781fd3cabc28278dc982a352f50c81c09a1a500cc2086dc4249853ea96b981"}, - {file = "coverage-7.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6407424621f40205bbe6325686417e5e552f6b2dba3535dd1f90afc88a61d465"}, - {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04312b036580ec505f2b77cbbdfb15137d5efdfade09156961f5277149f5e344"}, - {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9ad38204887349853d7c313f53a7b1c210ce138c73859e925bc4e5d8fc18e7"}, - {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:53669b79f3d599da95a0afbef039ac0fadbb236532feb042c534fbb81b1a4e40"}, - {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:614f1f98b84eb256e4f35e726bfe5ca82349f8dfa576faabf8a49ca09e630086"}, - {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1a317fdf5c122ad642db8a97964733ab7c3cf6009e1a8ae8821089993f175ff"}, - {file = "coverage-7.3.1-cp38-cp38-win32.whl", hash = "sha256:defbbb51121189722420a208957e26e49809feafca6afeef325df66c39c4fdb3"}, - {file = "coverage-7.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:f4f456590eefb6e1b3c9ea6328c1e9fa0f1006e7481179d749b3376fc793478e"}, - {file = "coverage-7.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f12d8b11a54f32688b165fd1a788c408f927b0960984b899be7e4c190ae758f1"}, - {file = "coverage-7.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f09195dda68d94a53123883de75bb97b0e35f5f6f9f3aa5bf6e496da718f0cb6"}, - {file = "coverage-7.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6601a60318f9c3945be6ea0f2a80571f4299b6801716f8a6e4846892737ebe4"}, - {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d156269718670d00a3b06db2288b48527fc5f36859425ff7cec07c6b367745"}, - {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:636a8ac0b044cfeccae76a36f3b18264edcc810a76a49884b96dd744613ec0b7"}, - {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5d991e13ad2ed3aced177f524e4d670f304c8233edad3210e02c465351f785a0"}, - {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:586649ada7cf139445da386ab6f8ef00e6172f11a939fc3b2b7e7c9082052fa0"}, - {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4aba512a15a3e1e4fdbfed2f5392ec221434a614cc68100ca99dcad7af29f3f8"}, - {file = "coverage-7.3.1-cp39-cp39-win32.whl", hash = "sha256:6bc6f3f4692d806831c136c5acad5ccedd0262aa44c087c46b7101c77e139140"}, - {file = "coverage-7.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:553d7094cb27db58ea91332e8b5681bac107e7242c23f7629ab1316ee73c4981"}, - {file = "coverage-7.3.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:220eb51f5fb38dfdb7e5d54284ca4d0cd70ddac047d750111a68ab1798945194"}, - {file = "coverage-7.3.1.tar.gz", hash = "sha256:6cb7fe1581deb67b782c153136541e20901aa312ceedaf1467dcb35255787952"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, + {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, + {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, + {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, + {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, + {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, + {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, + {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, + {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, + {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, + {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, + {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, + {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, ] [package.dependencies] @@ -1675,13 +1690,13 @@ attrs = ">=19.2.0" [[package]] name = "packaging" -version = "23.1" +version = "23.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] @@ -1725,13 +1740,13 @@ files = [ [[package]] name = "platformdirs" -version = "3.10.0" +version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, - {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, + {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, + {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, ] [package.extras] @@ -2020,13 +2035,13 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pymysql" -version = "1.0.2" +version = "1.1.0" description = "Pure Python MySQL Driver" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "PyMySQL-1.0.2-py3-none-any.whl", hash = "sha256:41fc3a0c5013d5f039639442321185532e3e2c8924687abe6537de157d403641"}, - {file = "PyMySQL-1.0.2.tar.gz", hash = "sha256:816927a350f38d56072aeca5dfb10221fe1dc653745853d30a216637f5d7ad36"}, + {file = "PyMySQL-1.1.0-py3-none-any.whl", hash = "sha256:8969ec6d763c856f7073c4c64662882675702efcb114b4bcbb955aea3a069fa7"}, + {file = "PyMySQL-1.1.0.tar.gz", hash = "sha256:4f13a7df8bf36a51e81dd9f3605fede45a4878fe02f9236349fd82a3f0612f96"}, ] [package.extras] @@ -2400,52 +2415,60 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.7" +version = "2.0.21" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7917632606fc5d4be661dcde45cc415df835e594e2c50cc999a44f24b6bf6d92"}, - {file = "SQLAlchemy-2.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:32f508fef9c5a7d19411d94ef64cf5405e42c4689e51ddbb81ac9a7be045cce8"}, - {file = "SQLAlchemy-2.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0995b92612979d208189245bf87349ad9243b97b49652347a28ddee0803225a"}, - {file = "SQLAlchemy-2.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cebd161f964af58290596523c65e41a5a161a99f7212b1ae675e288a4b5e0a7c"}, - {file = "SQLAlchemy-2.0.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c38641f5c3714505d65dbbd8fb1350408b9ad8461769ec8e440e1177f9c92d1d"}, - {file = "SQLAlchemy-2.0.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:921485d1f69ed016e1f756de67d02ad4f143eb6b92b9776bfff78786d8978ab5"}, - {file = "SQLAlchemy-2.0.7-cp310-cp310-win32.whl", hash = "sha256:a65a8fd09bdffd63fa23b39cd902e6a4ca23d86ecfe129513e43767a1f3e91fb"}, - {file = "SQLAlchemy-2.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:d2e7411d5ea164c6f4d003f5d4f5e72e202956aaa7496b95bb4a4c39669e001c"}, - {file = "SQLAlchemy-2.0.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:432cfd77642771ee7ea0dd0f3fb664f18506a3625eab6e6d5d1d771569171270"}, - {file = "SQLAlchemy-2.0.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce076e25f1170000b4ecdc57a1ff8a70dbe4a5648ec3da0563ef3064e8db4f15"}, - {file = "SQLAlchemy-2.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14854bdb2a35af536d14f77dfa8dbc20e1bb1972996d64c4147e0d3165c9aaf5"}, - {file = "SQLAlchemy-2.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9020125e3be677c64d4dda7048e247343f1663089cf268a4cc98c957adb7dbe0"}, - {file = "SQLAlchemy-2.0.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fb649c5473f79c9a7b6133f53a31f4d87de14755c79224007eb7ec76e628551e"}, - {file = "SQLAlchemy-2.0.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33f73cc45ffa050f5c3b60ff4490e0ae9e02701461c1600d5ede1b008076b1b9"}, - {file = "SQLAlchemy-2.0.7-cp311-cp311-win32.whl", hash = "sha256:0789e199fbce8cb1775337afc631ed12bcc5463dd77d7a06b8dafd758cde51f8"}, - {file = "SQLAlchemy-2.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:013f4f330001e84a2b0ef1f2c9bd73169c79d582e54e1a144be1be1dbc911711"}, - {file = "SQLAlchemy-2.0.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4339110be209fea37a2bb4f35f1127c7562a0393e9e6df5d9a65cc4f5c167cb6"}, - {file = "SQLAlchemy-2.0.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7e61e2e4dfe175dc3510889e44eda1c32f55870d6950ef40519640cb266704d"}, - {file = "SQLAlchemy-2.0.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d44ff7573016fc26311b5a5c54d5656fb9e0c39e138bc8b81cb7c8667485203"}, - {file = "SQLAlchemy-2.0.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:57b80e877eb6ec63295835f8a3b86ca3a44829f80c4748e1b019e03adea550fc"}, - {file = "SQLAlchemy-2.0.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e90f0be674e0845c5c1ccfa5e31c9ee28fd406546a61afc734355cc7ea1f8f8b"}, - {file = "SQLAlchemy-2.0.7-cp37-cp37m-win32.whl", hash = "sha256:e735a635126b2338dfd3a0863b675437cb53d85885a7602b8cffb24345df33ed"}, - {file = "SQLAlchemy-2.0.7-cp37-cp37m-win_amd64.whl", hash = "sha256:ea1c63e61b5c13161c8468305f0a5837c80aae2070e33654c68dd12572b638eb"}, - {file = "SQLAlchemy-2.0.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cc337b96ec59ef29907eeadc2ac11188739281568f14c719e61550ca6d201a41"}, - {file = "SQLAlchemy-2.0.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0eac488be90dd3f7a655d2e34fa59e1305fccabc4abfbd002e3a72ae10bd2f89"}, - {file = "SQLAlchemy-2.0.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8ab8f90f4a13c979e6c41c9f011b655c1b9ae2df6cffa8fa2c7c4d740f3512e"}, - {file = "SQLAlchemy-2.0.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc370d53fee7408330099c4bcc2573a107757b203bc61f114467dfe586a0c7bd"}, - {file = "SQLAlchemy-2.0.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:494db0026918e3f707466a1200a5dedbf254a4bce01a3115fd95f04ba8258f09"}, - {file = "SQLAlchemy-2.0.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:486015a58c9a67f65a15b4f19468b35b97cee074ae55386a9c240f1da308fbfe"}, - {file = "SQLAlchemy-2.0.7-cp38-cp38-win32.whl", hash = "sha256:5f7c40ec2e3b31293184020daba95850832bea523a08496ac89b27a5276ec804"}, - {file = "SQLAlchemy-2.0.7-cp38-cp38-win_amd64.whl", hash = "sha256:3da3dff8d9833a7d7f66a3c45a79a3955f775c79f47bb7eea266d0b4c267b17a"}, - {file = "SQLAlchemy-2.0.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:774965c41b71c8ebe3c5728bf5b9a948231fc3a0422d9fdace0686f5bb689ad6"}, - {file = "SQLAlchemy-2.0.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:94556a2a7fc3de094ea056b62845e2e6e271e26d1e1b2540a1cd2d2506257a10"}, - {file = "SQLAlchemy-2.0.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f15c54713a8dd57a01c974c9f96476688f6f6374d348819ed7e459535844b614"}, - {file = "SQLAlchemy-2.0.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea9461f6955f3cf9eff6eeec271686caed7792c76f5b966886a36a42ea46e6b2"}, - {file = "SQLAlchemy-2.0.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18795e87601b4244fd08b542cd6bff9ef674b17bcd34e4a3c9935398e2cc762c"}, - {file = "SQLAlchemy-2.0.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0b698440c477c00bdedff87348b19a79630a235864a8f4378098d61079c16ce9"}, - {file = "SQLAlchemy-2.0.7-cp39-cp39-win32.whl", hash = "sha256:38e26cf6b9b4c6c37846f7e31b42e4d664b35f055691265f07e06aeb6167c494"}, - {file = "SQLAlchemy-2.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:a6f7d1debb233f1567d700ebcdde0781a0b63db0ef266246dfbf75ae41bfdf85"}, - {file = "SQLAlchemy-2.0.7-py3-none-any.whl", hash = "sha256:fc67667c8e8c04e5c3250ab2cd51df40bc7c28c7c253d0475b377eff86fe4bb0"}, - {file = "SQLAlchemy-2.0.7.tar.gz", hash = "sha256:a4c1e1582492c66dfacc9eab52738f3e64d9a2a380e412668f75aa06e540f649"}, + {file = "SQLAlchemy-2.0.21-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1e7dc99b23e33c71d720c4ae37ebb095bebebbd31a24b7d99dfc4753d2803ede"}, + {file = "SQLAlchemy-2.0.21-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7f0c4ee579acfe6c994637527c386d1c22eb60bc1c1d36d940d8477e482095d4"}, + {file = "SQLAlchemy-2.0.21-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f7d57a7e140efe69ce2d7b057c3f9a595f98d0bbdfc23fd055efdfbaa46e3a5"}, + {file = "SQLAlchemy-2.0.21-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca38746eac23dd7c20bec9278d2058c7ad662b2f1576e4c3dbfcd7c00cc48fa"}, + {file = "SQLAlchemy-2.0.21-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3cf229704074bce31f7f47d12883afee3b0a02bb233a0ba45ddbfe542939cca4"}, + {file = "SQLAlchemy-2.0.21-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fb87f763b5d04a82ae84ccff25554ffd903baafba6698e18ebaf32561f2fe4aa"}, + {file = "SQLAlchemy-2.0.21-cp310-cp310-win32.whl", hash = "sha256:89e274604abb1a7fd5c14867a412c9d49c08ccf6ce3e1e04fffc068b5b6499d4"}, + {file = "SQLAlchemy-2.0.21-cp310-cp310-win_amd64.whl", hash = "sha256:e36339a68126ffb708dc6d1948161cea2a9e85d7d7b0c54f6999853d70d44430"}, + {file = "SQLAlchemy-2.0.21-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bf8eebccc66829010f06fbd2b80095d7872991bfe8415098b9fe47deaaa58063"}, + {file = "SQLAlchemy-2.0.21-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b977bfce15afa53d9cf6a632482d7968477625f030d86a109f7bdfe8ce3c064a"}, + {file = "SQLAlchemy-2.0.21-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ff3dc2f60dbf82c9e599c2915db1526d65415be323464f84de8db3e361ba5b9"}, + {file = "SQLAlchemy-2.0.21-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44ac5c89b6896f4740e7091f4a0ff2e62881da80c239dd9408f84f75a293dae9"}, + {file = "SQLAlchemy-2.0.21-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:87bf91ebf15258c4701d71dcdd9c4ba39521fb6a37379ea68088ce8cd869b446"}, + {file = "SQLAlchemy-2.0.21-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b69f1f754d92eb1cc6b50938359dead36b96a1dcf11a8670bff65fd9b21a4b09"}, + {file = "SQLAlchemy-2.0.21-cp311-cp311-win32.whl", hash = "sha256:af520a730d523eab77d754f5cf44cc7dd7ad2d54907adeb3233177eeb22f271b"}, + {file = "SQLAlchemy-2.0.21-cp311-cp311-win_amd64.whl", hash = "sha256:141675dae56522126986fa4ca713739d00ed3a6f08f3c2eb92c39c6dfec463ce"}, + {file = "SQLAlchemy-2.0.21-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:56628ca27aa17b5890391ded4e385bf0480209726f198799b7e980c6bd473bd7"}, + {file = "SQLAlchemy-2.0.21-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:db726be58837fe5ac39859e0fa40baafe54c6d54c02aba1d47d25536170b690f"}, + {file = "SQLAlchemy-2.0.21-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7421c1bfdbb7214313919472307be650bd45c4dc2fcb317d64d078993de045b"}, + {file = "SQLAlchemy-2.0.21-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:632784f7a6f12cfa0e84bf2a5003b07660addccf5563c132cd23b7cc1d7371a9"}, + {file = "SQLAlchemy-2.0.21-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f6f7276cf26145a888f2182a98f204541b519d9ea358a65d82095d9c9e22f917"}, + {file = "SQLAlchemy-2.0.21-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2a1f7ffac934bc0ea717fa1596f938483fb8c402233f9b26679b4f7b38d6ab6e"}, + {file = "SQLAlchemy-2.0.21-cp312-cp312-win32.whl", hash = "sha256:bfece2f7cec502ec5f759bbc09ce711445372deeac3628f6fa1c16b7fb45b682"}, + {file = "SQLAlchemy-2.0.21-cp312-cp312-win_amd64.whl", hash = "sha256:526b869a0f4f000d8d8ee3409d0becca30ae73f494cbb48801da0129601f72c6"}, + {file = "SQLAlchemy-2.0.21-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7614f1eab4336df7dd6bee05bc974f2b02c38d3d0c78060c5faa4cd1ca2af3b8"}, + {file = "SQLAlchemy-2.0.21-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d59cb9e20d79686aa473e0302e4a82882d7118744d30bb1dfb62d3c47141b3ec"}, + {file = "SQLAlchemy-2.0.21-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a95aa0672e3065d43c8aa80080cdd5cc40fe92dc873749e6c1cf23914c4b83af"}, + {file = "SQLAlchemy-2.0.21-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8c323813963b2503e54d0944813cd479c10c636e3ee223bcbd7bd478bf53c178"}, + {file = "SQLAlchemy-2.0.21-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:419b1276b55925b5ac9b4c7044e999f1787c69761a3c9756dec6e5c225ceca01"}, + {file = "SQLAlchemy-2.0.21-cp37-cp37m-win32.whl", hash = "sha256:4615623a490e46be85fbaa6335f35cf80e61df0783240afe7d4f544778c315a9"}, + {file = "SQLAlchemy-2.0.21-cp37-cp37m-win_amd64.whl", hash = "sha256:cca720d05389ab1a5877ff05af96551e58ba65e8dc65582d849ac83ddde3e231"}, + {file = "SQLAlchemy-2.0.21-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b4eae01faee9f2b17f08885e3f047153ae0416648f8e8c8bd9bc677c5ce64be9"}, + {file = "SQLAlchemy-2.0.21-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3eb7c03fe1cd3255811cd4e74db1ab8dca22074d50cd8937edf4ef62d758cdf4"}, + {file = "SQLAlchemy-2.0.21-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2d494b6a2a2d05fb99f01b84cc9af9f5f93bf3e1e5dbdafe4bed0c2823584c1"}, + {file = "SQLAlchemy-2.0.21-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b19ae41ef26c01a987e49e37c77b9ad060c59f94d3b3efdfdbf4f3daaca7b5fe"}, + {file = "SQLAlchemy-2.0.21-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fc6b15465fabccc94bf7e38777d665b6a4f95efd1725049d6184b3a39fd54880"}, + {file = "SQLAlchemy-2.0.21-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:014794b60d2021cc8ae0f91d4d0331fe92691ae5467a00841f7130fe877b678e"}, + {file = "SQLAlchemy-2.0.21-cp38-cp38-win32.whl", hash = "sha256:0268256a34806e5d1c8f7ee93277d7ea8cc8ae391f487213139018b6805aeaf6"}, + {file = "SQLAlchemy-2.0.21-cp38-cp38-win_amd64.whl", hash = "sha256:73c079e21d10ff2be54a4699f55865d4b275fd6c8bd5d90c5b1ef78ae0197301"}, + {file = "SQLAlchemy-2.0.21-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:785e2f2c1cb50d0a44e2cdeea5fd36b5bf2d79c481c10f3a88a8be4cfa2c4615"}, + {file = "SQLAlchemy-2.0.21-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c111cd40910ffcb615b33605fc8f8e22146aeb7933d06569ac90f219818345ef"}, + {file = "SQLAlchemy-2.0.21-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9cba4e7369de663611ce7460a34be48e999e0bbb1feb9130070f0685e9a6b66"}, + {file = "SQLAlchemy-2.0.21-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50a69067af86ec7f11a8e50ba85544657b1477aabf64fa447fd3736b5a0a4f67"}, + {file = "SQLAlchemy-2.0.21-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ccb99c3138c9bde118b51a289d90096a3791658da9aea1754667302ed6564f6e"}, + {file = "SQLAlchemy-2.0.21-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:513fd5b6513d37e985eb5b7ed89da5fd9e72354e3523980ef00d439bc549c9e9"}, + {file = "SQLAlchemy-2.0.21-cp39-cp39-win32.whl", hash = "sha256:f9fefd6298433b6e9188252f3bff53b9ff0443c8fde27298b8a2b19f6617eeb9"}, + {file = "SQLAlchemy-2.0.21-cp39-cp39-win_amd64.whl", hash = "sha256:2e617727fe4091cedb3e4409b39368f424934c7faa78171749f704b49b4bb4ce"}, + {file = "SQLAlchemy-2.0.21-py3-none-any.whl", hash = "sha256:ea7da25ee458d8f404b93eb073116156fd7d8c2a776d8311534851f28277b4ce"}, + {file = "SQLAlchemy-2.0.21.tar.gz", hash = "sha256:05b971ab1ac2994a14c56b35eaaa91f86ba080e9ad481b20d99d77f381bb6258"}, ] [package.dependencies] @@ -2454,7 +2477,7 @@ mypy = {version = ">=0.910", optional = true, markers = "extra == \"mypy\""} typing-extensions = ">=4.2.0" [package.extras] -aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] @@ -2473,18 +2496,19 @@ postgresql-pg8000 = ["pg8000 (>=1.29.1)"] postgresql-psycopg = ["psycopg (>=3.0.7)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3-binary"] [[package]] name = "sqlalchemy-utils" -version = "0.41.0" +version = "0.41.1" description = "Various utility functions for SQLAlchemy." optional = false python-versions = ">=3.6" files = [ - {file = "SQLAlchemy-Utils-0.41.0.tar.gz", hash = "sha256:894cce255eea0bcc4fdcff628af30219d24a325526011586dd7f1e3d9dfebba0"}, - {file = "SQLAlchemy_Utils-0.41.0-py3-none-any.whl", hash = "sha256:986b4140f7740ff37244f6ed9182e8c997caa334150773de5932009b2490fb50"}, + {file = "SQLAlchemy-Utils-0.41.1.tar.gz", hash = "sha256:a2181bff01eeb84479e38571d2c0718eb52042f9afd8c194d0d02877e84b7d74"}, + {file = "SQLAlchemy_Utils-0.41.1-py3-none-any.whl", hash = "sha256:6c96b0768ea3f15c0dc56b363d386138c562752b84f647fb8d31a2223aaab801"}, ] [package.dependencies] @@ -2608,28 +2632,17 @@ types-pyOpenSSL = "*" [[package]] name = "types-requests" -version = "2.31.0.6" +version = "2.31.0.8" description = "Typing stubs for requests" optional = false python-versions = ">=3.7" files = [ - {file = "types-requests-2.31.0.6.tar.gz", hash = "sha256:cd74ce3b53c461f1228a9b783929ac73a666658f223e28ed29753771477b3bd0"}, - {file = "types_requests-2.31.0.6-py3-none-any.whl", hash = "sha256:a2db9cb228a81da8348b49ad6db3f5519452dd20a9c1e1a868c83c5fe88fd1a9"}, + {file = "types-requests-2.31.0.8.tar.gz", hash = "sha256:e1b325c687b3494a2f528ab06e411d7092cc546cc9245c000bacc2fca5ae96d4"}, + {file = "types_requests-2.31.0.8-py3-none-any.whl", hash = "sha256:39894cbca3fb3d032ed8bdd02275b4273471aa5668564617cc1734b0a65ffdf8"}, ] [package.dependencies] -types-urllib3 = "*" - -[[package]] -name = "types-urllib3" -version = "1.26.25.14" -description = "Typing stubs for urllib3" -optional = false -python-versions = "*" -files = [ - {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, - {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, -] +urllib3 = ">=2" [[package]] name = "typing-extensions" @@ -2673,13 +2686,13 @@ test = ["pytest (>=2.8)", "requests-mock (>=1.6.0,<2.0)"] [[package]] name = "urllib3" -version = "2.0.5" +version = "2.0.6" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.7" files = [ - {file = "urllib3-2.0.5-py3-none-any.whl", hash = "sha256:ef16afa8ba34a1f989db38e1dbbe0c302e4289a47856990d0682e374563ce35e"}, - {file = "urllib3-2.0.5.tar.gz", hash = "sha256:13abf37382ea2ce6fb744d4dad67838eec857c9f4f57009891805e0b5e123594"}, + {file = "urllib3-2.0.6-py3-none-any.whl", hash = "sha256:7a7c7003b000adf9e7ca2a377c9688bbc54ed41b985789ed576570342a375cd2"}, + {file = "urllib3-2.0.6.tar.gz", hash = "sha256:b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564"}, ] [package.extras] @@ -2805,13 +2818,13 @@ anyio = ">=3.0.0" [[package]] name = "wcwidth" -version = "0.2.7" +version = "0.2.8" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.7-py2.py3-none-any.whl", hash = "sha256:fabf3e32999d9b0dab7d19d845149f326f04fe29bac67709ee071dbd92640a36"}, - {file = "wcwidth-0.2.7.tar.gz", hash = "sha256:1b6d30a98ddd5ce9bbdb33658191fd2423fc9da203fe3ef1855407dcb7ee4e26"}, + {file = "wcwidth-0.2.8-py2.py3-none-any.whl", hash = "sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704"}, + {file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"}, ] [[package]] @@ -2983,4 +2996,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "2274e7a2ce3832ce58c0c5da83235b466fa8b594102661ee33adbc620704921a" +content-hash = "7e4747c7a79f2a31c4766d05d18b791349e5fb8308a22cf9638df39dc2b793a0" diff --git a/pyproject.toml b/pyproject.toml index 1d154aae..6277b247 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,9 +18,9 @@ fastapi-mail = {extras = ["aioredis"], version = "^1.4.1"} pydantic = {version = "2.3.0", extras = ["email"]} python-multipart = "0.0.6" python-dotenv = "1.0.0" -PyMySQL = "1.0.2" -SQLAlchemy = {version = "2.0.7", extras = ["mypy", "asyncio"]} -SQLAlchemy-Utils = "0.41.0" +PyMySQL = "1.1.0" +SQLAlchemy = {version = "2.0.21", extras = ["mypy", "asyncio"]} +SQLAlchemy-Utils = "0.41.1" PyJWT = "^2.4.0" alembic = "1.10.3" asgi-correlation-id = "^4.1.0" @@ -59,7 +59,7 @@ mypy = "1.2.0" isort = "5.12.0" black = "23.3.0" flake8 = "6.0.0" -autoflake = "2.0.2" +autoflake = "2.2.1" makefun = "^1.15.1" asgi-lifespan = "^2.0.0" pytest-celery = "^0.0.0" diff --git a/requirements.txt b/requirements.txt index 8165703f..3dbdedc7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -uvicorn[standard]==0.21.1 +uvicorn[standard]==0.23.2 poetry==1.6.1 diff --git a/tests/api/deps/test_deps.py b/tests/api/deps/test_deps.py index e24b04a8..b79e3a5b 100644 --- a/tests/api/deps/test_deps.py +++ b/tests/api/deps/test_deps.py @@ -8,7 +8,6 @@ ClientQueryParams, CommonClientQueryParams, CommonClientWebsiteQueryParams, - CommonQueryParams, CommonWebsiteMapQueryParams, CommonWebsitePageQueryParams, CommonWebsitePageSpeedInsightsQueryParams, @@ -106,12 +105,6 @@ def test_query_devices_param_validation() -> None: assert exc_info.value.detail == "Invalid strategy" -def test_query_common_valid_params() -> None: - params = CommonQueryParams(page=1) - - assert params.page == 1 - - def test_query_common_client_valid_params() -> None: test_uuid = get_uuid_str() params = CommonClientQueryParams(page=1, client_id=test_uuid) diff --git a/tests/api/deps/test_get_clients.py b/tests/api/deps/test_get_clients.py index b803a8d0..1a1293cd 100644 --- a/tests/api/deps/test_get_clients.py +++ b/tests/api/deps/test_get_clients.py @@ -1,9 +1,9 @@ import pytest -from fastapi.exceptions import HTTPException from sqlalchemy.ext.asyncio import AsyncSession from tests.utils.clients import create_random_client from app.api.deps import get_client_or_404 +from app.api.exceptions.exceptions import ClientNotExists, InvalidID from app.core.utilities.uuids import get_uuid_str from app.models.client import Client from app.schemas.client import ClientRead @@ -17,10 +17,11 @@ async def test_get_client_or_404(db_session: AsyncSession) -> None: assert result.id == test_client.id # Test with invalid client_id - fake_clid: str = get_uuid_str() - with pytest.raises(HTTPException): + fake_clid: str = "1" + with pytest.raises(InvalidID): await get_client_or_404(db_session, fake_clid) - # Test with no client_id - result = await get_client_or_404(db_session) - assert result is None + # Test with invalid client_id + fake_clid = get_uuid_str() + with pytest.raises(ClientNotExists): + await get_client_or_404(db_session, fake_clid) diff --git a/tests/api/deps/test_get_current_user.py b/tests/api/deps/test_get_current_user.py index 48b1a5ec..67264800 100644 --- a/tests/api/deps/test_get_current_user.py +++ b/tests/api/deps/test_get_current_user.py @@ -6,8 +6,8 @@ from fastapi_permissions import Everyone from app.api.deps import get_current_user, get_current_user_permissions -from app.api.errors import ErrorCode -from app.core.auth import Auth0User +from app.api.exceptions import ErrorCode +from app.core.security import Auth0User from app.core.utilities.uuids import get_uuid_str diff --git a/tests/api/deps/test_get_website_page_speed_insights.py b/tests/api/deps/test_get_website_page_speed_insights.py index 486f59aa..c02de0d2 100644 --- a/tests/api/deps/test_get_website_page_speed_insights.py +++ b/tests/api/deps/test_get_website_page_speed_insights.py @@ -1,11 +1,11 @@ import pytest -from fastapi.exceptions import HTTPException from sqlalchemy.ext.asyncio import AsyncSession from tests.utils.website_pagespeedinsights import ( create_random_website_page_speed_insights, ) from app.api.deps import get_website_page_psi_or_404 +from app.api.exceptions.exceptions import InvalidID, WebsitePageSpeedInsightsNotExists from app.models.website_pagespeedinsights import WebsitePageSpeedInsights from app.schemas import WebsitePageSpeedInsightsRead @@ -24,10 +24,10 @@ async def test_get_website_page_psi_or_404(db_session: AsyncSession) -> None: # Test with invalid website_page_id fake_clid: str = "1" - with pytest.raises(HTTPException): + with pytest.raises(InvalidID): await get_website_page_psi_or_404(db_session, fake_clid) # Test with invalid website_page_id that looks like a valid uuid fake_clid_uuid: str = "00000000-0000-0000-0000-000000000000" - with pytest.raises(HTTPException): + with pytest.raises(WebsitePageSpeedInsightsNotExists): await get_website_page_psi_or_404(db_session, fake_clid_uuid) diff --git a/tests/api/deps/test_get_website_pages.py b/tests/api/deps/test_get_website_pages.py index 1a7157c1..bcd9cd48 100644 --- a/tests/api/deps/test_get_website_pages.py +++ b/tests/api/deps/test_get_website_pages.py @@ -1,9 +1,9 @@ import pytest -from fastapi.exceptions import HTTPException from sqlalchemy.ext.asyncio import AsyncSession from tests.utils.website_pages import create_random_website_page from app.api.deps import get_website_page_or_404 +from app.api.exceptions.exceptions import InvalidID, WebsitePageNotExists from app.core.utilities.uuids import get_uuid_str from app.models.website_page import WebsitePage from app.schemas.website_page import WebsitePageRead @@ -18,7 +18,11 @@ async def test_get_website_page_or_404(db_session: AsyncSession) -> None: assert isinstance(result, WebsitePage) assert result.id == test_website_page.id + fake_clid: str = "1" + with pytest.raises(InvalidID): + await get_website_page_or_404(db_session, fake_clid) + # Test with invalid website_page_id - fake_clid: str = get_uuid_str() - with pytest.raises(HTTPException): + fake_clid = get_uuid_str() + with pytest.raises(WebsitePageNotExists): await get_website_page_or_404(db_session, fake_clid) diff --git a/tests/api/deps/test_get_website_sitemap.py b/tests/api/deps/test_get_website_sitemap.py index 7eab93dd..84d82a42 100644 --- a/tests/api/deps/test_get_website_sitemap.py +++ b/tests/api/deps/test_get_website_sitemap.py @@ -1,9 +1,9 @@ import pytest -from fastapi.exceptions import HTTPException from sqlalchemy.ext.asyncio import AsyncSession from tests.utils.website_maps import create_random_website_map from app.api.deps import get_website_map_or_404 +from app.api.exceptions.exceptions import InvalidID, WebsiteMapNotExists from app.core.utilities.uuids import get_uuid_str from app.models.website_map import WebsiteMap from app.schemas.website_map import WebsiteMapRead @@ -19,6 +19,10 @@ async def test_get_website_map_or_404(db_session: AsyncSession) -> None: assert result.id == test_website_map.id # Test with invalid website_map_id - fake_clid: str = get_uuid_str() - with pytest.raises(HTTPException): + fake_clid: str = "1" + with pytest.raises(InvalidID): + await get_website_map_or_404(db_session, fake_clid) + + fake_clid = get_uuid_str() + with pytest.raises(WebsiteMapNotExists): await get_website_map_or_404(db_session, fake_clid) diff --git a/tests/api/deps/test_get_websites.py b/tests/api/deps/test_get_websites.py index e3e601e9..2063f2b7 100644 --- a/tests/api/deps/test_get_websites.py +++ b/tests/api/deps/test_get_websites.py @@ -1,9 +1,9 @@ import pytest -from fastapi.exceptions import HTTPException from sqlalchemy.ext.asyncio import AsyncSession from tests.utils.websites import create_random_website from app.api.deps import get_website_or_404 +from app.api.exceptions.exceptions import InvalidID, WebsiteNotExists from app.core.utilities.uuids import get_uuid_str from app.models.website import Website from app.schemas.website import WebsiteRead @@ -17,6 +17,10 @@ async def test_get_website_or_404(db_session: AsyncSession) -> None: assert result.id == test_website.id # Test with invalid website_id - fake_clid: str = get_uuid_str() - with pytest.raises(HTTPException): + fake_clid: str = "1" + with pytest.raises(InvalidID): + await get_website_or_404(db_session, fake_clid) + + fake_clid = get_uuid_str() + with pytest.raises(WebsiteNotExists): await get_website_or_404(db_session, fake_clid) diff --git a/tests/api/v1/clients/test_clients_create.py b/tests/api/v1/clients/test_clients_create.py index 90cdd11c..e9a06886 100644 --- a/tests/api/v1/clients/test_clients_create.py +++ b/tests/api/v1/clients/test_clients_create.py @@ -4,7 +4,7 @@ from httpx import AsyncClient, Response from tests.utils.utils import random_lower_string -from app.api.errors import ErrorCode +from app.api.exceptions import ErrorCode pytestmark = pytest.mark.asyncio diff --git a/tests/api/v1/clients/test_clients_delete.py b/tests/api/v1/clients/test_clients_delete.py index 157885f3..8adcaa58 100644 --- a/tests/api/v1/clients/test_clients_delete.py +++ b/tests/api/v1/clients/test_clients_delete.py @@ -5,7 +5,7 @@ from sqlalchemy.ext.asyncio import AsyncSession from tests.utils.clients import create_random_client -from app.api.errors import ErrorCode +from app.api.exceptions import ErrorCode from app.schemas import ClientRead pytestmark = pytest.mark.asyncio diff --git a/tests/api/v1/clients/test_clients_read.py b/tests/api/v1/clients/test_clients_read.py index a37f736d..2fe3513a 100644 --- a/tests/api/v1/clients/test_clients_read.py +++ b/tests/api/v1/clients/test_clients_read.py @@ -5,7 +5,7 @@ from sqlalchemy.ext.asyncio import AsyncSession from tests.utils.clients import create_random_client -from app.api.errors import ErrorCode +from app.api.exceptions import ErrorCode from app.core.utilities.uuids import get_uuid_str from app.crud import ClientRepository from app.schemas import ClientRead diff --git a/tests/api/v1/clients/test_clients_update.py b/tests/api/v1/clients/test_clients_update.py index 394798ab..37b2ea74 100644 --- a/tests/api/v1/clients/test_clients_update.py +++ b/tests/api/v1/clients/test_clients_update.py @@ -6,7 +6,7 @@ from tests.utils.clients import create_random_client from tests.utils.utils import random_lower_string -from app.api.errors import ErrorCode +from app.api.exceptions import ErrorCode from app.schemas import ClientRead, ClientUpdate pytestmark = pytest.mark.asyncio diff --git a/tests/api/v1/websites/test_websites_create.py b/tests/api/v1/websites/test_websites_create.py index 19fc44c7..d31e1895 100644 --- a/tests/api/v1/websites/test_websites_create.py +++ b/tests/api/v1/websites/test_websites_create.py @@ -4,7 +4,7 @@ from httpx import AsyncClient, Response from tests.utils.utils import random_boolean, random_domain -from app.api.errors import ErrorCode +from app.api.exceptions import ErrorCode pytestmark = pytest.mark.asyncio diff --git a/tests/api/v1/websites/test_websites_delete.py b/tests/api/v1/websites/test_websites_delete.py index e6a83d77..0bf57ecf 100644 --- a/tests/api/v1/websites/test_websites_delete.py +++ b/tests/api/v1/websites/test_websites_delete.py @@ -5,7 +5,7 @@ from sqlalchemy.ext.asyncio import AsyncSession from tests.utils.websites import create_random_website -from app.api.errors import ErrorCode +from app.api.exceptions import ErrorCode from app.schemas import WebsiteRead pytestmark = pytest.mark.asyncio diff --git a/tests/api/v1/websites/test_websites_read.py b/tests/api/v1/websites/test_websites_read.py index 3253bc65..1b8f2af2 100644 --- a/tests/api/v1/websites/test_websites_read.py +++ b/tests/api/v1/websites/test_websites_read.py @@ -5,7 +5,7 @@ from sqlalchemy.ext.asyncio import AsyncSession from tests.utils.utils import random_boolean -from app.api.errors import ErrorCode +from app.api.exceptions import ErrorCode from app.core.utilities.uuids import get_uuid_str from app.schemas import WebsiteRead diff --git a/tests/api/v1/websites/test_websites_update.py b/tests/api/v1/websites/test_websites_update.py index 5bb56033..e17d442b 100644 --- a/tests/api/v1/websites/test_websites_update.py +++ b/tests/api/v1/websites/test_websites_update.py @@ -6,7 +6,7 @@ from tests.utils.utils import random_boolean, random_lower_string from tests.utils.websites import create_random_website -from app.api.errors import ErrorCode +from app.api.exceptions import ErrorCode from app.schemas import WebsiteRead, WebsiteUpdate pytestmark = pytest.mark.asyncio diff --git a/tests/api/v1/websites_pages/test_websites_pages_create.py b/tests/api/v1/websites_pages/test_websites_pages_create.py index 6b9fd3c4..f4448de1 100644 --- a/tests/api/v1/websites_pages/test_websites_pages_create.py +++ b/tests/api/v1/websites_pages/test_websites_pages_create.py @@ -9,7 +9,7 @@ from tests.utils.website_pages import create_random_website_page from tests.utils.websites import create_random_website -from app.api.errors import ErrorCode +from app.api.exceptions import ErrorCode from app.core.utilities.uuids import get_uuid from app.schemas import WebsiteMapRead, WebsitePageRead, WebsiteRead @@ -138,6 +138,6 @@ async def test_create_website_page_as_superuser_website_not_exists( headers=superuser_token_headers, json=data, ) - assert response.status_code == 400 + assert response.status_code == 404 entry: Dict[str, Any] = response.json() - assert entry["detail"] == ErrorCode.WEBSITE_PAGE_UNASSIGNED_WEBSITE_ID + assert entry["detail"] == ErrorCode.WEBSITE_NOT_FOUND diff --git a/tests/api/v1/websites_pages/test_websites_pages_delete.py b/tests/api/v1/websites_pages/test_websites_pages_delete.py index d784327f..a94a7518 100644 --- a/tests/api/v1/websites_pages/test_websites_pages_delete.py +++ b/tests/api/v1/websites_pages/test_websites_pages_delete.py @@ -5,7 +5,7 @@ from sqlalchemy.ext.asyncio import AsyncSession from tests.utils.website_pages import create_random_website_page -from app.api.errors import ErrorCode +from app.api.exceptions import ErrorCode from app.schemas import WebsitePageRead pytestmark = pytest.mark.asyncio diff --git a/tests/api/v1/websites_pages/test_websites_pages_read.py b/tests/api/v1/websites_pages/test_websites_pages_read.py index 89a47288..f2b5ab78 100644 --- a/tests/api/v1/websites_pages/test_websites_pages_read.py +++ b/tests/api/v1/websites_pages/test_websites_pages_read.py @@ -5,7 +5,7 @@ from sqlalchemy.ext.asyncio import AsyncSession from tests.utils.website_pages import create_random_website_page -from app.api.errors import ErrorCode +from app.api.exceptions import ErrorCode from app.core.utilities.uuids import get_uuid_str from app.crud import WebsitePageRepository from app.schemas import WebsitePageRead diff --git a/tests/api/v1/websites_pagespeedinsights/test_websites_pagespeedinsights_create.py b/tests/api/v1/websites_pagespeedinsights/test_websites_pagespeedinsights_create.py index edb064b9..6bac1d57 100644 --- a/tests/api/v1/websites_pagespeedinsights/test_websites_pagespeedinsights_create.py +++ b/tests/api/v1/websites_pagespeedinsights/test_websites_pagespeedinsights_create.py @@ -5,7 +5,7 @@ from tests.utils.utils import random_boolean from tests.utils.website_pagespeedinsights import generate_psi_base -from app.api.errors import ErrorCode +from app.api.exceptions import ErrorCode from app.core.utilities.uuids import get_uuid_str from app.schemas import ( WebsiteMapRead, diff --git a/tests/api/v1/websites_pagespeedinsights/test_websites_pagespeedinsights_delete.py b/tests/api/v1/websites_pagespeedinsights/test_websites_pagespeedinsights_delete.py index b171788b..ee49bbe3 100644 --- a/tests/api/v1/websites_pagespeedinsights/test_websites_pagespeedinsights_delete.py +++ b/tests/api/v1/websites_pagespeedinsights/test_websites_pagespeedinsights_delete.py @@ -7,7 +7,7 @@ create_random_website_page_speed_insights, ) -from app.api.errors import ErrorCode +from app.api.exceptions import ErrorCode from app.schemas import WebsitePageSpeedInsightsRead pytestmark = pytest.mark.asyncio diff --git a/tests/api/v1/websites_pagespeedinsights/test_websites_pagespeedinsights_read.py b/tests/api/v1/websites_pagespeedinsights/test_websites_pagespeedinsights_read.py index 04298a23..e68ee18d 100644 --- a/tests/api/v1/websites_pagespeedinsights/test_websites_pagespeedinsights_read.py +++ b/tests/api/v1/websites_pagespeedinsights/test_websites_pagespeedinsights_read.py @@ -7,7 +7,7 @@ create_random_website_page_speed_insights, ) -from app.api.errors import ErrorCode +from app.api.exceptions import ErrorCode from app.core.utilities.uuids import get_uuid_str from app.crud import WebsitePageSpeedInsightsRepository from app.models.website_pagespeedinsights import WebsitePageSpeedInsights diff --git a/tests/api/v1/websites_sitemaps/test_websites_sitemaps_create.py b/tests/api/v1/websites_sitemaps/test_websites_sitemaps_create.py index 59b56587..fa534b0d 100644 --- a/tests/api/v1/websites_sitemaps/test_websites_sitemaps_create.py +++ b/tests/api/v1/websites_sitemaps/test_websites_sitemaps_create.py @@ -7,7 +7,7 @@ from tests.utils.website_maps import create_random_website_map from tests.utils.websites import create_random_website -from app.api.errors import ErrorCode +from app.api.exceptions import ErrorCode from app.core.utilities.uuids import get_uuid_str from app.schemas import WebsiteMapRead, WebsiteRead @@ -72,9 +72,9 @@ async def test_create_website_sitemap_as_superuser_unassigned_website_id( headers=superuser_token_headers, json=data, ) - assert response.status_code == 400 + assert response.status_code == 404 entry: Dict[str, Any] = response.json() - assert entry["detail"] == ErrorCode.WEBSITE_MAP_UNASSIGNED_WEBSITE_ID + assert entry["detail"] == ErrorCode.WEBSITE_NOT_FOUND async def test_create_website_sitemap_as_superuser_url_too_short( diff --git a/tests/api/v1/websites_sitemaps/test_websites_sitemaps_delete.py b/tests/api/v1/websites_sitemaps/test_websites_sitemaps_delete.py index af92841a..a66af47d 100644 --- a/tests/api/v1/websites_sitemaps/test_websites_sitemaps_delete.py +++ b/tests/api/v1/websites_sitemaps/test_websites_sitemaps_delete.py @@ -5,7 +5,7 @@ from sqlalchemy.ext.asyncio import AsyncSession from tests.utils.website_maps import create_random_website_map -from app.api.errors import ErrorCode +from app.api.exceptions import ErrorCode from app.schemas import WebsiteMapRead pytestmark = pytest.mark.asyncio diff --git a/tests/api/v1/websites_sitemaps/test_websites_sitemaps_read.py b/tests/api/v1/websites_sitemaps/test_websites_sitemaps_read.py index eb6ee5ab..63f6e07f 100644 --- a/tests/api/v1/websites_sitemaps/test_websites_sitemaps_read.py +++ b/tests/api/v1/websites_sitemaps/test_websites_sitemaps_read.py @@ -5,7 +5,7 @@ from sqlalchemy.ext.asyncio import AsyncSession from tests.utils.website_maps import create_random_website_map -from app.api.errors import ErrorCode +from app.api.exceptions import ErrorCode from app.core.utilities.uuids import get_uuid_str from app.crud import WebsiteMapRepository from app.schemas import WebsiteMapRead