Skip to content

Commit

Permalink
Fix auto-fixable trunk errors (#263)
Browse files Browse the repository at this point in the history
* Add trunk to nav backend

* Add code quality job to CI

* Run trunk autoformatting on all failing files

* Add .git-blame-ignore-revs

* Remove broken pyright config from pyproject.toml

* Ignore JSON under test search fixtures folder

* Do not use mutable data structures for argument defaults

* Remove unnecessary quotes

* Export symbols using __all__

* Use double [[ ]] for bash

* Run isort

* Remove unnecessary double quotes

* Disable oxipng

* Wrap variables in curly brackets & double quotes

* Fix included packages in pyproject.toml

* Fix line length violations, spellings & heading numbers & add language to code block

* Absolute paths instead of relative to appease pyright

* Fix line length violations

* Make sure variables are in scope before closing

* Turn off B rules for ruff until later date

* Fix line length violations, duplicate headings & missing language from code block

* Tell trunk to ignore everything under /scripts for now

* Fix import paths

* Run isort & use caplog rather than incorrectly spying on function

* Don't run markdown linting on LICENSE file

* Update .git-blame-ignore-revs

* Do not include scripts in pyright

* Fix pyright  unknown import symbol

* Move bats folder under test folder

* Fix import paths for pyright

* Removed old bats dockerfile as it's not being used now
  • Loading branch information
katybaulch authored Apr 9, 2024
1 parent bc8ceb9 commit 4eeb41c
Show file tree
Hide file tree
Showing 61 changed files with 628 additions and 667 deletions.
16 changes: 16 additions & 0 deletions .git-blame-ignore-revs
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
###############################################################################
# Ignore specific revisions when using Git Blame
#
# The file should contain the full (40 char) commit hashes. Lines starting with
# a hash are considered comments and can be used to explain what makes the
# given commit(s) unimportant.
#
# You can call git blame with the --ignore-revs-file option to ignore all the
# commits listed in this file.
###############################################################################

# Run trunk auto-formatting
220ab8dc9367b6cf4c6d41a491a32c3c4e5c8eb2

# Ignore LICENSE re-formatting
4e8229e076fec4c5013655a9950187bea9b354df
2 changes: 1 addition & 1 deletion .trunk/configs/bandit.yaml
Original file line number Diff line number Diff line change
@@ -1 +1 @@
exclude_dirs: ["tests"]
exclude_dirs: [tests]
2 changes: 1 addition & 1 deletion .trunk/configs/ruff.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Generic, formatter-friendly config.
select = ["B", "D3", "E", "F"]
select = ["D3", "E", "F"]

# Never enforce `E501` (line length violations). This should be handled by formatters.
ignore = ["E501"]
Expand Down
7 changes: 6 additions & 1 deletion .trunk/trunk.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ runtimes:
# This is the section where you manage your linters.
# (https://docs.trunk.io/check/configuration)
lint:
disabled:
- oxipng
definitions:
- name: bandit
direct_configs: [bandit.yaml]
Expand All @@ -37,6 +39,10 @@ lint:
# Ignore test data JSON files
- tests/data/**/*.json
- tests/search_fixtures/**/*.json
- scripts/**
- linters: [markdownlint]
paths:
- LICENSE.md

enabled:
- [email protected]
Expand All @@ -48,7 +54,6 @@ lint:
- [email protected]
- [email protected]
- [email protected]
- [email protected]
- [email protected]:
commands:
- check-ast
Expand Down
366 changes: 183 additions & 183 deletions LICENSE.md

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
Provides API for the navigator app.

## Docs

- [Quickstart](docs/quickstart.md)
- [Database](docs/database.md)
- [Docker](docs/docker.md)
Expand Down
23 changes: 7 additions & 16 deletions app/api/api_v1/routers/admin.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,19 @@
import logging
from typing import cast

from fastapi import (
APIRouter,
Depends,
HTTPException,
Request,
status,
)
from sqlalchemy import update
from sqlalchemy import Column

from db_client.models.dfce import DocumentStatus
from app.api.api_v1.schemas.document import (
DocumentUpdateRequest,
)
from app.core.auth import get_superuser_details
from app.core.lookups import get_family_document_by_import_id_or_slug
from db_client.models.document.physical_document import (
Language,
LanguageSource,
PhysicalDocument,
Language,
PhysicalDocumentLanguage,
)
from fastapi import APIRouter, Depends, HTTPException, Request, status
from sqlalchemy import Column, update

from app.api.api_v1.schemas.document import DocumentUpdateRequest
from app.core.auth import get_superuser_details
from app.core.lookups import get_family_document_by_import_id_or_slug
from app.db.session import get_db

_LOGGER = logging.getLogger(__name__)
Expand Down
4 changes: 2 additions & 2 deletions app/api/api_v1/routers/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi.security import OAuth2PasswordRequestForm

from app.core.security import create_access_token
from app.core.auth import authenticate_user
from app.db.session import get_db
from app.core.security import create_access_token
from app.db.crud.user import get_app_user_authorisation
from app.db.session import get_db

auth_router = r = APIRouter()

Expand Down
18 changes: 6 additions & 12 deletions app/api/api_v1/routers/documents.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,18 @@
from http.client import NOT_FOUND
import logging
from http.client import NOT_FOUND
from typing import Union

from fastapi import (
APIRouter,
Depends,
HTTPException,
)
from fastapi import APIRouter, Depends, HTTPException

from app.api.api_v1.schemas.document import (
FamilyAndDocumentsResponse,
FamilyDocumentWithContextResponse,
)
from app.db.crud.document import (
get_family_and_documents,
get_family_document_and_context,
get_slugged_objects,
)

from app.api.api_v1.schemas.document import (
FamilyAndDocumentsResponse,
FamilyDocumentWithContextResponse,
)

from app.db.session import get_db

_LOGGER = logging.getLogger(__file__)
Expand Down
8 changes: 5 additions & 3 deletions app/api/api_v1/routers/lookups/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from .config import lookup_config
from .geo_stats import lookup_geo_stats
from .router import lookups_router
from app.api.api_v1.routers.lookups.config import lookup_config
from app.api.api_v1.routers.lookups.geo_stats import lookup_geo_stats
from app.api.api_v1.routers.lookups.router import lookups_router

__all__ = ("lookup_config", "lookup_geo_stats", "lookups_router")
7 changes: 2 additions & 5 deletions app/api/api_v1/routers/lookups/config.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,12 @@
from fastapi import Depends, Request

from app.api.api_v1.routers.lookups.router import lookups_router
from app.api.api_v1.schemas.metadata import ApplicationConfig
from app.core.lookups import get_config
from app.db.session import get_db
from .router import lookups_router


@lookups_router.get("/config", response_model=ApplicationConfig)
def lookup_config(
request: Request,
db=Depends(get_db),
):
def lookup_config(request: Request, db=Depends(get_db)):
"""Get the config for the metadata."""
return get_config(db)
4 changes: 2 additions & 2 deletions app/api/api_v1/routers/lookups/geo_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,13 @@
from http.client import NOT_FOUND
from typing import Any, Dict, Optional, Union

from db_client.models.dfce import Geography, GeoStatistics
from fastapi import Depends, HTTPException
from pydantic import BaseModel
from sqlalchemy import exc

from app.api.api_v1.routers.lookups.router import lookups_router
from app.db.session import get_db
from db_client.models.dfce import Geography, GeoStatistics
from .router import lookups_router

_LOGGER = logging.getLogger(__name__)

Expand Down
24 changes: 5 additions & 19 deletions app/api/api_v1/routers/pipeline_trigger.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,13 @@
import logging
from sqlalchemy.orm import Session
from app.core.aws import S3Client

from fastapi import (
APIRouter,
BackgroundTasks,
Depends,
Request,
status,
)
from fastapi import APIRouter, BackgroundTasks, Depends, Request, status
from sqlalchemy.orm import Session

from app.api.api_v1.schemas.document import (
BulkIngestResult,
)
from app.api.api_v1.schemas.document import BulkIngestResult
from app.core.auth import get_superuser_details
from app.core.aws import get_s3_client

from app.core.aws import S3Client, get_s3_client
from app.core.ingestion.pipeline import generate_pipeline_ingest_input

from app.core.validation.util import (
get_new_s3_prefix,
write_documents_to_s3,
)
from app.core.validation.util import get_new_s3_prefix, write_documents_to_s3
from app.db.session import get_db

_LOGGER = logging.getLogger(__name__)
Expand Down
1 change: 1 addition & 0 deletions app/api/api_v1/routers/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
its input. The individual endpoints will return different responses tailored
for the type of document search being performed.
"""

import logging
from io import BytesIO

Expand Down
4 changes: 2 additions & 2 deletions app/api/api_v1/schemas/document.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
from datetime import datetime
from typing import Any, Mapping, Optional, Sequence, Union

from pydantic import field_validator, ConfigDict, BaseModel
from pydantic import BaseModel, ConfigDict, field_validator

from . import CLIMATE_LAWS_MATCH
from app.api.api_v1.schemas import CLIMATE_LAWS_MATCH

Json = dict[str, Any]

Expand Down
1 change: 0 additions & 1 deletion app/api/api_v1/schemas/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

from pydantic import BaseModel


TaxonomyData = Mapping[str, Mapping[str, Union[bool, Sequence[str]]]]


Expand Down
15 changes: 5 additions & 10 deletions app/api/api_v1/schemas/search.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,13 @@
from enum import Enum
from typing import List, Mapping, Optional, Sequence

from pydantic import field_validator, Field, BaseModel, PrivateAttr, model_validator
from typing import Literal
from typing import List, Literal, Mapping, Optional, Sequence

from cpr_data_access.models.search import SearchParameters as DataAccessSearchParameters
from db_client.models.dfce import FamilyCategory
from . import CLIMATE_LAWS_MATCH
from pydantic import BaseModel, Field, PrivateAttr, field_validator, model_validator
from typing_extensions import Annotated
from cpr_data_access.models.search import SearchParameters as DataAccessSearchParameters

from app.core.config import (
VESPA_SEARCH_LIMIT,
VESPA_SEARCH_MATCHES_PER_DOC,
)
from app.api.api_v1.schemas import CLIMATE_LAWS_MATCH
from app.core.config import VESPA_SEARCH_LIMIT, VESPA_SEARCH_MATCHES_PER_DOC

Coord = tuple[float, float]

Expand Down
5 changes: 2 additions & 3 deletions app/core/auth.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
from typing import Any, Optional, cast

import jwt
from db_client.models.organisation import AppUser
from fastapi import Depends, HTTPException, status
from jwt import PyJWTError

from app.api.api_v1.schemas.user import JWTUser
from app.core import security
from app.db.crud.user import get_app_user_by_email
from db_client.models.organisation import AppUser
from app.api.api_v1.schemas.user import JWTUser


CREDENTIALS_EXCEPTION = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
Expand Down
1 change: 1 addition & 0 deletions app/core/aws.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""AWS Helper classes."""

import logging
import os
import re
Expand Down
11 changes: 3 additions & 8 deletions app/core/browse.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@
from time import perf_counter_ns
from typing import Optional, Sequence, cast

from db_client.models.dfce.family import Corpus, Family, FamilyCorpus, FamilyStatus
from db_client.models.dfce.geography import Geography
from db_client.models.organisation import Organisation
from pydantic import BaseModel
from sqlalchemy.orm import Session

Expand All @@ -14,14 +17,6 @@
SortField,
SortOrder,
)
from db_client.models.dfce.family import (
Family,
FamilyStatus,
FamilyCorpus,
Corpus,
)
from db_client.models.dfce.geography import Geography
from db_client.models.organisation import Organisation

_LOGGER = getLogger(__name__)

Expand Down
6 changes: 5 additions & 1 deletion app/core/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import zipfile
from io import BytesIO, StringIO
from logging import getLogger
from typing import Optional

import pandas as pd
from fastapi import Depends
Expand Down Expand Up @@ -249,14 +250,17 @@ def get_whole_database_dump(ingest_cycle_start: str, db=Depends(get_db)):
def replace_slug_with_qualified_url(
df: pd.DataFrame,
public_app_url: str,
url_cols: list[str] = ["Family Slug", "Document Slug"],
url_cols: Optional[list[str]] = None,
) -> pd.DataFrame:
"""
Use the slug to create a fully qualified URL to the entity.
This functionality won't be included in the MVP for the data dump,
but will likely be included in future revisions.
"""
if url_cols is None:
url_cols = ["Family Slug", "Document Slug"]

url_base = f"{public_app_url}/documents/"

for col in url_cols:
Expand Down
14 changes: 6 additions & 8 deletions app/core/ingestion/pipeline.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,19 @@
import logging
from datetime import datetime, timezone
from typing import Any, Sequence, Tuple, cast
import logging


from sqlalchemy.orm import Session

from app.api.api_v1.schemas.document import DocumentParserInput
from db_client.models.organisation import Organisation
from db_client.models.dfce.family import (
Corpus,
Family,
FamilyCorpus,
FamilyDocument,
Geography,
FamilyCorpus,
Corpus,
)
from db_client.models.dfce.metadata import FamilyMetadata
from db_client.models.organisation import Organisation
from sqlalchemy.orm import Session

from app.api.api_v1.schemas.document import DocumentParserInput

_LOGGER = logging.getLogger(__name__)

Expand Down
Loading

0 comments on commit 4eeb41c

Please sign in to comment.