Skip to content

Commit

Permalink
fix: Lint Python code with ruff (#3799)
Browse files Browse the repository at this point in the history
  • Loading branch information
cclauss authored Aug 12, 2024
1 parent 65ece35 commit 432914e
Show file tree
Hide file tree
Showing 41 changed files with 112 additions and 120 deletions.
1 change: 1 addition & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,5 @@ repos:
# Ruff version.
rev: v0.5.7
hooks:
- id: ruff
- id: ruff-format
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,6 @@
"""

import sqlalchemy as sa

import mealie.db.migration_types
from alembic import op

# revision identifiers, used by Alembic.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
from text_unidecode import unidecode

import mealie.db.migration_types
from alembic import op
from mealie.db.models._model_utils.guid import GUID

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@

import sqlalchemy as sa

import mealie.db.migration_types
from alembic import op

# revision identifiers, used by Alembic.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,7 @@
"""

import sqlalchemy as sa

import mealie.db.migration_types
from alembic import op
import alembic.context as context
from mealie.core.config import get_app_settings

# revision identifiers, used by Alembic.
revision = "b3dbb554ba53"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,15 +66,15 @@ def populate_shopping_list_users():
user_id = find_user_id_for_group(group_id)
if user_id:
session.execute(
sa.text(f"UPDATE shopping_lists SET user_id=:user_id WHERE id=:id").bindparams(
sa.text("UPDATE shopping_lists SET user_id=:user_id WHERE id=:id").bindparams(
user_id=user_id, id=list_id
)
)
else:
logger.warning(
f"No user found for shopping list {list_id} with group {group_id}; deleting shopping list"
)
session.execute(sa.text(f"DELETE FROM shopping_lists WHERE id=:id").bindparams(id=list_id))
session.execute(sa.text("DELETE FROM shopping_lists WHERE id=:id").bindparams(id=list_id))


def upgrade():
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,6 @@
"""

import sqlalchemy as sa

import mealie.db.migration_types
from alembic import op

# revision identifiers, used by Alembic.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def new_user_rating(user_id: Any, recipe_id: Any, rating: float | None = None, i
if is_postgres():
id = str(uuid4())
else:
id = "%.32x" % uuid4().int
id = "%.32x" % uuid4().int # noqa: UP031

now = datetime.now(timezone.utc).isoformat()
return {
Expand Down
2 changes: 1 addition & 1 deletion dev/code-generation/gen_py_pytest_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from fastapi import FastAPI
from jinja2 import Template
from pydantic import BaseModel, ConfigDict
from utils import PROJECT_DIR, CodeTemplates, HTTPRequest, RouteObject, RequestType
from utils import PROJECT_DIR, CodeTemplates, HTTPRequest, RouteObject

CWD = Path(__file__).parent

Expand Down
2 changes: 1 addition & 1 deletion mealie/core/settings/db_providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def db_path(self):

@property
def db_url(self) -> str:
return f"sqlite:///{str(self.db_path.absolute())}"
return f"sqlite:///{self.db_path.absolute()!s}"

@property
def db_url_public(self) -> str:
Expand Down
2 changes: 1 addition & 1 deletion mealie/routes/auth/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ async def get_token(
@user_router.get("/refresh")
async def refresh_token(current_user: PrivateUser = Depends(get_current_user)):
"""Use a valid token to get another token"""
access_token = security.create_access_token(data=dict(sub=str(current_user.id)))
access_token = security.create_access_token(data={"sub": str(current_user.id)})
return MealieAuthToken.respond(access_token)


Expand Down
2 changes: 1 addition & 1 deletion mealie/schema/make_dependable.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def init_cls_and_handle_errors(*args, **kwargs):
return cls(*args, **kwargs)
except (ValidationError, RequestValidationError) as e:
for error in e.errors():
error["loc"] = ["query"] + list(error["loc"])
error["loc"] = ["query", *list(error["loc"])]
raise HTTPException(422, detail=[format_exception(ex) for ex in e.errors()]) from None

init_cls_and_handle_errors.__signature__ = signature(cls)
Expand Down
2 changes: 1 addition & 1 deletion mealie/services/group_services/group_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def calculate_group_storage(self, group_id: None | UUID4 = None) -> GroupStorage
all_ids = self.repos.recipes.all_ids(target_id)

used_size = sum(
fs_stats.get_dir_size(f"{self.directories.RECIPE_DATA_DIR}/{str(recipe_id)}") for recipe_id in all_ids
fs_stats.get_dir_size(f"{self.directories.RECIPE_DATA_DIR}/{recipe_id!s}") for recipe_id in all_ids
)

return GroupStorage.bytes(used_size, ALLOWED_SIZE)
6 changes: 3 additions & 3 deletions mealie/services/group_services/shopping_lists.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ def bulk_create_items(
created_items = self.list_items.create_many(filtered_create_items) if filtered_create_items else []
updated_items = self.list_items.update_many(update_items) if update_items else []

for list_id in set(item.shopping_list_id for item in created_items + updated_items):
for list_id in {item.shopping_list_id for item in created_items + updated_items}:
self.remove_unused_recipe_references(list_id)

return ShoppingListItemsCollectionOut(
Expand Down Expand Up @@ -278,7 +278,7 @@ def bulk_update_items(self, update_items: list[ShoppingListItemUpdateBulk]) -> S
self.list_items.delete_many(delete_items) if delete_items else [], # type: ignore
)

for list_id in set(item.shopping_list_id for item in updated_items + deleted_items):
for list_id in {item.shopping_list_id for item in updated_items + deleted_items}:
self.remove_unused_recipe_references(list_id)

return ShoppingListItemsCollectionOut(
Expand All @@ -291,7 +291,7 @@ def bulk_delete_items(self, delete_items: list[UUID4]) -> ShoppingListItemsColle
self.list_items.delete_many(set(delete_items)) if delete_items else [], # type: ignore
)

for list_id in set(item.shopping_list_id for item in deleted_items):
for list_id in {item.shopping_list_id for item in deleted_items}:
self.remove_unused_recipe_references(list_id)

return ShoppingListItemsCollectionOut(created_items=[], updated_items=[], deleted_items=deleted_items)
Expand Down
2 changes: 1 addition & 1 deletion mealie/services/parser_services/brute/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def parse_ingredient(tokens) -> tuple[str, str]:
# no opening bracket anywhere -> just ignore the last bracket
ingredient, note = parse_ingredient_with_comma(tokens)
else:
# opening bracket found -> split in ingredient and note, remove brackets from note # noqa: E501
# opening bracket found -> split in ingredient and note, remove brackets from note
note = " ".join(tokens[start:])[1:-1]
ingredient = " ".join(tokens[:start])
else:
Expand Down
6 changes: 3 additions & 3 deletions mealie/services/parser_services/crfpp/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def insideParenthesis(token, tokens):
else:
line = " ".join(tokens)
return (
re.match(r".*\(.*" + re.escape(token) + r".*\).*", line) is not None # noqa: W605 - invalid dscape sequence
re.match(r".*\(.*" + re.escape(token) + r".*\).*", line) is not None # - invalid dscape sequence
)


Expand Down Expand Up @@ -188,7 +188,7 @@ def import_data(lines):

# turn B-NAME/123 back into "name"
tag, confidence = re.split(r"/", columns[-1], maxsplit=1)
tag = re.sub(r"^[BI]\-", "", tag).lower() # noqa: W605 - invalid dscape sequence
tag = re.sub(r"^[BI]\-", "", tag).lower() # - invalid dscape sequence

# ====================
# Confidence Getter
Expand Down Expand Up @@ -261,6 +261,6 @@ def export_data(lines):

for i, token in enumerate(tokens):
features = getFeatures(token, i + 1, tokens)
output.append(joinLine([token] + features))
output.append(joinLine([token, *features]))
output.append("")
return "\n".join(output)
2 changes: 1 addition & 1 deletion mealie/services/recipe/recipe_data_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ async def scrape_image(self, image_url: str | dict[str, str] | list[str]) -> Non
if ext not in img.IMAGE_EXTENSIONS:
ext = "jpg" # Guess the extension

file_name = f"{str(self.recipe_id)}.{ext}"
file_name = f"{self.recipe_id!s}.{ext}"
file_path = Recipe.directory_from_id(self.recipe_id).joinpath("images", file_name)

async with AsyncClient(transport=AsyncSafeTransport()) as client:
Expand Down
4 changes: 2 additions & 2 deletions mealie/services/scraper/cleaner.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def clean_instructions(steps_object: list | dict | str, default: list | None = N
# }
#
steps_object = typing.cast(dict, steps_object)
return clean_instructions([x for x in steps_object.values()])
return clean_instructions(list(steps_object.values()))
case str(step_as_str):
# Strings are weird, some sites return a single string with newlines
# others returns a json string for some reasons
Expand Down Expand Up @@ -481,7 +481,7 @@ def clean_tags(data: str | list[str]) -> list[str]:
case [str(), *_]:
return [tag.strip().title() for tag in data if tag.strip()]
case str(data):
return clean_tags([t for t in data.split(",")])
return clean_tags(data.split(","))
case _:
return []
# should probably raise exception
Expand Down
2 changes: 1 addition & 1 deletion mealie/services/scraper/scraper.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ async def create_from_url(url: str, translator: Translator) -> tuple[Recipe, Scr
new_recipe.image = "no image"

if new_recipe.name is None or new_recipe.name == "":
new_recipe.name = f"No Recipe Name Found - {str(uuid4())}"
new_recipe.name = f"No Recipe Name Found - {uuid4()!s}"
new_recipe.slug = slugify(new_recipe.name)

return new_recipe, extras
16 changes: 11 additions & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -139,23 +139,29 @@ target-version = "py310"
# Enable Pyflakes `E` and `F` codes by default.
ignore = ["F403", "TID252", "B008"]
select = [
"B", # flake8-bugbear
"C4", # McCabe complexity
"C90", # flake8-comprehensions
"DTZ", # flake8-datetimez
"E", # pycodestyles
"F", # pyflakes
"I", # isort
"T", # flake8-print
"UP", # pyupgrade
"B", # flake8-bugbear
"DTZ", # flake8-datetimez
# "ANN", # flake8-annotations
# "C", # McCabe complexity
# "RUF", # Ruff specific
# "BLE", # blind-except
# "RUF", # Ruff specific
]

[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["E402", "E501"]
"alembic/versions/2022*" = ["E501"]
"alembic/versions/2023*" = ["E501"]
"dev/scripts/all_recipes_stress_test.py" = ["E501"]
"ldap_provider.py" = ["UP032"]
"tests/conftest.py" = ["E402"]
"tests/utils/routes/__init__.py" = ["F401"]

[tool.ruff.lint.mccabe]
# Unlike Flake8, default to a complexity level of 10.
max-complexity = 10
max-complexity = 24 # Default is 10.
2 changes: 1 addition & 1 deletion tests/fixtures/fixture_users.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import json
from typing import Generator
from collections.abc import Generator

from pytest import fixture
from starlette.testclient import TestClient
Expand Down
2 changes: 1 addition & 1 deletion tests/integration_tests/admin_tests/test_admin_about.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def test_public_about_get_app_info(api_client: TestClient, is_private_group: boo
assert as_dict["allowSignup"] == settings.ALLOW_SIGNUP

if is_private_group:
assert as_dict["defaultGroupSlug"] == None
assert as_dict["defaultGroupSlug"] is None
else:
assert as_dict["defaultGroupSlug"] == group.slug

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def test_bad_mealie_alpha_data_is_ignored(api_client: TestClient, unique_user: T
with open(invalid_json_path, "w"):
pass # write nothing to the file, which is invalid JSON
except Exception:
raise Exception(os.listdir(tmpdir))
raise Exception(os.listdir(tmpdir)) # noqa: B904

modified_test_data = os.path.join(tmpdir, "modified-test-data.zip")
with ZipFile(modified_test_data, "w") as zf:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def test_group_recipe_actions_get_all(api_client: TestClient, unique_user: TestU

response = api_client.get(api_routes.groups_recipe_actions, headers=unique_user.token)
data = assert_deserialize(response, 200)
fetched_ids = set(item["id"] for item in data["items"])
fetched_ids = {item["id"] for item in data["items"]}
for expected_id in expected_ids:
assert expected_id in fetched_ids

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from collections.abc import Generator
from pathlib import Path
from typing import Generator

import pytest
import sqlalchemy
Expand Down
20 changes: 10 additions & 10 deletions tests/integration_tests/user_recipe_tests/test_recipe_crud.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
import random
import shutil
import tempfile
from collections.abc import Generator
from pathlib import Path
from typing import Generator
from uuid import uuid4
from zipfile import ZipFile

Expand Down Expand Up @@ -489,9 +489,9 @@ def test_duplicate(api_client: TestClient, recipe_data: RecipeSiteTestCase, uniq

# Ingredients should have the same texts, but different ids
assert duplicate_recipe["recipeIngredient"] != initial_recipe["recipeIngredient"]
assert list(map(lambda i: i["note"], duplicate_recipe["recipeIngredient"])) == list(
map(lambda i: i["note"], initial_recipe["recipeIngredient"])
)
assert [i["note"] for i in duplicate_recipe["recipeIngredient"]] == [
i["note"] for i in initial_recipe["recipeIngredient"]
]

previous_categories = initial_recipe["recipeCategory"]
assert duplicate_recipe["recipeCategory"] == previous_categories
Expand Down Expand Up @@ -748,21 +748,21 @@ def test_get_recipes_organizer_filter(
# get recipes by organizer
if organizer_type == "tags":
organizer = random.choice(tags)
expected_recipe_ids = set(
expected_recipe_ids = {
str(recipe.id) for recipe in recipes if organizer.id in [tag.id for tag in recipe.tags or []]
)
}
elif organizer_type == "categories":
organizer = random.choice(categories)
expected_recipe_ids = set(
expected_recipe_ids = {
str(recipe.id)
for recipe in recipes
if organizer.id in [category.id for category in recipe.recipe_category or []]
)
}
elif organizer_type == "tools":
organizer = random.choice(tools)
expected_recipe_ids = set(
expected_recipe_ids = {
str(recipe.id) for recipe in recipes if organizer.id in [tool.id for tool in recipe.tools or []]
)
}
else:
raise ValueError(f"Unknown organizer type: {organizer_type}")

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from io import BytesIO
import json
import zipfile
from io import BytesIO

from fastapi.testclient import TestClient

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def test_recipe_ingredients_parser_nlp(api_client: TestClient, unique_user: Test
response = api_client.post(api_routes.parser_ingredients, json=payload, headers=unique_user.token)
assert response.status_code == 200

for api_ingredient, test_ingredient in zip(response.json(), test_ingredients):
for api_ingredient, test_ingredient in zip(response.json(), test_ingredients, strict=False):
assert_ingredient(api_ingredient, test_ingredient)


Expand Down
10 changes: 5 additions & 5 deletions tests/integration_tests/user_recipe_tests/test_recipe_ratings.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import random
from typing import Generator
from collections.abc import Generator
from uuid import UUID

import pytest
Expand Down Expand Up @@ -71,8 +71,8 @@ def test_user_recipe_favorites(
ratings = response.json()["ratings"]

assert len(ratings) == len(recipes_to_favorite)
fetched_recipe_ids = set(rating["recipeId"] for rating in ratings)
favorited_recipe_ids = set(str(recipe.id) for recipe in recipes_to_favorite)
fetched_recipe_ids = {rating["recipeId"] for rating in ratings}
favorited_recipe_ids = {str(recipe.id) for recipe in recipes_to_favorite}
assert fetched_recipe_ids == favorited_recipe_ids

# remove favorites
Expand All @@ -87,8 +87,8 @@ def test_user_recipe_favorites(
ratings = response.json()["ratings"]

assert len(ratings) == len(recipes_to_favorite) - len(recipe_favorites_to_remove)
fetched_recipe_ids = set(rating["recipeId"] for rating in ratings)
removed_recipe_ids = set(str(recipe.id) for recipe in recipe_favorites_to_remove)
fetched_recipe_ids = {rating["recipeId"] for rating in ratings}
removed_recipe_ids = {str(recipe.id) for recipe in recipe_favorites_to_remove}
assert fetched_recipe_ids == favorited_recipe_ids - removed_recipe_ids


Expand Down
Loading

0 comments on commit 432914e

Please sign in to comment.