From 038273ac33ddd72d3ae2263e7d0c8547f639ac78 Mon Sep 17 00:00:00 2001 From: "Openverse (Bot)" <101814513+openverse-bot@users.noreply.github.com> Date: Mon, 2 Dec 2024 17:08:03 -0400 Subject: [PATCH] chore(deps): update workflows (#5242) Co-authored-by: Krystle Salazar --- .github/workflows/renovate.yml | 2 +- .pre-commit-config.yaml | 6 ++--- api/api/admin/media_report.py | 2 +- api/api/utils/image_proxy/__init__.py | 7 +++--- api/test/fixtures/media_type_config.py | 2 +- .../controllers/test_search_controller.py | 25 +++++++++++-------- api/test/unit/views/test_media_views.py | 18 +++++++------ .../data_augmentation/rekognition/types.py | 4 +-- .../dags/data_refresh/test_alter_data.py | 7 +++--- .../provider_api_scripts/test_phylopic.py | 5 ++-- .../provider_api_scripts/test_smithsonian.py | 18 ++++++++----- .../test_generate_media_properties.py | 5 ++-- documentation/_ext/link_issues.py | 8 +++--- .../test_distributed_reindex_scheduler.py | 23 +++++++++++------ 14 files changed, 77 insertions(+), 55 deletions(-) diff --git a/.github/workflows/renovate.yml b/.github/workflows/renovate.yml index ebc46917056..e8c8490eb37 100644 --- a/.github/workflows/renovate.yml +++ b/.github/workflows/renovate.yml @@ -62,7 +62,7 @@ jobs: sudo chown -R runneradmin:root /tmp/renovate/ ls -R "$cache_dir" - - uses: renovatebot/github-action@v41.0.2 + - uses: renovatebot/github-action@v41.0.5 with: # Renovate recommends _not_ to use any of # [these names](https://docs.renovatebot.com/configuration-options/). diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0c83ca63306..eb06cc7ba1c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -46,7 +46,7 @@ repos: - id: requirements-txt-fixer - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.1 + rev: v0.8.1 hooks: - id: ruff # replaces Flake8, isort, pydocstyle, pyupgrade args: @@ -64,7 +64,7 @@ repos: - id: shfmt-docker - repo: https://github.com/rhysd/actionlint - rev: v1.7.3 + rev: v1.7.4 hooks: - id: actionlint-docker @@ -154,7 +154,7 @@ repos: files: (.vale/.*|.mdx?)$ - repo: https://github.com/renovatebot/pre-commit-hooks - rev: 39.17.0 + rev: 39.45.0 hooks: - id: renovate-config-validator args: diff --git a/api/api/admin/media_report.py b/api/api/admin/media_report.py index f1da55db51c..d8a2a2308e1 100644 --- a/api/api/admin/media_report.py +++ b/api/api/admin/media_report.py @@ -1,6 +1,6 @@ import json +from collections.abc import Sequence from functools import update_wrapper -from typing import Sequence from django import forms from django.conf import settings diff --git a/api/api/utils/image_proxy/__init__.py b/api/api/utils/image_proxy/__init__.py index 69535bf1404..5f3be690680 100644 --- a/api/api/utils/image_proxy/__init__.py +++ b/api/api/utils/image_proxy/__init__.py @@ -1,6 +1,5 @@ -from datetime import timedelta from functools import wraps -from typing import Literal, Type +from typing import Literal from urllib.parse import urlparse from django.conf import settings @@ -10,7 +9,6 @@ import aiohttp import django_redis import structlog -from aiohttp import client_exceptions from asgiref.sync import sync_to_async from redis.client import Redis from redis.exceptions import ConnectionError @@ -130,7 +128,8 @@ def _tally_client_response_errors(tallies, month: str, domain: str, status: int) logger.warning("Redis connect failed, thumbnail HTTP errors not tallied.") -# thmbfail == THuMBnail FAILures; this key path will exist for every thumbnail requested, so it needs to be space efficient +# thmbfail == THuMBnail FAILures; this key path will exist for every thumbnail +# requested, so it needs to be space efficient FAILURE_CACHE_KEY_TEMPLATE = "thmbfail:{ident}" diff --git a/api/test/fixtures/media_type_config.py b/api/test/fixtures/media_type_config.py index f5f26ded8eb..69d2b71f902 100644 --- a/api/test/fixtures/media_type_config.py +++ b/api/test/fixtures/media_type_config.py @@ -1,5 +1,5 @@ +from collections.abc import Iterable from dataclasses import dataclass -from typing import Iterable import pook import pytest diff --git a/api/test/unit/controllers/test_search_controller.py b/api/test/unit/controllers/test_search_controller.py index d8ff1da43ed..71c16403910 100644 --- a/api/test/unit/controllers/test_search_controller.py +++ b/api/test/unit/controllers/test_search_controller.py @@ -886,18 +886,21 @@ def test_get_sources_returns_stats(is_cache_reachable, cache_name, request, capl if is_cache_reachable: cache.set("sources-multimedia", value={"source_1": "1000", "source_2": "1000"}) - with capture_logs() as cap_logs, patch( - "api.controllers.search_controller.get_raw_es_response", - return_value={ - "aggregations": { - "unique_sources": { - "buckets": [ - {"key": "source_1", "doc_count": 1000}, - {"key": "source_2", "doc_count": 1000}, - ] + with ( + capture_logs() as cap_logs, + patch( + "api.controllers.search_controller.get_raw_es_response", + return_value={ + "aggregations": { + "unique_sources": { + "buckets": [ + {"key": "source_1", "doc_count": 1000}, + {"key": "source_2", "doc_count": 1000}, + ] + } } - } - }, + }, + ), ): assert search_controller.get_sources("multimedia") == { "source_1": 1000, diff --git a/api/test/unit/views/test_media_views.py b/api/test/unit/views/test_media_views.py index f55c807df38..f9219d94dbb 100644 --- a/api/test/unit/views/test_media_views.py +++ b/api/test/unit/views/test_media_views.py @@ -24,13 +24,17 @@ def test_list_query_count(api_client, media_type_config): num_results, {}, # search_context ) - with patch( - "api.views.media_views.search_controller", - query_media=MagicMock(return_value=controller_ret), - ), patch( - "api.serializers.media_serializers.search_controller", - get_sources=MagicMock(return_value={}), - ), pytest_django.asserts.assertNumQueries(1): + with ( + patch( + "api.views.media_views.search_controller", + query_media=MagicMock(return_value=controller_ret), + ), + patch( + "api.serializers.media_serializers.search_controller", + get_sources=MagicMock(return_value={}), + ), + pytest_django.asserts.assertNumQueries(1), + ): res = api_client.get(f"/v1/{media_type_config.url_prefix}/") assert res.status_code == 200 diff --git a/catalog/dags/data_augmentation/rekognition/types.py b/catalog/dags/data_augmentation/rekognition/types.py index 1a7d6a69d63..4ce50042a06 100644 --- a/catalog/dags/data_augmentation/rekognition/types.py +++ b/catalog/dags/data_augmentation/rekognition/types.py @@ -1,10 +1,10 @@ # Type definitions for the sample data -from typing import NamedTuple, Type, TypedDict +from typing import NamedTuple, TypedDict from psycopg2.extras import Json -TagsBuffer: Type = list[tuple[str, Json]] +TagsBuffer: type = list[tuple[str, Json]] class Label(TypedDict, total=False): diff --git a/catalog/tests/dags/data_refresh/test_alter_data.py b/catalog/tests/dags/data_refresh/test_alter_data.py index 5dba5d64851..9b63e28faef 100644 --- a/catalog/tests/dags/data_refresh/test_alter_data.py +++ b/catalog/tests/dags/data_refresh/test_alter_data.py @@ -80,9 +80,10 @@ def test_alter_data_batch(): (51, "bbb", True), (52, "ccc", False), ] - with mock.patch("data_refresh.alter_data.PostgresHook") as HookMock, mock.patch( - "data_refresh.alter_data.generate_tag_updates" - ) as tag_updates_mock: + with ( + mock.patch("data_refresh.alter_data.PostgresHook") as HookMock, + mock.patch("data_refresh.alter_data.generate_tag_updates") as tag_updates_mock, + ): mock_pg = HookMock.return_value mock_pg.run.return_value = sample_data mock_cursor = ( diff --git a/catalog/tests/dags/providers/provider_api_scripts/test_phylopic.py b/catalog/tests/dags/providers/provider_api_scripts/test_phylopic.py index 0c4af63cd8c..1eefab412cb 100644 --- a/catalog/tests/dags/providers/provider_api_scripts/test_phylopic.py +++ b/catalog/tests/dags/providers/provider_api_scripts/test_phylopic.py @@ -23,8 +23,9 @@ def image_data(): def test__get_initial_query_params(): - with patch.object(pp, "get_response_json", return_value={}), pytest.raises( - Exception + with ( + patch.object(pp, "get_response_json", return_value={}), + pytest.raises(Exception), ): pp._get_initial_query_params() diff --git a/catalog/tests/dags/providers/provider_api_scripts/test_smithsonian.py b/catalog/tests/dags/providers/provider_api_scripts/test_smithsonian.py index 08a4c64bf28..c514b17600a 100644 --- a/catalog/tests/dags/providers/provider_api_scripts/test_smithsonian.py +++ b/catalog/tests/dags/providers/provider_api_scripts/test_smithsonian.py @@ -58,10 +58,13 @@ def test_alert_new_unit_codes(): def test_validate_unit_codes_from_api_raises_exception( new_unit_codes, outdated_unit_codes ): - with patch.object(ingester, "_get_unit_codes_from_api"), patch.object( - ingester, - "_get_new_and_outdated_unit_codes", - return_value=(new_unit_codes, outdated_unit_codes), + with ( + patch.object(ingester, "_get_unit_codes_from_api"), + patch.object( + ingester, + "_get_new_and_outdated_unit_codes", + return_value=(new_unit_codes, outdated_unit_codes), + ), ): message = "^\n\\*Updates needed to the SMITHSONIAN_SUB_PROVIDERS dictionary\\**" with pytest.raises(AirflowException, match=message): @@ -69,8 +72,11 @@ def test_validate_unit_codes_from_api_raises_exception( def test_validate_unit_codes_from_api(): - with patch.object(ingester, "_get_unit_codes_from_api"), patch.object( - ingester, "_get_new_and_outdated_unit_codes", return_value=(set(), set()) + with ( + patch.object(ingester, "_get_unit_codes_from_api"), + patch.object( + ingester, "_get_new_and_outdated_unit_codes", return_value=(set(), set()) + ), ): # Validation should run without raising an exception ingester.validate_unit_codes_from_api() diff --git a/catalog/tests/utilities/media_props_gen/test_generate_media_properties.py b/catalog/tests/utilities/media_props_gen/test_generate_media_properties.py index 86eedbd5f1d..f9996175831 100644 --- a/catalog/tests/utilities/media_props_gen/test_generate_media_properties.py +++ b/catalog/tests/utilities/media_props_gen/test_generate_media_properties.py @@ -31,8 +31,9 @@ def test_generate_markdown_doc(props_source_mock): preamble_mock = "Preamble Content" postamble_mock = "Postamble Content" - with patch(f"{MODULE}.PREAMBLE", preamble_mock), patch( - f"{MODULE}.POSTAMBLE", postamble_mock + with ( + patch(f"{MODULE}.PREAMBLE", preamble_mock), + patch(f"{MODULE}.POSTAMBLE", postamble_mock), ): result = generate_markdown_doc() diff --git a/documentation/_ext/link_issues.py b/documentation/_ext/link_issues.py index baa7b92407c..4c1f45dc6fc 100644 --- a/documentation/_ext/link_issues.py +++ b/documentation/_ext/link_issues.py @@ -54,7 +54,7 @@ class IssueTrackerBuildEnvironment(BuildEnvironment): tracker_config: "TrackerConfig" issuetracker_cache: "IssueTrackerCache" - github_rate_limit: t.Tuple[float, bool] + github_rate_limit: tuple[float, bool] class Issue(t.NamedTuple): @@ -64,7 +64,7 @@ class Issue(t.NamedTuple): closed: bool -IssueTrackerCache = t.Dict[str, Issue] +IssueTrackerCache = dict[str, Issue] @dataclasses.dataclass @@ -355,7 +355,7 @@ def lookup_github_issue( return None -BUILTIN_ISSUE_TRACKERS: t.Dict[str, t.Any] = { +BUILTIN_ISSUE_TRACKERS: dict[str, t.Any] = { "github": lookup_github_issue, } @@ -371,7 +371,7 @@ def connect_builtin_tracker(app: Sphinx) -> None: app.connect("issuetracker-lookup-issue", tracker) -def setup(app: Sphinx) -> t.Dict[str, t.Any]: +def setup(app: Sphinx) -> dict[str, t.Any]: app.add_event("issuetracker-lookup-issue") app.connect("builder-inited", connect_builtin_tracker) app.add_config_value("issuetracker", None, "env") diff --git a/ingestion_server/test/unit_tests/test_distributed_reindex_scheduler.py b/ingestion_server/test/unit_tests/test_distributed_reindex_scheduler.py index 5f875a447d6..6c2805ff463 100644 --- a/ingestion_server/test/unit_tests/test_distributed_reindex_scheduler.py +++ b/ingestion_server/test/unit_tests/test_distributed_reindex_scheduler.py @@ -33,11 +33,15 @@ def test_assign_work(estimated_records, record_limit, workers, expected_ranges): estimated_records ] # Enable pook & mock other internal functions - with pook.use(), mock.patch( - "ingestion_server.distributed_reindex_scheduler.get_record_limit" - ) as mock_get_record_limit, mock.patch( - "ingestion_server.distributed_reindex_scheduler._wait_for_healthcheck" - ) as mock_wait_for_healthcheck: + with ( + pook.use(), + mock.patch( + "ingestion_server.distributed_reindex_scheduler.get_record_limit" + ) as mock_get_record_limit, + mock.patch( + "ingestion_server.distributed_reindex_scheduler._wait_for_healthcheck" + ) as mock_wait_for_healthcheck, + ): mock_wait_for_healthcheck.return_value = True mock_get_record_limit.return_value = record_limit @@ -68,9 +72,12 @@ def test_assign_work(estimated_records, record_limit, workers, expected_ranges): def test_assign_work_workers_fail(): mock_db = mock.MagicMock() mock_db.cursor.return_value.__enter__.return_value.fetchone.return_value = [100] - with mock.patch( - "ingestion_server.distributed_reindex_scheduler._wait_for_healthcheck" - ) as mock_wait_for_healthcheck, pook.use(False): + with ( + mock.patch( + "ingestion_server.distributed_reindex_scheduler._wait_for_healthcheck" + ) as mock_wait_for_healthcheck, + pook.use(False), + ): mock_wait_for_healthcheck.return_value = False with pytest.raises(ValueError, match="Some workers didn't respond"):