Skip to content

Commit

Permalink
chore(deps): update workflows (#5242)
Browse files Browse the repository at this point in the history
Co-authored-by: Krystle Salazar <[email protected]>
  • Loading branch information
openverse-bot and krysal authored Dec 2, 2024
1 parent 1f9f87b commit 038273a
Show file tree
Hide file tree
Showing 14 changed files with 77 additions and 55 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/renovate.yml
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ jobs:
sudo chown -R runneradmin:root /tmp/renovate/
ls -R "$cache_dir"
- uses: renovatebot/[email protected].2
- uses: renovatebot/[email protected].5
with:
# Renovate recommends _not_ to use any of
# [these names](https://docs.renovatebot.com/configuration-options/).
Expand Down
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ repos:
- id: requirements-txt-fixer

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.7.1
rev: v0.8.1
hooks:
- id: ruff # replaces Flake8, isort, pydocstyle, pyupgrade
args:
Expand All @@ -64,7 +64,7 @@ repos:
- id: shfmt-docker

- repo: https://github.com/rhysd/actionlint
rev: v1.7.3
rev: v1.7.4
hooks:
- id: actionlint-docker

Expand Down Expand Up @@ -154,7 +154,7 @@ repos:
files: (.vale/.*|.mdx?)$

- repo: https://github.com/renovatebot/pre-commit-hooks
rev: 39.17.0
rev: 39.45.0
hooks:
- id: renovate-config-validator
args:
Expand Down
2 changes: 1 addition & 1 deletion api/api/admin/media_report.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import json
from collections.abc import Sequence
from functools import update_wrapper
from typing import Sequence

from django import forms
from django.conf import settings
Expand Down
7 changes: 3 additions & 4 deletions api/api/utils/image_proxy/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from datetime import timedelta
from functools import wraps
from typing import Literal, Type
from typing import Literal
from urllib.parse import urlparse

from django.conf import settings
Expand All @@ -10,7 +9,6 @@
import aiohttp
import django_redis
import structlog
from aiohttp import client_exceptions
from asgiref.sync import sync_to_async
from redis.client import Redis
from redis.exceptions import ConnectionError
Expand Down Expand Up @@ -130,7 +128,8 @@ def _tally_client_response_errors(tallies, month: str, domain: str, status: int)
logger.warning("Redis connect failed, thumbnail HTTP errors not tallied.")


# thmbfail == THuMBnail FAILures; this key path will exist for every thumbnail requested, so it needs to be space efficient
# thmbfail == THuMBnail FAILures; this key path will exist for every thumbnail
# requested, so it needs to be space efficient
FAILURE_CACHE_KEY_TEMPLATE = "thmbfail:{ident}"


Expand Down
2 changes: 1 addition & 1 deletion api/test/fixtures/media_type_config.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from collections.abc import Iterable
from dataclasses import dataclass
from typing import Iterable

import pook
import pytest
Expand Down
25 changes: 14 additions & 11 deletions api/test/unit/controllers/test_search_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -886,18 +886,21 @@ def test_get_sources_returns_stats(is_cache_reachable, cache_name, request, capl
if is_cache_reachable:
cache.set("sources-multimedia", value={"source_1": "1000", "source_2": "1000"})

with capture_logs() as cap_logs, patch(
"api.controllers.search_controller.get_raw_es_response",
return_value={
"aggregations": {
"unique_sources": {
"buckets": [
{"key": "source_1", "doc_count": 1000},
{"key": "source_2", "doc_count": 1000},
]
with (
capture_logs() as cap_logs,
patch(
"api.controllers.search_controller.get_raw_es_response",
return_value={
"aggregations": {
"unique_sources": {
"buckets": [
{"key": "source_1", "doc_count": 1000},
{"key": "source_2", "doc_count": 1000},
]
}
}
}
},
},
),
):
assert search_controller.get_sources("multimedia") == {
"source_1": 1000,
Expand Down
18 changes: 11 additions & 7 deletions api/test/unit/views/test_media_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,17 @@ def test_list_query_count(api_client, media_type_config):
num_results,
{}, # search_context
)
with patch(
"api.views.media_views.search_controller",
query_media=MagicMock(return_value=controller_ret),
), patch(
"api.serializers.media_serializers.search_controller",
get_sources=MagicMock(return_value={}),
), pytest_django.asserts.assertNumQueries(1):
with (
patch(
"api.views.media_views.search_controller",
query_media=MagicMock(return_value=controller_ret),
),
patch(
"api.serializers.media_serializers.search_controller",
get_sources=MagicMock(return_value={}),
),
pytest_django.asserts.assertNumQueries(1),
):
res = api_client.get(f"/v1/{media_type_config.url_prefix}/")

assert res.status_code == 200
Expand Down
4 changes: 2 additions & 2 deletions catalog/dags/data_augmentation/rekognition/types.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
# Type definitions for the sample data
from typing import NamedTuple, Type, TypedDict
from typing import NamedTuple, TypedDict

from psycopg2.extras import Json


TagsBuffer: Type = list[tuple[str, Json]]
TagsBuffer: type = list[tuple[str, Json]]


class Label(TypedDict, total=False):
Expand Down
7 changes: 4 additions & 3 deletions catalog/tests/dags/data_refresh/test_alter_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,10 @@ def test_alter_data_batch():
(51, "bbb", True),
(52, "ccc", False),
]
with mock.patch("data_refresh.alter_data.PostgresHook") as HookMock, mock.patch(
"data_refresh.alter_data.generate_tag_updates"
) as tag_updates_mock:
with (
mock.patch("data_refresh.alter_data.PostgresHook") as HookMock,
mock.patch("data_refresh.alter_data.generate_tag_updates") as tag_updates_mock,
):
mock_pg = HookMock.return_value
mock_pg.run.return_value = sample_data
mock_cursor = (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,9 @@ def image_data():


def test__get_initial_query_params():
with patch.object(pp, "get_response_json", return_value={}), pytest.raises(
Exception
with (
patch.object(pp, "get_response_json", return_value={}),
pytest.raises(Exception),
):
pp._get_initial_query_params()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,19 +58,25 @@ def test_alert_new_unit_codes():
def test_validate_unit_codes_from_api_raises_exception(
new_unit_codes, outdated_unit_codes
):
with patch.object(ingester, "_get_unit_codes_from_api"), patch.object(
ingester,
"_get_new_and_outdated_unit_codes",
return_value=(new_unit_codes, outdated_unit_codes),
with (
patch.object(ingester, "_get_unit_codes_from_api"),
patch.object(
ingester,
"_get_new_and_outdated_unit_codes",
return_value=(new_unit_codes, outdated_unit_codes),
),
):
message = "^\n\\*Updates needed to the SMITHSONIAN_SUB_PROVIDERS dictionary\\**"
with pytest.raises(AirflowException, match=message):
ingester.validate_unit_codes_from_api()


def test_validate_unit_codes_from_api():
with patch.object(ingester, "_get_unit_codes_from_api"), patch.object(
ingester, "_get_new_and_outdated_unit_codes", return_value=(set(), set())
with (
patch.object(ingester, "_get_unit_codes_from_api"),
patch.object(
ingester, "_get_new_and_outdated_unit_codes", return_value=(set(), set())
),
):
# Validation should run without raising an exception
ingester.validate_unit_codes_from_api()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,9 @@ def test_generate_markdown_doc(props_source_mock):
preamble_mock = "Preamble Content"
postamble_mock = "Postamble Content"

with patch(f"{MODULE}.PREAMBLE", preamble_mock), patch(
f"{MODULE}.POSTAMBLE", postamble_mock
with (
patch(f"{MODULE}.PREAMBLE", preamble_mock),
patch(f"{MODULE}.POSTAMBLE", postamble_mock),
):
result = generate_markdown_doc()

Expand Down
8 changes: 4 additions & 4 deletions documentation/_ext/link_issues.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
class IssueTrackerBuildEnvironment(BuildEnvironment):
tracker_config: "TrackerConfig"
issuetracker_cache: "IssueTrackerCache"
github_rate_limit: t.Tuple[float, bool]
github_rate_limit: tuple[float, bool]


class Issue(t.NamedTuple):
Expand All @@ -64,7 +64,7 @@ class Issue(t.NamedTuple):
closed: bool


IssueTrackerCache = t.Dict[str, Issue]
IssueTrackerCache = dict[str, Issue]


@dataclasses.dataclass
Expand Down Expand Up @@ -355,7 +355,7 @@ def lookup_github_issue(
return None


BUILTIN_ISSUE_TRACKERS: t.Dict[str, t.Any] = {
BUILTIN_ISSUE_TRACKERS: dict[str, t.Any] = {
"github": lookup_github_issue,
}

Expand All @@ -371,7 +371,7 @@ def connect_builtin_tracker(app: Sphinx) -> None:
app.connect("issuetracker-lookup-issue", tracker)


def setup(app: Sphinx) -> t.Dict[str, t.Any]:
def setup(app: Sphinx) -> dict[str, t.Any]:
app.add_event("issuetracker-lookup-issue")
app.connect("builder-inited", connect_builtin_tracker)
app.add_config_value("issuetracker", None, "env")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,15 @@ def test_assign_work(estimated_records, record_limit, workers, expected_ranges):
estimated_records
]
# Enable pook & mock other internal functions
with pook.use(), mock.patch(
"ingestion_server.distributed_reindex_scheduler.get_record_limit"
) as mock_get_record_limit, mock.patch(
"ingestion_server.distributed_reindex_scheduler._wait_for_healthcheck"
) as mock_wait_for_healthcheck:
with (
pook.use(),
mock.patch(
"ingestion_server.distributed_reindex_scheduler.get_record_limit"
) as mock_get_record_limit,
mock.patch(
"ingestion_server.distributed_reindex_scheduler._wait_for_healthcheck"
) as mock_wait_for_healthcheck,
):
mock_wait_for_healthcheck.return_value = True
mock_get_record_limit.return_value = record_limit

Expand Down Expand Up @@ -68,9 +72,12 @@ def test_assign_work(estimated_records, record_limit, workers, expected_ranges):
def test_assign_work_workers_fail():
mock_db = mock.MagicMock()
mock_db.cursor.return_value.__enter__.return_value.fetchone.return_value = [100]
with mock.patch(
"ingestion_server.distributed_reindex_scheduler._wait_for_healthcheck"
) as mock_wait_for_healthcheck, pook.use(False):
with (
mock.patch(
"ingestion_server.distributed_reindex_scheduler._wait_for_healthcheck"
) as mock_wait_for_healthcheck,
pook.use(False),
):
mock_wait_for_healthcheck.return_value = False

with pytest.raises(ValueError, match="Some workers didn't respond"):
Expand Down

0 comments on commit 038273a

Please sign in to comment.