From 78f1757e3fe28fc4c571b5db0d3fbf1edbc272a4 Mon Sep 17 00:00:00 2001 From: Maria Khrustaleva Date: Mon, 25 Mar 2024 12:07:54 +0100 Subject: [PATCH] Move logic for downloading resources from CS into main background process (#7551) ### Motivation and context Looks like using dependent jobs for downloading resources from cloud storage gives more problems than benefits. With such architecture, it is possible to stop the import queue forever. E.g. RQ_1 process for downloading file from CS fails -> dependent RQ_2 process for importing file will not be started ([1](https://github.dev/rq/rq/blob/3ad86083c33ec28b81a07f94dafdcf1cd56429ea/rq/worker.py#L1562), [2](https://github.dev/rq/rq/blob/3ad86083c33ec28b81a07f94dafdcf1cd56429ea/rq/worker.py#L691-L692)) (it means that this process will be kept in the deferred registry forever since there is no code that cleans the deferred registry + for some reason such jobs are not moved to the failed registry after dependencies fail) -> next request for importing something creates new RQ job dependent from RQ_2. This PR fixes the described issue. ### How has this been tested? Manually ### Checklist - [x] I submit my changes into the `develop` branch - [ ] I have created a changelog fragment - [ ] I have updated the documentation accordingly - [ ] I have added tests to cover my changes - [ ] I have linked related issues (see [GitHub docs]( https://help.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword)) ~~- [ ] I have increased versions of npm packages if it is necessary ([cvat-canvas](https://github.com/opencv/cvat/tree/develop/cvat-canvas#versioning), [cvat-core](https://github.com/opencv/cvat/tree/develop/cvat-core#versioning), [cvat-data](https://github.com/opencv/cvat/tree/develop/cvat-data#versioning) and [cvat-ui](https://github.com/opencv/cvat/tree/develop/cvat-ui#versioning))~~ ### License - [x] I submit _my code changes_ under the same [MIT License]( https://github.com/opencv/cvat/blob/develop/LICENSE) that covers the project. Feel free to contact the maintainers if that's a concern. --------- Co-authored-by: Andrey Zhavoronkov --- cvat/apps/engine/backup.py | 35 ++++------ cvat/apps/engine/cloud_provider.py | 16 ++++- cvat/apps/engine/utils.py | 47 +------------ cvat/apps/engine/views.py | 68 +++++++----------- cvat/requirements/base.in | 2 +- cvat/requirements/base.txt | 92 +++++++++++++------------ cvat/requirements/development.txt | 20 +++--- cvat/requirements/production.txt | 20 +++--- cvat/rq_patching.py | 2 +- utils/dataset_manifest/requirements.txt | 6 +- 10 files changed, 129 insertions(+), 179 deletions(-) diff --git a/cvat/apps/engine/backup.py b/cvat/apps/engine/backup.py index 42105687de1e..489ce8f58244 100644 --- a/cvat/apps/engine/backup.py +++ b/cvat/apps/engine/backup.py @@ -36,14 +36,14 @@ LabeledDataSerializer, SegmentSerializer, SimpleJobSerializer, TaskReadSerializer, ProjectReadSerializer, ProjectFileSerializer, TaskFileSerializer, RqIdSerializer) from cvat.apps.engine.utils import ( - av_scan_paths, process_failed_job, configure_dependent_job_to_download_from_cs, + av_scan_paths, process_failed_job, get_rq_job_meta, get_import_rq_id, import_resource_with_clean_up_after, sendfile, define_dependent_job, get_rq_lock_by_user, build_backup_file_name, ) from cvat.apps.engine.models import ( StorageChoice, StorageMethodChoice, DataChoice, Task, Project, Location) from cvat.apps.engine.task import JobFileMapping, _create_thread -from cvat.apps.engine.cloud_provider import download_file_from_bucket, export_resource_to_cloud_storage +from cvat.apps.engine.cloud_provider import import_resource_from_cloud_storage, export_resource_to_cloud_storage from cvat.apps.engine.location import StorageType, get_location_configuration from cvat.apps.engine.view_utils import get_cloud_storage_for_import_or_export from cvat.apps.dataset_manager.views import TASK_CACHE_TTL, PROJECT_CACHE_TTL, get_export_cache_dir, clear_export_cache, log_exception @@ -1051,9 +1051,8 @@ def _import(importer, request, queue, rq_id, Serializer, file_field_name, locati if not rq_job: org_id = getattr(request.iam_context['organization'], 'id', None) - dependent_job = None - location = location_conf.get('location') + if location == Location.LOCAL: if not filename: serializer = Serializer(data=request.data) @@ -1084,42 +1083,34 @@ def _import(importer, request, queue, rq_id, Serializer, file_field_name, locati with NamedTemporaryFile(prefix='cvat_', dir=settings.TMP_FILES_ROOT, delete=False) as tf: filename = tf.name - dependent_job = configure_dependent_job_to_download_from_cs( - queue=queue, - rq_id=rq_id, - rq_func=download_file_from_bucket, - db_storage=db_storage, - filename=filename, - key=key, - request=request, - result_ttl=settings.IMPORT_CACHE_SUCCESS_TTL.total_seconds(), - failure_ttl=settings.IMPORT_CACHE_FAILED_TTL.total_seconds() - ) + func = import_resource_with_clean_up_after + func_args = (importer, filename, request.user.id, org_id) + + if location == Location.CLOUD_STORAGE: + func_args = (db_storage, key, func) + func_args + func = import_resource_from_cloud_storage user_id = request.user.id with get_rq_lock_by_user(queue, user_id): rq_job = queue.enqueue_call( - func=import_resource_with_clean_up_after, - args=(importer, filename, request.user.id, org_id), + func=func, + args=func_args, job_id=rq_id, meta={ 'tmp_file': filename, **get_rq_job_meta(request=request, db_obj=None) }, - depends_on=dependent_job or define_dependent_job(queue, user_id), + depends_on=define_dependent_job(queue, user_id), result_ttl=settings.IMPORT_CACHE_SUCCESS_TTL.total_seconds(), failure_ttl=settings.IMPORT_CACHE_FAILED_TTL.total_seconds() ) else: if rq_job.is_finished: - if rq_job.dependency: - rq_job.dependency.delete() project_id = rq_job.return_value() rq_job.delete() return Response({'id': project_id}, status=status.HTTP_201_CREATED) - elif rq_job.is_failed or \ - rq_job.is_deferred and rq_job.dependency and rq_job.dependency.is_failed: + elif rq_job.is_failed: exc_info = process_failed_job(rq_job) # RQ adds a prefix with exception class name import_error_prefix = '{}.{}'.format( diff --git a/cvat/apps/engine/cloud_provider.py b/cvat/apps/engine/cloud_provider.py index 1ddf88c95bdb..63cfefcd6309 100644 --- a/cvat/apps/engine/cloud_provider.py +++ b/cvat/apps/engine/cloud_provider.py @@ -10,7 +10,7 @@ from enum import Enum from io import BytesIO from multiprocessing.pool import ThreadPool -from typing import Dict, List, Optional, Any, Callable +from typing import Dict, List, Optional, Any, Callable, TypeVar import boto3 from azure.core.exceptions import HttpResponseError, ResourceExistsError @@ -963,12 +963,24 @@ def db_storage_to_storage_instance(db_storage): } return get_cloud_storage_instance(cloud_provider=db_storage.provider_type, **details) -def download_file_from_bucket(db_storage: Any, filename: str, key: str) -> None: +T = TypeVar('T', Callable[[str, int, int], int], Callable[[str, int, str, bool], None]) + +def import_resource_from_cloud_storage( + db_storage: Any, + key: str, + cleanup_func: Callable[[T, str,], Any], + import_func: T, + filename: str, + *args, + **kwargs, +) -> Any: storage = db_storage_to_storage_instance(db_storage) with storage.download_fileobj(key) as data, open(filename, 'wb+') as f: f.write(data.getbuffer()) + return cleanup_func(import_func, filename, *args, **kwargs) + def export_resource_to_cloud_storage( db_storage: Any, key: str, diff --git a/cvat/apps/engine/utils.py b/cvat/apps/engine/utils.py index ee11a65bec94..efb6f0c8ba1c 100644 --- a/cvat/apps/engine/utils.py +++ b/cvat/apps/engine/utils.py @@ -144,9 +144,7 @@ def parse_exception_message(msg): return parsed_msg def process_failed_job(rq_job: Job): - exc_info = str(rq_job.exc_info or getattr(rq_job.dependency, 'exc_info', None) or '') - if rq_job.dependency: - rq_job.dependency.delete() + exc_info = str(rq_job.exc_info or '') rq_job.delete() msg = parse_exception_message(exc_info) @@ -204,50 +202,11 @@ def define_dependent_job( return Dependency(jobs=[sorted(user_jobs, key=lambda job: job.created_at)[-1]], allow_failure=True) if user_jobs else None -def get_rq_lock_by_user(queue: DjangoRQ, user_id: int, additional_condition: bool = True) -> Union[Lock, nullcontext]: - if settings.ONE_RUNNING_JOB_IN_QUEUE_PER_USER and additional_condition: +def get_rq_lock_by_user(queue: DjangoRQ, user_id: int) -> Union[Lock, nullcontext]: + if settings.ONE_RUNNING_JOB_IN_QUEUE_PER_USER: return queue.connection.lock(f'{queue.name}-lock-{user_id}', timeout=30) return nullcontext() - -def configure_dependent_job_to_download_from_cs( - queue: DjangoRQ, - rq_id: str, - rq_func: Callable[[Any, str, str], None], - db_storage: Any, - filename: str, - key: str, - request: HttpRequest, - result_ttl: float, - failure_ttl: float, - should_be_dependent: bool = settings.ONE_RUNNING_JOB_IN_QUEUE_PER_USER, -) -> Job: - rq_job_id_download_file = rq_id + f'?action=download_{key.replace("/", ".")}' - rq_job_download_file = queue.fetch_job(rq_job_id_download_file) - - if rq_job_download_file and (rq_job_download_file.is_finished or rq_job_download_file.is_failed): - rq_job_download_file.delete() - rq_job_download_file = None - - if not rq_job_download_file: - # note: boto3 resource isn't pickleable, so we can't use storage - user_id = request.user.id - - with get_rq_lock_by_user(queue, user_id): - rq_job_download_file = queue.enqueue_call( - func=rq_func, - args=(db_storage, filename, key), - job_id=rq_job_id_download_file, - meta={ - **get_rq_job_meta(request=request, db_obj=db_storage), - KEY_TO_EXCLUDE_FROM_DEPENDENCY: True, - }, - result_ttl=result_ttl, - failure_ttl=failure_ttl, - depends_on=define_dependent_job(queue, user_id, should_be_dependent, rq_id=rq_job_id_download_file) - ) - return rq_job_download_file - def get_rq_job_meta(request, db_obj): # to prevent circular import from cvat.apps.webhooks.signals import project_id, organization_id diff --git a/cvat/apps/engine/views.py b/cvat/apps/engine/views.py index f428e3516119..bf65fdb13408 100644 --- a/cvat/apps/engine/views.py +++ b/cvat/apps/engine/views.py @@ -42,7 +42,7 @@ import cvat.apps.dataset_manager as dm import cvat.apps.dataset_manager.views # pylint: disable=unused-import -from cvat.apps.engine.cloud_provider import db_storage_to_storage_instance, download_file_from_bucket, export_resource_to_cloud_storage +from cvat.apps.engine.cloud_provider import db_storage_to_storage_instance, import_resource_from_cloud_storage, export_resource_to_cloud_storage from cvat.apps.events.handlers import handle_dataset_export, handle_dataset_import from cvat.apps.dataset_manager.bindings import CvatImportError from cvat.apps.dataset_manager.serializers import DatasetFormatsSerializer @@ -70,7 +70,7 @@ from utils.dataset_manifest import ImageManifestManager from cvat.apps.engine.utils import ( - av_scan_paths, process_failed_job, configure_dependent_job_to_download_from_cs, + av_scan_paths, process_failed_job, parse_exception_message, get_rq_job_meta, get_import_rq_id, import_resource_with_clean_up_after, sendfile, define_dependent_job, get_rq_lock_by_user, build_annotations_file_name, @@ -376,12 +376,9 @@ def dataset(self, request, pk): if rq_job is None: return Response(status=status.HTTP_404_NOT_FOUND) elif rq_job.is_finished: - if rq_job.dependency: - rq_job.dependency.delete() rq_job.delete() return Response(status=status.HTTP_201_CREATED) - elif rq_job.is_failed or \ - rq_job.is_deferred and rq_job.dependency and rq_job.dependency.is_failed: + elif rq_job.is_failed: exc_info = process_failed_job(rq_job) return Response( @@ -2836,9 +2833,7 @@ def _import_annotations(request, rq_id_template, rq_func, db_obj, format_name, # If filename is specified we consider that file was uploaded via TUS, so it exists in filesystem # Then we dont need to create temporary file # Or filename specify key in cloud storage so we need to download file - dependent_job = None location = location_conf.get('location') if location_conf else Location.LOCAL - db_storage = None if not filename or location == Location.CLOUD_STORAGE: @@ -2873,27 +2868,22 @@ def _import_annotations(request, rq_id_template, rq_func, db_obj, format_name, delete=False) as tf: filename = tf.name - dependent_job = configure_dependent_job_to_download_from_cs( - queue=queue, - rq_id=rq_id, - rq_func=download_file_from_bucket, - db_storage=db_storage, - filename=filename, - key=key, - request=request, - result_ttl=settings.IMPORT_CACHE_SUCCESS_TTL.total_seconds(), - failure_ttl=settings.IMPORT_CACHE_FAILED_TTL.total_seconds() - ) + func = import_resource_with_clean_up_after + func_args = (rq_func, filename, db_obj.pk, format_name, conv_mask_to_poly) + + if location == Location.CLOUD_STORAGE: + func_args = (db_storage, key, func) + func_args + func = import_resource_from_cloud_storage av_scan_paths(filename) user_id = request.user.id - with get_rq_lock_by_user(queue, user_id, additional_condition=not dependent_job): + with get_rq_lock_by_user(queue, user_id): rq_job = queue.enqueue_call( - func=import_resource_with_clean_up_after, - args=(rq_func, filename, db_obj.pk, format_name, conv_mask_to_poly), + func=func, + args=func_args, job_id=rq_id, - depends_on=dependent_job or define_dependent_job(queue, user_id, rq_id=rq_id), + depends_on=define_dependent_job(queue, user_id, rq_id=rq_id), meta={ 'tmp_file': filename, **get_rq_job_meta(request=request, db_obj=db_obj), @@ -2910,12 +2900,9 @@ def _import_annotations(request, rq_id_template, rq_func, db_obj, format_name, return Response(serializer.data, status=status.HTTP_202_ACCEPTED) else: if rq_job.is_finished: - if rq_job.dependency: - rq_job.dependency.delete() rq_job.delete() return Response(status=status.HTTP_201_CREATED) - elif rq_job.is_failed or \ - rq_job.is_deferred and rq_job.dependency and rq_job.dependency.is_failed: + elif rq_job.is_failed: exc_info = process_failed_job(rq_job) import_error_prefix = f'{CvatImportError.__module__}.{CvatImportError.__name__}:' @@ -3090,7 +3077,7 @@ def _import_project_dataset(request, rq_id_template, rq_func, db_obj, format_nam # (e.g the user closed the browser tab when job has been created # but no one requests for checking status were not made) rq_job.delete() - dependent_job = None + location = location_conf.get('location') if location_conf else None db_storage = None @@ -3125,30 +3112,25 @@ def _import_project_dataset(request, rq_id_template, rq_func, db_obj, format_nam delete=False) as tf: filename = tf.name - dependent_job = configure_dependent_job_to_download_from_cs( - queue=queue, - rq_id=rq_id, - rq_func=download_file_from_bucket, - db_storage=db_storage, - filename=filename, - key=key, - request=request, - result_ttl=settings.IMPORT_CACHE_SUCCESS_TTL.total_seconds(), - failure_ttl=settings.IMPORT_CACHE_FAILED_TTL.total_seconds() - ) + func = import_resource_with_clean_up_after + func_args = (rq_func, filename, db_obj.pk, format_name, conv_mask_to_poly) + + if location == Location.CLOUD_STORAGE: + func_args = (db_storage, key, func) + func_args + func = import_resource_from_cloud_storage user_id = request.user.id - with get_rq_lock_by_user(queue, user_id, additional_condition=not dependent_job): + with get_rq_lock_by_user(queue, user_id): rq_job = queue.enqueue_call( - func=import_resource_with_clean_up_after, - args=(rq_func, filename, db_obj.pk, format_name, conv_mask_to_poly), + func=func, + args=func_args, job_id=rq_id, meta={ 'tmp_file': filename, **get_rq_job_meta(request=request, db_obj=db_obj), }, - depends_on=dependent_job or define_dependent_job(queue, user_id, rq_id=rq_id), + depends_on=define_dependent_job(queue, user_id, rq_id=rq_id), result_ttl=settings.IMPORT_CACHE_SUCCESS_TTL.total_seconds(), failure_ttl=settings.IMPORT_CACHE_FAILED_TTL.total_seconds() ) diff --git a/cvat/requirements/base.in b/cvat/requirements/base.in index e9b37657ed7e..91d594e89e18 100644 --- a/cvat/requirements/base.in +++ b/cvat/requirements/base.in @@ -48,6 +48,6 @@ pyunpack==0.2.1 redis==4.5.4 requests~=2.26 rq-scheduler==0.13.1 -rq==1.15.1 +rq==1.16.0 rules>=3.3 Shapely==1.7.1 diff --git a/cvat/requirements/base.txt b/cvat/requirements/base.txt index 3b10c67bc585..df10ef1b6685 100644 --- a/cvat/requirements/base.txt +++ b/cvat/requirements/base.txt @@ -1,4 +1,4 @@ -# SHA1:55af6f61daa4ceab3e9aa358d3109c7af9660c0a +# SHA1:92683dac1b858a87e89ba736358e779ac8be666d # # This file is autogenerated by pip-compile-multi # To update, run: @@ -15,7 +15,7 @@ attrs==21.4.0 # -r cvat/requirements/base.in # datumaro # jsonschema -azure-core==1.29.4 +azure-core==1.30.1 # via # azure-storage-blob # msrest @@ -27,22 +27,22 @@ botocore==1.20.112 # via # boto3 # s3transfer -cachetools==5.3.1 +cachetools==5.3.3 # via google-auth -certifi==2023.7.22 +certifi==2024.2.2 # via # clickhouse-connect # msrest # requests cffi==1.16.0 # via cryptography -charset-normalizer==3.3.0 +charset-normalizer==3.3.2 # via requests click==8.1.7 # via rq clickhouse-connect==0.6.8 # via -r cvat/requirements/base.in -contourpy==1.1.1 +contourpy==1.2.0 # via matplotlib coreapi==2.3.3 # via -r cvat/requirements/base.in @@ -50,7 +50,7 @@ coreschema==0.0.4 # via coreapi crontab==1.0.1 # via rq-scheduler -cryptography==42.0.4 +cryptography==42.0.5 # via # azure-storage-blob # pyjwt @@ -66,9 +66,11 @@ deprecated==1.2.14 # via limits dj-pagination==2.5.0 # via -r cvat/requirements/base.in -dj-rest-auth[with_social]==2.2.7 - # via -r cvat/requirements/base.in -django==4.2.6 +dj-rest-auth[with-social]==2.2.7 + # via + # -r cvat/requirements/base.in + # dj-rest-auth +django==4.2.11 # via # -r cvat/requirements/base.in # dj-rest-auth @@ -87,7 +89,7 @@ django-allauth==0.52.0 # via # -r cvat/requirements/base.in # dj-rest-auth -django-appconf==1.0.5 +django-appconf==1.0.6 # via django-compressor django-auth-ldap==2.2.0 # via -r cvat/requirements/base.in @@ -116,38 +118,38 @@ easyprocess==1.1 # via pyunpack entrypoint2==1.1 # via pyunpack -fonttools==4.43.1 +fonttools==4.49.0 # via matplotlib -freezegun==1.2.2 +freezegun==1.4.0 # via rq-scheduler furl==2.1.0 # via -r cvat/requirements/base.in -google-api-core==2.12.0 +google-api-core==2.17.1 # via # google-cloud-core # google-cloud-storage -google-auth==2.23.3 +google-auth==2.28.1 # via # google-api-core # google-cloud-core # google-cloud-storage -google-cloud-core==2.3.3 +google-cloud-core==2.4.1 # via google-cloud-storage google-cloud-storage==1.42.0 # via -r cvat/requirements/base.in google-crc32c==1.5.0 # via google-resumable-media -google-resumable-media==2.6.0 +google-resumable-media==2.7.0 # via google-cloud-storage -googleapis-common-protos==1.60.0 +googleapis-common-protos==1.62.0 # via google-api-core h5py==3.10.0 # via datumaro -idna==3.4 +idna==3.6 # via requests -importlib-metadata==6.8.0 +importlib-metadata==7.0.1 # via clickhouse-connect -importlib-resources==6.1.0 +importlib-resources==6.1.2 # via limits inflection==0.5.1 # via drf-spectacular @@ -165,29 +167,29 @@ jsonschema==4.17.3 # via drf-spectacular kiwisolver==1.4.5 # via matplotlib -limits==3.6.0 +limits==3.9.0 # via python-logstash-async -lxml==4.9.3 +lxml==5.1.0 # via datumaro -lz4==4.3.2 +lz4==4.3.3 # via clickhouse-connect -markupsafe==2.1.3 +markupsafe==2.1.5 # via jinja2 -matplotlib==3.8.0 +matplotlib==3.8.3 # via # datumaro # pycocotools msrest==0.7.1 # via azure-storage-blob -networkx==3.1 +networkx==3.2.1 # via datumaro -nibabel==5.1.0 +nibabel==5.2.1 # via datumaro oauthlib==3.2.2 # via requests-oauthlib orderedmultidict==1.0.1 # via furl -orjson==3.9.8 +orjson==3.9.15 # via datumaro packaging==23.2 # via @@ -195,13 +197,13 @@ packaging==23.2 # matplotlib # nibabel # tensorboardx -pandas==2.1.1 +pandas==2.2.1 # via datumaro patool==1.12 # via -r cvat/requirements/base.in pdf2image==1.14.0 # via -r cvat/requirements/base.in -protobuf==3.19.6 +protobuf==4.25.3 # via # google-api-core # googleapis-common-protos @@ -210,7 +212,7 @@ psutil==5.9.4 # via -r cvat/requirements/base.in psycopg2-binary==2.9.5 # via -r cvat/requirements/base.in -pyasn1==0.5.0 +pyasn1==0.5.1 # via # pyasn1-modules # python-ldap @@ -224,14 +226,16 @@ pycocotools==2.0.7 pycparser==2.21 # via cffi pyjwt[crypto]==2.8.0 - # via django-allauth + # via + # django-allauth + # pyjwt pylogbeat==2.0.1 # via python-logstash-async pyparsing==3.1.1 # via matplotlib -pyrsistent==0.19.3 +pyrsistent==0.20.0 # via jsonschema -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via # botocore # freezegun @@ -246,7 +250,7 @@ python-logstash-async==2.5.0 # via -r cvat/requirements/base.in python3-openid==3.2.0 # via django-allauth -pytz==2020.1 +pytz==2024.1 # via # clickhouse-connect # djangorestframework @@ -282,7 +286,7 @@ requests-oauthlib==1.3.1 # msrest rjsmin==1.2.1 # via django-compressor -rq==1.15.1 +rq==1.16.0 # via # -r cvat/requirements/base.in # django-rq @@ -291,7 +295,7 @@ rq-scheduler==0.13.1 # via -r cvat/requirements/base.in rsa==4.9 # via google-auth -ruamel-yaml==0.17.35 +ruamel-yaml==0.18.6 # via datumaro ruamel-yaml-clib==0.2.8 # via ruamel-yaml @@ -299,7 +303,7 @@ rules==3.3 # via -r cvat/requirements/base.in s3transfer==0.4.2 # via boto3 -scipy==1.11.3 +scipy==1.12.0 # via datumaro shapely==1.7.1 # via -r cvat/requirements/base.in @@ -312,15 +316,15 @@ six==1.16.0 # python-dateutil sqlparse==0.4.4 # via django -tensorboardx==2.6 +tensorboardx==2.6.2.2 # via datumaro -typing-extensions==4.8.0 +typing-extensions==4.10.0 # via # asgiref # azure-core # datumaro # limits -tzdata==2023.3 +tzdata==2024.1 # via pandas uritemplate==4.1.1 # via @@ -331,9 +335,9 @@ urllib3==1.26.18 # botocore # clickhouse-connect # requests -wrapt==1.15.0 +wrapt==1.16.0 # via deprecated zipp==3.17.0 # via importlib-metadata -zstandard==0.21.0 +zstandard==0.22.0 # via clickhouse-connect diff --git a/cvat/requirements/development.txt b/cvat/requirements/development.txt index 995112dbc937..3625cd7745bd 100644 --- a/cvat/requirements/development.txt +++ b/cvat/requirements/development.txt @@ -10,9 +10,9 @@ astroid==2.11.7 # via pylint autopep8==2.0.4 # via django-silk -black==24.1.1 +black==24.2.0 # via -r cvat/requirements/development.in -dill==0.3.7 +dill==0.3.8 # via pylint django-extensions==3.0.8 # via -r cvat/requirements/development.in @@ -20,21 +20,21 @@ django-silk==5.0.3 # via -r cvat/requirements/development.in gprof2dot==2022.7.29 # via django-silk -isort==5.12.0 +isort==5.13.2 # via pylint -lazy-object-proxy==1.9.0 +lazy-object-proxy==1.10.0 # via astroid mccabe==0.7.0 # via pylint mypy-extensions==1.0.0 # via black -pathspec==0.11.2 +pathspec==0.12.1 # via black -platformdirs==3.11.0 +platformdirs==4.2.0 # via # black # pylint -pycodestyle==2.11.0 +pycodestyle==2.11.1 # via autopep8 pylint==2.14.5 # via @@ -56,11 +56,11 @@ tomli==2.0.1 # autopep8 # black # pylint -tomlkit==0.12.1 +tomlkit==0.12.4 # via pylint -tornado==6.3.3 +tornado==6.4 # via snakeviz # The following packages are considered to be unsafe in a requirements file: -setuptools==68.2.2 +setuptools==69.1.1 # via astroid diff --git a/cvat/requirements/production.txt b/cvat/requirements/production.txt index a919444f96cf..0bea0b968b15 100644 --- a/cvat/requirements/production.txt +++ b/cvat/requirements/production.txt @@ -6,25 +6,27 @@ # pip-compile-multi # -r base.txt -anyio==4.0.0 +anyio==4.3.0 # via watchfiles coverage==7.2.3 # via -r cvat/requirements/production.in -exceptiongroup==1.1.3 +exceptiongroup==1.2.0 # via anyio h11==0.14.0 # via uvicorn -httptools==0.6.0 +httptools==0.6.1 # via uvicorn -python-dotenv==1.0.0 +python-dotenv==1.0.1 # via uvicorn -sniffio==1.3.0 +sniffio==1.3.1 # via anyio uvicorn[standard]==0.22.0 - # via -r cvat/requirements/production.in -uvloop==0.17.0 + # via + # -r cvat/requirements/production.in + # uvicorn +uvloop==0.19.0 # via uvicorn -watchfiles==0.20.0 +watchfiles==0.21.0 # via uvicorn -websockets==11.0.3 +websockets==12.0 # via uvicorn diff --git a/cvat/rq_patching.py b/cvat/rq_patching.py index fcf5bc2f6d89..cd8c1ac74225 100644 --- a/cvat/rq_patching.py +++ b/cvat/rq_patching.py @@ -71,5 +71,5 @@ def custom_started_job_registry_cleanup(self, timestamp: Optional[float] = None) def update_started_job_registry_cleanup() -> None: # don't forget to check if the issue https://github.com/rq/rq/issues/2006 has been resolved in upstream - assert VERSION == '1.15.1' + assert VERSION == '1.16.0' rq.registry.StartedJobRegistry.cleanup = custom_started_job_registry_cleanup diff --git a/utils/dataset_manifest/requirements.txt b/utils/dataset_manifest/requirements.txt index f5a34a49dc7c..6210dac1d64d 100644 --- a/utils/dataset_manifest/requirements.txt +++ b/utils/dataset_manifest/requirements.txt @@ -11,9 +11,9 @@ natsort==8.0.0 # via -r utils/dataset_manifest/requirements.in numpy==1.22.4 # via opencv-python-headless -opencv-python-headless==4.8.1.78 +opencv-python-headless==4.9.0.80 # via -r utils/dataset_manifest/requirements.in -pillow==10.1.0 +pillow==10.2.0 # via -r utils/dataset_manifest/requirements.in -tqdm==4.66.1 +tqdm==4.66.2 # via -r utils/dataset_manifest/requirements.in