From 9d5bb993717cd961ef476b7c73cdf70b5506d1c2 Mon Sep 17 00:00:00 2001 From: Roman Donchenko Date: Sat, 27 Apr 2024 18:20:25 +0300 Subject: [PATCH 01/29] Switch workflows back to the default runner (#7812) Analyzing recent runs shows that the savings in run duration from using larger runners are pretty small (on the order of 5%), which is not worth the price. Reverts cvat-ai/cvat#7723. --- .github/workflows/cache.yml | 2 +- .github/workflows/full.yml | 8 ++++---- .github/workflows/helm.yml | 2 +- .github/workflows/main.yml | 12 ++++++------ .github/workflows/schedule.yml | 8 ++++---- 5 files changed, 16 insertions(+), 16 deletions(-) diff --git a/.github/workflows/cache.yml b/.github/workflows/cache.yml index 654001994316..9cfc7d8766cb 100644 --- a/.github/workflows/cache.yml +++ b/.github/workflows/cache.yml @@ -13,7 +13,7 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} REPO: ${{ github.repository }} - runs-on: ubuntu-latest-big + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/full.yml b/.github/workflows/full.yml index b2a99ae1e502..bd317dca1e81 100644 --- a/.github/workflows/full.yml +++ b/.github/workflows/full.yml @@ -22,7 +22,7 @@ jobs: build: needs: search_cache - runs-on: ubuntu-latest-big + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: @@ -94,7 +94,7 @@ jobs: rest_api_testing: needs: build - runs-on: ubuntu-latest-big + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: @@ -187,7 +187,7 @@ jobs: unit_testing: needs: build - runs-on: ubuntu-latest-big + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: @@ -247,7 +247,7 @@ jobs: e2e_testing: needs: build - runs-on: ubuntu-latest-big + runs-on: ubuntu-latest strategy: fail-fast: false matrix: diff --git a/.github/workflows/helm.yml b/.github/workflows/helm.yml index 33c81b906663..d05bb5a24ee4 100644 --- a/.github/workflows/helm.yml +++ b/.github/workflows/helm.yml @@ -21,7 +21,7 @@ jobs: github.event.pull_request.draft == false && !startsWith(github.event.pull_request.title, '[WIP]') && !startsWith(github.event.pull_request.title, '[Dependent]') - runs-on: ubuntu-latest-big + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 9273aeb9aa7d..583657e02731 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -28,7 +28,7 @@ jobs: build: needs: search_cache - runs-on: ubuntu-latest-big + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -127,7 +127,7 @@ jobs: rest_api_testing: needs: build - runs-on: ubuntu-latest-big + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -208,7 +208,7 @@ jobs: unit_testing: needs: build - runs-on: ubuntu-latest-big + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -274,7 +274,7 @@ jobs: e2e_testing: needs: build - runs-on: ubuntu-latest-big + runs-on: ubuntu-latest strategy: fail-fast: false matrix: @@ -388,7 +388,7 @@ jobs: publish_dev_images: if: github.ref == 'refs/heads/develop' needs: [rest_api_testing, unit_testing, e2e_testing] - runs-on: ubuntu-latest-big + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -427,7 +427,7 @@ jobs: docker push "${UI_IMAGE_REPO}:dev" codecov: - runs-on: ubuntu-latest-big + runs-on: ubuntu-latest needs: [unit_testing, e2e_testing, rest_api_testing] steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/schedule.yml b/.github/workflows/schedule.yml index 5f4f11131989..516bff2ad7f2 100644 --- a/.github/workflows/schedule.yml +++ b/.github/workflows/schedule.yml @@ -11,7 +11,7 @@ env: jobs: check_updates: - runs-on: ubuntu-latest-big + runs-on: ubuntu-latest env: REPO: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -44,7 +44,7 @@ jobs: build: needs: search_cache - runs-on: ubuntu-latest-big + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -107,7 +107,7 @@ jobs: unit_testing: needs: build - runs-on: ubuntu-latest-big + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -190,7 +190,7 @@ jobs: e2e_testing: needs: build - runs-on: ubuntu-latest-big + runs-on: ubuntu-latest strategy: fail-fast: false matrix: From e2fdd26d9b26ce648aedc843d542ec168aaa6ba2 Mon Sep 17 00:00:00 2001 From: "cvat-bot[bot]" <147643061+cvat-bot[bot]@users.noreply.github.com> Date: Mon, 29 Apr 2024 09:06:53 +0000 Subject: [PATCH 02/29] Update develop after v2.12.1 --- cvat-cli/requirements/base.txt | 2 +- cvat-cli/src/cvat_cli/version.py | 2 +- cvat-sdk/gen/generate.sh | 2 +- cvat/__init__.py | 2 +- cvat/schema.yml | 2 +- docker-compose.yml | 18 +++++++++--------- helm-chart/values.yaml | 4 ++-- 7 files changed, 16 insertions(+), 16 deletions(-) diff --git a/cvat-cli/requirements/base.txt b/cvat-cli/requirements/base.txt index fac7d816ec89..31d1de00d5ab 100644 --- a/cvat-cli/requirements/base.txt +++ b/cvat-cli/requirements/base.txt @@ -1,3 +1,3 @@ -cvat-sdk~=2.12.1 +cvat-sdk~=2.13.0 Pillow>=10.3.0 setuptools>=65.5.1 # not directly required, pinned by Snyk to avoid a vulnerability diff --git a/cvat-cli/src/cvat_cli/version.py b/cvat-cli/src/cvat_cli/version.py index 81d6ef39304a..e3b5cba094f6 100644 --- a/cvat-cli/src/cvat_cli/version.py +++ b/cvat-cli/src/cvat_cli/version.py @@ -1 +1 @@ -VERSION = "2.12.1" +VERSION = "2.13.0" diff --git a/cvat-sdk/gen/generate.sh b/cvat-sdk/gen/generate.sh index 6ec1ee255a41..ec7f6217145f 100755 --- a/cvat-sdk/gen/generate.sh +++ b/cvat-sdk/gen/generate.sh @@ -8,7 +8,7 @@ set -e GENERATOR_VERSION="v6.0.1" -VERSION="2.12.1" +VERSION="2.13.0" LIB_NAME="cvat_sdk" LAYER1_LIB_NAME="${LIB_NAME}/api_client" DST_DIR="$(cd "$(dirname -- "$0")/.." && pwd)" diff --git a/cvat/__init__.py b/cvat/__init__.py index 1c653002b319..feafa26b59be 100644 --- a/cvat/__init__.py +++ b/cvat/__init__.py @@ -4,6 +4,6 @@ from cvat.utils.version import get_version -VERSION = (2, 12, 1, 'final', 0) +VERSION = (2, 13, 0, 'alpha', 0) __version__ = get_version(VERSION) diff --git a/cvat/schema.yml b/cvat/schema.yml index 57089e74b8ff..1474351f474c 100644 --- a/cvat/schema.yml +++ b/cvat/schema.yml @@ -1,7 +1,7 @@ openapi: 3.0.3 info: title: CVAT REST API - version: 2.12.1 + version: 2.13.0 description: REST API for Computer Vision Annotation Tool (CVAT) termsOfService: https://www.google.com/policies/terms/ contact: diff --git a/docker-compose.yml b/docker-compose.yml index 0b666b36cbbe..98bf519c20d5 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -72,7 +72,7 @@ services: cvat_server: container_name: cvat_server - image: cvat/server:${CVAT_VERSION:-v2.12.1} + image: cvat/server:${CVAT_VERSION:-dev} restart: always depends_on: <<: *backend-deps @@ -106,7 +106,7 @@ services: cvat_utils: container_name: cvat_utils - image: cvat/server:${CVAT_VERSION:-v2.12.1} + image: cvat/server:${CVAT_VERSION:-dev} restart: always depends_on: *backend-deps environment: @@ -123,7 +123,7 @@ services: cvat_worker_import: container_name: cvat_worker_import - image: cvat/server:${CVAT_VERSION:-v2.12.1} + image: cvat/server:${CVAT_VERSION:-dev} restart: always depends_on: *backend-deps environment: @@ -139,7 +139,7 @@ services: cvat_worker_export: container_name: cvat_worker_export - image: cvat/server:${CVAT_VERSION:-v2.12.1} + image: cvat/server:${CVAT_VERSION:-dev} restart: always depends_on: *backend-deps environment: @@ -155,7 +155,7 @@ services: cvat_worker_annotation: container_name: cvat_worker_annotation - image: cvat/server:${CVAT_VERSION:-v2.12.1} + image: cvat/server:${CVAT_VERSION:-dev} restart: always depends_on: *backend-deps environment: @@ -171,7 +171,7 @@ services: cvat_worker_webhooks: container_name: cvat_worker_webhooks - image: cvat/server:${CVAT_VERSION:-v2.12.1} + image: cvat/server:${CVAT_VERSION:-dev} restart: always depends_on: *backend-deps environment: @@ -187,7 +187,7 @@ services: cvat_worker_quality_reports: container_name: cvat_worker_quality_reports - image: cvat/server:${CVAT_VERSION:-v2.12.1} + image: cvat/server:${CVAT_VERSION:-dev} restart: always depends_on: *backend-deps environment: @@ -203,7 +203,7 @@ services: cvat_worker_analytics_reports: container_name: cvat_worker_analytics_reports - image: cvat/server:${CVAT_VERSION:-v2.12.1} + image: cvat/server:${CVAT_VERSION:-dev} restart: always depends_on: *backend-deps environment: @@ -219,7 +219,7 @@ services: cvat_ui: container_name: cvat_ui - image: cvat/ui:${CVAT_VERSION:-v2.12.1} + image: cvat/ui:${CVAT_VERSION:-dev} restart: always depends_on: - cvat_server diff --git a/helm-chart/values.yaml b/helm-chart/values.yaml index a972fd6171af..6ce3305242c1 100644 --- a/helm-chart/values.yaml +++ b/helm-chart/values.yaml @@ -113,7 +113,7 @@ cvat: additionalVolumeMounts: [] replicas: 1 image: cvat/server - tag: v2.12.1 + tag: dev imagePullPolicy: Always permissionFix: enabled: true @@ -137,7 +137,7 @@ cvat: frontend: replicas: 1 image: cvat/ui - tag: v2.12.1 + tag: dev imagePullPolicy: Always labels: {} # test: test From ab8674c0d37add997871a4f8cf833375d93a81b0 Mon Sep 17 00:00:00 2001 From: Roman Donchenko Date: Mon, 29 Apr 2024 16:55:39 +0300 Subject: [PATCH 03/29] Move rego files into their respective apps (#7806) This is the promised sequel to #7734. After this change, the `iam` app will no longer contain any code specific to other apps. To make this work, the `/api/auth/rules` endpoint will now construct the OPA bundle from a set of paths, which will be populated by `load_app_permissions`. Move OPA test files accordingly. Fortunately, `opa test` accepts multiple directories, so it is trivial to adapt the testing instructions. Make the necessary adaptations to `generate_tests.py` to search for test generators in every app. The original parameters of `generate_tests.py` don't really make sense when there are multiple `rules` directory, so remove them. Instead, add a new `--apps-dir` parameter. This parameter isn't really needed to test the open source version of CVAT, but I expect it to be useful for testing the Enterprise version. In addition, add some safety checks to `generate_tests.py`: * Make sure that we find at least one test generator. * Propagate exceptions from `call_generator` into the main thread. ### How has this been tested? I tested the updated commands from the documentation manually, and examined the rules bundle returned by `/api/auth/rules` to ensure that it still contains all the `.rego` files. --- .github/workflows/full.yml | 7 ++- .github/workflows/main.yml | 7 ++- .github/workflows/regallint.yml | 2 +- .github/workflows/schedule.yml | 7 ++- .gitignore | 2 + .../iam/rules/.regal => .regal}/config.yaml | 0 .../rules/analytics_reports.rego | 0 .../rules/annotationguides.rego | 0 .../{iam => engine}/rules/cloudstorages.rego | 0 cvat/apps/{iam => engine}/rules/comments.rego | 0 cvat/apps/{iam => engine}/rules/issues.rego | 0 cvat/apps/{iam => engine}/rules/jobs.rego | 0 cvat/apps/{iam => engine}/rules/labels.rego | 0 cvat/apps/{iam => engine}/rules/projects.rego | 0 cvat/apps/{iam => engine}/rules/server.rego | 0 cvat/apps/{iam => engine}/rules/tasks.rego | 0 .../rules/tests/configs/annotationguides.csv | 0 .../rules/tests/configs/cloudstorages.csv | 0 .../rules/tests/configs/comments.csv | 0 .../rules/tests/configs/issues.csv | 0 .../rules/tests/configs/jobs.csv | 0 .../rules/tests/configs/projects.csv | 0 .../rules/tests/configs/server.csv | 0 .../rules/tests/configs/tasks.csv | 0 .../rules/tests/configs/users.csv | 0 .../annotationguides_test.gen.rego.py | 0 .../generators/cloudstorages_test.gen.rego.py | 0 .../generators/comments_test.gen.rego.py | 0 .../tests/generators/issues_test.gen.rego.py | 0 .../tests/generators/jobs_test.gen.rego.py | 0 .../generators/projects_test.gen.rego.py | 0 .../tests/generators/server_test.gen.rego.py | 0 .../tests/generators/tasks_test.gen.rego.py | 0 .../tests/generators/users_test.gen.rego.py | 0 cvat/apps/{iam => engine}/rules/users.rego | 0 cvat/apps/{iam => events}/rules/events.rego | 0 .../rules/tests/configs/events.csv | 0 .../tests/generators/events_test.gen.rego.py | 0 cvat/apps/iam/permissions.py | 6 ++- cvat/apps/iam/rules/.gitignore | 2 - cvat/apps/iam/rules/tests/generate_tests.py | 48 +++++++------------ cvat/apps/iam/utils.py | 14 ++++-- .../{iam => lambda_manager}/rules/lambda.rego | 0 .../rules/tests/configs/lambda.csv | 0 .../tests/generators/lambda_test.gen.rego.py | 0 .../{iam => log_viewer}/rules/analytics.rego | 0 .../rules/tests/configs/analytics.csv | 0 .../generators/analytics_test.gen.rego.py | 0 .../rules/invitations.rego | 0 .../rules/memberships.rego | 0 .../rules/organizations.rego | 0 .../rules/tests/configs/invitations.csv | 0 .../rules/tests/configs/memberships.csv | 0 .../rules/tests/configs/organizations.csv | 0 .../generators/invitations_test.gen.rego.py | 0 .../generators/memberships_test.gen.rego.py | 0 .../generators/organizations_test.gen.rego.py | 0 .../rules/conflicts.rego | 0 .../rules/quality_reports.rego | 0 .../rules/quality_settings.rego | 0 .../rules/tests/configs/webhooks.csv | 0 .../generators/webhooks_test.gen.rego.py | 0 .../{iam => webhooks}/rules/webhooks.rego | 0 cvat/settings/base.py | 1 - .../en/docs/contributing/running-tests.md | 16 +++---- .../en/docs/manual/advanced/iam_user_roles.md | 2 +- 66 files changed, 51 insertions(+), 63 deletions(-) rename {cvat/apps/iam/rules/.regal => .regal}/config.yaml (100%) rename cvat/apps/{iam => analytics_report}/rules/analytics_reports.rego (100%) rename cvat/apps/{iam => engine}/rules/annotationguides.rego (100%) rename cvat/apps/{iam => engine}/rules/cloudstorages.rego (100%) rename cvat/apps/{iam => engine}/rules/comments.rego (100%) rename cvat/apps/{iam => engine}/rules/issues.rego (100%) rename cvat/apps/{iam => engine}/rules/jobs.rego (100%) rename cvat/apps/{iam => engine}/rules/labels.rego (100%) rename cvat/apps/{iam => engine}/rules/projects.rego (100%) rename cvat/apps/{iam => engine}/rules/server.rego (100%) rename cvat/apps/{iam => engine}/rules/tasks.rego (100%) rename cvat/apps/{iam => engine}/rules/tests/configs/annotationguides.csv (100%) rename cvat/apps/{iam => engine}/rules/tests/configs/cloudstorages.csv (100%) rename cvat/apps/{iam => engine}/rules/tests/configs/comments.csv (100%) rename cvat/apps/{iam => engine}/rules/tests/configs/issues.csv (100%) rename cvat/apps/{iam => engine}/rules/tests/configs/jobs.csv (100%) rename cvat/apps/{iam => engine}/rules/tests/configs/projects.csv (100%) rename cvat/apps/{iam => engine}/rules/tests/configs/server.csv (100%) rename cvat/apps/{iam => engine}/rules/tests/configs/tasks.csv (100%) rename cvat/apps/{iam => engine}/rules/tests/configs/users.csv (100%) rename cvat/apps/{iam => engine}/rules/tests/generators/annotationguides_test.gen.rego.py (100%) rename cvat/apps/{iam => engine}/rules/tests/generators/cloudstorages_test.gen.rego.py (100%) rename cvat/apps/{iam => engine}/rules/tests/generators/comments_test.gen.rego.py (100%) rename cvat/apps/{iam => engine}/rules/tests/generators/issues_test.gen.rego.py (100%) rename cvat/apps/{iam => engine}/rules/tests/generators/jobs_test.gen.rego.py (100%) rename cvat/apps/{iam => engine}/rules/tests/generators/projects_test.gen.rego.py (100%) rename cvat/apps/{iam => engine}/rules/tests/generators/server_test.gen.rego.py (100%) rename cvat/apps/{iam => engine}/rules/tests/generators/tasks_test.gen.rego.py (100%) rename cvat/apps/{iam => engine}/rules/tests/generators/users_test.gen.rego.py (100%) rename cvat/apps/{iam => engine}/rules/users.rego (100%) rename cvat/apps/{iam => events}/rules/events.rego (100%) rename cvat/apps/{iam => events}/rules/tests/configs/events.csv (100%) rename cvat/apps/{iam => events}/rules/tests/generators/events_test.gen.rego.py (100%) delete mode 100644 cvat/apps/iam/rules/.gitignore rename cvat/apps/{iam => lambda_manager}/rules/lambda.rego (100%) rename cvat/apps/{iam => lambda_manager}/rules/tests/configs/lambda.csv (100%) rename cvat/apps/{iam => lambda_manager}/rules/tests/generators/lambda_test.gen.rego.py (100%) rename cvat/apps/{iam => log_viewer}/rules/analytics.rego (100%) rename cvat/apps/{iam => log_viewer}/rules/tests/configs/analytics.csv (100%) rename cvat/apps/{iam => log_viewer}/rules/tests/generators/analytics_test.gen.rego.py (100%) rename cvat/apps/{iam => organizations}/rules/invitations.rego (100%) rename cvat/apps/{iam => organizations}/rules/memberships.rego (100%) rename cvat/apps/{iam => organizations}/rules/organizations.rego (100%) rename cvat/apps/{iam => organizations}/rules/tests/configs/invitations.csv (100%) rename cvat/apps/{iam => organizations}/rules/tests/configs/memberships.csv (100%) rename cvat/apps/{iam => organizations}/rules/tests/configs/organizations.csv (100%) rename cvat/apps/{iam => organizations}/rules/tests/generators/invitations_test.gen.rego.py (100%) rename cvat/apps/{iam => organizations}/rules/tests/generators/memberships_test.gen.rego.py (100%) rename cvat/apps/{iam => organizations}/rules/tests/generators/organizations_test.gen.rego.py (100%) rename cvat/apps/{iam => quality_control}/rules/conflicts.rego (100%) rename cvat/apps/{iam => quality_control}/rules/quality_reports.rego (100%) rename cvat/apps/{iam => quality_control}/rules/quality_settings.rego (100%) rename cvat/apps/{iam => webhooks}/rules/tests/configs/webhooks.csv (100%) rename cvat/apps/{iam => webhooks}/rules/tests/generators/webhooks_test.gen.rego.py (100%) rename cvat/apps/{iam => webhooks}/rules/webhooks.rego (100%) diff --git a/.github/workflows/full.yml b/.github/workflows/full.yml index bd317dca1e81..c3401a744fda 100644 --- a/.github/workflows/full.yml +++ b/.github/workflows/full.yml @@ -207,11 +207,10 @@ jobs: - name: Running OPA tests run: | - python cvat/apps/iam/rules/tests/generate_tests.py \ - --output-dir cvat/apps/iam/rules/ + python cvat/apps/iam/rules/tests/generate_tests.py - docker compose run --rm -v "$PWD/cvat/apps/iam/rules/:/mnt/rules" \ - cvat_opa test /mnt/rules + docker compose run --rm -v "$PWD:/mnt/src:ro" -w /mnt/src \ + cvat_opa test cvat/apps/*/rules - name: Running unit tests env: diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 583657e02731..c2d7617f7b3f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -226,11 +226,10 @@ jobs: - name: Running OPA tests run: | - python cvat/apps/iam/rules/tests/generate_tests.py \ - --output-dir cvat/apps/iam/rules/ + python cvat/apps/iam/rules/tests/generate_tests.py - docker compose run --rm -v "$PWD/cvat/apps/iam/rules/:/mnt/rules" \ - cvat_opa test /mnt/rules + docker compose run --rm -v "$PWD:/mnt/src:ro" -w /mnt/src \ + cvat_opa test cvat/apps/*/rules - name: Running unit tests env: diff --git a/.github/workflows/regallint.yml b/.github/workflows/regallint.yml index 00dcec06885f..2e0c3dd89357 100644 --- a/.github/workflows/regallint.yml +++ b/.github/workflows/regallint.yml @@ -9,4 +9,4 @@ jobs: uses: StyraInc/setup-regal@v0.2.0 with: version: v0.11.0 - - run: regal lint --format=github cvat/apps/iam/rules + - run: regal lint --format=github cvat/apps/*/rules diff --git a/.github/workflows/schedule.yml b/.github/workflows/schedule.yml index 516bff2ad7f2..f7244d915c42 100644 --- a/.github/workflows/schedule.yml +++ b/.github/workflows/schedule.yml @@ -153,11 +153,10 @@ jobs: - name: OPA tests run: | - python cvat/apps/iam/rules/tests/generate_tests.py \ - --output-dir cvat/apps/iam/rules/ + python cvat/apps/iam/rules/tests/generate_tests.py - docker compose run --rm -v "$PWD/cvat/apps/iam/rules/:/mnt/rules" \ - cvat_opa test /mnt/rules + docker compose run --rm -v "$PWD:/mnt/src:ro" -w /mnt/src \ + cvat_opa test cvat/apps/*/rules - name: REST API and SDK tests run: | diff --git a/.gitignore b/.gitignore index 37b0b9b30b9a..9736baa80a3f 100644 --- a/.gitignore +++ b/.gitignore @@ -64,3 +64,5 @@ cvat-core/reports # produced by prepare in the root package.json script .husky +# produced by cvat/apps/iam/rules/tests/generate_tests.py +/cvat/apps/*/rules/*_test.gen.rego diff --git a/cvat/apps/iam/rules/.regal/config.yaml b/.regal/config.yaml similarity index 100% rename from cvat/apps/iam/rules/.regal/config.yaml rename to .regal/config.yaml diff --git a/cvat/apps/iam/rules/analytics_reports.rego b/cvat/apps/analytics_report/rules/analytics_reports.rego similarity index 100% rename from cvat/apps/iam/rules/analytics_reports.rego rename to cvat/apps/analytics_report/rules/analytics_reports.rego diff --git a/cvat/apps/iam/rules/annotationguides.rego b/cvat/apps/engine/rules/annotationguides.rego similarity index 100% rename from cvat/apps/iam/rules/annotationguides.rego rename to cvat/apps/engine/rules/annotationguides.rego diff --git a/cvat/apps/iam/rules/cloudstorages.rego b/cvat/apps/engine/rules/cloudstorages.rego similarity index 100% rename from cvat/apps/iam/rules/cloudstorages.rego rename to cvat/apps/engine/rules/cloudstorages.rego diff --git a/cvat/apps/iam/rules/comments.rego b/cvat/apps/engine/rules/comments.rego similarity index 100% rename from cvat/apps/iam/rules/comments.rego rename to cvat/apps/engine/rules/comments.rego diff --git a/cvat/apps/iam/rules/issues.rego b/cvat/apps/engine/rules/issues.rego similarity index 100% rename from cvat/apps/iam/rules/issues.rego rename to cvat/apps/engine/rules/issues.rego diff --git a/cvat/apps/iam/rules/jobs.rego b/cvat/apps/engine/rules/jobs.rego similarity index 100% rename from cvat/apps/iam/rules/jobs.rego rename to cvat/apps/engine/rules/jobs.rego diff --git a/cvat/apps/iam/rules/labels.rego b/cvat/apps/engine/rules/labels.rego similarity index 100% rename from cvat/apps/iam/rules/labels.rego rename to cvat/apps/engine/rules/labels.rego diff --git a/cvat/apps/iam/rules/projects.rego b/cvat/apps/engine/rules/projects.rego similarity index 100% rename from cvat/apps/iam/rules/projects.rego rename to cvat/apps/engine/rules/projects.rego diff --git a/cvat/apps/iam/rules/server.rego b/cvat/apps/engine/rules/server.rego similarity index 100% rename from cvat/apps/iam/rules/server.rego rename to cvat/apps/engine/rules/server.rego diff --git a/cvat/apps/iam/rules/tasks.rego b/cvat/apps/engine/rules/tasks.rego similarity index 100% rename from cvat/apps/iam/rules/tasks.rego rename to cvat/apps/engine/rules/tasks.rego diff --git a/cvat/apps/iam/rules/tests/configs/annotationguides.csv b/cvat/apps/engine/rules/tests/configs/annotationguides.csv similarity index 100% rename from cvat/apps/iam/rules/tests/configs/annotationguides.csv rename to cvat/apps/engine/rules/tests/configs/annotationguides.csv diff --git a/cvat/apps/iam/rules/tests/configs/cloudstorages.csv b/cvat/apps/engine/rules/tests/configs/cloudstorages.csv similarity index 100% rename from cvat/apps/iam/rules/tests/configs/cloudstorages.csv rename to cvat/apps/engine/rules/tests/configs/cloudstorages.csv diff --git a/cvat/apps/iam/rules/tests/configs/comments.csv b/cvat/apps/engine/rules/tests/configs/comments.csv similarity index 100% rename from cvat/apps/iam/rules/tests/configs/comments.csv rename to cvat/apps/engine/rules/tests/configs/comments.csv diff --git a/cvat/apps/iam/rules/tests/configs/issues.csv b/cvat/apps/engine/rules/tests/configs/issues.csv similarity index 100% rename from cvat/apps/iam/rules/tests/configs/issues.csv rename to cvat/apps/engine/rules/tests/configs/issues.csv diff --git a/cvat/apps/iam/rules/tests/configs/jobs.csv b/cvat/apps/engine/rules/tests/configs/jobs.csv similarity index 100% rename from cvat/apps/iam/rules/tests/configs/jobs.csv rename to cvat/apps/engine/rules/tests/configs/jobs.csv diff --git a/cvat/apps/iam/rules/tests/configs/projects.csv b/cvat/apps/engine/rules/tests/configs/projects.csv similarity index 100% rename from cvat/apps/iam/rules/tests/configs/projects.csv rename to cvat/apps/engine/rules/tests/configs/projects.csv diff --git a/cvat/apps/iam/rules/tests/configs/server.csv b/cvat/apps/engine/rules/tests/configs/server.csv similarity index 100% rename from cvat/apps/iam/rules/tests/configs/server.csv rename to cvat/apps/engine/rules/tests/configs/server.csv diff --git a/cvat/apps/iam/rules/tests/configs/tasks.csv b/cvat/apps/engine/rules/tests/configs/tasks.csv similarity index 100% rename from cvat/apps/iam/rules/tests/configs/tasks.csv rename to cvat/apps/engine/rules/tests/configs/tasks.csv diff --git a/cvat/apps/iam/rules/tests/configs/users.csv b/cvat/apps/engine/rules/tests/configs/users.csv similarity index 100% rename from cvat/apps/iam/rules/tests/configs/users.csv rename to cvat/apps/engine/rules/tests/configs/users.csv diff --git a/cvat/apps/iam/rules/tests/generators/annotationguides_test.gen.rego.py b/cvat/apps/engine/rules/tests/generators/annotationguides_test.gen.rego.py similarity index 100% rename from cvat/apps/iam/rules/tests/generators/annotationguides_test.gen.rego.py rename to cvat/apps/engine/rules/tests/generators/annotationguides_test.gen.rego.py diff --git a/cvat/apps/iam/rules/tests/generators/cloudstorages_test.gen.rego.py b/cvat/apps/engine/rules/tests/generators/cloudstorages_test.gen.rego.py similarity index 100% rename from cvat/apps/iam/rules/tests/generators/cloudstorages_test.gen.rego.py rename to cvat/apps/engine/rules/tests/generators/cloudstorages_test.gen.rego.py diff --git a/cvat/apps/iam/rules/tests/generators/comments_test.gen.rego.py b/cvat/apps/engine/rules/tests/generators/comments_test.gen.rego.py similarity index 100% rename from cvat/apps/iam/rules/tests/generators/comments_test.gen.rego.py rename to cvat/apps/engine/rules/tests/generators/comments_test.gen.rego.py diff --git a/cvat/apps/iam/rules/tests/generators/issues_test.gen.rego.py b/cvat/apps/engine/rules/tests/generators/issues_test.gen.rego.py similarity index 100% rename from cvat/apps/iam/rules/tests/generators/issues_test.gen.rego.py rename to cvat/apps/engine/rules/tests/generators/issues_test.gen.rego.py diff --git a/cvat/apps/iam/rules/tests/generators/jobs_test.gen.rego.py b/cvat/apps/engine/rules/tests/generators/jobs_test.gen.rego.py similarity index 100% rename from cvat/apps/iam/rules/tests/generators/jobs_test.gen.rego.py rename to cvat/apps/engine/rules/tests/generators/jobs_test.gen.rego.py diff --git a/cvat/apps/iam/rules/tests/generators/projects_test.gen.rego.py b/cvat/apps/engine/rules/tests/generators/projects_test.gen.rego.py similarity index 100% rename from cvat/apps/iam/rules/tests/generators/projects_test.gen.rego.py rename to cvat/apps/engine/rules/tests/generators/projects_test.gen.rego.py diff --git a/cvat/apps/iam/rules/tests/generators/server_test.gen.rego.py b/cvat/apps/engine/rules/tests/generators/server_test.gen.rego.py similarity index 100% rename from cvat/apps/iam/rules/tests/generators/server_test.gen.rego.py rename to cvat/apps/engine/rules/tests/generators/server_test.gen.rego.py diff --git a/cvat/apps/iam/rules/tests/generators/tasks_test.gen.rego.py b/cvat/apps/engine/rules/tests/generators/tasks_test.gen.rego.py similarity index 100% rename from cvat/apps/iam/rules/tests/generators/tasks_test.gen.rego.py rename to cvat/apps/engine/rules/tests/generators/tasks_test.gen.rego.py diff --git a/cvat/apps/iam/rules/tests/generators/users_test.gen.rego.py b/cvat/apps/engine/rules/tests/generators/users_test.gen.rego.py similarity index 100% rename from cvat/apps/iam/rules/tests/generators/users_test.gen.rego.py rename to cvat/apps/engine/rules/tests/generators/users_test.gen.rego.py diff --git a/cvat/apps/iam/rules/users.rego b/cvat/apps/engine/rules/users.rego similarity index 100% rename from cvat/apps/iam/rules/users.rego rename to cvat/apps/engine/rules/users.rego diff --git a/cvat/apps/iam/rules/events.rego b/cvat/apps/events/rules/events.rego similarity index 100% rename from cvat/apps/iam/rules/events.rego rename to cvat/apps/events/rules/events.rego diff --git a/cvat/apps/iam/rules/tests/configs/events.csv b/cvat/apps/events/rules/tests/configs/events.csv similarity index 100% rename from cvat/apps/iam/rules/tests/configs/events.csv rename to cvat/apps/events/rules/tests/configs/events.csv diff --git a/cvat/apps/iam/rules/tests/generators/events_test.gen.rego.py b/cvat/apps/events/rules/tests/generators/events_test.gen.rego.py similarity index 100% rename from cvat/apps/iam/rules/tests/generators/events_test.gen.rego.py rename to cvat/apps/events/rules/tests/generators/events_test.gen.rego.py diff --git a/cvat/apps/iam/permissions.py b/cvat/apps/iam/permissions.py index 8c9ec2da9ce0..b4e802378f96 100644 --- a/cvat/apps/iam/permissions.py +++ b/cvat/apps/iam/permissions.py @@ -9,6 +9,7 @@ import operator from abc import ABCMeta, abstractmethod from enum import Enum +from pathlib import Path from typing import Any, Dict, List, Optional, Sequence, TypeVar from attrs import define, field @@ -20,6 +21,7 @@ from cvat.apps.organizations.models import Membership, Organization from cvat.utils.http import make_requests_session +from .utils import add_opa_rules_path class StrEnum(str, Enum): def __str__(self) -> str: @@ -243,7 +245,7 @@ def has_object_permission(self, request, view, obj) -> bool: def load_app_permissions(config: AppConfig) -> None: """ - Ensures that permissions from the given app are loaded. + Ensures that permissions and OPA rules from the given app are loaded. This function should be called from the AppConfig.ready() method of every app that defines a permissions module. @@ -254,3 +256,5 @@ def load_app_permissions(config: AppConfig) -> None: isinstance(attr, type) and issubclass(attr, OpenPolicyAgentPermission) for attr in vars(permissions_module).values() ) + + add_opa_rules_path(Path(config.path, "rules")) diff --git a/cvat/apps/iam/rules/.gitignore b/cvat/apps/iam/rules/.gitignore deleted file mode 100644 index b29dc890bb39..000000000000 --- a/cvat/apps/iam/rules/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -# Autogenerated files -/*_test.gen.rego diff --git a/cvat/apps/iam/rules/tests/generate_tests.py b/cvat/apps/iam/rules/tests/generate_tests.py index 72f016badaae..b6f95e63af4b 100755 --- a/cvat/apps/iam/rules/tests/generate_tests.py +++ b/cvat/apps/iam/rules/tests/generate_tests.py @@ -4,37 +4,24 @@ # # SPDX-License-Identifier: MIT -import os -import os.path as osp import subprocess import sys from argparse import ArgumentParser, Namespace from concurrent.futures import ThreadPoolExecutor from functools import partial -from glob import glob from typing import Optional, Sequence +from pathlib import Path +REPO_ROOT = Path(__file__).resolve().parents[5] def create_arg_parser() -> ArgumentParser: parser = ArgumentParser(add_help=True) parser.add_argument( - "-c", - "--config-dir", - default=None, - help="The directory with test configs in CSV format (default: the default location)", - ) - parser.add_argument( - "-g", - "--gen-dir", - default=None, - help="The directory with test generators (default: the default location)", - ) - parser.add_argument( - "-o", - "--output-dir", - default=".", - type=osp.abspath, - help="The output directory for rego files (default: current dir)", + "-a", + "--apps-dir", + type=Path, + default=REPO_ROOT / "cvat/apps", + help="The directory with Django apps (default: cvat/apps)", ) return parser @@ -45,28 +32,27 @@ def parse_args(args: Optional[Sequence[str]] = None) -> Namespace: return parsed_args -def call_generator(module_path: str, gen_params: Namespace): +def call_generator(generator_path: Path, gen_params: Namespace) -> None: + rules_dir = generator_path.parents[2] subprocess.check_call( - ["python3", module_path, gen_params.config_dir], cwd=gen_params.output_dir + [sys.executable, generator_path, rules_dir / 'tests/configs'], cwd=rules_dir ) def main(args: Optional[Sequence[str]] = None) -> int: args = parse_args(args) - args.config_dir = osp.abspath(args.config_dir or osp.join(osp.dirname(__file__), "configs")) - args.gen_dir = osp.abspath(args.gen_dir or osp.join(osp.dirname(__file__), "generators")) - - assert osp.isdir(args.config_dir) - assert osp.isdir(args.gen_dir) + generator_paths = list(args.apps_dir.glob("*/rules/tests/generators/*_test.gen.rego.py")) - os.makedirs(args.output_dir, exist_ok=True) + if not generator_paths: + sys.exit("error: no generators found") with ThreadPoolExecutor() as pool: - pool.map( + for _ in pool.map( partial(call_generator, gen_params=args), - glob(osp.join(args.gen_dir, "*_test.gen.rego.py")), - ) + generator_paths, + ): + pass # consume all results in order to propagate exceptions if __name__ == "__main__": diff --git a/cvat/apps/iam/utils.py b/cvat/apps/iam/utils.py index 5f9c8170b244..9cd122ab1ba3 100644 --- a/cvat/apps/iam/utils.py +++ b/cvat/apps/iam/utils.py @@ -5,16 +5,16 @@ import io import tarfile -from django.conf import settings +_OPA_RULES_PATHS = { + Path(__file__).parent / 'rules', +} -@functools.lru_cache() +@functools.lru_cache(maxsize=None) def get_opa_bundle() -> Tuple[bytes, str]: - rules_paths = [Path(settings.BASE_DIR) / rel_path for rel_path in settings.IAM_OPA_RULES_PATH.strip(':').split(':')] - bundle_file = io.BytesIO() with tarfile.open(fileobj=bundle_file, mode='w:gz') as tar: - for p in rules_paths: + for p in _OPA_RULES_PATHS: for f in p.glob('*[!.gen].rego'): tar.add(name=f, arcname=f.relative_to(p.parent)) @@ -22,6 +22,10 @@ def get_opa_bundle() -> Tuple[bytes, str]: etag = hashlib.blake2b(bundle).hexdigest() return bundle, etag +def add_opa_rules_path(path: Path) -> None: + _OPA_RULES_PATHS.add(path) + get_opa_bundle.cache_clear() + def get_dummy_user(email): from allauth.account.models import EmailAddress from allauth.account import app_settings diff --git a/cvat/apps/iam/rules/lambda.rego b/cvat/apps/lambda_manager/rules/lambda.rego similarity index 100% rename from cvat/apps/iam/rules/lambda.rego rename to cvat/apps/lambda_manager/rules/lambda.rego diff --git a/cvat/apps/iam/rules/tests/configs/lambda.csv b/cvat/apps/lambda_manager/rules/tests/configs/lambda.csv similarity index 100% rename from cvat/apps/iam/rules/tests/configs/lambda.csv rename to cvat/apps/lambda_manager/rules/tests/configs/lambda.csv diff --git a/cvat/apps/iam/rules/tests/generators/lambda_test.gen.rego.py b/cvat/apps/lambda_manager/rules/tests/generators/lambda_test.gen.rego.py similarity index 100% rename from cvat/apps/iam/rules/tests/generators/lambda_test.gen.rego.py rename to cvat/apps/lambda_manager/rules/tests/generators/lambda_test.gen.rego.py diff --git a/cvat/apps/iam/rules/analytics.rego b/cvat/apps/log_viewer/rules/analytics.rego similarity index 100% rename from cvat/apps/iam/rules/analytics.rego rename to cvat/apps/log_viewer/rules/analytics.rego diff --git a/cvat/apps/iam/rules/tests/configs/analytics.csv b/cvat/apps/log_viewer/rules/tests/configs/analytics.csv similarity index 100% rename from cvat/apps/iam/rules/tests/configs/analytics.csv rename to cvat/apps/log_viewer/rules/tests/configs/analytics.csv diff --git a/cvat/apps/iam/rules/tests/generators/analytics_test.gen.rego.py b/cvat/apps/log_viewer/rules/tests/generators/analytics_test.gen.rego.py similarity index 100% rename from cvat/apps/iam/rules/tests/generators/analytics_test.gen.rego.py rename to cvat/apps/log_viewer/rules/tests/generators/analytics_test.gen.rego.py diff --git a/cvat/apps/iam/rules/invitations.rego b/cvat/apps/organizations/rules/invitations.rego similarity index 100% rename from cvat/apps/iam/rules/invitations.rego rename to cvat/apps/organizations/rules/invitations.rego diff --git a/cvat/apps/iam/rules/memberships.rego b/cvat/apps/organizations/rules/memberships.rego similarity index 100% rename from cvat/apps/iam/rules/memberships.rego rename to cvat/apps/organizations/rules/memberships.rego diff --git a/cvat/apps/iam/rules/organizations.rego b/cvat/apps/organizations/rules/organizations.rego similarity index 100% rename from cvat/apps/iam/rules/organizations.rego rename to cvat/apps/organizations/rules/organizations.rego diff --git a/cvat/apps/iam/rules/tests/configs/invitations.csv b/cvat/apps/organizations/rules/tests/configs/invitations.csv similarity index 100% rename from cvat/apps/iam/rules/tests/configs/invitations.csv rename to cvat/apps/organizations/rules/tests/configs/invitations.csv diff --git a/cvat/apps/iam/rules/tests/configs/memberships.csv b/cvat/apps/organizations/rules/tests/configs/memberships.csv similarity index 100% rename from cvat/apps/iam/rules/tests/configs/memberships.csv rename to cvat/apps/organizations/rules/tests/configs/memberships.csv diff --git a/cvat/apps/iam/rules/tests/configs/organizations.csv b/cvat/apps/organizations/rules/tests/configs/organizations.csv similarity index 100% rename from cvat/apps/iam/rules/tests/configs/organizations.csv rename to cvat/apps/organizations/rules/tests/configs/organizations.csv diff --git a/cvat/apps/iam/rules/tests/generators/invitations_test.gen.rego.py b/cvat/apps/organizations/rules/tests/generators/invitations_test.gen.rego.py similarity index 100% rename from cvat/apps/iam/rules/tests/generators/invitations_test.gen.rego.py rename to cvat/apps/organizations/rules/tests/generators/invitations_test.gen.rego.py diff --git a/cvat/apps/iam/rules/tests/generators/memberships_test.gen.rego.py b/cvat/apps/organizations/rules/tests/generators/memberships_test.gen.rego.py similarity index 100% rename from cvat/apps/iam/rules/tests/generators/memberships_test.gen.rego.py rename to cvat/apps/organizations/rules/tests/generators/memberships_test.gen.rego.py diff --git a/cvat/apps/iam/rules/tests/generators/organizations_test.gen.rego.py b/cvat/apps/organizations/rules/tests/generators/organizations_test.gen.rego.py similarity index 100% rename from cvat/apps/iam/rules/tests/generators/organizations_test.gen.rego.py rename to cvat/apps/organizations/rules/tests/generators/organizations_test.gen.rego.py diff --git a/cvat/apps/iam/rules/conflicts.rego b/cvat/apps/quality_control/rules/conflicts.rego similarity index 100% rename from cvat/apps/iam/rules/conflicts.rego rename to cvat/apps/quality_control/rules/conflicts.rego diff --git a/cvat/apps/iam/rules/quality_reports.rego b/cvat/apps/quality_control/rules/quality_reports.rego similarity index 100% rename from cvat/apps/iam/rules/quality_reports.rego rename to cvat/apps/quality_control/rules/quality_reports.rego diff --git a/cvat/apps/iam/rules/quality_settings.rego b/cvat/apps/quality_control/rules/quality_settings.rego similarity index 100% rename from cvat/apps/iam/rules/quality_settings.rego rename to cvat/apps/quality_control/rules/quality_settings.rego diff --git a/cvat/apps/iam/rules/tests/configs/webhooks.csv b/cvat/apps/webhooks/rules/tests/configs/webhooks.csv similarity index 100% rename from cvat/apps/iam/rules/tests/configs/webhooks.csv rename to cvat/apps/webhooks/rules/tests/configs/webhooks.csv diff --git a/cvat/apps/iam/rules/tests/generators/webhooks_test.gen.rego.py b/cvat/apps/webhooks/rules/tests/generators/webhooks_test.gen.rego.py similarity index 100% rename from cvat/apps/iam/rules/tests/generators/webhooks_test.gen.rego.py rename to cvat/apps/webhooks/rules/tests/generators/webhooks_test.gen.rego.py diff --git a/cvat/apps/iam/rules/webhooks.rego b/cvat/apps/webhooks/rules/webhooks.rego similarity index 100% rename from cvat/apps/iam/rules/webhooks.rego rename to cvat/apps/webhooks/rules/webhooks.rego diff --git a/cvat/settings/base.py b/cvat/settings/base.py index d96a569a9c53..9818a19cbf36 100644 --- a/cvat/settings/base.py +++ b/cvat/settings/base.py @@ -236,7 +236,6 @@ def generate_secret_key(): IAM_ROLES = [IAM_ADMIN_ROLE, 'business', 'user', 'worker'] IAM_OPA_HOST = 'http://opa:8181' IAM_OPA_DATA_URL = f'{IAM_OPA_HOST}/v1/data' -IAM_OPA_RULES_PATH = 'cvat/apps/iam/rules:' LOGIN_URL = 'rest_login' LOGIN_REDIRECT_URL = '/' diff --git a/site/content/en/docs/contributing/running-tests.md b/site/content/en/docs/contributing/running-tests.md index e6e68f6f85f4..b5faaca89f36 100644 --- a/site/content/en/docs/contributing/running-tests.md +++ b/site/content/en/docs/contributing/running-tests.md @@ -204,24 +204,22 @@ of the corresponding task in `./vscode/launch.json`, for example: ### Generate tests ```bash -python cvat/apps/iam/rules/tests/generate_tests.py \ - --output-dir cvat/apps/iam/rules/ +python cvat/apps/iam/rules/tests/generate_tests.py ``` ### Run testing - In a Docker container ```bash -docker run --rm -v ${PWD}/cvat/apps/iam/rules:/rules \ - openpolicyagent/opa:0.63.0 \ - test /rules -v +docker compose run --rm -v "$PWD:/mnt/src:ro" -w /mnt/src \ + cvat_opa test -v cvat/apps/*/rules ``` - or execute OPA directly ```bash curl -L -o opa https://openpolicyagent.org/downloads/v0.63.0/opa_linux_amd64_static chmod +x ./opa -./opa test cvat/apps/iam/rules +./opa test cvat/apps/*/rules ``` ### Linting Rego @@ -230,14 +228,14 @@ The Rego policies in this project are linted using [Regal](https://github.com/st - In a Docker container ```bash -docker run --rm -v ${PWD}/cvat/apps/iam/rules:/rules \ +docker run --rm -v ${PWD}:/mnt/src:ro -w /mnt/src \ ghcr.io/styrainc/regal:0.11.0 \ - lint /rules + lint cvat/apps/*/rules ``` - or execute Regal directly ```bash curl -L -o regal https://github.com/StyraInc/regal/releases/download/v0.11.0/regal_Linux_x86_64 chmod +x ./regal -./regal lint cvat/apps/iam/rules +./regal lint cvat/apps/*/rules ``` diff --git a/site/content/en/docs/manual/advanced/iam_user_roles.md b/site/content/en/docs/manual/advanced/iam_user_roles.md index ce111dcb50da..e2e723cf8f5f 100644 --- a/site/content/en/docs/manual/advanced/iam_user_roles.md +++ b/site/content/en/docs/manual/advanced/iam_user_roles.md @@ -21,7 +21,7 @@ using the [**Free plan**](https://www.cvat.ai/pricing/cloud) and can be lifted u All roles are predefined and cannot be modified through the user interface. However, within the _self-hosted solution_, roles can be adjusted using `.rego` -files stored in `cvat/apps/iam/rules/`. +files stored in `cvat/apps/*/rules/`. Rego is a declarative language employed for defining OPA (Open Policy Agent) policies, and its syntax is detailed in the [**OPA documentation**](https://www.openpolicyagent.org/docs/latest/policy-language/). From f2924d44a5a73f34c78c4ecf71cde134d1725164 Mon Sep 17 00:00:00 2001 From: Boris Sekachev Date: Tue, 30 Apr 2024 12:36:36 +0300 Subject: [PATCH 04/29] Fixed incorrect Cloud Storage request by ID (#7823) --- .../20240429_145914_boris_fixed_incorrect_cs_request.md | 4 ++++ cvat-core/src/server-proxy.ts | 9 ++++++--- 2 files changed, 10 insertions(+), 3 deletions(-) create mode 100644 changelog.d/20240429_145914_boris_fixed_incorrect_cs_request.md diff --git a/changelog.d/20240429_145914_boris_fixed_incorrect_cs_request.md b/changelog.d/20240429_145914_boris_fixed_incorrect_cs_request.md new file mode 100644 index 000000000000..af3a917889b7 --- /dev/null +++ b/changelog.d/20240429_145914_boris_fixed_incorrect_cs_request.md @@ -0,0 +1,4 @@ +### Fixed + +- Infinite loading cloud storage update page when a lot of cloud storages are available for a user + () diff --git a/cvat-core/src/server-proxy.ts b/cvat-core/src/server-proxy.ts index 594b714fef10..6d9e9f118021 100644 --- a/cvat-core/src/server-proxy.ts +++ b/cvat-core/src/server-proxy.ts @@ -1937,18 +1937,21 @@ async function getCloudStorages(filter = {}): Promise Date: Tue, 30 Apr 2024 14:14:10 +0300 Subject: [PATCH 05/29] Opening update CS page sends infinite requests when CS id does not exist (#7828) = --- ...s_fixed_infinite_loading_nonexisting_cs.md | 4 +++ .../update-cloud-storage-page.tsx | 25 ++++++++++++------- 2 files changed, 20 insertions(+), 9 deletions(-) create mode 100644 changelog.d/20240430_115445_boris_fixed_infinite_loading_nonexisting_cs.md diff --git a/changelog.d/20240430_115445_boris_fixed_infinite_loading_nonexisting_cs.md b/changelog.d/20240430_115445_boris_fixed_infinite_loading_nonexisting_cs.md new file mode 100644 index 000000000000..09b53cdc4f93 --- /dev/null +++ b/changelog.d/20240430_115445_boris_fixed_infinite_loading_nonexisting_cs.md @@ -0,0 +1,4 @@ +### Fixed + +- Opening update CS page sends infinite requests when CS id does not exist + () diff --git a/cvat-ui/src/components/update-cloud-storage-page/update-cloud-storage-page.tsx b/cvat-ui/src/components/update-cloud-storage-page/update-cloud-storage-page.tsx index 85db9d8990ed..52afa4c97377 100644 --- a/cvat-ui/src/components/update-cloud-storage-page/update-cloud-storage-page.tsx +++ b/cvat-ui/src/components/update-cloud-storage-page/update-cloud-storage-page.tsx @@ -3,8 +3,8 @@ // SPDX-License-Identifier: MIT import './styles.scss'; -import React, { useEffect } from 'react'; -import { useDispatch, useSelector } from 'react-redux'; +import React, { useEffect, useState } from 'react'; +import { shallowEqual, useDispatch, useSelector } from 'react-redux'; import { useParams } from 'react-router-dom'; import { Row, Col } from 'antd/lib/grid'; import Spin from 'antd/lib/spin'; @@ -22,16 +22,23 @@ interface ParamType { export default function UpdateCloudStoragePageComponent(): JSX.Element { const dispatch = useDispatch(); const cloudStorageId = +useParams().id; - const isFetching = useSelector((state: CombinedState) => state.cloudStorages.fetching); - const isInitialized = useSelector((state: CombinedState) => state.cloudStorages.initialized); - const cloudStorages = useSelector((state: CombinedState) => state.cloudStorages.current); - const [cloudStorage] = cloudStorages.filter((_cloudStorage) => _cloudStorage.id === cloudStorageId); + const [requested, setRequested] = useState(false); + const { + isFetching, + isInitialized, + cloudStorage, + } = useSelector((state: CombinedState) => ({ + isFetching: state.cloudStorages.fetching, + isInitialized: state.cloudStorages.initialized, + cloudStorage: state.cloudStorages.current.find((_cloudStorage) => _cloudStorage.id === cloudStorageId), + }), shallowEqual); useEffect(() => { - if (!cloudStorage && !isFetching) { + if (!cloudStorage && !requested && !isFetching) { + setRequested(true); dispatch(getCloudStoragesAsync({ id: cloudStorageId })); } - }, [isFetching]); + }, [requested, cloudStorage, isFetching]); if (!cloudStorage && !isInitialized) { return ; @@ -42,7 +49,7 @@ export default function UpdateCloudStoragePageComponent(): JSX.Element { ); From 5f71ab754ea1af429bd607f761f5e380df67ce4d Mon Sep 17 00:00:00 2001 From: Boris Sekachev Date: Tue, 30 Apr 2024 14:19:07 +0300 Subject: [PATCH 06/29] Fixed duration of 'change:frame' event (#7817) --- .../20240429_110227_boris_fixed_change_frame_duration.md | 4 ++++ cvat-ui/package.json | 2 +- cvat-ui/src/reducers/annotation-reducer.ts | 4 ++++ 3 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 changelog.d/20240429_110227_boris_fixed_change_frame_duration.md diff --git a/changelog.d/20240429_110227_boris_fixed_change_frame_duration.md b/changelog.d/20240429_110227_boris_fixed_change_frame_duration.md new file mode 100644 index 000000000000..19bc00474b21 --- /dev/null +++ b/changelog.d/20240429_110227_boris_fixed_change_frame_duration.md @@ -0,0 +1,4 @@ +### Fixed + +- Incorrect duration of `change:frame` event + () diff --git a/cvat-ui/package.json b/cvat-ui/package.json index c0eb9e443e96..408f19a9c6d7 100644 --- a/cvat-ui/package.json +++ b/cvat-ui/package.json @@ -1,6 +1,6 @@ { "name": "cvat-ui", - "version": "1.63.7", + "version": "1.63.8", "description": "CVAT single-page application", "main": "src/index.tsx", "scripts": { diff --git a/cvat-ui/src/reducers/annotation-reducer.ts b/cvat-ui/src/reducers/annotation-reducer.ts index 3f60717d9788..c18d042c19db 100644 --- a/cvat-ui/src/reducers/annotation-reducer.ts +++ b/cvat-ui/src/reducers/annotation-reducer.ts @@ -445,6 +445,10 @@ export default (state = defaultState, action: AnyAction): AnnotationState => { player: { ...state.player, ranges: ranges || state.player.ranges, + frame: { + ...state.player.frame, + changeFrameEvent: null, + }, }, canvas: { ...state.canvas, From a5146319193cfcac70f978e2f7f20d56ef62c4c9 Mon Sep 17 00:00:00 2001 From: zaha Date: Tue, 30 Apr 2024 15:08:31 +0300 Subject: [PATCH 07/29] Save video if test failed (#7807) --- .github/workflows/main.yml | 7 +++++++ tests/cypress.config.js | 2 +- tests/cypress/plugins/index.js | 6 ++++++ 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index c2d7617f7b3f..99b9d0dc79bd 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -384,6 +384,13 @@ jobs: name: cypress_screenshots_${{ matrix.specs }} path: ${{ github.workspace }}/tests/cypress/screenshots + - name: Uploading cypress videos as an artifact + if: failure() + uses: actions/upload-artifact@v3.1.1 + with: + name: cypress_videos_${{ matrix.specs }} + path: ${{ github.workspace }}/tests/cypress/videos + publish_dev_images: if: github.ref == 'refs/heads/develop' needs: [rest_api_testing, unit_testing, e2e_testing] diff --git a/tests/cypress.config.js b/tests/cypress.config.js index 37400e0c3601..f6d327466adc 100644 --- a/tests/cypress.config.js +++ b/tests/cypress.config.js @@ -2,7 +2,7 @@ const { defineConfig } = require('cypress'); const plugins = require('./cypress/plugins/index'); module.exports = defineConfig({ - video: false, + video: true, viewportWidth: 1300, viewportHeight: 960, defaultCommandTimeout: 25000, diff --git a/tests/cypress/plugins/index.js b/tests/cypress/plugins/index.js index 9d51227c08a3..790aef6a81bc 100644 --- a/tests/cypress/plugins/index.js +++ b/tests/cypress/plugins/index.js @@ -41,5 +41,11 @@ module.exports = (on, config) => { } return launchOptions; }); + + on('after:spec', (spec, results) => { + if (results && results.stats.failures === 0 && results.video) { + fs.unlinkSync(results.video); + } + }); return config; }; From bae657bc1206c2f7c437c3b032aebcea93eb0318 Mon Sep 17 00:00:00 2001 From: Roman Donchenko Date: Tue, 30 Apr 2024 17:40:29 +0300 Subject: [PATCH 08/29] Modernize Rego syntax (#7824) Open Policy Agent v0.59 introduced a new directive (`import rego.v1`) that ensures that the file is compatible with OPA v1 (to be released in the future). Add this directive to all Rego files and update the syntax accordingly. Which involves the following: * Rewrite all rules to use the `if` keyword, which is now mandatory. * Where appropriate, use the `in` keyword, which is now available without a future import. It's not mandatory, but it looks much nicer. In addition, update Regal to the latest version, which now enforces the use of `import rego.v1` by default. --- .github/workflows/regallint.yml | 4 +- .../rules/analytics_reports.rego | 6 +- cvat/apps/engine/rules/annotationguides.rego | 32 ++-- cvat/apps/engine/rules/cloudstorages.rego | 47 +++--- cvat/apps/engine/rules/comments.rego | 83 +++++----- cvat/apps/engine/rules/issues.rego | 81 +++++----- cvat/apps/engine/rules/jobs.rego | 131 ++++++++------- cvat/apps/engine/rules/labels.rego | 19 ++- cvat/apps/engine/rules/projects.rego | 91 ++++++----- cvat/apps/engine/rules/server.rego | 7 +- cvat/apps/engine/rules/tasks.rego | 151 ++++++++++-------- .../annotationguides_test.gen.rego.py | 4 +- .../generators/cloudstorages_test.gen.rego.py | 4 +- .../generators/comments_test.gen.rego.py | 4 +- .../tests/generators/issues_test.gen.rego.py | 4 +- .../tests/generators/jobs_test.gen.rego.py | 4 +- .../generators/projects_test.gen.rego.py | 4 +- .../tests/generators/server_test.gen.rego.py | 4 +- .../tests/generators/tasks_test.gen.rego.py | 4 +- .../tests/generators/users_test.gen.rego.py | 4 +- cvat/apps/engine/rules/users.rego | 23 +-- cvat/apps/events/rules/events.rego | 21 +-- .../tests/generators/events_test.gen.rego.py | 4 +- cvat/apps/iam/rules/utils.rego | 20 +-- cvat/apps/lambda_manager/rules/lambda.rego | 22 +-- .../tests/generators/lambda_test.gen.rego.py | 4 +- cvat/apps/log_viewer/rules/analytics.rego | 7 +- .../generators/analytics_test.gen.rego.py | 4 +- .../apps/organizations/rules/invitations.rego | 53 +++--- .../apps/organizations/rules/memberships.rego | 39 ++--- .../organizations/rules/organizations.rego | 37 +++-- .../generators/invitations_test.gen.rego.py | 4 +- .../generators/memberships_test.gen.rego.py | 4 +- .../generators/organizations_test.gen.rego.py | 4 +- .../apps/quality_control/rules/conflicts.rego | 19 ++- .../rules/quality_reports.rego | 19 ++- .../rules/quality_settings.rego | 19 ++- .../generators/webhooks_test.gen.rego.py | 4 +- cvat/apps/webhooks/rules/webhooks.rego | 61 +++---- 39 files changed, 565 insertions(+), 491 deletions(-) diff --git a/.github/workflows/regallint.yml b/.github/workflows/regallint.yml index 2e0c3dd89357..b35a1a862b34 100644 --- a/.github/workflows/regallint.yml +++ b/.github/workflows/regallint.yml @@ -6,7 +6,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup Regal - uses: StyraInc/setup-regal@v0.2.0 + uses: StyraInc/setup-regal@v1 with: - version: v0.11.0 + version: v0.21.3 - run: regal lint --format=github cvat/apps/*/rules diff --git a/cvat/apps/analytics_report/rules/analytics_reports.rego b/cvat/apps/analytics_report/rules/analytics_reports.rego index e260760f7e36..b57dc764fcde 100644 --- a/cvat/apps/analytics_report/rules/analytics_reports.rego +++ b/cvat/apps/analytics_report/rules/analytics_reports.rego @@ -1,5 +1,7 @@ package analytics_reports +import rego.v1 + import data.utils import data.organizations @@ -24,11 +26,11 @@ import data.organizations default allow := false -allow { +allow if { utils.is_admin } -allow { +allow if { input.scope == utils.LIST utils.has_perm(utils.WORKER) } diff --git a/cvat/apps/engine/rules/annotationguides.rego b/cvat/apps/engine/rules/annotationguides.rego index 3acb74954fb7..dd512af6d79a 100644 --- a/cvat/apps/engine/rules/annotationguides.rego +++ b/cvat/apps/engine/rules/annotationguides.rego @@ -1,5 +1,7 @@ package annotationguides +import rego.v1 + import data.utils import data.organizations @@ -31,72 +33,72 @@ import data.organizations # } # } -is_target_owner { +is_target_owner if { input.resource.target.owner.id == input.auth.user.id } -is_target_assignee { +is_target_assignee if { input.resource.target.assignee.id == input.auth.user.id } -is_target_staff { +is_target_staff if { is_target_owner } -is_target_staff { +is_target_staff if { is_target_assignee } default allow := false -allow { +allow if { utils.is_admin } -allow { +allow if { input.scope == utils.VIEW utils.is_sandbox utils.has_perm(utils.WORKER) input.resource.target.is_job_staff } -allow { +allow if { input.scope == utils.VIEW utils.is_sandbox utils.has_perm(utils.WORKER) is_target_staff } -allow { - { utils.CREATE, utils.DELETE, utils.UPDATE }[input.scope] +allow if { + input.scope in {utils.CREATE, utils.DELETE, utils.UPDATE} utils.is_sandbox utils.has_perm(utils.USER) is_target_staff } -allow { - { utils.CREATE, utils.DELETE, utils.UPDATE, utils.VIEW }[input.scope] +allow if { + input.scope in {utils.CREATE, utils.DELETE, utils.UPDATE, utils.VIEW} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) } -allow { - { utils.CREATE, utils.DELETE, utils.UPDATE }[input.scope] +allow if { + input.scope in {utils.CREATE, utils.DELETE, utils.UPDATE} input.auth.organization.id == input.resource.organization.id organizations.is_member utils.has_perm(utils.USER) is_target_staff } -allow { +allow if { input.scope == utils.VIEW input.auth.organization.id == input.resource.organization.id organizations.is_member is_target_staff } -allow { +allow if { input.scope == utils.VIEW input.auth.organization.id == input.resource.organization.id organizations.is_member diff --git a/cvat/apps/engine/rules/cloudstorages.rego b/cvat/apps/engine/rules/cloudstorages.rego index 1b57bdee015c..3e278a35a7d5 100644 --- a/cvat/apps/engine/rules/cloudstorages.rego +++ b/cvat/apps/engine/rules/cloudstorages.rego @@ -1,4 +1,7 @@ package cloudstorages + +import rego.v1 + import data.utils import data.organizations @@ -29,80 +32,80 @@ import data.organizations default allow := false # Admin has no restrictions -allow { +allow if { utils.is_admin } -allow { +allow if { input.scope == utils.CREATE utils.has_perm(utils.USER) utils.is_sandbox } -allow { +allow if { input.scope == utils.CREATE input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) } -allow { +allow if { input.scope == utils.LIST utils.is_sandbox } -allow { +allow if { input.scope == utils.LIST organizations.is_member } -filter := [] { # Django Q object to filter list of entries +filter := [] if { # Django Q object to filter list of entries utils.is_admin utils.is_sandbox -} else := qobject { +} else := qobject if { utils.is_admin qobject := [ {"organization": input.auth.organization.id} ] -} else := qobject { +} else := qobject if { utils.has_perm(utils.USER) organizations.has_perm(organizations.SUPERVISOR) qobject := [ {"organization": input.auth.organization.id} ] -} else := qobject { +} else := qobject if { utils.is_sandbox qobject := [ {"owner": input.auth.user.id} ] -} else := qobject { +} else := qobject if { utils.is_organization qobject := [ {"owner": input.auth.user.id}, {"organization": input.auth.organization.id}, "&" ] } -allow { - { utils.VIEW, utils.LIST_CONTENT }[input.scope] +allow if { + input.scope in {utils.VIEW, utils.LIST_CONTENT} utils.is_sandbox utils.is_resource_owner } -allow { - { utils.VIEW, utils.LIST_CONTENT }[input.scope] +allow if { + input.scope in {utils.VIEW, utils.LIST_CONTENT} input.auth.organization.id == input.resource.organization.id organizations.is_member utils.is_resource_owner } -allow { - { utils.VIEW, utils.LIST_CONTENT }[input.scope] +allow if { + input.scope in {utils.VIEW, utils.LIST_CONTENT} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.has_perm(organizations.SUPERVISOR) } -allow { - { utils.UPDATE, utils.DELETE }[input.scope] +allow if { + input.scope in {utils.UPDATE, utils.DELETE} utils.is_sandbox utils.has_perm(utils.WORKER) utils.is_resource_owner } -allow { - { utils.UPDATE, utils.DELETE }[input.scope] +allow if { + input.scope in {utils.UPDATE, utils.DELETE} input.auth.organization.id == input.resource.organization.id organizations.is_member utils.has_perm(utils.WORKER) @@ -110,8 +113,8 @@ allow { } -allow { - { utils.UPDATE, utils.DELETE }[input.scope] +allow if { + input.scope in {utils.UPDATE, utils.DELETE} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) diff --git a/cvat/apps/engine/rules/comments.rego b/cvat/apps/engine/rules/comments.rego index cd5b987a50b6..019a5ebcecc4 100644 --- a/cvat/apps/engine/rules/comments.rego +++ b/cvat/apps/engine/rules/comments.rego @@ -1,4 +1,7 @@ package comments + +import rego.v1 + import data.utils import data.organizations @@ -41,100 +44,100 @@ import data.organizations # } # } -is_comment_owner { +is_comment_owner if { input.resource.owner.id == input.auth.user.id } -is_issue_owner { +is_issue_owner if { input.resource.issue.owner.id == input.auth.user.id } -is_issue_assignee { +is_issue_assignee if { input.resource.issue.assignee.id == input.auth.user.id } -is_job_assignee { +is_job_assignee if { input.resource.job.assignee.id == input.auth.user.id } -is_task_owner { +is_task_owner if { input.resource.task.owner.id == input.auth.user.id } -is_task_assignee { +is_task_assignee if { input.resource.task.assignee.id == input.auth.user.id } -is_project_owner { +is_project_owner if { input.resource.project.owner.id == input.auth.user.id } -is_project_assignee { +is_project_assignee if { input.resource.project.assignee.id == input.auth.user.id } -is_project_staff { +is_project_staff if { is_project_owner } -is_project_staff { +is_project_staff if { is_project_assignee } -is_task_staff { +is_task_staff if { is_project_staff } -is_task_staff { +is_task_staff if { is_task_owner } -is_task_staff { +is_task_staff if { is_task_assignee } -is_job_staff { +is_job_staff if { is_task_staff } -is_job_staff { +is_job_staff if { is_job_assignee } -is_issue_staff { +is_issue_staff if { is_job_staff } -is_issue_staff { +is_issue_staff if { is_issue_owner } -is_issue_staff { +is_issue_staff if { is_issue_assignee } -is_comment_staff { +is_comment_staff if { is_issue_staff } -is_comment_staff { +is_comment_staff if { is_comment_owner } default allow := false -allow { +allow if { utils.is_admin } -allow { +allow if { input.scope == utils.CREATE_IN_ISSUE utils.is_sandbox utils.has_perm(utils.WORKER) is_issue_staff } -allow { +allow if { input.scope == utils.CREATE_IN_ISSUE input.auth.organization.id == input.resource.organization.id utils.is_organization @@ -142,7 +145,7 @@ allow { organizations.has_perm(organizations.MAINTAINER) } -allow { +allow if { input.scope == utils.CREATE_IN_ISSUE input.auth.organization.id == input.resource.organization.id utils.is_organization @@ -151,20 +154,20 @@ allow { is_issue_staff } -allow { +allow if { input.scope == utils.LIST utils.is_sandbox } -allow { +allow if { input.scope == utils.LIST organizations.is_member } -filter := [] { # Django Q object to filter list of entries +filter := [] if { # Django Q object to filter list of entries utils.is_admin utils.is_sandbox -} else := qobject { +} else := qobject if { utils.is_admin utils.is_organization org := input.auth.organization @@ -172,7 +175,7 @@ filter := [] { # Django Q object to filter list of entries {"issue__job__segment__task__organization": org.id}, {"issue__job__segment__task__project__organization": org.id}, "|" ] -} else := qobject { +} else := qobject if { utils.is_sandbox user := input.auth.user qobject := [ @@ -185,7 +188,7 @@ filter := [] { # Django Q object to filter list of entries {"issue__job__segment__task__project__owner": user.id}, "|", {"issue__job__segment__task__project__assignee": user.id}, "|" ] -} else := qobject { +} else := qobject if { utils.is_organization utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) @@ -194,7 +197,7 @@ filter := [] { # Django Q object to filter list of entries {"issue__job__segment__task__organization": org.id}, {"issue__job__segment__task__project__organization": org.id}, "|" ] -} else := qobject { +} else := qobject if { organizations.has_perm(organizations.WORKER) user := input.auth.user org := input.auth.organization @@ -212,42 +215,42 @@ filter := [] { # Django Q object to filter list of entries ] } -allow { +allow if { input.scope == utils.VIEW utils.is_sandbox is_comment_staff } -allow { +allow if { input.scope == utils.VIEW input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) } -allow { +allow if { input.scope == utils.VIEW input.auth.organization.id == input.resource.organization.id organizations.is_member is_comment_staff } -allow { - { utils.UPDATE, utils.DELETE }[input.scope] +allow if { + input.scope in {utils.UPDATE, utils.DELETE} utils.is_sandbox utils.has_perm(utils.WORKER) is_comment_staff } -allow { - { utils.UPDATE, utils.DELETE }[input.scope] +allow if { + input.scope in {utils.UPDATE, utils.DELETE} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) } -allow { - { utils.UPDATE, utils.DELETE }[input.scope] +allow if { + input.scope in {utils.UPDATE, utils.DELETE} input.auth.organization.id == input.resource.organization.id is_comment_staff utils.has_perm(utils.WORKER) diff --git a/cvat/apps/engine/rules/issues.rego b/cvat/apps/engine/rules/issues.rego index 0475f832ad53..803dab16c019 100644 --- a/cvat/apps/engine/rules/issues.rego +++ b/cvat/apps/engine/rules/issues.rego @@ -1,4 +1,7 @@ package issues + +import rego.v1 + import data.utils import data.organizations @@ -38,96 +41,96 @@ import data.organizations # } # } -is_issue_owner { +is_issue_owner if { input.resource.owner.id == input.auth.user.id } -is_issue_assignee { +is_issue_assignee if { input.resource.assignee.id == input.auth.user.id } -is_job_assignee { +is_job_assignee if { input.resource.job.assignee.id == input.auth.user.id } -is_task_owner { +is_task_owner if { input.resource.task.owner.id == input.auth.user.id } -is_task_assignee { +is_task_assignee if { input.resource.task.assignee.id == input.auth.user.id } -is_project_owner { +is_project_owner if { input.resource.project.owner.id == input.auth.user.id } -is_project_assignee { +is_project_assignee if { input.resource.project.assignee.id == input.auth.user.id } -is_project_staff { +is_project_staff if { is_project_owner } -is_project_staff { +is_project_staff if { is_project_assignee } -is_task_staff { +is_task_staff if { is_project_staff } -is_task_staff { +is_task_staff if { is_task_owner } -is_task_staff { +is_task_staff if { is_task_assignee } -is_job_staff { +is_job_staff if { is_task_staff } -is_job_staff { +is_job_staff if { is_job_assignee } -is_issue_admin { +is_issue_admin if { is_task_staff } -is_issue_admin { +is_issue_admin if { is_issue_owner } -is_issue_staff { +is_issue_staff if { is_job_staff } -is_issue_staff { +is_issue_staff if { is_issue_admin } -is_issue_staff { +is_issue_staff if { is_issue_assignee } default allow := false -allow { +allow if { utils.is_admin } -allow { +allow if { input.scope == utils.CREATE_IN_JOB utils.is_sandbox utils.has_perm(utils.WORKER) is_job_staff } -allow { +allow if { input.scope == utils.CREATE_IN_JOB input.auth.organization.id == input.resource.organization.id utils.is_organization @@ -135,7 +138,7 @@ allow { organizations.has_perm(organizations.MAINTAINER) } -allow { +allow if { input.scope == utils.CREATE_IN_JOB input.auth.organization.id == input.resource.organization.id utils.is_organization @@ -144,20 +147,20 @@ allow { is_job_staff } -allow { +allow if { input.scope == utils.LIST utils.is_sandbox } -allow { +allow if { input.scope == utils.LIST organizations.is_member } -filter := [] { # Django Q object to filter list of entries +filter := [] if { # Django Q object to filter list of entries utils.is_admin utils.is_sandbox -} else := qobject { +} else := qobject if { utils.is_admin utils.is_organization org := input.auth.organization @@ -165,7 +168,7 @@ filter := [] { # Django Q object to filter list of entries {"job__segment__task__organization": org.id}, {"job__segment__task__project__organization": org.id}, "|" ] -} else := qobject { +} else := qobject if { utils.is_sandbox user := input.auth.user qobject := [ @@ -176,7 +179,7 @@ filter := [] { # Django Q object to filter list of entries {"job__segment__task__project__owner": user.id}, "|", {"job__segment__task__project__assignee": user.id}, "|" ] -} else := qobject { +} else := qobject if { utils.is_organization utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) @@ -185,7 +188,7 @@ filter := [] { # Django Q object to filter list of entries {"job__segment__task__organization": org.id}, {"job__segment__task__project__organization": org.id}, "|" ] -} else := qobject { +} else := qobject if { organizations.has_perm(organizations.WORKER) user := input.auth.user org := input.auth.organization @@ -201,34 +204,34 @@ filter := [] { # Django Q object to filter list of entries ] } -allow { +allow if { input.scope == utils.VIEW utils.is_sandbox is_issue_staff } -allow { +allow if { input.scope == utils.VIEW input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) } -allow { +allow if { input.scope == utils.VIEW input.auth.organization.id == input.resource.organization.id organizations.is_member is_issue_staff } -allow { +allow if { input.scope == utils.UPDATE utils.is_sandbox utils.has_perm(utils.WORKER) is_issue_staff } -allow { +allow if { input.scope == utils.UPDATE input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.WORKER) @@ -236,14 +239,14 @@ allow { is_issue_staff } -allow { +allow if { input.scope == utils.DELETE utils.is_sandbox utils.has_perm(utils.WORKER) is_issue_admin } -allow { +allow if { input.scope == utils.DELETE input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.WORKER) @@ -251,8 +254,8 @@ allow { is_issue_admin } -allow { - { utils.UPDATE, utils.DELETE }[input.scope] +allow if { + input.scope in {utils.UPDATE, utils.DELETE} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) diff --git a/cvat/apps/engine/rules/jobs.rego b/cvat/apps/engine/rules/jobs.rego index 4aaa02dad444..22b91a3a1050 100644 --- a/cvat/apps/engine/rules/jobs.rego +++ b/cvat/apps/engine/rules/jobs.rego @@ -1,4 +1,7 @@ package jobs + +import rego.v1 + import data.utils import data.organizations @@ -36,81 +39,81 @@ import data.organizations # } # } -is_job_assignee { +is_job_assignee if { input.resource.assignee.id == input.auth.user.id } -is_task_owner { +is_task_owner if { input.resource.task.owner.id == input.auth.user.id } -is_task_assignee { +is_task_assignee if { input.resource.task.assignee.id == input.auth.user.id } -is_project_owner { +is_project_owner if { input.resource.project.owner.id == input.auth.user.id } -is_project_assignee { +is_project_assignee if { input.resource.project.assignee.id == input.auth.user.id } -is_project_staff { +is_project_staff if { is_project_owner } -is_project_staff { +is_project_staff if { is_project_assignee } -is_task_staff { +is_task_staff if { is_project_staff } -is_task_staff { +is_task_staff if { is_task_owner } -is_task_staff { +is_task_staff if { is_task_assignee } -is_job_staff { +is_job_staff if { is_task_staff } -is_job_staff { +is_job_staff if { is_job_assignee } default allow := false -allow { +allow if { utils.is_admin } -allow { +allow if { input.scope == utils.LIST utils.is_sandbox } -allow { +allow if { input.scope == utils.LIST organizations.is_member } -filter := [] { # Django Q object to filter list of entries +filter := [] if { # Django Q object to filter list of entries utils.is_admin utils.is_sandbox -} else := qobject { +} else := qobject if { utils.is_admin utils.is_organization qobject := [ {"segment__task__organization": input.auth.organization.id}, {"segment__task__project__organization": input.auth.organization.id}, "|" ] -} else := qobject { +} else := qobject if { utils.is_sandbox user := input.auth.user qobject := [ @@ -119,14 +122,14 @@ filter := [] { # Django Q object to filter list of entries {"segment__task__assignee_id": user.id}, "|", {"segment__task__project__owner_id": user.id}, "|", {"segment__task__project__assignee_id": user.id}, "|"] -} else := qobject { +} else := qobject if { utils.is_organization utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) qobject := [ {"segment__task__organization": input.auth.organization.id}, {"segment__task__project__organization": input.auth.organization.id}, "|"] -} else := qobject { +} else := qobject if { organizations.has_perm(organizations.WORKER) user := input.auth.user qobject := [ @@ -139,102 +142,112 @@ filter := [] { # Django Q object to filter list of entries {"segment__task__project__organization": input.auth.organization.id}, "|", "&"] } -allow { - { utils.CREATE, utils.DELETE }[input.scope] +allow if { + input.scope in {utils.CREATE, utils.DELETE} utils.has_perm(utils.USER) utils.is_sandbox is_task_staff } -allow { - { utils.CREATE, utils.DELETE }[input.scope] +allow if { + input.scope in {utils.CREATE, utils.DELETE} input.auth.organization.id == input.resource.organization.id organizations.has_perm(organizations.SUPERVISOR) utils.has_perm(utils.USER) is_task_staff } -allow { - { utils.VIEW, - utils.EXPORT_DATASET, utils.EXPORT_ANNOTATIONS, - utils.VIEW_ANNOTATIONS, utils.VIEW_DATA, utils.VIEW_METADATA - }[input.scope] +allow if { + input.scope in { + utils.VIEW, + utils.EXPORT_DATASET, utils.EXPORT_ANNOTATIONS, + utils.VIEW_ANNOTATIONS, utils.VIEW_DATA, utils.VIEW_METADATA + } utils.is_sandbox is_job_staff } -allow { - { utils.CREATE, utils.DELETE, utils.VIEW, - utils.EXPORT_DATASET, utils.EXPORT_ANNOTATIONS, - utils.VIEW_ANNOTATIONS, utils.VIEW_DATA, utils.VIEW_METADATA - }[input.scope] +allow if { + input.scope in { + utils.CREATE, utils.DELETE, utils.VIEW, + utils.EXPORT_DATASET, utils.EXPORT_ANNOTATIONS, + utils.VIEW_ANNOTATIONS, utils.VIEW_DATA, utils.VIEW_METADATA + } input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) } -allow { - { utils.VIEW, - utils.EXPORT_DATASET, utils.EXPORT_ANNOTATIONS, - utils.VIEW_ANNOTATIONS, utils.VIEW_DATA, utils.VIEW_METADATA - }[input.scope] +allow if { + input.scope in { + utils.VIEW, + utils.EXPORT_DATASET, utils.EXPORT_ANNOTATIONS, + utils.VIEW_ANNOTATIONS, utils.VIEW_DATA, utils.VIEW_METADATA + } input.auth.organization.id == input.resource.organization.id organizations.has_perm(organizations.WORKER) is_job_staff } -allow { - { utils.UPDATE_STATE, utils.UPDATE_ANNOTATIONS, utils.DELETE_ANNOTATIONS, - utils.IMPORT_ANNOTATIONS, utils.UPDATE_METADATA }[input.scope] +allow if { + input.scope in { + utils.UPDATE_STATE, utils.UPDATE_ANNOTATIONS, utils.DELETE_ANNOTATIONS, + utils.IMPORT_ANNOTATIONS, utils.UPDATE_METADATA + } utils.is_sandbox utils.has_perm(utils.WORKER) is_job_staff } -allow { - { utils.UPDATE_STATE, utils.UPDATE_ANNOTATIONS, utils.DELETE_ANNOTATIONS, - utils.IMPORT_ANNOTATIONS, utils.UPDATE_METADATA }[input.scope] +allow if { + input.scope in { + utils.UPDATE_STATE, utils.UPDATE_ANNOTATIONS, utils.DELETE_ANNOTATIONS, + utils.IMPORT_ANNOTATIONS, utils.UPDATE_METADATA + } input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) } -allow { - { utils.UPDATE_STATE, utils.UPDATE_ANNOTATIONS, utils.DELETE_ANNOTATIONS, - utils.IMPORT_ANNOTATIONS, utils.UPDATE_METADATA }[input.scope] +allow if { + input.scope in { + utils.UPDATE_STATE, utils.UPDATE_ANNOTATIONS, utils.DELETE_ANNOTATIONS, + utils.IMPORT_ANNOTATIONS, utils.UPDATE_METADATA + } input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.WORKER) organizations.has_perm(organizations.WORKER) is_job_staff } -allow { - { utils.VIEW, utils.VIEW_ANNOTATIONS, utils.VIEW_DATA, utils.VIEW_METADATA, - utils.UPDATE_STATE, utils.UPDATE_ANNOTATIONS, utils.DELETE_ANNOTATIONS, - utils.IMPORT_ANNOTATIONS, utils.UPDATE_METADATA - }[input.scope] +allow if { + input.scope in { + utils.VIEW, utils.VIEW_ANNOTATIONS, utils.VIEW_DATA, utils.VIEW_METADATA, + utils.UPDATE_STATE, utils.UPDATE_ANNOTATIONS, utils.DELETE_ANNOTATIONS, + utils.IMPORT_ANNOTATIONS, utils.UPDATE_METADATA + } input.auth.organization.id == input.resource.organization.id input.auth.user.privilege == utils.WORKER input.auth.organization.user.role == null is_job_assignee } -allow { - { utils.UPDATE_STAGE, utils.UPDATE_ASSIGNEE }[input.scope] +allow if { + input.scope in {utils.UPDATE_STAGE, utils.UPDATE_ASSIGNEE} utils.is_sandbox utils.has_perm(utils.WORKER) is_task_staff } -allow { - { utils.UPDATE_STAGE, utils.UPDATE_ASSIGNEE }[input.scope] +allow if { + input.scope in {utils.UPDATE_STAGE, utils.UPDATE_ASSIGNEE} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) } -allow { - { utils.UPDATE_STAGE, utils.UPDATE_ASSIGNEE }[input.scope] +allow if { + input.scope in {utils.UPDATE_STAGE, utils.UPDATE_ASSIGNEE} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.WORKER) organizations.has_perm(organizations.WORKER) diff --git a/cvat/apps/engine/rules/labels.rego b/cvat/apps/engine/rules/labels.rego index 773147636495..a50296377683 100644 --- a/cvat/apps/engine/rules/labels.rego +++ b/cvat/apps/engine/rules/labels.rego @@ -1,7 +1,6 @@ package labels -import future.keywords.if -import future.keywords.in +import rego.v1 import data.utils import data.organizations @@ -44,24 +43,24 @@ import data.organizations default allow := false -allow { +allow if { utils.is_admin } -allow { +allow if { input.scope == utils.LIST utils.is_sandbox } -allow { +allow if { input.scope == utils.LIST organizations.is_member } -filter := [] { # Django Q object to filter list of entries +filter := [] if { # Django Q object to filter list of entries utils.is_admin utils.is_sandbox -} else := qobject { +} else := qobject if { utils.is_admin utils.is_organization org := input.auth.organization @@ -69,7 +68,7 @@ filter := [] { # Django Q object to filter list of entries {"task__organization": org.id}, {"project__organization": org.id}, "|", ] -} else := qobject { +} else := qobject if { utils.is_sandbox user := input.auth.user qobject := [ @@ -78,7 +77,7 @@ filter := [] { # Django Q object to filter list of entries {"project__owner_id": user.id}, "|", {"project__assignee_id": user.id}, "|", ] -} else := qobject { +} else := qobject if { utils.is_organization utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) @@ -87,7 +86,7 @@ filter := [] { # Django Q object to filter list of entries {"task__organization": org.id}, {"project__organization": org.id}, "|", ] -} else := qobject { +} else := qobject if { organizations.has_perm(organizations.WORKER) user := input.auth.user qobject := [ diff --git a/cvat/apps/engine/rules/projects.rego b/cvat/apps/engine/rules/projects.rego index 642574529d07..dadebdc894ad 100644 --- a/cvat/apps/engine/rules/projects.rego +++ b/cvat/apps/engine/rules/projects.rego @@ -1,4 +1,7 @@ package projects + +import rego.v1 + import data.utils import data.organizations @@ -31,91 +34,91 @@ import data.organizations default allow := false -is_project_staff { +is_project_staff if { utils.is_resource_owner } -is_project_staff { +is_project_staff if { utils.is_resource_assignee } -allow { +allow if { utils.is_admin } -allow { - { utils.CREATE, utils.IMPORT_BACKUP }[input.scope] +allow if { + input.scope in {utils.CREATE, utils.IMPORT_BACKUP} utils.is_sandbox utils.has_perm(utils.USER) } -allow { - { utils.CREATE, utils.IMPORT_BACKUP }[input.scope] +allow if { + input.scope in {utils.CREATE, utils.IMPORT_BACKUP} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.has_perm(organizations.SUPERVISOR) } -allow { - { utils.CREATE, utils.IMPORT_BACKUP }[input.scope] +allow if { + input.scope in {utils.CREATE, utils.IMPORT_BACKUP} utils.is_sandbox utils.has_perm(utils.BUSINESS) } -allow { - { utils.CREATE, utils.IMPORT_BACKUP }[input.scope] +allow if { + input.scope in {utils.CREATE, utils.IMPORT_BACKUP} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.BUSINESS) organizations.has_perm(organizations.SUPERVISOR) } -allow { +allow if { input.scope == utils.LIST utils.is_sandbox } -allow { +allow if { input.scope == utils.LIST organizations.is_member } -filter := [] { # Django Q object to filter list of entries +filter := [] if { # Django Q object to filter list of entries utils.is_admin utils.is_sandbox -} else := qobject { +} else := qobject if { utils.is_admin utils.is_organization qobject := [ {"organization": input.auth.organization.id} ] -} else := qobject { +} else := qobject if { utils.is_sandbox user := input.auth.user qobject := [ {"owner_id": user.id}, {"assignee_id": user.id}, "|" ] -} else := qobject { +} else := qobject if { utils.is_organization utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) qobject := [ {"organization": input.auth.organization.id} ] -} else := qobject { +} else := qobject if { organizations.has_perm(organizations.WORKER) user := input.auth.user qobject := [ {"owner_id": user.id}, {"assignee_id": user.id}, "|", {"organization": input.auth.organization.id}, "&" ] } -allow { +allow if { input.scope == utils.VIEW utils.is_sandbox is_project_staff } -allow { +allow if { input.scope == utils.VIEW input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) } -allow { +allow if { input.scope == utils.VIEW input.auth.organization.id == input.resource.organization.id organizations.has_perm(organizations.WORKER) @@ -123,58 +126,58 @@ allow { } -allow { - { utils.DELETE, utils.UPDATE_ORG }[input.scope] +allow if { + input.scope in {utils.DELETE, utils.UPDATE_ORG} utils.is_sandbox utils.has_perm(utils.WORKER) utils.is_resource_owner } -allow { - { utils.DELETE, utils.UPDATE_ORG }[input.scope] +allow if { + input.scope in {utils.DELETE, utils.UPDATE_ORG} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.WORKER) organizations.is_member utils.is_resource_owner } -allow { - { utils.DELETE, utils.UPDATE_ORG }[input.scope] +allow if { + input.scope in {utils.DELETE, utils.UPDATE_ORG} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.is_staff } -allow { - { utils.UPDATE_DESC, utils.IMPORT_DATASET }[input.scope] +allow if { + input.scope in {utils.UPDATE_DESC, utils.IMPORT_DATASET} utils.is_sandbox is_project_staff utils.has_perm(utils.WORKER) } -allow { - { utils.UPDATE_DESC, utils.IMPORT_DATASET }[input.scope] +allow if { + input.scope in {utils.UPDATE_DESC, utils.IMPORT_DATASET} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.is_staff } -allow { - { utils.UPDATE_DESC, utils.IMPORT_DATASET }[input.scope] +allow if { + input.scope in {utils.UPDATE_DESC, utils.IMPORT_DATASET} is_project_staff input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.WORKER) organizations.is_member } -allow { +allow if { input.scope == utils.UPDATE_ASSIGNEE utils.is_sandbox utils.is_resource_owner utils.has_perm(utils.WORKER) } -allow { +allow if { input.scope == utils.UPDATE_ASSIGNEE input.auth.organization.id == input.resource.organization.id utils.is_resource_owner @@ -182,14 +185,14 @@ allow { organizations.is_member } -allow { +allow if { input.scope == utils.UPDATE_ASSIGNEE input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.is_staff } -allow { +allow if { input.scope == utils.UPDATE_OWNER input.auth.organization.id == input.resource.organization.id utils.is_resource_owner @@ -197,28 +200,28 @@ allow { organizations.is_staff } -allow { +allow if { input.scope == utils.UPDATE_OWNER input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.is_staff } -allow { - { utils.EXPORT_ANNOTATIONS, utils.EXPORT_DATASET, utils.EXPORT_BACKUP }[input.scope] +allow if { + input.scope in {utils.EXPORT_ANNOTATIONS, utils.EXPORT_DATASET, utils.EXPORT_BACKUP} utils.is_sandbox is_project_staff } -allow { - { utils.EXPORT_ANNOTATIONS, utils.EXPORT_DATASET, utils.EXPORT_BACKUP }[input.scope] +allow if { + input.scope in {utils.EXPORT_ANNOTATIONS, utils.EXPORT_DATASET, utils.EXPORT_BACKUP} input.auth.organization.id == input.resource.organization.id organizations.is_member is_project_staff } -allow { - { utils.EXPORT_ANNOTATIONS, utils.EXPORT_DATASET, utils.EXPORT_BACKUP }[input.scope] +allow if { + input.scope in {utils.EXPORT_ANNOTATIONS, utils.EXPORT_DATASET, utils.EXPORT_BACKUP} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) diff --git a/cvat/apps/engine/rules/server.rego b/cvat/apps/engine/rules/server.rego index 0aa94d42d1c4..bfe3b47a0d46 100644 --- a/cvat/apps/engine/rules/server.rego +++ b/cvat/apps/engine/rules/server.rego @@ -1,4 +1,7 @@ package server + +import rego.v1 + import data.utils # input: { @@ -22,11 +25,11 @@ import data.utils default allow := false -allow { +allow if { input.scope == utils.VIEW } -allow { +allow if { input.scope == utils.LIST_CONTENT utils.has_perm(utils.USER) } diff --git a/cvat/apps/engine/rules/tasks.rego b/cvat/apps/engine/rules/tasks.rego index 79c057db434b..9f1b7fa951a9 100644 --- a/cvat/apps/engine/rules/tasks.rego +++ b/cvat/apps/engine/rules/tasks.rego @@ -1,7 +1,6 @@ package tasks -import future.keywords.if -import future.keywords.in +import rego.v1 import data.utils import data.organizations @@ -39,89 +38,89 @@ import data.organizations # } # } -is_task_owner { +is_task_owner if { input.resource.owner.id == input.auth.user.id } -is_task_assignee { +is_task_assignee if { input.resource.assignee.id == input.auth.user.id } -is_project_owner { +is_project_owner if { input.resource.project.owner.id == input.auth.user.id } -is_project_assignee { +is_project_assignee if { input.resource.project.assignee.id == input.auth.user.id } -is_project_staff { +is_project_staff if { is_project_owner } -is_project_staff { +is_project_staff if { is_project_assignee } -is_task_staff { +is_task_staff if { is_project_staff } -is_task_staff { +is_task_staff if { is_task_owner } -is_task_staff { +is_task_staff if { is_task_assignee } default allow := false -allow { +allow if { utils.is_admin } -allow { - { utils.CREATE, utils.IMPORT_BACKUP }[input.scope] +allow if { + input.scope in {utils.CREATE, utils.IMPORT_BACKUP} utils.is_sandbox utils.has_perm(utils.USER) } -allow { - { utils.CREATE, utils.IMPORT_BACKUP }[input.scope] +allow if { + input.scope in {utils.CREATE, utils.IMPORT_BACKUP} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.has_perm(organizations.SUPERVISOR) } -allow { - { utils.CREATE, utils.IMPORT_BACKUP }[input.scope] +allow if { + input.scope in {utils.CREATE, utils.IMPORT_BACKUP} utils.is_sandbox utils.has_perm(utils.BUSINESS) } -allow { - { utils.CREATE, utils.IMPORT_BACKUP }[input.scope] +allow if { + input.scope in {utils.CREATE, utils.IMPORT_BACKUP} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.BUSINESS) organizations.has_perm(organizations.SUPERVISOR) } -allow { +allow if { input.scope == utils.CREATE_IN_PROJECT utils.is_sandbox utils.has_perm(utils.USER) is_project_staff } -allow { +allow if { input.scope == utils.CREATE_IN_PROJECT input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.has_perm(organizations.SUPERVISOR) } -allow { +allow if { input.scope == utils.CREATE_IN_PROJECT input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) @@ -129,50 +128,50 @@ allow { is_project_staff } -allow { +allow if { input.scope == utils.CREATE_IN_PROJECT utils.is_sandbox utils.has_perm(utils.BUSINESS) is_project_staff } -allow { +allow if { input.scope == utils.CREATE_IN_PROJECT input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.BUSINESS) organizations.has_perm(organizations.SUPERVISOR) } -allow { +allow if { input.scope == utils.LIST utils.is_sandbox } -allow { +allow if { input.scope == utils.LIST organizations.is_member } -filter := [] { # Django Q object to filter list of entries +filter := [] if { # Django Q object to filter list of entries utils.is_admin utils.is_sandbox -} else := qobject { +} else := qobject if { utils.is_admin utils.is_organization qobject := [ {"organization": input.auth.organization.id}, {"project__organization": input.auth.organization.id}, "|"] -} else := qobject { +} else := qobject if { utils.is_sandbox user := input.auth.user qobject := [ {"owner_id": user.id}, {"assignee_id": user.id}, "|", {"project__owner_id": user.id}, "|", {"project__assignee_id": user.id}, "|"] -} else := qobject { +} else := qobject if { utils.is_organization utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) qobject := [ {"organization": input.auth.organization.id}, {"project__organization": input.auth.organization.id}, "|"] -} else := qobject { +} else := qobject if { organizations.has_perm(organizations.WORKER) user := input.auth.user qobject := [ {"owner_id": user.id}, {"assignee_id": user.id}, "|", @@ -181,90 +180,112 @@ filter := [] { # Django Q object to filter list of entries {"project__organization": input.auth.organization.id}, "|", "&"] } -allow { - { utils.VIEW, utils.VIEW_ANNOTATIONS, utils.EXPORT_DATASET, utils.VIEW_METADATA, - utils.VIEW_DATA, utils.EXPORT_ANNOTATIONS, utils.EXPORT_BACKUP }[input.scope] +allow if { + input.scope in { + utils.VIEW, utils.VIEW_ANNOTATIONS, utils.EXPORT_DATASET, utils.VIEW_METADATA, + utils.VIEW_DATA, utils.EXPORT_ANNOTATIONS, utils.EXPORT_BACKUP + } utils.is_sandbox is_task_staff } -allow { - { utils.VIEW, utils.VIEW_ANNOTATIONS, utils.EXPORT_DATASET, utils.VIEW_METADATA, - utils.VIEW_DATA, utils.EXPORT_ANNOTATIONS, utils.EXPORT_BACKUP }[input.scope] +allow if { + input.scope in { + utils.VIEW, utils.VIEW_ANNOTATIONS, utils.EXPORT_DATASET, utils.VIEW_METADATA, + utils.VIEW_DATA, utils.EXPORT_ANNOTATIONS, utils.EXPORT_BACKUP + } input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) } -allow { - { utils.VIEW, utils.VIEW_ANNOTATIONS, utils.EXPORT_DATASET, utils.VIEW_METADATA, - utils.VIEW_DATA, utils.EXPORT_ANNOTATIONS, utils.EXPORT_BACKUP }[input.scope] +allow if { + input.scope in { + utils.VIEW, utils.VIEW_ANNOTATIONS, utils.EXPORT_DATASET, utils.VIEW_METADATA, + utils.VIEW_DATA, utils.EXPORT_ANNOTATIONS, utils.EXPORT_BACKUP + } input.auth.organization.id == input.resource.organization.id organizations.has_perm(organizations.WORKER) is_task_staff } -allow { - { utils.UPDATE_DESC, utils.UPDATE_ANNOTATIONS, utils.DELETE_ANNOTATIONS, - utils.UPLOAD_DATA, utils.UPDATE_METADATA, utils.IMPORT_ANNOTATIONS }[input.scope] +allow if { + input.scope in { + utils.UPDATE_DESC, utils.UPDATE_ANNOTATIONS, utils.DELETE_ANNOTATIONS, + utils.UPLOAD_DATA, utils.UPDATE_METADATA, utils.IMPORT_ANNOTATIONS + } utils.is_sandbox is_task_staff utils.has_perm(utils.WORKER) } -allow { - { utils.UPDATE_DESC, utils.UPDATE_ANNOTATIONS, utils.DELETE_ANNOTATIONS, - utils.UPLOAD_DATA, utils.UPDATE_METADATA, utils.IMPORT_ANNOTATIONS }[input.scope] +allow if { + input.scope in { + utils.UPDATE_DESC, utils.UPDATE_ANNOTATIONS, utils.DELETE_ANNOTATIONS, + utils.UPLOAD_DATA, utils.UPDATE_METADATA, utils.IMPORT_ANNOTATIONS + } input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) } -allow { - { utils.UPDATE_DESC, utils.UPDATE_ANNOTATIONS, utils.DELETE_ANNOTATIONS, - utils.UPLOAD_DATA, utils.UPDATE_METADATA, utils.IMPORT_ANNOTATIONS }[input.scope] +allow if { + input.scope in { + utils.UPDATE_DESC, utils.UPDATE_ANNOTATIONS, utils.DELETE_ANNOTATIONS, + utils.UPLOAD_DATA, utils.UPDATE_METADATA, utils.IMPORT_ANNOTATIONS + } is_task_staff input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.WORKER) organizations.has_perm(organizations.WORKER) } -allow { - { utils.UPDATE_OWNER, utils.UPDATE_ASSIGNEE, utils.UPDATE_PROJECT, - utils.DELETE, utils.UPDATE_ORG }[input.scope] +allow if { + input.scope in { + utils.UPDATE_OWNER, utils.UPDATE_ASSIGNEE, utils.UPDATE_PROJECT, + utils.DELETE, utils.UPDATE_ORG + } utils.is_sandbox is_project_staff utils.has_perm(utils.WORKER) } -allow { - { utils.UPDATE_OWNER, utils.UPDATE_ASSIGNEE, utils.UPDATE_PROJECT, - utils.DELETE, utils.UPDATE_ORG }[input.scope] +allow if { + input.scope in { + utils.UPDATE_OWNER, utils.UPDATE_ASSIGNEE, utils.UPDATE_PROJECT, + utils.DELETE, utils.UPDATE_ORG + } utils.is_sandbox is_task_owner utils.has_perm(utils.WORKER) } -allow { - { utils.UPDATE_OWNER, utils.UPDATE_ASSIGNEE, utils.UPDATE_PROJECT, - utils.DELETE, utils.UPDATE_ORG }[input.scope] +allow if { + input.scope in { + utils.UPDATE_OWNER, utils.UPDATE_ASSIGNEE, utils.UPDATE_PROJECT, + utils.DELETE, utils.UPDATE_ORG + } input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) } -allow { - { utils.UPDATE_OWNER, utils.UPDATE_ASSIGNEE, utils.UPDATE_PROJECT, - utils.DELETE, utils.UPDATE_ORG }[input.scope] +allow if { + input.scope in { + utils.UPDATE_OWNER, utils.UPDATE_ASSIGNEE, utils.UPDATE_PROJECT, + utils.DELETE, utils.UPDATE_ORG + } input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.WORKER) organizations.has_perm(organizations.WORKER) is_task_owner } -allow { - { utils.UPDATE_OWNER, utils.UPDATE_ASSIGNEE, utils.UPDATE_PROJECT, - utils.DELETE, utils.UPDATE_ORG }[input.scope] +allow if { + input.scope in { + utils.UPDATE_OWNER, utils.UPDATE_ASSIGNEE, utils.UPDATE_PROJECT, + utils.DELETE, utils.UPDATE_ORG + } input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.WORKER) organizations.has_perm(organizations.WORKER) diff --git a/cvat/apps/engine/rules/tests/generators/annotationguides_test.gen.rego.py b/cvat/apps/engine/rules/tests/generators/annotationguides_test.gen.rego.py index c12c56ff54a6..4cf562741677 100644 --- a/cvat/apps/engine/rules/tests/generators/annotationguides_test.gen.rego.py +++ b/cvat/apps/engine/rules/tests/generators/annotationguides_test.gen.rego.py @@ -178,7 +178,7 @@ def is_valid(scope, context, ownership, privilege, membership, resource, same_or def gen_test_rego(name): with open(f"{name}_test.gen.rego", "wt") as f: - f.write(f"package {name}\n\n") + f.write(f"package {name}\nimport rego.v1\n\n") for scope, context, ownership, privilege, membership, same_org, in product( SCOPES, CONTEXTS, OWNERSHIPS, GROUPS, ORG_ROLES, SAME_ORG, ): @@ -196,7 +196,7 @@ def gen_test_rego(name): ) result = eval_rule(scope, context, ownership, privilege, membership, data) f.write( - "{test_name} {{\n {allow} with input as {data}\n}}\n\n".format( + "{test_name} if {{\n {allow} with input as {data}\n}}\n\n".format( test_name=test_name, allow="allow" if result else "not allow", data=json.dumps(data), diff --git a/cvat/apps/engine/rules/tests/generators/cloudstorages_test.gen.rego.py b/cvat/apps/engine/rules/tests/generators/cloudstorages_test.gen.rego.py index 04802e5e966b..63460df540b2 100644 --- a/cvat/apps/engine/rules/tests/generators/cloudstorages_test.gen.rego.py +++ b/cvat/apps/engine/rules/tests/generators/cloudstorages_test.gen.rego.py @@ -158,7 +158,7 @@ def is_valid(scope, context, ownership, privilege, membership, resource, same_or def gen_test_rego(name): with open(f"{name}_test.gen.rego", "wt") as f: - f.write(f"package {name}\n\n") + f.write(f"package {name}\nimport rego.v1\n\n") for scope, context, ownership, privilege, membership, same_org in product( SCOPES, CONTEXTS, OWNERSHIPS, GROUPS, ORG_ROLES, SAME_ORG ): @@ -176,7 +176,7 @@ def gen_test_rego(name): ) result = eval_rule(scope, context, ownership, privilege, membership, data) f.write( - "{test_name} {{\n {allow} with input as {data}\n}}\n\n".format( + "{test_name} if {{\n {allow} with input as {data}\n}}\n\n".format( test_name=test_name, allow="allow" if result else "not allow", data=json.dumps(data), diff --git a/cvat/apps/engine/rules/tests/generators/comments_test.gen.rego.py b/cvat/apps/engine/rules/tests/generators/comments_test.gen.rego.py index b8c2eff1b7c2..f36c8a7dfa0d 100644 --- a/cvat/apps/engine/rules/tests/generators/comments_test.gen.rego.py +++ b/cvat/apps/engine/rules/tests/generators/comments_test.gen.rego.py @@ -223,7 +223,7 @@ def is_valid(scope, context, ownership, privilege, membership, resource, same_or def gen_test_rego(name): with open(f"{name}_test.gen.rego", "wt") as f: - f.write(f"package {name}\n\n") + f.write(f"package {name}\nimport rego.v1\n\n") for scope, context, ownership, privilege, membership, same_org, has_proj in product( SCOPES, CONTEXTS, OWNERSHIPS, GROUPS, ORG_ROLES, SAME_ORG, HAS_PROJ ): @@ -241,7 +241,7 @@ def gen_test_rego(name): ) result = eval_rule(scope, context, ownership, privilege, membership, data) f.write( - "{test_name} {{\n {allow} with input as {data}\n}}\n\n".format( + "{test_name} if {{\n {allow} with input as {data}\n}}\n\n".format( test_name=test_name, allow="allow" if result else "not allow", data=json.dumps(data), diff --git a/cvat/apps/engine/rules/tests/generators/issues_test.gen.rego.py b/cvat/apps/engine/rules/tests/generators/issues_test.gen.rego.py index 7951bc92892e..0a35d83880eb 100644 --- a/cvat/apps/engine/rules/tests/generators/issues_test.gen.rego.py +++ b/cvat/apps/engine/rules/tests/generators/issues_test.gen.rego.py @@ -214,7 +214,7 @@ def is_valid(scope, context, ownership, privilege, membership, resource, same_or def gen_test_rego(name): with open(f"{name}_test.gen.rego", "wt") as f: - f.write(f"package {name}\n\n") + f.write(f"package {name}\nimport rego.v1\n\n") for scope, context, ownership, privilege, membership, same_org, has_proj in product( SCOPES, CONTEXTS, OWNERSHIPS, GROUPS, ORG_ROLES, SAME_ORG, HAS_PROJ ): @@ -232,7 +232,7 @@ def gen_test_rego(name): ) result = eval_rule(scope, context, ownership, privilege, membership, data) f.write( - "{test_name} {{\n {allow} with input as {data}\n}}\n\n".format( + "{test_name} if {{\n {allow} with input as {data}\n}}\n\n".format( test_name=test_name, allow="allow" if result else "not allow", data=json.dumps(data), diff --git a/cvat/apps/engine/rules/tests/generators/jobs_test.gen.rego.py b/cvat/apps/engine/rules/tests/generators/jobs_test.gen.rego.py index 7136d358ea75..ca799f953cd3 100644 --- a/cvat/apps/engine/rules/tests/generators/jobs_test.gen.rego.py +++ b/cvat/apps/engine/rules/tests/generators/jobs_test.gen.rego.py @@ -207,7 +207,7 @@ def is_valid(scope, context, ownership, privilege, membership, resource, same_or def gen_test_rego(name): with open(f"{name}_test.gen.rego", "wt") as f: - f.write(f"package {name}\n\n") + f.write(f"package {name}\nimport rego.v1\n\n") for scope, context, ownership, privilege, membership, same_org in product( SCOPES, CONTEXTS, OWNERSHIPS, GROUPS, ORG_ROLES, SAME_ORG ): @@ -225,7 +225,7 @@ def gen_test_rego(name): ) result = eval_rule(scope, context, ownership, privilege, membership, data) f.write( - "{test_name} {{\n {allow} with input as {data}\n}}\n\n".format( + "{test_name} if {{\n {allow} with input as {data}\n}}\n\n".format( test_name=test_name, allow="allow" if result else "not allow", data=json.dumps(data), diff --git a/cvat/apps/engine/rules/tests/generators/projects_test.gen.rego.py b/cvat/apps/engine/rules/tests/generators/projects_test.gen.rego.py index ba325f95cad4..6657f21d2994 100644 --- a/cvat/apps/engine/rules/tests/generators/projects_test.gen.rego.py +++ b/cvat/apps/engine/rules/tests/generators/projects_test.gen.rego.py @@ -174,7 +174,7 @@ def is_valid(scope, context, ownership, privilege, membership, resource, same_or def gen_test_rego(name): with open(f"{name}_test.gen.rego", "wt") as f: - f.write(f"package {name}\n\n") + f.write(f"package {name}\nimport rego.v1\n\n") for scope, context, ownership, privilege, membership, same_org in product( SCOPES, CONTEXTS, OWNERSHIPS, GROUPS, ORG_ROLES, SAME_ORG ): @@ -192,7 +192,7 @@ def gen_test_rego(name): ) result = eval_rule(scope, context, ownership, privilege, membership, data) f.write( - "{test_name} {{\n {allow} with input as {data}\n}}\n\n".format( + "{test_name} if {{\n {allow} with input as {data}\n}}\n\n".format( test_name=test_name, allow="allow" if result else "not allow", data=json.dumps(data), diff --git a/cvat/apps/engine/rules/tests/generators/server_test.gen.rego.py b/cvat/apps/engine/rules/tests/generators/server_test.gen.rego.py index 84c9f469c783..8e9b57a814d8 100644 --- a/cvat/apps/engine/rules/tests/generators/server_test.gen.rego.py +++ b/cvat/apps/engine/rules/tests/generators/server_test.gen.rego.py @@ -121,7 +121,7 @@ def is_valid(scope, context, ownership, privilege, membership): def gen_test_rego(name): with open(f"{name}_test.gen.rego", "wt") as f: - f.write(f"package {name}\n\n") + f.write(f"package {name}\nimport rego.v1\n\n") for scope, context, ownership, privilege, membership in product( SCOPES, CONTEXTS, OWNERSHIPS, GROUPS, ORG_ROLES ): @@ -132,7 +132,7 @@ def gen_test_rego(name): test_name = get_name(scope, context, ownership, privilege, membership) result = eval_rule(scope, context, ownership, privilege, membership, data) f.write( - "{test_name} {{\n {allow} with input as {data}\n}}\n\n".format( + "{test_name} if {{\n {allow} with input as {data}\n}}\n\n".format( test_name=test_name, allow="allow" if result else "not allow", data=json.dumps(data), diff --git a/cvat/apps/engine/rules/tests/generators/tasks_test.gen.rego.py b/cvat/apps/engine/rules/tests/generators/tasks_test.gen.rego.py index 05cc890cd673..61da5c8520de 100644 --- a/cvat/apps/engine/rules/tests/generators/tasks_test.gen.rego.py +++ b/cvat/apps/engine/rules/tests/generators/tasks_test.gen.rego.py @@ -201,7 +201,7 @@ def is_valid(scope, context, ownership, privilege, membership, resource, same_or def gen_test_rego(name): with open(f"{name}_test.gen.rego", "wt") as f: - f.write(f"package {name}\n\n") + f.write(f"package {name}\nimport rego.v1\n\n") for scope, context, ownership, privilege, membership, same_org in product( SCOPES, CONTEXTS, OWNERSHIPS, GROUPS, ORG_ROLES, SAME_ORG ): @@ -219,7 +219,7 @@ def gen_test_rego(name): ) result = eval_rule(scope, context, ownership, privilege, membership, data) f.write( - "{test_name} {{\n {allow} with input as {data}\n}}\n\n".format( + "{test_name} if {{\n {allow} with input as {data}\n}}\n\n".format( test_name=test_name, allow="allow" if result else "not allow", data=json.dumps(data), diff --git a/cvat/apps/engine/rules/tests/generators/users_test.gen.rego.py b/cvat/apps/engine/rules/tests/generators/users_test.gen.rego.py index 83b70e1ad707..595cbaae4ee4 100644 --- a/cvat/apps/engine/rules/tests/generators/users_test.gen.rego.py +++ b/cvat/apps/engine/rules/tests/generators/users_test.gen.rego.py @@ -138,7 +138,7 @@ def is_valid(scope, context, ownership, privilege, membership, resource): def gen_test_rego(name): with open(f"{name}_test.gen.rego", "wt") as f: - f.write(f"package {name}\n\n") + f.write(f"package {name}\nimport rego.v1\n\n") for scope, context, ownership, privilege, membership in product( SCOPES, CONTEXTS, OWNERSHIPS, GROUPS, ORG_ROLES ): @@ -150,7 +150,7 @@ def gen_test_rego(name): test_name = get_name(scope, context, ownership, privilege, membership, resource) result = eval_rule(scope, context, ownership, privilege, membership, data) f.write( - "{test_name} {{\n {allow} with input as {data}\n}}\n\n".format( + "{test_name} if {{\n {allow} with input as {data}\n}}\n\n".format( test_name=test_name, allow="allow" if result else "not allow", data=json.dumps(data), diff --git a/cvat/apps/engine/rules/users.rego b/cvat/apps/engine/rules/users.rego index 929ee1b5b1cd..63469228e11a 100644 --- a/cvat/apps/engine/rules/users.rego +++ b/cvat/apps/engine/rules/users.rego @@ -1,4 +1,7 @@ package users + +import rego.v1 + import data.utils import data.organizations @@ -29,42 +32,42 @@ import data.organizations default allow := false -allow { +allow if { utils.is_admin } -allow { +allow if { input.scope == utils.LIST utils.is_sandbox } -allow { +allow if { input.scope == utils.LIST organizations.is_member } -filter := [] { # Django Q object to filter list of entries +filter := [] if { # Django Q object to filter list of entries utils.is_admin utils.is_sandbox -} else := qobject { +} else := qobject if { utils.is_sandbox qobject := [ {"id": input.auth.user.id} ] -} else := qobject { +} else := qobject if { org_id := input.auth.organization.id qobject := [ {"memberships__organization": org_id} ] } -allow { +allow if { input.scope == utils.VIEW input.resource.id == input.auth.user.id } -allow { +allow if { input.scope == utils.VIEW input.resource.membership.role != null } -allow { - { utils.UPDATE, utils.DELETE }[input.scope] +allow if { + input.scope in {utils.UPDATE, utils.DELETE} input.auth.user.id == input.resource.id } diff --git a/cvat/apps/events/rules/events.rego b/cvat/apps/events/rules/events.rego index 903c5453af25..0152ec721ba8 100644 --- a/cvat/apps/events/rules/events.rego +++ b/cvat/apps/events/rules/events.rego @@ -1,4 +1,7 @@ package events + +import rego.v1 + import data.utils import data.organizations @@ -23,42 +26,42 @@ import data.organizations default allow := false -allow { +allow if { utils.is_admin } -allow { +allow if { input.scope == utils.SEND_EVENTS } -allow { +allow if { input.scope == utils.DUMP_EVENTS utils.is_sandbox utils.has_perm(utils.WORKER) } -allow { +allow if { input.scope == utils.DUMP_EVENTS utils.has_perm(utils.WORKER) organizations.has_perm(organizations.WORKER) } -filter := [] { +filter := [] if { utils.is_admin utils.is_sandbox -} else := qobject { +} else := qobject if { utils.is_admin utils.is_organization qobject := [ {"org_id": input.auth.organization.id} ] -} else := qobject { +} else := qobject if { utils.is_sandbox qobject := [ {"user_id": input.auth.user.id} ] -} else := qobject { +} else := qobject if { utils.is_organization utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) qobject := [ {"org_id": input.auth.organization.id} ] -} else := qobject { +} else := qobject if { utils.is_organization utils.has_perm(utils.USER) organizations.has_perm(organizations.WORKER) diff --git a/cvat/apps/events/rules/tests/generators/events_test.gen.rego.py b/cvat/apps/events/rules/tests/generators/events_test.gen.rego.py index b6db2d79fa2d..da9d54d79e22 100644 --- a/cvat/apps/events/rules/tests/generators/events_test.gen.rego.py +++ b/cvat/apps/events/rules/tests/generators/events_test.gen.rego.py @@ -140,7 +140,7 @@ def is_valid(scope, context, ownership, privilege, membership, resource, same_or def gen_test_rego(name): with open(f"{name}_test.gen.rego", "wt") as f: - f.write(f"package {name}\n\n") + f.write(f"package {name}\nimport rego.v1\n\n") print("scopes", SCOPES) for scope, context, ownership, privilege, membership, same_org in product( SCOPES, CONTEXTS, OWNERSHIPS, GROUPS, ORG_ROLES, SAME_ORG @@ -159,7 +159,7 @@ def gen_test_rego(name): ) result = eval_rule(scope, context, ownership, privilege, membership, data) f.write( - "{test_name} {{\n {allow} with input as {data}\n}}\n\n".format( + "{test_name} if {{\n {allow} with input as {data}\n}}\n\n".format( test_name=test_name, allow="allow" if result else "not allow", data=json.dumps(data), diff --git a/cvat/apps/iam/rules/utils.rego b/cvat/apps/iam/rules/utils.rego index 7ad2c70d5a1f..c0f719c63957 100644 --- a/cvat/apps/iam/rules/utils.rego +++ b/cvat/apps/iam/rules/utils.rego @@ -1,5 +1,7 @@ package utils +import rego.v1 + # Groups ADMIN := "admin" BUSINESS := "business" @@ -65,38 +67,38 @@ get_priority(privilege) := { null: 1000 }[privilege] -has_perm(group) { +has_perm(group) if { get_priority(input.auth.user.privilege) <= get_priority(group) } -is_admin { +is_admin if { input.auth.user.privilege == ADMIN } -is_business { +is_business if { input.auth.user.privilege == BUSINESS } -is_user { +is_user if { input.auth.user.privilege == USER } -is_worker { +is_worker if { input.auth.user.privilege == WORKER } -is_resource_owner { +is_resource_owner if { input.resource.owner.id == input.auth.user.id } -is_resource_assignee { +is_resource_assignee if { input.resource.assignee.id == input.auth.user.id } -is_sandbox { +is_sandbox if { input.auth.organization == null } -is_organization { +is_organization if { input.auth.organization != null } diff --git a/cvat/apps/lambda_manager/rules/lambda.rego b/cvat/apps/lambda_manager/rules/lambda.rego index 90d30ee9aa81..2829860c0932 100644 --- a/cvat/apps/lambda_manager/rules/lambda.rego +++ b/cvat/apps/lambda_manager/rules/lambda.rego @@ -1,5 +1,7 @@ package lambda +import rego.v1 + import data.utils import data.organizations @@ -24,43 +26,43 @@ import data.organizations default allow := false -allow { +allow if { utils.is_admin } -allow { +allow if { input.scope == utils.LIST } -allow { +allow if { input.scope == utils.VIEW } -allow { - { utils.CALL_ONLINE, utils.CALL_OFFLINE, utils.LIST_OFFLINE }[input.scope] +allow if { + input.scope in {utils.CALL_ONLINE, utils.CALL_OFFLINE, utils.LIST_OFFLINE} utils.has_perm(utils.WORKER) } -filter := [] { # Django Q object to filter list of entries +filter := [] if { # Django Q object to filter list of entries utils.is_admin utils.is_sandbox -} else := qobject { +} else := qobject if { utils.is_admin utils.is_organization qobject := [ {"organization": input.auth.organization.id}, {"project__organization": input.auth.organization.id}, "|"] -} else := qobject { +} else := qobject if { utils.is_sandbox user := input.auth.user qobject := [ {"owner_id": user.id}, {"assignee_id": user.id}, "|", {"project__owner_id": user.id}, "|", {"project__assignee_id": user.id}, "|"] -} else := qobject { +} else := qobject if { utils.is_organization utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) qobject := [ {"organization": input.auth.organization.id}, {"project__organization": input.auth.organization.id}, "|"] -} else := qobject { +} else := qobject if { organizations.has_perm(organizations.WORKER) user := input.auth.user qobject := [ {"owner_id": user.id}, {"assignee_id": user.id}, "|", diff --git a/cvat/apps/lambda_manager/rules/tests/generators/lambda_test.gen.rego.py b/cvat/apps/lambda_manager/rules/tests/generators/lambda_test.gen.rego.py index b2a9a7659707..5a669c5f49fc 100644 --- a/cvat/apps/lambda_manager/rules/tests/generators/lambda_test.gen.rego.py +++ b/cvat/apps/lambda_manager/rules/tests/generators/lambda_test.gen.rego.py @@ -134,7 +134,7 @@ def is_valid(scope, context, ownership, privilege, membership, resource): def gen_test_rego(name): with open(f"{name}_test.gen.rego", "wt") as f: - f.write(f"package {name}\n\n") + f.write(f"package {name}\nimport rego.v1\n\n") for scope, context, ownership, privilege, membership in product( SCOPES, CONTEXTS, OWNERSHIPS, GROUPS, ORG_ROLES ): @@ -146,7 +146,7 @@ def gen_test_rego(name): test_name = get_name(scope, context, ownership, privilege, membership, resource) result = eval_rule(scope, context, ownership, privilege, membership, data) f.write( - "{test_name} {{\n {allow} with input as {data}\n}}\n\n".format( + "{test_name} if {{\n {allow} with input as {data}\n}}\n\n".format( test_name=test_name, allow="allow" if result else "not allow", data=json.dumps(data), diff --git a/cvat/apps/log_viewer/rules/analytics.rego b/cvat/apps/log_viewer/rules/analytics.rego index ef36929639ff..970a6a3e97d1 100644 --- a/cvat/apps/log_viewer/rules/analytics.rego +++ b/cvat/apps/log_viewer/rules/analytics.rego @@ -1,4 +1,7 @@ package analytics + +import rego.v1 + import data.utils # input: { @@ -25,11 +28,11 @@ import data.utils default allow := false -allow { +allow if { utils.is_admin } -allow { +allow if { input.resource.visibility == utils.PUBLIC input.scope == utils.VIEW utils.has_perm(utils.BUSINESS) diff --git a/cvat/apps/log_viewer/rules/tests/generators/analytics_test.gen.rego.py b/cvat/apps/log_viewer/rules/tests/generators/analytics_test.gen.rego.py index e2fc73ebc314..ce4b50a7c8fb 100644 --- a/cvat/apps/log_viewer/rules/tests/generators/analytics_test.gen.rego.py +++ b/cvat/apps/log_viewer/rules/tests/generators/analytics_test.gen.rego.py @@ -138,7 +138,7 @@ def is_valid(scope, context, ownership, privilege, membership, resource): def gen_test_rego(name): with open(f"{name}_test.gen.rego", "wt") as f: - f.write(f"package {name}\n\n") + f.write(f"package {name}\nimport rego.v1\n\n") for scope, context, ownership, privilege, membership in product( SCOPES, CONTEXTS, OWNERSHIPS, GROUPS, ORG_ROLES ): @@ -150,7 +150,7 @@ def gen_test_rego(name): test_name = get_name(scope, context, ownership, privilege, membership, resource) result = eval_rule(scope, context, ownership, privilege, membership, data) f.write( - "{test_name} {{\n {allow} with input as {data}\n}}\n\n".format( + "{test_name} if {{\n {allow} with input as {data}\n}}\n\n".format( test_name=test_name, allow="allow" if result else "not allow", data=json.dumps(data), diff --git a/cvat/apps/organizations/rules/invitations.rego b/cvat/apps/organizations/rules/invitations.rego index 9471ec84157f..3a51f76128e5 100644 --- a/cvat/apps/organizations/rules/invitations.rego +++ b/cvat/apps/organizations/rules/invitations.rego @@ -1,4 +1,7 @@ package invitations + +import rego.v1 + import data.utils import data.organizations @@ -29,37 +32,37 @@ import data.organizations default allow := false -allow { +allow if { utils.is_admin } -allow { +allow if { input.scope == utils.LIST utils.is_sandbox } -allow { +allow if { input.scope == utils.LIST organizations.is_member } -filter := [] { # Django Q object to filter list of entries +filter := [] if { # Django Q object to filter list of entries utils.is_sandbox utils.is_admin -} else := qobject { +} else := qobject if { utils.is_sandbox user := input.auth.user qobject := [ {"owner": user.id}, {"membership__user": user.id}, "|" ] -} else := qobject { +} else := qobject if { utils.is_organization utils.is_admin qobject := [ {"membership__organization": input.auth.organization.id} ] -} else := qobject { +} else := qobject if { utils.is_organization organizations.is_staff utils.has_perm(utils.USER) qobject := [ {"membership__organization": input.auth.organization.id} ] -} else := qobject { +} else := qobject if { utils.is_organization user := input.auth.user org_id := input.auth.organization.id @@ -67,7 +70,7 @@ filter := [] { # Django Q object to filter list of entries {"membership__organization": org_id}, "&" ] } -allow { +allow if { input.scope == utils.CREATE input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) @@ -78,7 +81,7 @@ allow { } -allow { +allow if { input.scope == utils.CREATE input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) @@ -87,73 +90,73 @@ allow { input.resource.role != organizations.OWNER } -allow { +allow if { input.scope == utils.VIEW utils.is_sandbox utils.is_resource_owner } -allow { +allow if { input.scope == utils.VIEW utils.is_sandbox input.resource.invitee.id == input.auth.user.id } -allow { +allow if { input.scope == utils.VIEW input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.is_staff } -allow { +allow if { input.scope == utils.VIEW input.auth.organization.id == input.resource.organization.id utils.is_resource_owner } -allow { +allow if { input.scope == utils.VIEW input.auth.organization.id == input.resource.organization.id input.resource.invitee.id == input.auth.user.id } -allow { +allow if { input.scope == utils.RESEND utils.has_perm(utils.WORKER) utils.is_sandbox utils.is_resource_owner } -allow { +allow if { input.scope == utils.RESEND input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.is_staff } -allow { +allow if { input.scope == utils.RESEND input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.WORKER) utils.is_resource_owner } -allow { +allow if { input.scope == utils.DELETE utils.is_sandbox utils.has_perm(utils.WORKER) utils.is_resource_owner } -allow { +allow if { input.scope == utils.DELETE input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.USER) organizations.is_staff } -allow { +allow if { input.scope == utils.DELETE input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.WORKER) @@ -161,14 +164,14 @@ allow { } -allow { - { utils.ACCEPT, utils.DECLINE }[input.scope] +allow if { + input.scope in {utils.ACCEPT, utils.DECLINE} input.resource.invitee.id == input.auth.user.id utils.is_sandbox } -allow { - { utils.ACCEPT, utils.DECLINE }[input.scope] +allow if { + input.scope in {utils.ACCEPT, utils.DECLINE} input.auth.organization.id == input.resource.organization.id input.resource.invitee.id == input.auth.user.id } diff --git a/cvat/apps/organizations/rules/memberships.rego b/cvat/apps/organizations/rules/memberships.rego index 497b6fe58ebe..c23f3039ff16 100644 --- a/cvat/apps/organizations/rules/memberships.rego +++ b/cvat/apps/organizations/rules/memberships.rego @@ -1,4 +1,7 @@ package memberships + +import rego.v1 + import data.utils import data.organizations @@ -29,53 +32,53 @@ import data.organizations default allow := false -allow { +allow if { utils.is_admin } -allow { +allow if { input.scope == utils.LIST utils.is_sandbox } -allow { +allow if { input.scope == utils.LIST organizations.is_member } -filter := [] { # Django Q object to filter list of entries +filter := [] if { # Django Q object to filter list of entries utils.is_admin utils.is_sandbox -} else := qobject { +} else := qobject if { utils.is_sandbox qobject := [ {"user": input.auth.user.id}, {"is_active": true}, "&" ] -} else := qobject { +} else := qobject if { utils.is_admin org_id := input.auth.organization.id qobject := [ {"organization": org_id} ] -} else := qobject { +} else := qobject if { organizations.is_staff org_id := input.auth.organization.id qobject := [ {"organization": org_id} ] -} else := qobject { +} else := qobject if { org_id := input.auth.organization.id qobject := [ {"organization": org_id}, {"is_active": true}, "&" ] } -allow { +allow if { input.scope == utils.VIEW input.resource.is_active utils.is_sandbox input.resource.user.id == input.auth.user.id } -allow { +allow if { input.scope == utils.VIEW organizations.is_staff input.resource.organization.id == input.auth.organization.id } -allow { +allow if { input.scope == utils.VIEW input.resource.is_active organizations.is_member @@ -84,22 +87,22 @@ allow { # maintainer of the organization can change the role of any member and remove any member except # himself/another maintainer/owner -allow { - { utils.CHANGE_ROLE, utils.DELETE }[input.scope] +allow if { + input.scope in {utils.CHANGE_ROLE, utils.DELETE} input.resource.organization.id == input.auth.organization.id utils.has_perm(utils.USER) organizations.is_maintainer - not { + not input.resource.role in { organizations.OWNER, organizations.MAINTAINER - }[input.resource.role] + } input.resource.user.id != input.auth.user.id } # owner of the organization can change the role of any member and remove any member except himself -allow { - { utils.CHANGE_ROLE, utils.DELETE }[input.scope] +allow if { + input.scope in {utils.CHANGE_ROLE, utils.DELETE} input.resource.organization.id == input.auth.organization.id utils.has_perm(utils.USER) organizations.is_owner @@ -108,7 +111,7 @@ allow { } # member can leave the organization except case when member is the owner -allow { +allow if { input.scope == utils.DELETE input.resource.is_active organizations.is_member diff --git a/cvat/apps/organizations/rules/organizations.rego b/cvat/apps/organizations/rules/organizations.rego index af9533c481dc..24643feab703 100644 --- a/cvat/apps/organizations/rules/organizations.rego +++ b/cvat/apps/organizations/rules/organizations.rego @@ -1,4 +1,7 @@ package organizations + +import rego.v1 + import data.utils # input: { @@ -23,24 +26,24 @@ MAINTAINER := "maintainer" SUPERVISOR := "supervisor" WORKER := "worker" -is_owner { +is_owner if { input.auth.organization.owner.id == input.auth.user.id input.auth.organization.user.role == OWNER } -is_maintainer { +is_maintainer if { input.auth.organization.user.role == MAINTAINER } -is_staff { +is_staff if { is_owner } -is_staff { +is_staff if { is_maintainer } -is_member { +is_member if { input.auth.organization.user.role != null } @@ -51,60 +54,60 @@ get_priority(role) := { WORKER: 100 }[role] -has_perm(role) { +has_perm(role) if { get_priority(input.auth.organization.user.role) <= get_priority(role) } default allow := false -allow { +allow if { utils.is_admin } -allow { +allow if { input.scope == utils.CREATE utils.has_perm(utils.USER) } -allow { +allow if { input.scope == utils.CREATE utils.has_perm(utils.BUSINESS) } -filter := [] { # Django Q object to filter list of entries +filter := [] if { # Django Q object to filter list of entries utils.is_admin -} else := qobject { +} else := qobject if { user := input.auth.user qobject := [{"members__user_id": user.id}, {"members__is_active": true}, "&", {"owner_id": user.id}, "|" ] } -allow { +allow if { input.scope == utils.LIST } -allow { +allow if { input.scope == utils.VIEW utils.is_resource_owner } -allow { +allow if { input.scope == utils.VIEW input.resource.user.role != null } -allow { +allow if { input.scope == utils.UPDATE utils.has_perm(utils.WORKER) utils.is_resource_owner } -allow { +allow if { input.scope == utils.UPDATE utils.has_perm(utils.WORKER) input.resource.user.role == MAINTAINER } -allow { +allow if { input.scope == utils.DELETE utils.has_perm(utils.WORKER) utils.is_resource_owner diff --git a/cvat/apps/organizations/rules/tests/generators/invitations_test.gen.rego.py b/cvat/apps/organizations/rules/tests/generators/invitations_test.gen.rego.py index 1865236b1fc6..c3ba86abb75f 100644 --- a/cvat/apps/organizations/rules/tests/generators/invitations_test.gen.rego.py +++ b/cvat/apps/organizations/rules/tests/generators/invitations_test.gen.rego.py @@ -170,7 +170,7 @@ def is_valid(scope, context, ownership, privilege, membership, resource, same_or def gen_test_rego(name): with open(f"{name}_test.gen.rego", "wt") as f: - f.write(f"package {name}\n\n") + f.write(f"package {name}\nimport rego.v1\n\n") for scope, context, ownership, privilege, membership, same_org in product( SCOPES, CONTEXTS, OWNERSHIPS, GROUPS, ORG_ROLES, SAME_ORG ): @@ -188,7 +188,7 @@ def gen_test_rego(name): ) result = eval_rule(scope, context, ownership, privilege, membership, data) f.write( - "{test_name} {{\n {allow} with input as {data}\n}}\n\n".format( + "{test_name} if {{\n {allow} with input as {data}\n}}\n\n".format( test_name=test_name, allow="allow" if result else "not allow", data=json.dumps(data), diff --git a/cvat/apps/organizations/rules/tests/generators/memberships_test.gen.rego.py b/cvat/apps/organizations/rules/tests/generators/memberships_test.gen.rego.py index 7cf9cfca255e..b86548142da7 100644 --- a/cvat/apps/organizations/rules/tests/generators/memberships_test.gen.rego.py +++ b/cvat/apps/organizations/rules/tests/generators/memberships_test.gen.rego.py @@ -174,7 +174,7 @@ def is_valid(scope, context, ownership, privilege, membership, resource, same_or def gen_test_rego(name): with open(f"{name}_test.gen.rego", "wt") as f: - f.write(f"package {name}\n\n") + f.write(f"package {name}\nimport rego.v1\n\n") for scope, context, ownership, privilege, membership, same_org in product( SCOPES, CONTEXTS, OWNERSHIPS, GROUPS, ORG_ROLES, SAME_ORG ): @@ -192,7 +192,7 @@ def gen_test_rego(name): ) result = eval_rule(scope, context, ownership, privilege, membership, data) f.write( - "{test_name} {{\n {allow} with input as {data}\n}}\n\n".format( + "{test_name} if {{\n {allow} with input as {data}\n}}\n\n".format( test_name=test_name, allow="allow" if result else "not allow", data=json.dumps(data), diff --git a/cvat/apps/organizations/rules/tests/generators/organizations_test.gen.rego.py b/cvat/apps/organizations/rules/tests/generators/organizations_test.gen.rego.py index d4acedb42f69..a6c111bfef40 100644 --- a/cvat/apps/organizations/rules/tests/generators/organizations_test.gen.rego.py +++ b/cvat/apps/organizations/rules/tests/generators/organizations_test.gen.rego.py @@ -127,7 +127,7 @@ def is_valid(scope, context, ownership, privilege, membership, resource): def gen_test_rego(name): with open(f"{name}_test.gen.rego", "wt") as f: - f.write(f"package {name}\n\n") + f.write(f"package {name}\nimport rego.v1\n\n") for scope, context, ownership, privilege, membership in product( SCOPES, CONTEXTS, OWNERSHIPS, GROUPS, ORG_ROLES ): @@ -139,7 +139,7 @@ def gen_test_rego(name): data = get_data(scope, context, ownership, privilege, membership, resource) result = eval_rule(scope, context, ownership, privilege, membership, data) f.write( - "{test_name} {{\n {allow} with input as {data}\n}}\n\n".format( + "{test_name} if {{\n {allow} with input as {data}\n}}\n\n".format( test_name=test_name, allow="allow" if result else "not allow", data=json.dumps(data), diff --git a/cvat/apps/quality_control/rules/conflicts.rego b/cvat/apps/quality_control/rules/conflicts.rego index e0c94f0c86fd..f8e570b58826 100644 --- a/cvat/apps/quality_control/rules/conflicts.rego +++ b/cvat/apps/quality_control/rules/conflicts.rego @@ -1,7 +1,6 @@ package conflicts -import future.keywords.if -import future.keywords.in +import rego.v1 import data.utils import data.organizations @@ -44,24 +43,24 @@ import data.organizations default allow := false -allow { +allow if { utils.is_admin } -allow { +allow if { input.scope == utils.LIST utils.is_sandbox } -allow { +allow if { input.scope == utils.LIST organizations.is_member } -filter := [] { # Django Q object to filter list of entries +filter := [] if { # Django Q object to filter list of entries utils.is_admin utils.is_sandbox -} else := qobject { +} else := qobject if { utils.is_admin utils.is_organization org := input.auth.organization @@ -71,7 +70,7 @@ filter := [] { # Django Q object to filter list of entries {"report__task__organization": org.id}, "|", {"report__task__project__organization": org.id}, "|", ] -} else := qobject { +} else := qobject if { utils.is_sandbox user := input.auth.user qobject := [ @@ -84,7 +83,7 @@ filter := [] { # Django Q object to filter list of entries {"report__task__project__owner_id": user.id}, "|", {"report__task__project__assignee_id": user.id}, "|", ] -} else := qobject { +} else := qobject if { utils.is_organization utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) @@ -95,7 +94,7 @@ filter := [] { # Django Q object to filter list of entries {"report__task__organization": org.id}, "|", {"report__task__project__organization": org.id}, "|", ] -} else := qobject { +} else := qobject if { organizations.has_perm(organizations.WORKER) user := input.auth.user org := input.auth.organization diff --git a/cvat/apps/quality_control/rules/quality_reports.rego b/cvat/apps/quality_control/rules/quality_reports.rego index 025a869e472c..d7fff8ac7e74 100644 --- a/cvat/apps/quality_control/rules/quality_reports.rego +++ b/cvat/apps/quality_control/rules/quality_reports.rego @@ -1,7 +1,6 @@ package quality_reports -import future.keywords.if -import future.keywords.in +import rego.v1 import data.utils import data.organizations @@ -44,24 +43,24 @@ import data.organizations default allow := false -allow { +allow if { utils.is_admin } -allow { +allow if { input.scope == utils.LIST utils.is_sandbox } -allow { +allow if { input.scope == utils.LIST organizations.is_member } -filter := [] { # Django Q object to filter list of entries +filter := [] if { # Django Q object to filter list of entries utils.is_admin utils.is_sandbox -} else := qobject { +} else := qobject if { utils.is_admin utils.is_organization org := input.auth.organization @@ -71,7 +70,7 @@ filter := [] { # Django Q object to filter list of entries {"task__organization": org.id}, "|", {"task__project__organization": org.id}, "|", ] -} else := qobject { +} else := qobject if { utils.is_sandbox user := input.auth.user qobject := [ @@ -84,7 +83,7 @@ filter := [] { # Django Q object to filter list of entries {"task__project__owner_id": user.id}, "|", {"task__project__assignee_id": user.id}, "|", ] -} else := qobject { +} else := qobject if { utils.is_organization utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) @@ -95,7 +94,7 @@ filter := [] { # Django Q object to filter list of entries {"task__organization": org.id}, "|", {"task__project__organization": org.id}, "|", ] -} else := qobject { +} else := qobject if { organizations.has_perm(organizations.WORKER) user := input.auth.user org := input.auth.organization diff --git a/cvat/apps/quality_control/rules/quality_settings.rego b/cvat/apps/quality_control/rules/quality_settings.rego index 1ed7a6bded37..ec2d1c307a6b 100644 --- a/cvat/apps/quality_control/rules/quality_settings.rego +++ b/cvat/apps/quality_control/rules/quality_settings.rego @@ -1,7 +1,6 @@ package quality_settings -import future.keywords.if -import future.keywords.in +import rego.v1 import data.utils import data.organizations @@ -44,24 +43,24 @@ import data.organizations default allow := false -allow { +allow if { utils.is_admin } -allow { +allow if { input.scope == utils.LIST utils.is_sandbox } -allow { +allow if { input.scope == utils.LIST organizations.is_member } -filter := [] { # Django Q object to filter list of entries +filter := [] if { # Django Q object to filter list of entries utils.is_admin utils.is_sandbox -} else := qobject { +} else := qobject if { utils.is_admin utils.is_organization org := input.auth.organization @@ -69,7 +68,7 @@ filter := [] { # Django Q object to filter list of entries {"task__organization": org.id}, {"task__project__organization": org.id}, "|", ] -} else := qobject { +} else := qobject if { utils.is_sandbox user := input.auth.user qobject := [ @@ -78,7 +77,7 @@ filter := [] { # Django Q object to filter list of entries {"task__project__owner_id": user.id}, "|", {"task__project__assignee_id": user.id}, "|", ] -} else := qobject { +} else := qobject if { utils.is_organization utils.has_perm(utils.USER) organizations.has_perm(organizations.MAINTAINER) @@ -87,7 +86,7 @@ filter := [] { # Django Q object to filter list of entries {"task__organization": org.id}, {"task__project__organization": org.id}, "|", ] -} else := qobject { +} else := qobject if { organizations.has_perm(organizations.WORKER) user := input.auth.user org := input.auth.organization diff --git a/cvat/apps/webhooks/rules/tests/generators/webhooks_test.gen.rego.py b/cvat/apps/webhooks/rules/tests/generators/webhooks_test.gen.rego.py index 764e6610f5a6..c367a42cc98b 100644 --- a/cvat/apps/webhooks/rules/tests/generators/webhooks_test.gen.rego.py +++ b/cvat/apps/webhooks/rules/tests/generators/webhooks_test.gen.rego.py @@ -193,7 +193,7 @@ def is_valid(scope, context, ownership, privilege, membership, resource, same_or def gen_test_rego(name): with open(f"{name}_test.gen.rego", "wt") as f: - f.write(f"package {name}\n\n") + f.write(f"package {name}\nimport rego.v1\n\n") for scope, context, ownership, privilege, membership, same_org in product( SCOPES, CONTEXTS, OWNERSHIPS, GROUPS, ORG_ROLES, SAME_ORG ): @@ -213,7 +213,7 @@ def gen_test_rego(name): result = eval_rule(scope, context, ownership, privilege, membership, data) f.write( - "{test_name} {{\n {allow} with input as {data}\n}}\n\n".format( + "{test_name} if {{\n {allow} with input as {data}\n}}\n\n".format( test_name=test_name, allow="allow" if result else "not allow", data=json.dumps(data), diff --git a/cvat/apps/webhooks/rules/webhooks.rego b/cvat/apps/webhooks/rules/webhooks.rego index 144e3ad14d1d..a74a88c6a965 100644 --- a/cvat/apps/webhooks/rules/webhooks.rego +++ b/cvat/apps/webhooks/rules/webhooks.rego @@ -1,4 +1,7 @@ package webhooks + +import rego.v1 + import data.utils import data.organizations @@ -31,54 +34,54 @@ import data.organizations # } # -is_project_owner { +is_project_owner if { input.resource.project.owner.id == input.auth.user.id } -is_webhook_owner { +is_webhook_owner if { input.resource.owner.id == input.auth.user.id } default allow := false -allow { +allow if { utils.is_admin } -allow { +allow if { input.scope == utils.CREATE_IN_PROJECT utils.is_sandbox utils.has_perm(utils.USER) is_project_owner } -allow { +allow if { input.scope == utils.LIST utils.is_sandbox } -allow { +allow if { input.scope == utils.LIST organizations.is_member } -filter := [] { # Django Q object to filter list of entries +filter := [] if { # Django Q object to filter list of entries utils.is_admin utils.is_sandbox -} else := qobject { +} else := qobject if { utils.is_admin utils.is_organization qobject := [ {"organization": input.auth.organization.id} ] -} else := qobject { +} else := qobject if { utils.is_sandbox user := input.auth.user qobject := [ {"owner_id": user.id}, {"project__owner_id": user.id}, "|" ] -} else := qobject { +} else := qobject if { utils.is_organization utils.has_perm(utils.WORKER) organizations.has_perm(organizations.MAINTAINER) qobject := [ {"organization": input.auth.organization.id} ] -} else := qobject { +} else := qobject if { utils.is_organization utils.has_perm(utils.WORKER) organizations.has_perm(organizations.WORKER) @@ -88,48 +91,48 @@ filter := [] { # Django Q object to filter list of entries } -allow { +allow if { input.scope == utils.VIEW utils.is_sandbox utils.is_resource_owner } -allow { +allow if { input.scope == utils.VIEW utils.is_sandbox is_project_owner } -allow { - { utils.UPDATE, utils.DELETE }[input.scope] +allow if { + input.scope in {utils.UPDATE, utils.DELETE} utils.is_sandbox utils.has_perm(utils.WORKER) utils.is_resource_owner } -allow { - { utils.UPDATE, utils.DELETE }[input.scope] +allow if { + input.scope in {utils.UPDATE, utils.DELETE} utils.is_sandbox utils.has_perm(utils.WORKER) is_project_owner } -allow { +allow if { input.scope == utils.VIEW input.auth.organization.id == input.resource.organization.id organizations.has_perm(organizations.WORKER) utils.is_resource_owner } -allow { +allow if { input.scope == utils.VIEW input.auth.organization.id == input.resource.organization.id organizations.has_perm(organizations.WORKER) is_project_owner } -allow { - { utils.UPDATE, utils.DELETE }[input.scope] +allow if { + input.scope in {utils.UPDATE, utils.DELETE} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.WORKER) organizations.has_perm(organizations.WORKER) @@ -137,30 +140,30 @@ allow { } -allow { - { utils.UPDATE, utils.DELETE, utils.VIEW }[input.scope] +allow if { + input.scope in {utils.UPDATE, utils.DELETE, utils.VIEW} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.WORKER) organizations.has_perm(organizations.MAINTAINER) } -allow { - { utils.CREATE_IN_PROJECT, utils.CREATE_IN_ORGANIZATION }[input.scope] +allow if { + input.scope in {utils.CREATE_IN_PROJECT, utils.CREATE_IN_ORGANIZATION} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.WORKER) organizations.has_perm(organizations.MAINTAINER) } -allow { - { utils.UPDATE, utils.DELETE }[input.scope] +allow if { + input.scope in {utils.UPDATE, utils.DELETE} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.WORKER) organizations.has_perm(organizations.WORKER) is_project_owner } -allow { - { utils.CREATE_IN_PROJECT }[input.scope] +allow if { + input.scope in {utils.CREATE_IN_PROJECT} input.auth.organization.id == input.resource.organization.id utils.has_perm(utils.WORKER) organizations.has_perm(organizations.WORKER) From f3247fa5a8bc2b44fc3f459b22fb99fad34f3f8f Mon Sep 17 00:00:00 2001 From: Boris Sekachev Date: Wed, 1 May 2024 09:33:52 +0300 Subject: [PATCH 09/29] Optimized analytics requests to ClickHouse (#7804) --- .vscode/settings.json | 7 +- ..._boris_requests_clickhouse_optimization.md | 4 + cvat/apps/analytics_report/report/create.py | 314 ++++++++++-------- .../report/derived_metrics/__init__.py | 3 +- .../report/derived_metrics/base.py | 10 +- .../report/primary_metrics/__init__.py | 10 +- .../primary_metrics/annotation_speed.py | 42 ++- .../report/primary_metrics/annotation_time.py | 35 +- .../report/primary_metrics/base.py | 38 ++- .../report/primary_metrics/objects.py | 32 +- cvat/apps/analytics_report/views.py | 61 ++-- 11 files changed, 304 insertions(+), 252 deletions(-) create mode 100644 changelog.d/20240426_101506_boris_requests_clickhouse_optimization.md diff --git a/.vscode/settings.json b/.vscode/settings.json index a0caaf036765..75ca0eb512af 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -39,5 +39,10 @@ "python.testing.cwd": "${workspaceFolder}/tests", "cSpell.words": [ "crowdsourcing" - ] + ], + "isort.args":["--profile", "black"], + "[python]": { + "editor.defaultFormatter": "ms-python.black-formatter", + "editor.formatOnSave": true + } } diff --git a/changelog.d/20240426_101506_boris_requests_clickhouse_optimization.md b/changelog.d/20240426_101506_boris_requests_clickhouse_optimization.md new file mode 100644 index 000000000000..183d39b51fc4 --- /dev/null +++ b/changelog.d/20240426_101506_boris_requests_clickhouse_optimization.md @@ -0,0 +1,4 @@ +### Fixed + +- Analytics report calculation fails with timeout because of redundant number of requests to ClickHouse + () diff --git a/cvat/apps/analytics_report/report/create.py b/cvat/apps/analytics_report/report/create.py index 738cd9cfc069..85d27ce96f6f 100644 --- a/cvat/apps/analytics_report/report/create.py +++ b/cvat/apps/analytics_report/report/create.py @@ -14,6 +14,7 @@ from cvat.apps.analytics_report.models import AnalyticsReport from cvat.apps.analytics_report.report.derived_metrics import ( + DerivedMetricBase, JobTotalAnnotationSpeed, JobTotalObjectCount, ProjectAnnotationSpeed, @@ -29,8 +30,12 @@ ) from cvat.apps.analytics_report.report.primary_metrics import ( JobAnnotationSpeed, + JobAnnotationSpeedExtractor, JobAnnotationTime, + JobAnnotationTimeExtractor, JobObjects, + JobObjectsExtractor, + PrimaryMetricBase, ) from cvat.apps.engine.models import Job, Project, Task from cvat.utils.background_jobs import schedule_job_with_throttling @@ -41,8 +46,8 @@ def get_empty_report(): JobObjects(None), JobAnnotationSpeed(None), JobAnnotationTime(None), - JobTotalObjectCount(None, []), - JobTotalAnnotationSpeed(None, []), + JobTotalObjectCount(None), + JobTotalAnnotationSpeed(None), ] statistics = [AnalyticsReportUpdateManager._get_empty_statistics_entry(dm) for dm in metrics] @@ -85,9 +90,6 @@ def _get_last_report_time(cls, obj): except ObjectDoesNotExist: return None - class AnalyticsReportsNotAvailable(Exception): - pass - def schedule_analytics_report_autoupdate_job(self, *, job=None, task=None, project=None): assert sum(map(bool, (job, task, project))) == 1, "Expected only 1 argument" @@ -175,142 +177,137 @@ def _get_analytics_report(db_obj: Union[Job, Task, Project]) -> AnalyticsReport: def _check_analytics_report( cls, *, cvat_job_id: int = None, cvat_task_id: int = None, cvat_project_id: int = None ) -> bool: - if cvat_job_id is not None: - queryset = Job.objects.select_related("analytics_report") - with transaction.atomic(): - # The Job could have been deleted during scheduling - try: - db_job = queryset.get(pk=cvat_job_id) - except Job.DoesNotExist: - return False + try: + if cvat_job_id is not None: + queryset = Job.objects.select_related("analytics_report") + db_job = queryset.get(pk=cvat_job_id) db_report = cls._get_analytics_report(db_job) + primary_metric_extractors = dict( + ( + (JobObjects.key(), JobObjectsExtractor(cvat_job_id)), + (JobAnnotationSpeed.key(), JobAnnotationSpeedExtractor(cvat_job_id)), + (JobAnnotationTime.key(), JobAnnotationTimeExtractor(cvat_job_id)), + ) + ) + db_report = cls()._compute_report_for_job( + db_job, db_report, primary_metric_extractors + ) - db_report = cls()._compute_report_for_job(db_job=db_job, db_report=db_report) - - with transaction.atomic(): - # The job could have been deleted during processing - try: + with transaction.atomic(): actual_job = queryset.get(pk=db_job.id) - except Job.DoesNotExist: - return False - - actual_report = getattr(actual_job, "analytics_report", None) - actual_created_date = ( - getattr(actual_report, "created_date", None) - if actual_report is not None - else None + actual_report = getattr(actual_job, "analytics_report", None) + actual_created_date = getattr(actual_report, "created_date", None) + # The report has been updated during processing + if db_report.created_date != actual_created_date: + return False + db_report.save() + return True + elif cvat_task_id is not None: + queryset = Task.objects.select_related("analytics_report").prefetch_related( + "segment_set__job_set" ) - # The report has been updated during processing - if db_report.created_date != actual_created_date: - return False - - db_report.save() - return True - - elif cvat_task_id is not None: - queryset = Task.objects.select_related("analytics_report").prefetch_related( - "segment_set__job_set" - ) - with transaction.atomic(): - try: - db_task = queryset.get(pk=cvat_task_id) - except Task.DoesNotExist: - return False - - db_report = cls._get_analytics_report(db_task) - db_report, job_reports = cls()._compute_report_for_task( - db_task=db_task, db_report=db_report - ) - - with transaction.atomic(): - # The task could have been deleted during processing - try: - actual_task = queryset.get(pk=cvat_task_id) - except Task.DoesNotExist: - return False - - actual_report = getattr(actual_task, "analytics_report", None) - actual_created_date = ( - actual_report.created_date if actual_report is not None else None + db_task = queryset.get(pk=cvat_task_id) + + db_report = cls._get_analytics_report(db_task) + primary_metric_extractors = dict( + ( + (JobObjects.key(), JobObjectsExtractor(task_ids=[cvat_task_id])), + ( + JobAnnotationSpeed.key(), + JobAnnotationSpeedExtractor(task_ids=[cvat_task_id]), + ), + ( + JobAnnotationTime.key(), + JobAnnotationTimeExtractor(task_ids=[cvat_task_id]), + ), + ) + ) + db_report, job_reports = cls()._compute_report_for_task( + db_task, db_report, primary_metric_extractors ) - # The report has been updated during processing - if db_report.created_date != actual_created_date: - return False - - actual_job_report_created_dates = {} - for db_segment in db_task.segment_set.all(): - for db_job in db_segment.job_set.all(): - ar = getattr(db_job, "analytics_report", None) - acd = ar.created_date if ar is not None else None - actual_job_report_created_dates[db_job.id] = acd - - for jr in job_reports: - if jr.created_date != actual_job_report_created_dates[jr.job_id]: - return False - db_report.save() - for jr in job_reports: - jr.save() - return True - - elif cvat_project_id is not None: - queryset = Project.objects.select_related("analytics_report").prefetch_related( - "tasks__segment_set__job_set" - ) - with transaction.atomic(): - try: - db_project = queryset.get(pk=cvat_project_id) - except Project.DoesNotExist: - return False - - db_report = cls._get_analytics_report(db_project) - db_report, task_reports, job_reports = cls()._compute_report_for_project( - db_project=db_project, db_report=db_report - ) - - with transaction.atomic(): - # The Project could have been deleted during processing - try: - actual_project = queryset.get(pk=cvat_project_id) - except Project.DoesNotExist: - return False + with transaction.atomic(): + actual_task = queryset.get(pk=cvat_task_id) + actual_report = getattr(actual_task, "analytics_report", None) + actual_created_date = getattr(actual_report, "created_date", None) + # The report has been updated during processing + if db_report.created_date != actual_created_date: + return False - actual_report = getattr(actual_project, "analytics_report", None) - actual_created_date = ( - actual_report.created_date if actual_report is not None else None - ) - # The report has been updated during processing - if db_report.created_date != actual_created_date: - return False - - actual_job_report_created_dates = {} - actual_tasks_report_created_dates = {} - for db_task in db_project.tasks.all(): - task_ar = getattr(db_task, "analytics_report", None) - task_ar_created_date = task_ar.created_date if task_ar else None - actual_tasks_report_created_dates[db_task.id] = task_ar_created_date + actual_job_report_created_dates = {} for db_segment in db_task.segment_set.all(): for db_job in db_segment.job_set.all(): ar = getattr(db_job, "analytics_report", None) acd = ar.created_date if ar is not None else None actual_job_report_created_dates[db_job.id] = acd - for tr in task_reports: - if tr.created_date != actual_tasks_report_created_dates[tr.task_id]: - return False + for jr in job_reports: + if jr.created_date != actual_job_report_created_dates[jr.job_id]: + return False - for jr in job_reports: - if jr.created_date != actual_job_report_created_dates[jr.job_id]: - return False + db_report.save() + for jr in job_reports: + jr.save() + return True + + elif cvat_project_id is not None: + queryset = Project.objects.select_related("analytics_report").prefetch_related( + "tasks__segment_set__job_set" + ) - db_report.save() - for tr in task_reports: - tr.save() + db_project = queryset.get(pk=cvat_project_id) + db_report = cls._get_analytics_report(db_project) + task_ids = [item["id"] for item in db_project.tasks.values("id")] + primary_metric_extractors = dict( + ( + (JobObjects.key(), JobObjectsExtractor(task_ids=task_ids)), + (JobAnnotationSpeed.key(), JobAnnotationSpeedExtractor(task_ids=task_ids)), + (JobAnnotationTime.key(), JobAnnotationTimeExtractor(task_ids=task_ids)), + ) + ) + db_report, task_reports, job_reports = cls()._compute_report_for_project( + db_project, db_report, primary_metric_extractors + ) - for jr in job_reports: - jr.save() - return True + with transaction.atomic(): + actual_project = queryset.get(pk=cvat_project_id) + actual_report = getattr(actual_project, "analytics_report", None) + actual_created_date = getattr(actual_report, "created_date", None) + # The report has been updated during processing + if db_report.created_date != actual_created_date: + return False + + actual_job_report_created_dates = {} + actual_tasks_report_created_dates = {} + for db_task in db_project.tasks.all(): + task_ar = getattr(db_task, "analytics_report", None) + task_ar_created_date = task_ar.created_date if task_ar else None + actual_tasks_report_created_dates[db_task.id] = task_ar_created_date + for db_segment in db_task.segment_set.all(): + for db_job in db_segment.job_set.all(): + ar = getattr(db_job, "analytics_report", None) + acd = ar.created_date if ar is not None else None + actual_job_report_created_dates[db_job.id] = acd + + for tr in task_reports: + if tr.created_date != actual_tasks_report_created_dates[tr.task_id]: + return False + + for jr in job_reports: + if jr.created_date != actual_job_report_created_dates[jr.job_id]: + return False + + db_report.save() + for tr in task_reports: + tr.save() + + for jr in job_reports: + jr.save() + return True + except ObjectDoesNotExist: + # The resource may have been deleted while rq job was queued + return False @staticmethod def _get_statistics_entry_props(statistics_object): @@ -325,14 +322,14 @@ def _get_statistics_entry_props(statistics_object): } @staticmethod - def _get_statistics_entry(statistics_object): + def _get_statistics_entry(statistics_object: PrimaryMetricBase | DerivedMetricBase): return { **AnalyticsReportUpdateManager._get_statistics_entry_props(statistics_object), **{"data_series": statistics_object.calculate()}, } @staticmethod - def _get_empty_statistics_entry(statistics_object): + def _get_empty_statistics_entry(statistics_object: PrimaryMetricBase | DerivedMetricBase): return { **AnalyticsReportUpdateManager._get_statistics_entry_props(statistics_object), **{"data_series": statistics_object.get_empty()}, @@ -342,13 +339,22 @@ def _get_empty_statistics_entry(statistics_object): def _get_metric_by_key(key, statistics): return next(filter(lambda s: s["name"] == key, statistics)) - def _compute_report_for_job(self, db_job: Job, db_report: AnalyticsReport) -> AnalyticsReport: + def _compute_report_for_job( + self, + db_job: Job, + db_report: AnalyticsReport, + data_extractors: dict, + ) -> AnalyticsReport: # recalculate the report if there is no report or the existing one is outdated if db_report.created_date is None or db_report.created_date < db_job.updated_date: primary_metrics = [ - JobObjects(db_job), - JobAnnotationSpeed(db_job), - JobAnnotationTime(db_job), + JobObjects(db_job, data_extractor=data_extractors.get(JobObjects.key())), + JobAnnotationSpeed( + db_job, data_extractor=data_extractors.get(JobAnnotationSpeed.key()) + ), + JobAnnotationTime( + db_job, data_extractor=data_extractors.get(JobAnnotationTime.key()) + ), ] primary_statistics = { @@ -357,10 +363,14 @@ def _compute_report_for_job(self, db_job: Job, db_report: AnalyticsReport) -> An derived_metrics = [ JobTotalObjectCount( - db_job, primary_statistics=primary_statistics[JobAnnotationSpeed.key()] + db_job, + data_extractor=None, + primary_statistics=primary_statistics[JobAnnotationSpeed.key()], ), JobTotalAnnotationSpeed( - db_job, primary_statistics=primary_statistics[JobAnnotationSpeed.key()] + db_job, + data_extractor=None, + primary_statistics=primary_statistics[JobAnnotationSpeed.key()], ), ] @@ -377,48 +387,54 @@ def _compute_report_for_task( self, db_task: Task, db_report: AnalyticsReport, + data_extractors: dict, ) -> tuple[AnalyticsReport, list[AnalyticsReport]]: job_reports = [] for db_segment in db_task.segment_set.all(): for db_job in db_segment.job_set.all(): job_report = self._get_analytics_report(db_job) job_reports.append( - self._compute_report_for_job(db_job=db_job, db_report=job_report) + self._compute_report_for_job(db_job, job_report, data_extractors) ) # recalculate the report if there is no report or the existing one is outdated if db_report.created_date is None or db_report.created_date < db_task.updated_date: derived_metrics = [ TaskObjects( db_task, - [ + data_extractor=None, + primary_statistics=[ self._get_metric_by_key(JobObjects.key(), jr.statistics) for jr in job_reports ], ), TaskAnnotationSpeed( db_task, - [ + data_extractor=None, + primary_statistics=[ self._get_metric_by_key(JobAnnotationSpeed.key(), jr.statistics) for jr in job_reports ], ), TaskAnnotationTime( db_task, - [ + data_extractor=None, + primary_statistics=[ self._get_metric_by_key(JobAnnotationTime.key(), jr.statistics) for jr in job_reports ], ), TaskTotalObjectCount( db_task, - [ + data_extractor=None, + primary_statistics=[ self._get_metric_by_key(JobAnnotationSpeed.key(), jr.statistics) for jr in job_reports ], ), TaskTotalAnnotationSpeed( db_task, - [ + data_extractor=None, + primary_statistics=[ self._get_metric_by_key(JobAnnotationSpeed.key(), jr.statistics) for jr in job_reports ], @@ -431,13 +447,16 @@ def _compute_report_for_task( return db_report, job_reports def _compute_report_for_project( - self, db_project: Project, db_report: AnalyticsReport + self, + db_project: Project, + db_report: AnalyticsReport, + data_extractors: dict, ) -> tuple[AnalyticsReport, list[AnalyticsReport], list[AnalyticsReport]]: job_reports = [] task_reports = [] for db_task in db_project.tasks.all(): db_task_report = self._get_analytics_report(db_task) - tr, jrs = self._compute_report_for_task(db_task, db_task_report) + tr, jrs = self._compute_report_for_task(db_task, db_task_report, data_extractors) task_reports.append(tr) job_reports.extend(jrs) # recalculate the report if there is no report or the existing one is outdated @@ -445,35 +464,40 @@ def _compute_report_for_project( derived_metrics = [ ProjectObjects( db_project, - [ + data_extractor=None, + primary_statistics=[ self._get_metric_by_key(JobObjects.key(), jr.statistics) for jr in job_reports ], ), ProjectAnnotationSpeed( db_project, - [ + data_extractor=None, + primary_statistics=[ self._get_metric_by_key(JobAnnotationSpeed.key(), jr.statistics) for jr in job_reports ], ), ProjectAnnotationTime( db_project, - [ + data_extractor=None, + primary_statistics=[ self._get_metric_by_key(JobAnnotationTime.key(), jr.statistics) for jr in job_reports ], ), ProjectTotalObjectCount( db_project, - [ + data_extractor=None, + primary_statistics=[ self._get_metric_by_key(JobAnnotationSpeed.key(), jr.statistics) for jr in job_reports ], ), ProjectTotalAnnotationSpeed( db_project, - [ + data_extractor=None, + primary_statistics=[ self._get_metric_by_key(JobAnnotationSpeed.key(), jr.statistics) for jr in job_reports ], diff --git a/cvat/apps/analytics_report/report/derived_metrics/__init__.py b/cvat/apps/analytics_report/report/derived_metrics/__init__.py index 4c2875a4dbae..f502235d1a0e 100644 --- a/cvat/apps/analytics_report/report/derived_metrics/__init__.py +++ b/cvat/apps/analytics_report/report/derived_metrics/__init__.py @@ -1,9 +1,10 @@ -# Copyright (C) 2023 CVAT.ai Corporation +# Copyright (C) 2023-2024 CVAT.ai Corporation # # SPDX-License-Identifier: MIT from .annotation_speed import ProjectAnnotationSpeed, TaskAnnotationSpeed from .annotation_time import ProjectAnnotationTime, TaskAnnotationTime +from .base import DerivedMetricBase from .objects import ProjectObjects, TaskObjects from .total_annotation_speed import ( JobTotalAnnotationSpeed, diff --git a/cvat/apps/analytics_report/report/derived_metrics/base.py b/cvat/apps/analytics_report/report/derived_metrics/base.py index 6d6d3d8e38c7..a5dc31292517 100644 --- a/cvat/apps/analytics_report/report/derived_metrics/base.py +++ b/cvat/apps/analytics_report/report/derived_metrics/base.py @@ -1,12 +1,12 @@ -# Copyright (C) 2023 CVAT.ai Corporation +# Copyright (C) 2023-2024 CVAT.ai Corporation # # SPDX-License-Identifier: MIT -from cvat.apps.analytics_report.report.primary_metrics import PrimaryMetricBase +from cvat.apps.analytics_report.report.primary_metrics import DataExtractorBase, PrimaryMetricBase class DerivedMetricBase(PrimaryMetricBase): - def __init__(self, db_obj, primary_statistics): - super().__init__(db_obj) + def __init__(self, db_obj, data_extractor: DataExtractorBase = None, primary_statistics=None): + super().__init__(db_obj, data_extractor) - self._primary_statistics = primary_statistics + self._primary_statistics = primary_statistics or [] diff --git a/cvat/apps/analytics_report/report/primary_metrics/__init__.py b/cvat/apps/analytics_report/report/primary_metrics/__init__.py index 36c9c6be52e4..a0c2e98efad9 100644 --- a/cvat/apps/analytics_report/report/primary_metrics/__init__.py +++ b/cvat/apps/analytics_report/report/primary_metrics/__init__.py @@ -1,8 +1,8 @@ -# Copyright (C) 2023 CVAT.ai Corporation +# Copyright (C) 2023-2024 CVAT.ai Corporation # # SPDX-License-Identifier: MIT -from .annotation_speed import JobAnnotationSpeed -from .annotation_time import JobAnnotationTime -from .base import PrimaryMetricBase -from .objects import JobObjects +from .annotation_speed import JobAnnotationSpeed, JobAnnotationSpeedExtractor +from .annotation_time import JobAnnotationTime, JobAnnotationTimeExtractor +from .base import DataExtractorBase, PrimaryMetricBase +from .objects import JobObjects, JobObjectsExtractor diff --git a/cvat/apps/analytics_report/report/primary_metrics/annotation_speed.py b/cvat/apps/analytics_report/report/primary_metrics/annotation_speed.py index 98a273451bde..148d7bcabcaa 100644 --- a/cvat/apps/analytics_report/report/primary_metrics/annotation_speed.py +++ b/cvat/apps/analytics_report/report/primary_metrics/annotation_speed.py @@ -1,4 +1,4 @@ -# Copyright (C) 2023 CVAT.ai Corporation +# Copyright (C) 2023-2024 CVAT.ai Corporation # # SPDX-License-Identifier: MIT @@ -11,17 +11,28 @@ TransformOperationType, ViewChoice, ) -from cvat.apps.analytics_report.report.primary_metrics.base import PrimaryMetricBase +from cvat.apps.analytics_report.report.primary_metrics.base import ( + DataExtractorBase, + PrimaryMetricBase, +) from cvat.apps.engine.models import SourceType +class JobAnnotationSpeedExtractor(DataExtractorBase): + def __init__(self, job_id: int = None, task_ids: list[int] = None): + super().__init__(job_id, task_ids) + + if task_ids is not None: + self._query = "SELECT job_id, sum(JSONExtractUInt(payload, 'working_time')) as wt FROM events WHERE task_id IN ({task_ids:Array(UInt64)}) AND timestamp >= {start_datetime:DateTime64} AND timestamp < {end_datetime:DateTime64} GROUP BY job_id" + elif job_id is not None: + self._query = "SELECT job_id, sum(JSONExtractUInt(payload, 'working_time')) as wt FROM events WHERE job_id={job_id:UInt64} AND timestamp >= {start_datetime:DateTime64} AND timestamp < {end_datetime:DateTime64} GROUP BY job_id" + + class JobAnnotationSpeed(PrimaryMetricBase): + _key = "annotation_speed" _title = "Annotation speed (objects per hour)" _description = "Metric shows the annotation speed in objects per hour." _default_view = ViewChoice.HISTOGRAM - _key = "annotation_speed" - # Raw SQL queries are used to execute ClickHouse queries, as there is no ORM available here - _query = "SELECT sum(JSONExtractUInt(payload, 'working_time')) / 1000 / 3600 as wt FROM events WHERE job_id={job_id:UInt64} AND timestamp >= {start_datetime:DateTime64} AND timestamp < {end_datetime:DateTime64}" _granularity = GranularityChoice.DAY _is_filterable_by_date = False _transformations = [ @@ -64,7 +75,6 @@ def get_default(): } # Calculate object count - annotations = dm.task.get_job_data(self._db_obj.id) object_count = 0 object_count += get_tags_count(annotations) @@ -109,17 +119,17 @@ def get_default(): ) # Calculate working time + rows = list( + self._data_extractor.extract_for_job( + self._db_obj.id, + { + "start_datetime": start_datetime, + "end_datetime": self._get_utc_now(), + }, + ) + ) - parameters = { - "job_id": self._db_obj.id, - "start_datetime": start_datetime, - "end_datetime": self._get_utc_now(), - } - - result = self._make_clickhouse_query(parameters) - value = 0 - if (wt := next(iter(result.result_rows))[0]) is not None: - value = wt + value = (rows[0][0] if len(rows) else 0) / (1000 * 3600) data_series["working_time"].append( { "value": value, diff --git a/cvat/apps/analytics_report/report/primary_metrics/annotation_time.py b/cvat/apps/analytics_report/report/primary_metrics/annotation_time.py index 919766e0ffaf..95d32100a563 100644 --- a/cvat/apps/analytics_report/report/primary_metrics/annotation_time.py +++ b/cvat/apps/analytics_report/report/primary_metrics/annotation_time.py @@ -1,37 +1,42 @@ -# Copyright (C) 2023 CVAT.ai Corporation +# Copyright (C) 2023-2024 CVAT.ai Corporation # # SPDX-License-Identifier: MIT from cvat.apps.analytics_report.models import ViewChoice -from cvat.apps.analytics_report.report.primary_metrics.base import PrimaryMetricBase +from cvat.apps.analytics_report.report.primary_metrics.base import ( + DataExtractorBase, + PrimaryMetricBase, +) + + +class JobAnnotationTimeExtractor(DataExtractorBase): + def __init__(self, job_id: int = None, task_ids: list[int] = None): + super().__init__(job_id, task_ids) + + if task_ids is not None: + self._query = "SELECT job_id, timestamp, obj_val FROM events WHERE scope='update:job' AND task_id IN ({task_ids:Array(UInt64)}) AND obj_name='state' ORDER BY timestamp ASC" + elif job_id is not None: + self._query = "SELECT job_id, timestamp, obj_val FROM events WHERE scope='update:job' AND job_id={job_id:UInt64} AND obj_name='state' ORDER BY timestamp ASC" class JobAnnotationTime(PrimaryMetricBase): + _key = "annotation_time" _title = "Annotation time (hours)" _description = "Metric shows how long the Job is in progress state." _default_view = ViewChoice.NUMERIC - _key = "annotation_time" - # Raw SQL queries are used to execute ClickHouse queries, as there is no ORM available here - _query = "SELECT timestamp, obj_val FROM cvat.events WHERE scope='update:job' AND job_id={job_id:UInt64} AND obj_name='state' ORDER BY timestamp ASC" _is_filterable_by_date = False def calculate(self): - results = self._make_clickhouse_query( - { - "job_id": self._db_obj.id, - } - ) + rows = list(self._data_extractor.extract_for_job(self._db_obj.id)) total_annotating_time = 0 last_change = None - for prev_row, cur_row in zip(results.result_rows, results.result_rows[1:]): + for prev_row, cur_row in zip(rows, rows[1:]): if prev_row[1] == "in progress": total_annotating_time += int((cur_row[0] - prev_row[0]).total_seconds()) last_change = cur_row[0] - if results.result_rows and results.result_rows[-1][1] == "in progress": - total_annotating_time += int( - (self._db_obj.updated_date - results.result_rows[-1][0]).total_seconds() - ) + if rows and rows[-1][1] == "in progress": + total_annotating_time += int((self._db_obj.updated_date - rows[-1][0]).total_seconds()) if not last_change: last_change = self._get_utc_now() diff --git a/cvat/apps/analytics_report/report/primary_metrics/base.py b/cvat/apps/analytics_report/report/primary_metrics/base.py index 2de8bd606ba2..35004632d8e9 100644 --- a/cvat/apps/analytics_report/report/primary_metrics/base.py +++ b/cvat/apps/analytics_report/report/primary_metrics/base.py @@ -1,26 +1,51 @@ -# Copyright (C) 2023 CVAT.ai Corporation +# Copyright (C) 2023-2024 CVAT.ai Corporation # # SPDX-License-Identifier: MIT from abc import ABCMeta, abstractmethod +from collections import ChainMap from datetime import datetime, timezone from cvat.apps.analytics_report.report.primary_metrics.utils import make_clickhouse_query +class DataExtractorBase: + def __init__(self, job_id: int = None, task_ids: list[int] = None): + # Raw SQL queries are used to execute ClickHouse queries, as there is no ORM available here + self._query = None + self._parameters = {} + self._rows = [] + self._initialized = False + + if task_ids is not None: + self._parameters["task_ids"] = task_ids + elif job_id is not None: + self._parameters["job_id"] = job_id + + def _make_clickhouse_query(self, parameters): + return make_clickhouse_query(query=self._query, parameters=parameters) + + def extract_for_job(self, job_id: int, extras: dict = None): + if not self._initialized: + self._rows = self._make_clickhouse_query( + ChainMap(self._parameters, extras or {}) + ).result_rows + self._initialized = True + return map(lambda x: x[1:], filter(lambda x: x[0] == job_id, self._rows)) + + class PrimaryMetricBase(metaclass=ABCMeta): + _key = None _title = None _description = None - # Raw SQL queries are used to execute ClickHouse queries, as there is no ORM available here - _query = None _granularity = None _default_view = None - _key = None _transformations = [] _is_filterable_by_date = True - def __init__(self, db_obj): + def __init__(self, db_obj, data_extractor: DataExtractorBase = None): self._db_obj = db_obj + self._data_extractor = data_extractor @classmethod def description(cls): @@ -56,9 +81,6 @@ def calculate(self): ... @abstractmethod def get_empty(self): ... - def _make_clickhouse_query(self, parameters): - return make_clickhouse_query(query=self._query, parameters=parameters) - @staticmethod def _get_utc_now(): return datetime.now(timezone.utc) diff --git a/cvat/apps/analytics_report/report/primary_metrics/objects.py b/cvat/apps/analytics_report/report/primary_metrics/objects.py index 80c59b6457bb..be5bfca35334 100644 --- a/cvat/apps/analytics_report/report/primary_metrics/objects.py +++ b/cvat/apps/analytics_report/report/primary_metrics/objects.py @@ -1,18 +1,29 @@ -# Copyright (C) 2023 CVAT.ai Corporation +# Copyright (C) 2023-2024 CVAT.ai Corporation # # SPDX-License-Identifier: MIT from cvat.apps.analytics_report.models import GranularityChoice, ViewChoice -from cvat.apps.analytics_report.report.primary_metrics.base import PrimaryMetricBase +from cvat.apps.analytics_report.report.primary_metrics.base import ( + DataExtractorBase, + PrimaryMetricBase, +) + + +class JobObjectsExtractor(DataExtractorBase): + def __init__(self, job_id: int = None, task_ids: list[int] = None): + super().__init__(job_id, task_ids) + + if task_ids is not None: + self._query = "SELECT job_id, toStartOfDay(timestamp) as day, scope, sum(count) FROM events WHERE scope IN ({scopes:Array(String)}) AND task_id IN ({task_ids:Array(UInt64)}) GROUP BY scope, day, job_id ORDER BY day ASC" + elif job_id is not None: + self._query = "SELECT job_id, toStartOfDay(timestamp) as day, scope, sum(count) FROM events WHERE scope IN ({scopes:Array(String)}) AND job_id = {job_id:UInt64} GROUP BY scope, day, job_id ORDER BY day ASC" class JobObjects(PrimaryMetricBase): + _key = "objects" _title = "Objects" _description = "Metric shows number of added/changed/deleted objects for the Job." _default_view = ViewChoice.HISTOGRAM - _key = "objects" - # Raw SQL queries are used to execute ClickHouse queries, as there is no ORM available here - _query = "SELECT toStartOfDay(timestamp) as day, scope, sum(count) FROM events WHERE scope IN ({scopes:Array(String)}) AND job_id = {job_id:UInt64} GROUP BY scope, day ORDER BY day ASC" _granularity = GranularityChoice.DAY def calculate(self): @@ -25,17 +36,10 @@ def calculate(self): for obj_type in obj_types: statistics[action][obj_type] = {} - result = self._make_clickhouse_query( - { - "scopes": scopes, - "job_id": self._db_obj.id, - } - ) - - for day, scope, count in result.result_rows: + rows = self._data_extractor.extract_for_job(self._db_obj.id, {"scopes": scopes}) + for day, scope, count in rows: action, obj_type = scope.split(":") statistics[action][obj_type][day] = count - objects_statistics = self.get_empty() dates = set() diff --git a/cvat/apps/analytics_report/views.py b/cvat/apps/analytics_report/views.py index 4723a705cf27..5a59f5a99ba5 100644 --- a/cvat/apps/analytics_report/views.py +++ b/cvat/apps/analytics_report/views.py @@ -1,9 +1,10 @@ -# Copyright (C) 2023 CVAT.ai Corporation +# Copyright (C) 2023-2024 CVAT.ai Corporation # # SPDX-License-Identifier: MIT import textwrap +from django.core.exceptions import ObjectDoesNotExist from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema from rest_framework import status, viewsets @@ -76,47 +77,23 @@ def create(self, request, *args, **kwargs): task_id = input_serializer.validated_data.get("task_id") project_id = input_serializer.validated_data.get("project_id") - if job_id is not None: - try: - job = Job.objects.get(pk=int(job_id)) - except Job.DoesNotExist as ex: - raise NotFound(f"Job {job_id} does not exist") from ex - - try: - rq_id = AnalyticsReportUpdateManager().schedule_analytics_check_job( - job=job, user_id=request.user.id - ) - serializer = RqIdSerializer({"rq_id": rq_id}) - return Response(serializer.data, status=status.HTTP_202_ACCEPTED) - except AnalyticsReportUpdateManager.AnalyticsReportsNotAvailable as ex: - raise ValidationError(str(ex)) - elif task_id is not None: - try: - task = Task.objects.get(pk=int(task_id)) - except Task.DoesNotExist as ex: - raise NotFound(f"Task {task_id} does not exist") from ex - - try: - rq_id = AnalyticsReportUpdateManager().schedule_analytics_check_job( - task=task, user_id=request.user.id - ) - serializer = RqIdSerializer({"rq_id": rq_id}) - return Response(serializer.data, status=status.HTTP_202_ACCEPTED) - except AnalyticsReportUpdateManager.AnalyticsReportsNotAvailable as ex: - raise ValidationError(str(ex)) - elif project_id is not None: - try: - project = Project.objects.get(pk=int(project_id)) - except Project.DoesNotExist as ex: - raise NotFound(f"Project {project_id} does not exist") from ex - try: - rq_id = AnalyticsReportUpdateManager().schedule_analytics_check_job( - project=project, user_id=request.user.id - ) - serializer = RqIdSerializer({"rq_id": rq_id}) - return Response(serializer.data, status=status.HTTP_202_ACCEPTED) - except AnalyticsReportUpdateManager.AnalyticsReportsNotAvailable as ex: - raise ValidationError(str(ex)) + try: + params = {"user_id": request.user.id} + rq_id = None + if job_id is not None: + params["job"] = Job.objects.get(pk=int(job_id)) + elif task_id is not None: + params["task"] = Task.objects.get(pk=int(task_id)) + elif project_id is not None: + params["project"] = Project.objects.get(pk=int(project_id)) + + rq_id = AnalyticsReportUpdateManager().schedule_analytics_check_job(**params) + serializer = RqIdSerializer({"rq_id": rq_id}) + return Response(serializer.data, status=status.HTTP_202_ACCEPTED) + except ObjectDoesNotExist as ex: + raise NotFound( + "The specified resource does not exist. Please check the provided identifiers" + ) from ex else: serializer = RqIdSerializer(data={"rq_id": rq_id}) serializer.is_valid(raise_exception=True) From 57085e8850f8c23166f587e42606a84034b6ebd3 Mon Sep 17 00:00:00 2001 From: zaha Date: Thu, 2 May 2024 12:55:25 +0300 Subject: [PATCH 10/29] Update the Nuclio version (#7787) Old verison of Nuclio has some vulnerabilities and it needs to be updated. Function dependencies have also been updated. The `mask_rcnn` function has been removed because `mask_rcnn` using python 3.6. In new version of Nuclio python3.6 is no longer supported. Nuclio officially recommends using python3.9. Running `mask_rcnn` on python3.9 causes errors within the function and package conflicts. --- ...125117_zahadhamov_update_nuclio_version.md | 10 ++ .../serverless/docker-compose.serverless.yml | 2 +- helm-chart/Chart.yaml | 2 +- .../siammask/nuclio/function-gpu.yaml | 10 +- .../foolwood/siammask/nuclio/function.yaml | 10 +- .../saic-vul/fbrs/nuclio/function.yaml | 3 +- .../nuclio/function-gpu.yaml | 4 +- .../nuclio/function.yaml | 4 +- .../nuclio/model_loader.py | 2 +- .../mask_rcnn/nuclio/function-gpu.yaml | 135 ------------------ .../matterport/mask_rcnn/nuclio/function.yaml | 132 ----------------- .../matterport/mask_rcnn/nuclio/main.py | 33 ----- .../mask_rcnn/nuclio/model_loader.py | 87 ----------- .../manual/advanced/serverless-tutorial.md | 28 ++-- 14 files changed, 43 insertions(+), 419 deletions(-) create mode 100644 changelog.d/20240429_125117_zahadhamov_update_nuclio_version.md delete mode 100644 serverless/tensorflow/matterport/mask_rcnn/nuclio/function-gpu.yaml delete mode 100644 serverless/tensorflow/matterport/mask_rcnn/nuclio/function.yaml delete mode 100644 serverless/tensorflow/matterport/mask_rcnn/nuclio/main.py delete mode 100644 serverless/tensorflow/matterport/mask_rcnn/nuclio/model_loader.py diff --git a/changelog.d/20240429_125117_zahadhamov_update_nuclio_version.md b/changelog.d/20240429_125117_zahadhamov_update_nuclio_version.md new file mode 100644 index 000000000000..052192aea85e --- /dev/null +++ b/changelog.d/20240429_125117_zahadhamov_update_nuclio_version.md @@ -0,0 +1,10 @@ +### Changed + +- Update the Nuclio version and related packages/libraries + () + +### Removed + +- The `mask_rcnn` function has been removed because it was using python3.6. + In new version of Nuclio python3.6 is no longer supported. Nuclio officially recommends using python3.9. + Running `mask_rcnn` on python3.9 causes errors within the function and package conflicts. () diff --git a/components/serverless/docker-compose.serverless.yml b/components/serverless/docker-compose.serverless.yml index 778dc2759c11..6450ac222423 100644 --- a/components/serverless/docker-compose.serverless.yml +++ b/components/serverless/docker-compose.serverless.yml @@ -1,7 +1,7 @@ services: nuclio: container_name: nuclio - image: quay.io/nuclio/dashboard:1.11.24-amd64 + image: quay.io/nuclio/dashboard:1.13.0-amd64 restart: always networks: - cvat diff --git a/helm-chart/Chart.yaml b/helm-chart/Chart.yaml index e3ff3a20d81e..93e98a2116f1 100644 --- a/helm-chart/Chart.yaml +++ b/helm-chart/Chart.yaml @@ -32,7 +32,7 @@ dependencies: condition: postgresql.enabled - name: nuclio - version: 0.12.1 + version: 0.19.0 repository: https://nuclio.github.io/nuclio/charts condition: nuclio.enabled diff --git a/serverless/pytorch/foolwood/siammask/nuclio/function-gpu.yaml b/serverless/pytorch/foolwood/siammask/nuclio/function-gpu.yaml index 23fb2eea9494..3ee8b43640bb 100644 --- a/serverless/pytorch/foolwood/siammask/nuclio/function-gpu.yaml +++ b/serverless/pytorch/foolwood/siammask/nuclio/function-gpu.yaml @@ -9,7 +9,7 @@ metadata: spec: description: Fast Online Object Tracking and Segmentation - runtime: 'python:3.6' + runtime: 'python:3.9' handler: main:handler eventTimeout: 30s env: @@ -27,7 +27,7 @@ spec: - kind: ARG value: PATH="/root/miniconda3/bin:${PATH}" - kind: RUN - value: apt update && apt install -y --no-install-recommends wget git ca-certificates libglib2.0-0 libsm6 libxrender1 libxext6 && rm -rf /var/lib/apt/lists/* + value: apt update && apt install -y --no-install-recommends wget git ca-certificates libgl1 libglib2.0-0 libsm6 libxrender1 libxext6 && rm -rf /var/lib/apt/lists/* - kind: RUN value: wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh && chmod +x Miniconda3-latest-Linux-x86_64.sh && bash Miniconda3-latest-Linux-x86_64.sh -b && @@ -35,13 +35,13 @@ spec: - kind: WORKDIR value: /opt/nuclio - kind: RUN - value: conda create -y -n siammask python=3.7 + value: conda create -y -n siammask python=3.9 - kind: SHELL value: '["conda", "run", "-n", "siammask", "/bin/bash", "-c"]' - kind: RUN value: git clone https://github.com/foolwood/SiamMask.git - kind: RUN - value: pip install Cython==0.29.4 colorama==0.3.9 numpy==1.15.4 requests==2.21.0 fire==0.1.3 matplotlib==2.2.3 numba==0.39.0 scipy==1.1.0 h5py==2.8.0 pandas==0.23.4 tqdm==4.29.1 tensorboardX==1.6 opencv_python==3.4.3.18 jsonpickle + value: pip install Cython colorama 'numpy<1.20' requests fire matplotlib numba scipy h5py pandas tqdm tensorboardX opencv_python jsonpickle - kind: RUN value: pip install torch==1.9.0+cu111 torchvision==0.10.0+cu111 torchaudio==0.9.0 -f https://download.pytorch.org/whl/torch_stable.html - kind: RUN @@ -51,7 +51,7 @@ spec: - kind: RUN value: wget -P SiamMask/experiments/siammask_sharp http://www.robots.ox.ac.uk/~qwang/SiamMask_DAVIS.pth - kind: ENTRYPOINT - value: '["conda", "run", "-n", "siammask"]' + value: '["conda", "run", "--no-capture-output", "-n", "siammask"]' triggers: myHttpTrigger: diff --git a/serverless/pytorch/foolwood/siammask/nuclio/function.yaml b/serverless/pytorch/foolwood/siammask/nuclio/function.yaml index 57bc1f63cca0..d36e00d3e969 100644 --- a/serverless/pytorch/foolwood/siammask/nuclio/function.yaml +++ b/serverless/pytorch/foolwood/siammask/nuclio/function.yaml @@ -9,7 +9,7 @@ metadata: spec: description: Fast Online Object Tracking and Segmentation - runtime: 'python:3.6' + runtime: 'python:3.9' handler: main:handler eventTimeout: 30s env: @@ -27,7 +27,7 @@ spec: - kind: ARG value: PATH="/root/miniconda3/bin:${PATH}" - kind: RUN - value: apt update && apt install -y --no-install-recommends wget git ca-certificates libglib2.0-0 libsm6 libxrender1 libxext6 && rm -rf /var/lib/apt/lists/* + value: apt update && apt install -y --no-install-recommends wget git ca-certificates libgl1 libglib2.0-0 libsm6 libxrender1 libxext6 && rm -rf /var/lib/apt/lists/* - kind: RUN value: wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh && chmod +x Miniconda3-latest-Linux-x86_64.sh && bash Miniconda3-latest-Linux-x86_64.sh -b && @@ -35,13 +35,13 @@ spec: - kind: WORKDIR value: /opt/nuclio - kind: RUN - value: conda create -y -n siammask python=3.6 + value: conda create -y -n siammask python=3.9 - kind: SHELL value: '["conda", "run", "-n", "siammask", "/bin/bash", "-c"]' - kind: RUN value: git clone https://github.com/foolwood/SiamMask.git - kind: RUN - value: pip install Cython==0.29.4 colorama==0.3.9 numpy==1.15.4 requests==2.21.0 fire==0.1.3 matplotlib==2.2.3 numba==0.39.0 scipy==1.1.0 h5py==2.8.0 pandas==0.23.4 tqdm==4.29.1 tensorboardX==1.6 opencv_python==3.4.3.18 torch==1.9.0 torchvision==0.2.1 jsonpickle + value: pip install Cython colorama 'numpy<1.20' requests fire matplotlib numba scipy h5py pandas tqdm tensorboardX opencv_python torch torchvision jsonpickle - kind: RUN value: conda install -y gcc_linux-64 - kind: RUN @@ -49,7 +49,7 @@ spec: - kind: RUN value: wget -P SiamMask/experiments/siammask_sharp http://www.robots.ox.ac.uk/~qwang/SiamMask_DAVIS.pth - kind: ENTRYPOINT - value: '["conda", "run", "-n", "siammask"]' + value: '["conda", "run", "--no-capture-output", "-n", "siammask"]' triggers: myHttpTrigger: diff --git a/serverless/pytorch/saic-vul/fbrs/nuclio/function.yaml b/serverless/pytorch/saic-vul/fbrs/nuclio/function.yaml index 5b9bd7433a82..9e8bfb6f4d6a 100644 --- a/serverless/pytorch/saic-vul/fbrs/nuclio/function.yaml +++ b/serverless/pytorch/saic-vul/fbrs/nuclio/function.yaml @@ -14,7 +14,7 @@ metadata: spec: description: f-BRS interactive segmentation - runtime: 'python:3.6' + runtime: 'python:3.9' handler: main:handler eventTimeout: 30s env: @@ -23,7 +23,6 @@ spec: build: image: cvat.pth.saic-vul.fbrs - baseImage: python:3.6.11 directives: preCopy: diff --git a/serverless/tensorflow/faster_rcnn_inception_v2_coco/nuclio/function-gpu.yaml b/serverless/tensorflow/faster_rcnn_inception_v2_coco/nuclio/function-gpu.yaml index 10f23d4f586b..3fa0f8b0a421 100644 --- a/serverless/tensorflow/faster_rcnn_inception_v2_coco/nuclio/function-gpu.yaml +++ b/serverless/tensorflow/faster_rcnn_inception_v2_coco/nuclio/function-gpu.yaml @@ -91,13 +91,13 @@ metadata: spec: description: Faster RCNN from Tensorflow Object Detection API optimized for GPU - runtime: 'python:3.6' + runtime: 'python:3.9' handler: main:handler eventTimeout: 30s build: image: cvat.tf.faster_rcnn_inception_v2_coco:latest-gpu - baseImage: tensorflow/tensorflow:2.1.1-gpu + baseImage: tensorflow/tensorflow:2.16.1-gpu directives: preCopy: diff --git a/serverless/tensorflow/faster_rcnn_inception_v2_coco/nuclio/function.yaml b/serverless/tensorflow/faster_rcnn_inception_v2_coco/nuclio/function.yaml index 6ebe8af61390..656d4a6c8a20 100644 --- a/serverless/tensorflow/faster_rcnn_inception_v2_coco/nuclio/function.yaml +++ b/serverless/tensorflow/faster_rcnn_inception_v2_coco/nuclio/function.yaml @@ -91,13 +91,13 @@ metadata: spec: description: Faster RCNN from Tensorflow Object Detection API - runtime: 'python:3.6' + runtime: 'python:3.9' handler: main:handler eventTimeout: 30s build: image: cvat.tf.faster_rcnn_inception_v2_coco - baseImage: tensorflow/tensorflow:2.1.1 + baseImage: tensorflow/tensorflow:2.16.1 directives: preCopy: diff --git a/serverless/tensorflow/faster_rcnn_inception_v2_coco/nuclio/model_loader.py b/serverless/tensorflow/faster_rcnn_inception_v2_coco/nuclio/model_loader.py index 36b5188eca41..b41b3ec33481 100644 --- a/serverless/tensorflow/faster_rcnn_inception_v2_coco/nuclio/model_loader.py +++ b/serverless/tensorflow/faster_rcnn_inception_v2_coco/nuclio/model_loader.py @@ -35,7 +35,7 @@ def __del__(self): def infer(self, image): width, height = image.size if width > 1920 or height > 1080: - image = image.resize((width // 2, height // 2), Image.ANTIALIAS) + image = image.resize((width // 2, height // 2), Image.LANCZOS) image_np = np.array(image.getdata())[:, :3].reshape( (image.height, image.width, -1)).astype(np.uint8) image_np = np.expand_dims(image_np, axis=0) diff --git a/serverless/tensorflow/matterport/mask_rcnn/nuclio/function-gpu.yaml b/serverless/tensorflow/matterport/mask_rcnn/nuclio/function-gpu.yaml deleted file mode 100644 index 906dd6ede599..000000000000 --- a/serverless/tensorflow/matterport/mask_rcnn/nuclio/function-gpu.yaml +++ /dev/null @@ -1,135 +0,0 @@ -metadata: - name: tf-matterport-mask-rcnn - namespace: cvat - annotations: - name: Mask RCNN via Tensorflow - type: detector - framework: tensorflow - spec: | - [ - { "id": 0, "name": "BG", "type": "mask" }, - { "id": 1, "name": "person", "type": "mask" }, - { "id": 2, "name": "bicycle", "type": "mask" }, - { "id": 3, "name": "car", "type": "mask" }, - { "id": 4, "name": "motorcycle", "type": "mask" }, - { "id": 5, "name": "airplane", "type": "mask" }, - { "id": 6, "name": "bus", "type": "mask" }, - { "id": 7, "name": "train", "type": "mask" }, - { "id": 8, "name": "truck", "type": "mask" }, - { "id": 9, "name": "boat", "type": "mask" }, - { "id": 10, "name": "traffic_light", "type": "mask" }, - { "id": 11, "name": "fire_hydrant", "type": "mask" }, - { "id": 12, "name": "stop_sign", "type": "mask" }, - { "id": 13, "name": "parking_meter", "type": "mask" }, - { "id": 14, "name": "bench", "type": "mask" }, - { "id": 15, "name": "bird", "type": "mask" }, - { "id": 16, "name": "cat", "type": "mask" }, - { "id": 17, "name": "dog", "type": "mask" }, - { "id": 18, "name": "horse", "type": "mask" }, - { "id": 19, "name": "sheep", "type": "mask" }, - { "id": 20, "name": "cow", "type": "mask" }, - { "id": 21, "name": "elephant", "type": "mask" }, - { "id": 22, "name": "bear", "type": "mask" }, - { "id": 23, "name": "zebra", "type": "mask" }, - { "id": 24, "name": "giraffe", "type": "mask" }, - { "id": 25, "name": "backpack", "type": "mask" }, - { "id": 26, "name": "umbrella", "type": "mask" }, - { "id": 27, "name": "handbag", "type": "mask" }, - { "id": 28, "name": "tie", "type": "mask" }, - { "id": 29, "name": "suitcase", "type": "mask" }, - { "id": 30, "name": "frisbee", "type": "mask" }, - { "id": 31, "name": "skis", "type": "mask" }, - { "id": 32, "name": "snowboard", "type": "mask" }, - { "id": 33, "name": "sports_ball", "type": "mask" }, - { "id": 34, "name": "kite", "type": "mask" }, - { "id": 35, "name": "baseball_bat", "type": "mask" }, - { "id": 36, "name": "baseball_glove", "type": "mask" }, - { "id": 37, "name": "skateboard", "type": "mask" }, - { "id": 38, "name": "surfboard", "type": "mask" }, - { "id": 39, "name": "tennis_racket", "type": "mask" }, - { "id": 40, "name": "bottle", "type": "mask" }, - { "id": 41, "name": "wine_glass", "type": "mask" }, - { "id": 42, "name": "cup", "type": "mask" }, - { "id": 43, "name": "fork", "type": "mask" }, - { "id": 44, "name": "knife", "type": "mask" }, - { "id": 45, "name": "spoon", "type": "mask" }, - { "id": 46, "name": "bowl", "type": "mask" }, - { "id": 47, "name": "banana", "type": "mask" }, - { "id": 48, "name": "apple", "type": "mask" }, - { "id": 49, "name": "sandwich", "type": "mask" }, - { "id": 50, "name": "orange", "type": "mask" }, - { "id": 51, "name": "broccoli", "type": "mask" }, - { "id": 52, "name": "carrot", "type": "mask" }, - { "id": 53, "name": "hot_dog", "type": "mask" }, - { "id": 54, "name": "pizza", "type": "mask" }, - { "id": 55, "name": "donut", "type": "mask" }, - { "id": 56, "name": "cake", "type": "mask" }, - { "id": 57, "name": "chair", "type": "mask" }, - { "id": 58, "name": "couch", "type": "mask" }, - { "id": 59, "name": "potted_plant", "type": "mask" }, - { "id": 60, "name": "bed", "type": "mask" }, - { "id": 61, "name": "dining_table", "type": "mask" }, - { "id": 62, "name": "toilet", "type": "mask" }, - { "id": 63, "name": "tv", "type": "mask" }, - { "id": 64, "name": "laptop", "type": "mask" }, - { "id": 65, "name": "mouse", "type": "mask" }, - { "id": 66, "name": "remote", "type": "mask" }, - { "id": 67, "name": "keyboard", "type": "mask" }, - { "id": 68, "name": "cell_phone", "type": "mask" }, - { "id": 69, "name": "microwave", "type": "mask" }, - { "id": 70, "name": "oven", "type": "mask" }, - { "id": 71, "name": "toaster", "type": "mask" }, - { "id": 72, "name": "sink", "type": "mask" }, - { "id": 73, "name": "refrigerator", "type": "mask" }, - { "id": 74, "name": "book", "type": "mask" }, - { "id": 75, "name": "clock", "type": "mask" }, - { "id": 76, "name": "vase", "type": "mask" }, - { "id": 77, "name": "scissors", "type": "mask" }, - { "id": 78, "name": "teddy_bear", "type": "mask" }, - { "id": 79, "name": "hair_drier", "type": "mask" }, - { "id": 80, "name": "toothbrush", "type": "mask" } - ] - -spec: - description: Mask RCNN optimized for GPU - - runtime: 'python:3.6' - handler: main:handler - eventTimeout: 30s - env: - - name: MASK_RCNN_DIR - value: /opt/nuclio/Mask_RCNN - build: - image: cvat.tf.matterport.mask_rcnn:latest-gpu - baseImage: tensorflow/tensorflow:1.15.5-gpu-py3 - directives: - postCopy: - - kind: WORKDIR - value: /opt/nuclio - - kind: RUN - value: apt update && apt install --no-install-recommends -y git curl - - kind: RUN - value: git clone --depth 1 https://github.com/matterport/Mask_RCNN.git - - kind: RUN - value: curl -L https://github.com/matterport/Mask_RCNN/releases/download/v2.0/mask_rcnn_coco.h5 -o Mask_RCNN/mask_rcnn_coco.h5 - - kind: RUN - value: pip3 install numpy cython pyyaml keras==2.1.0 scikit-image Pillow - - triggers: - myHttpTrigger: - maxWorkers: 1 - kind: 'http' - workerAvailabilityTimeoutMilliseconds: 10000 - attributes: - maxRequestBodySize: 33554432 # 32MB - - resources: - limits: - nvidia.com/gpu: 1 - - platform: - attributes: - restartPolicy: - name: always - maximumRetryCount: 3 - mountMode: volume diff --git a/serverless/tensorflow/matterport/mask_rcnn/nuclio/function.yaml b/serverless/tensorflow/matterport/mask_rcnn/nuclio/function.yaml deleted file mode 100644 index c620486d0628..000000000000 --- a/serverless/tensorflow/matterport/mask_rcnn/nuclio/function.yaml +++ /dev/null @@ -1,132 +0,0 @@ -metadata: - name: tf-matterport-mask-rcnn - namespace: cvat - annotations: - name: Mask RCNN via Tensorflow - type: detector - framework: tensorflow - spec: | - [ - { "id": 0, "name": "BG", "type": "mask" }, - { "id": 1, "name": "person", "type": "mask" }, - { "id": 2, "name": "bicycle", "type": "mask" }, - { "id": 3, "name": "car", "type": "mask" }, - { "id": 4, "name": "motorcycle", "type": "mask" }, - { "id": 5, "name": "airplane", "type": "mask" }, - { "id": 6, "name": "bus", "type": "mask" }, - { "id": 7, "name": "train", "type": "mask" }, - { "id": 8, "name": "truck", "type": "mask" }, - { "id": 9, "name": "boat", "type": "mask" }, - { "id": 10, "name": "traffic_light", "type": "mask" }, - { "id": 11, "name": "fire_hydrant", "type": "mask" }, - { "id": 12, "name": "stop_sign", "type": "mask" }, - { "id": 13, "name": "parking_meter", "type": "mask" }, - { "id": 14, "name": "bench", "type": "mask" }, - { "id": 15, "name": "bird", "type": "mask" }, - { "id": 16, "name": "cat", "type": "mask" }, - { "id": 17, "name": "dog", "type": "mask" }, - { "id": 18, "name": "horse", "type": "mask" }, - { "id": 19, "name": "sheep", "type": "mask" }, - { "id": 20, "name": "cow", "type": "mask" }, - { "id": 21, "name": "elephant", "type": "mask" }, - { "id": 22, "name": "bear", "type": "mask" }, - { "id": 23, "name": "zebra", "type": "mask" }, - { "id": 24, "name": "giraffe", "type": "mask" }, - { "id": 25, "name": "backpack", "type": "mask" }, - { "id": 26, "name": "umbrella", "type": "mask" }, - { "id": 27, "name": "handbag", "type": "mask" }, - { "id": 28, "name": "tie", "type": "mask" }, - { "id": 29, "name": "suitcase", "type": "mask" }, - { "id": 30, "name": "frisbee", "type": "mask" }, - { "id": 31, "name": "skis", "type": "mask" }, - { "id": 32, "name": "snowboard", "type": "mask" }, - { "id": 33, "name": "sports_ball", "type": "mask" }, - { "id": 34, "name": "kite", "type": "mask" }, - { "id": 35, "name": "baseball_bat", "type": "mask" }, - { "id": 36, "name": "baseball_glove", "type": "mask" }, - { "id": 37, "name": "skateboard", "type": "mask" }, - { "id": 38, "name": "surfboard", "type": "mask" }, - { "id": 39, "name": "tennis_racket", "type": "mask" }, - { "id": 40, "name": "bottle", "type": "mask" }, - { "id": 41, "name": "wine_glass", "type": "mask" }, - { "id": 42, "name": "cup", "type": "mask" }, - { "id": 43, "name": "fork", "type": "mask" }, - { "id": 44, "name": "knife", "type": "mask" }, - { "id": 45, "name": "spoon", "type": "mask" }, - { "id": 46, "name": "bowl", "type": "mask" }, - { "id": 47, "name": "banana", "type": "mask" }, - { "id": 48, "name": "apple", "type": "mask" }, - { "id": 49, "name": "sandwich", "type": "mask" }, - { "id": 50, "name": "orange", "type": "mask" }, - { "id": 51, "name": "broccoli", "type": "mask" }, - { "id": 52, "name": "carrot", "type": "mask" }, - { "id": 53, "name": "hot_dog", "type": "mask" }, - { "id": 54, "name": "pizza", "type": "mask" }, - { "id": 55, "name": "donut", "type": "mask" }, - { "id": 56, "name": "cake", "type": "mask" }, - { "id": 57, "name": "chair", "type": "mask" }, - { "id": 58, "name": "couch", "type": "mask" }, - { "id": 59, "name": "potted_plant", "type": "mask" }, - { "id": 60, "name": "bed", "type": "mask" }, - { "id": 61, "name": "dining_table", "type": "mask" }, - { "id": 62, "name": "toilet", "type": "mask" }, - { "id": 63, "name": "tv", "type": "mask" }, - { "id": 64, "name": "laptop", "type": "mask" }, - { "id": 65, "name": "mouse", "type": "mask" }, - { "id": 66, "name": "remote", "type": "mask" }, - { "id": 67, "name": "keyboard", "type": "mask" }, - { "id": 68, "name": "cell_phone", "type": "mask" }, - { "id": 69, "name": "microwave", "type": "mask" }, - { "id": 70, "name": "oven", "type": "mask" }, - { "id": 71, "name": "toaster", "type": "mask" }, - { "id": 72, "name": "sink", "type": "mask" }, - { "id": 73, "name": "refrigerator", "type": "mask" }, - { "id": 74, "name": "book", "type": "mask" }, - { "id": 75, "name": "clock", "type": "mask" }, - { "id": 76, "name": "vase", "type": "mask" }, - { "id": 77, "name": "scissors", "type": "mask" }, - { "id": 78, "name": "teddy_bear", "type": "mask" }, - { "id": 79, "name": "hair_drier", "type": "mask" }, - { "id": 80, "name": "toothbrush", "type": "mask" } - ] - -spec: - description: | - An implementation of Mask RCNN on Python 3, Keras, and TensorFlow. - - runtime: 'python:3.6' - handler: main:handler - eventTimeout: 30s - env: - - name: MASK_RCNN_DIR - value: /opt/nuclio/Mask_RCNN - build: - image: cvat.tf.matterport.mask_rcnn - baseImage: tensorflow/tensorflow:1.13.1-py3 - directives: - postCopy: - - kind: WORKDIR - value: /opt/nuclio - - kind: RUN - value: apt update && apt install --no-install-recommends -y git curl - - kind: RUN - value: git clone --depth 1 https://github.com/matterport/Mask_RCNN.git - - kind: RUN - value: curl -L https://github.com/matterport/Mask_RCNN/releases/download/v2.0/mask_rcnn_coco.h5 -o Mask_RCNN/mask_rcnn_coco.h5 - - kind: RUN - value: pip3 install numpy cython pyyaml keras==2.1.0 scikit-image 'imageio<=2.9.0' Pillow - - triggers: - myHttpTrigger: - maxWorkers: 2 - kind: 'http' - workerAvailabilityTimeoutMilliseconds: 10000 - attributes: - maxRequestBodySize: 33554432 # 32MB - - platform: - attributes: - restartPolicy: - name: always - maximumRetryCount: 3 - mountMode: volume diff --git a/serverless/tensorflow/matterport/mask_rcnn/nuclio/main.py b/serverless/tensorflow/matterport/mask_rcnn/nuclio/main.py deleted file mode 100644 index 99c533b0eeee..000000000000 --- a/serverless/tensorflow/matterport/mask_rcnn/nuclio/main.py +++ /dev/null @@ -1,33 +0,0 @@ -import json -import base64 -from PIL import Image -import io -from model_loader import ModelLoader -import numpy as np -import yaml - - -def init_context(context): - context.logger.info("Init context... 0%") - - with open("/opt/nuclio/function.yaml", 'rb') as function_file: - functionconfig = yaml.safe_load(function_file) - labels_spec = functionconfig['metadata']['annotations']['spec'] - labels = {item['id']: item['name'] for item in json.loads(labels_spec)} - - model_handler = ModelLoader(labels) # pylint: disable=no-value-for-parameter - context.user_data.model_handler = model_handler - - context.logger.info("Init context...100%") - -def handler(context, event): - context.logger.info("Run tf.matterport.mask_rcnn model") - data = event.body - buf = io.BytesIO(base64.b64decode(data["image"])) - threshold = float(data.get("threshold", 0.2)) - image = Image.open(buf) - - results = context.user_data.model_handler.infer(np.array(image), threshold) - - return context.Response(body=json.dumps(results), headers={}, - content_type='application/json', status_code=200) diff --git a/serverless/tensorflow/matterport/mask_rcnn/nuclio/model_loader.py b/serverless/tensorflow/matterport/mask_rcnn/nuclio/model_loader.py deleted file mode 100644 index 196cab8663d1..000000000000 --- a/serverless/tensorflow/matterport/mask_rcnn/nuclio/model_loader.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright (C) 2020-2022 Intel Corporation -# -# SPDX-License-Identifier: MIT - -import os -import numpy as np -import sys -from skimage.measure import find_contours, approximate_polygon -import tensorflow as tf -MASK_RCNN_DIR = os.path.abspath(os.environ.get('MASK_RCNN_DIR')) -if MASK_RCNN_DIR: - sys.path.append(MASK_RCNN_DIR) # To find local version of the library -from mrcnn import model as modellib -from mrcnn.config import Config - -def to_cvat_mask(box: list, mask): - xtl, ytl, xbr, ybr = box - flattened = mask[ytl:ybr + 1, xtl:xbr + 1].flat[:].tolist() - flattened.extend([xtl, ytl, xbr, ybr]) - return flattened - -class ModelLoader: - def __init__(self, labels): - COCO_MODEL_PATH = os.path.join(MASK_RCNN_DIR, "mask_rcnn_coco.h5") - if COCO_MODEL_PATH is None: - raise OSError('Model path env not found in the system.') - - class InferenceConfig(Config): - NAME = "coco" - NUM_CLASSES = 1 + 80 # COCO has 80 classes - GPU_COUNT = 1 - IMAGES_PER_GPU = 1 - - # Limit gpu memory to 30% to allow for other nuclio gpu functions. Increase fraction as you like - import keras.backend.tensorflow_backend as ktf - def get_session(gpu_fraction=0.333): - gpu_options = tf.GPUOptions( - per_process_gpu_memory_fraction=gpu_fraction, - allow_growth=True) - return tf.Session(config=tf.ConfigProto(gpu_options=gpu_options)) - - ktf.set_session(get_session()) - # Print config details - self.config = InferenceConfig() - self.config.display() - - self.model = modellib.MaskRCNN(mode="inference", - config=self.config, model_dir=MASK_RCNN_DIR) - self.model.load_weights(COCO_MODEL_PATH, by_name=True) - self.labels = labels - - def infer(self, image, threshold): - output = self.model.detect([image], verbose=1)[0] - - results = [] - MASK_THRESHOLD = 0.5 - for i in range(len(output["rois"])): - score = output["scores"][i] - class_id = output["class_ids"][i] - mask = output["masks"][:, :, i] - if score >= threshold: - mask = mask.astype(np.uint8) - contours = find_contours(mask, MASK_THRESHOLD) - # only one contour exist in our case - contour = contours[0] - contour = np.flip(contour, axis=1) - # Approximate the contour and reduce the number of points - contour = approximate_polygon(contour, tolerance=2.5) - if len(contour) < 6: - continue - label = self.labels[class_id] - - Xmin = int(np.min(contour[:,0])) - Xmax = int(np.max(contour[:,0])) - Ymin = int(np.min(contour[:,1])) - Ymax = int(np.max(contour[:,1])) - cvat_mask = to_cvat_mask((Xmin, Ymin, Xmax, Ymax), mask) - - results.append({ - "confidence": str(score), - "label": label, - "points": contour.ravel().tolist(), - "mask": cvat_mask, - "type": "mask", - }) - - return results diff --git a/site/content/en/docs/manual/advanced/serverless-tutorial.md b/site/content/en/docs/manual/advanced/serverless-tutorial.md index b906fc59de4c..bb83acf9cd04 100644 --- a/site/content/en/docs/manual/advanced/serverless-tutorial.md +++ b/site/content/en/docs/manual/advanced/serverless-tutorial.md @@ -110,19 +110,21 @@ nuctl create project cvat nuctl deploy --project-name cvat --path "./serverless/pytorch/foolwood/siammask/nuclio" --platform local ``` ``` -21.05.07 13:00:22.233 nuctl (I) Deploying function {"name": ""} -21.05.07 13:00:22.233 nuctl (I) Building {"versionInfo": "Label: 1.5.16, Git commit: ae43a6a560c2bec42d7ccfdf6e8e11a1e3cc3774, OS: linux, Arch: amd64, Go version: go1.14.3", "name": ""} -21.05.07 13:00:22.652 nuctl (I) Cleaning up before deployment {"functionName": "pth-foolwood-siammask"} -21.05.07 13:00:22.705 nuctl (I) Staging files and preparing base images -21.05.07 13:00:22.706 nuctl (I) Building processor image {"imageName": "cvat/pth.foolwood.siammask:latest"} -21.05.07 13:00:22.706 nuctl.platform.docker (I) Pulling image {"imageName": "quay.io/nuclio/handler-builder-python-onbuild:1.5.16-amd64"} -21.05.07 13:00:26.351 nuctl.platform.docker (I) Pulling image {"imageName": "quay.io/nuclio/uhttpc:0.0.1-amd64"} -21.05.07 13:00:29.819 nuctl.platform (I) Building docker image {"image": "cvat/pth.foolwood.siammask:latest"} -21.05.07 13:00:30.103 nuctl.platform (I) Pushing docker image into registry {"image": "cvat/pth.foolwood.siammask:latest", "registry": ""} -21.05.07 13:00:30.103 nuctl.platform (I) Docker image was successfully built and pushed into docker registry {"image": "cvat/pth.foolwood.siammask:latest"} -21.05.07 13:00:30.104 nuctl (I) Build complete {"result": {"Image":"cvat/pth.foolwood.siammask:latest","UpdatedFunctionConfig":{"metadata":{"name":"pth-foolwood-siammask","namespace":"nuclio","labels":{"nuclio.io/project-name":"cvat"},"annotations":{"framework":"pytorch","name":"SiamMask","spec":"","type":"tracker"}},"spec":{"description":"Fast Online Object Tracking and Segmentation","handler":"main:handler","runtime":"python:3.6","env":[{"name":"PYTHONPATH","value":"/opt/nuclio/SiamMask:/opt/nuclio/SiamMask/experiments/siammask_sharp"}],"resources":{},"image":"cvat/pth.foolwood.siammask:latest","targetCPU":75,"triggers":{"myHttpTrigger":{"class":"","kind":"http","name":"myHttpTrigger","maxWorkers":2,"workerAvailabilityTimeoutMilliseconds":10000,"attributes":{"maxRequestBodySize":33554432}}},"build":{"image":"cvat/pth.foolwood.siammask","baseImage":"continuumio/miniconda3","directives":{"preCopy":[{"kind":"WORKDIR","value":"/opt/nuclio"},{"kind":"RUN","value":"conda create -y -n siammask python=3.6"},{"kind":"SHELL","value":"[\"conda\", \"run\", \"-n\", \"siammask\", \"/bin/bash\", \"-c\"]"},{"kind":"RUN","value":"git clone https://github.com/foolwood/SiamMask.git"},{"kind":"RUN","value":"pip install -r SiamMask/requirements.txt jsonpickle"},{"kind":"RUN","value":"conda install -y gcc_linux-64"},{"kind":"RUN","value":"cd SiamMask \u0026\u0026 bash make.sh \u0026\u0026 cd -"},{"kind":"RUN","value":"wget -P SiamMask/experiments/siammask_sharp http://www.robots.ox.ac.uk/~qwang/SiamMask_DAVIS.pth"},{"kind":"ENTRYPOINT","value":"[\"conda\", \"run\", \"-n\", \"siammask\"]"}]},"codeEntryType":"image"},"platform":{"attributes":{"mountMode":"volume","restartPolicy":{"maximumRetryCount":3,"name":"always"}}},"readinessTimeoutSeconds":60,"securityContext":{},"eventTimeout":"30s"}}}} -21.05.07 13:00:31.387 nuctl.platform (I) Waiting for function to be ready {"timeout": 60} -21.05.07 13:00:32.796 nuctl (I) Function deploy complete {"functionName": "pth-foolwood-siammask", "httpPort": 49155} +24.04.18 20:52:47.910 (I) nuctl Deploying function {"name": "pth-foolwood-siammask"} +24.04.18 20:52:47.910 (I) nuctl Building {"builderKind": "docker", "versionInfo": "Label: 1.13.0, Git commit: c4422eb772781fb50fbf017698aae96199d81388, OS: linux, Arch: amd64, Go version: go1.21.7", "name": "pth-foolwood-siammask"} +24.04.18 20:52:47.929 (W) nuctl.platform MaxWorkers is deprecated and will be removed in v1.15.x, use NumWorkers instead +24.04.18 20:52:48.044 (I) nuctl Staging files and preparing base images +24.04.18 20:52:48.044 (W) nuctl Using user provided base image, runtime interpreter version is provided by the base image {"baseImage": "ubuntu:20.04"} +24.04.18 20:52:48.044 (I) nuctl Building processor image {"registryURL": "", "taggedImageName": "cvat.pth.foolwood.siammask:latest"} +24.04.18 20:52:48.044 (I) nuctl.platform.docker Pulling image {"imageName": "quay.io/nuclio/handler-builder-python-onbuild:1.13.0-amd64"} +24.04.18 20:52:49.717 (I) nuctl.platform.docker Pulling image {"imageName": "quay.io/nuclio/uhttpc:0.0.1-amd64"} +24.04.18 20:52:51.363 (I) nuctl.platform Building docker image {"image": "cvat.pth.foolwood.siammask:latest"} +24.04.18 20:55:58.853 (I) nuctl.platform Pushing docker image into registry {"image": "cvat.pth.foolwood.siammask:latest", "registry": ""} +24.04.18 20:55:58.853 (I) nuctl.platform Docker image was successfully built and pushed into docker registry {"image": "cvat.pth.foolwood.siammask:latest"} +24.04.18 20:55:58.853 (I) nuctl Build complete {"image": "cvat.pth.foolwood.siammask:latest"} +24.04.18 20:55:58.861 (I) nuctl Cleaning up before deployment {"functionName": "pth-foolwood-siammask"} +24.04.18 20:55:59.593 (I) nuctl.platform Waiting for function to be ready {"timeout": 120} +24.04.18 20:56:01.315 (I) nuctl Function deploy complete {"functionName": "pth-foolwood-siammask", "httpPort": 33453, "internalInvocationURLs": ["172.17.0.5:8080"], "externalInvocationURLs": ["0.0.0.0:33453"]} ``` ```bash From 3e29537995bc12d1166566944cf73642c2b7580d Mon Sep 17 00:00:00 2001 From: Boris Sekachev Date: Thu, 2 May 2024 13:23:45 +0300 Subject: [PATCH 11/29] Fixed: Cannot read properties of undefined (reading 'addClass') (#7834) --- ...44_boris_reworked_empty_first_frame_fix.md | 4 ++++ cvat-canvas/package.json | 2 +- cvat-canvas/src/typescript/canvasModel.ts | 23 ++++++++----------- 3 files changed, 15 insertions(+), 14 deletions(-) create mode 100644 changelog.d/20240501_111444_boris_reworked_empty_first_frame_fix.md diff --git a/changelog.d/20240501_111444_boris_reworked_empty_first_frame_fix.md b/changelog.d/20240501_111444_boris_reworked_empty_first_frame_fix.md new file mode 100644 index 000000000000..11d0468b5e5f --- /dev/null +++ b/changelog.d/20240501_111444_boris_reworked_empty_first_frame_fix.md @@ -0,0 +1,4 @@ +### Fixed + +- Cannot read properties of undefined (reading 'addClass') + () diff --git a/cvat-canvas/package.json b/cvat-canvas/package.json index 97302daace22..74341ac5d2ec 100644 --- a/cvat-canvas/package.json +++ b/cvat-canvas/package.json @@ -1,6 +1,6 @@ { "name": "cvat-canvas", - "version": "2.20.0", + "version": "2.20.1", "type": "module", "description": "Part of Computer Vision Annotation Tool which presents its canvas library", "main": "src/canvas.ts", diff --git a/cvat-canvas/src/typescript/canvasModel.ts b/cvat-canvas/src/typescript/canvasModel.ts index a4bb033b91ba..60b3330af3fd 100644 --- a/cvat-canvas/src/typescript/canvasModel.ts +++ b/cvat-canvas/src/typescript/canvasModel.ts @@ -555,13 +555,8 @@ export class CanvasModelImpl extends MasterImpl implements CanvasModel { this.data.imageID = frameData.number; - // We set objects immideately to avoid outdated data in case if setup() is called - // multiple times before the frameData.data() promise is resolved. - // If promise is rejected we restore previous objects - const prevZLayer = this.data.zLayer; - const prevObjects = this.data.objects; - this.data.zLayer = zLayer; - this.data.objects = objectStates; + const { zLayer: prevZLayer, objects: prevObjects } = this.data; + frameData .data((): void => { this.data.image = null; @@ -569,7 +564,7 @@ export class CanvasModelImpl extends MasterImpl implements CanvasModel { }) .then((data: Image): void => { if (frameData.number !== this.data.imageID) { - // already another image + // check that request is still relevant after async image data fetching return; } @@ -604,6 +599,13 @@ export class CanvasModelImpl extends MasterImpl implements CanvasModel { } this.notify(UpdateReasons.IMAGE_CHANGED); + + if (prevZLayer === this.data.zLayer && prevObjects === this.data.objects) { + // check the request is relevant, other setup() may have been called while promise resolving + this.data.zLayer = zLayer; + this.data.objects = objectStates; + } + this.notify(UpdateReasons.OBJECTS_UPDATED); }) .catch((exception: unknown): void => { @@ -614,11 +616,6 @@ export class CanvasModelImpl extends MasterImpl implements CanvasModel { } else { this.data.exception = new Error('Unknown error occured when fetching image data'); } - // Restore only relevant data in case if setup() is called multiple times - if (this.data.objects === objectStates && this.data.zLayer === zLayer) { - this.data.objects = prevObjects; - this.data.zLayer = prevZLayer; - } this.notify(UpdateReasons.DATA_FAILED); } }); From ce5e07ccfab94ceb455621ab23773a3d5e1b6984 Mon Sep 17 00:00:00 2001 From: Jacky Lam Date: Thu, 2 May 2024 17:05:06 +0100 Subject: [PATCH 12/29] fix[security]: Disable nginx server signature by default (#7814) --- changelog.d/20240429_124319_jackylamhk_patch_2.md | 4 ++++ cvat-ui/react_nginx.conf | 5 +++++ cvat/nginx.conf | 5 +++++ 3 files changed, 14 insertions(+) create mode 100644 changelog.d/20240429_124319_jackylamhk_patch_2.md diff --git a/changelog.d/20240429_124319_jackylamhk_patch_2.md b/changelog.d/20240429_124319_jackylamhk_patch_2.md new file mode 100644 index 000000000000..029f7148be04 --- /dev/null +++ b/changelog.d/20240429_124319_jackylamhk_patch_2.md @@ -0,0 +1,4 @@ +### Security + +- Disable the nginx server signature by default to make it slightly harder for attackers to find known vulnerabilities. + () diff --git a/cvat-ui/react_nginx.conf b/cvat-ui/react_nginx.conf index 29ae133f3978..c3d51866beab 100644 --- a/cvat-ui/react_nginx.conf +++ b/cvat-ui/react_nginx.conf @@ -1,6 +1,11 @@ server { root /usr/share/nginx/html; + # Disable server signature to make it slighty harder for + # attackers to find known vulnerabilities. See + # https://datatracker.ietf.org/doc/html/rfc9110#name-server + server_tokens off; + gzip on; gzip_comp_level 6; gzip_http_version 1.1; diff --git a/cvat/nginx.conf b/cvat/nginx.conf index a0ea97a07d00..5c67e4b1acd1 100644 --- a/cvat/nginx.conf +++ b/cvat/nginx.conf @@ -50,6 +50,11 @@ http { server_name _; + # Disable server signature to make it slighty harder for + # attackers to find known vulnerabilities. See + # https://datatracker.ietf.org/doc/html/rfc9110#name-server + server_tokens off; + location /static/ { gzip on; gzip_comp_level 6; From 04f3c845e15547795108598e3d77f6ff10b6c8a1 Mon Sep 17 00:00:00 2001 From: Boris Sekachev Date: Fri, 3 May 2024 12:20:23 +0300 Subject: [PATCH 13/29] Enhanced uploading files with tus protocol, enabled retries (#7830) --- .../20240430_172649_boris_enable_tus_retries.md | 4 ++++ cvat-core/package.json | 2 +- cvat-core/src/server-proxy.ts | 15 ++++++++++++++- 3 files changed, 19 insertions(+), 2 deletions(-) create mode 100644 changelog.d/20240430_172649_boris_enable_tus_retries.md diff --git a/changelog.d/20240430_172649_boris_enable_tus_retries.md b/changelog.d/20240430_172649_boris_enable_tus_retries.md new file mode 100644 index 000000000000..3d0ad6318c2d --- /dev/null +++ b/changelog.d/20240430_172649_boris_enable_tus_retries.md @@ -0,0 +1,4 @@ +### Fixed + +Uploading files with TUS immediately failed when one of the requests failed + () diff --git a/cvat-core/package.json b/cvat-core/package.json index 5ae406892349..98adef48e9ca 100644 --- a/cvat-core/package.json +++ b/cvat-core/package.json @@ -1,6 +1,6 @@ { "name": "cvat-core", - "version": "15.0.3", + "version": "15.0.4", "type": "module", "description": "Part of Computer Vision Tool which presents an interface for client-side integration", "main": "src/api.ts", diff --git a/cvat-core/src/server-proxy.ts b/cvat-core/src/server-proxy.ts index 6d9e9f118021..c5a84a3b0afb 100644 --- a/cvat-core/src/server-proxy.ts +++ b/cvat-core/src/server-proxy.ts @@ -132,7 +132,20 @@ async function chunkUpload(file: File, uploadConfig): Promise<{ uploadSentSize: Authorization: Axios.defaults.headers.common.Authorization, }, chunkSize, - retryDelays: null, + retryDelays: [2000, 4000, 8000, 16000, 32000, 64000], + onShouldRetry(err: tus.DetailedError | Error): boolean { + if (err instanceof tus.DetailedError) { + const { originalResponse } = (err as tus.DetailedError); + const code = (originalResponse?.getStatus() || 0); + + // do not retry if (code >= 400 && code < 500) is default tus behaviour + // retry if code === 409 or 423 is default tus behaviour + // additionally handle codes 429 and 0 + return !(code >= 400 && code < 500) || [409, 423, 429, 0].includes(code); + } + + return false; + }, onError(error) { reject(error); }, From af182a358937446836c22cbe97582842269af506 Mon Sep 17 00:00:00 2001 From: Boris Sekachev Date: Fri, 3 May 2024 14:35:42 +0300 Subject: [PATCH 14/29] Fixed exception when copy/paste a skeleton point (#7843) --- .../20240503_101921_boris_fixed_length_of_undefined.md | 4 ++++ .../standard-workspace/objects-side-bar/objects-list.tsx | 8 ++++---- 2 files changed, 8 insertions(+), 4 deletions(-) create mode 100644 changelog.d/20240503_101921_boris_fixed_length_of_undefined.md diff --git a/changelog.d/20240503_101921_boris_fixed_length_of_undefined.md b/changelog.d/20240503_101921_boris_fixed_length_of_undefined.md new file mode 100644 index 000000000000..39d41c007e95 --- /dev/null +++ b/changelog.d/20240503_101921_boris_fixed_length_of_undefined.md @@ -0,0 +1,4 @@ +### Fixed + +- Fixed exception 'Could not read property length of undefined' when copy/paste a skeleton point + () diff --git a/cvat-ui/src/containers/annotation-page/standard-workspace/objects-side-bar/objects-list.tsx b/cvat-ui/src/containers/annotation-page/standard-workspace/objects-side-bar/objects-list.tsx index d0e7c78247d0..a8c45ca203c2 100644 --- a/cvat-ui/src/containers/annotation-page/standard-workspace/objects-side-bar/objects-list.tsx +++ b/cvat-ui/src/containers/annotation-page/standard-workspace/objects-side-bar/objects-list.tsx @@ -408,7 +408,7 @@ class ObjectsListContainer extends React.PureComponent { }, SWITCH_PINNED: (event: KeyboardEvent | undefined) => { preventDefault(event); - const state = activatedState(); + const state = activatedState(true); if (state && !readonly) { state.pinned = !state.pinned; updateAnnotations([state]); @@ -456,7 +456,7 @@ class ObjectsListContainer extends React.PureComponent { }, TO_BACKGROUND: (event: KeyboardEvent | undefined) => { preventDefault(event); - const state = activatedState(); + const state = activatedState(true); if (state && !readonly && state.objectType !== ObjectType.TAG) { state.zOrder = minZLayer - 1; updateAnnotations([state]); @@ -464,14 +464,14 @@ class ObjectsListContainer extends React.PureComponent { }, TO_FOREGROUND: (event: KeyboardEvent | undefined) => { preventDefault(event); - const state = activatedState(); + const state = activatedState(true); if (state && !readonly && state.objectType !== ObjectType.TAG) { state.zOrder = maxZLayer + 1; updateAnnotations([state]); } }, COPY_SHAPE: () => { - const state = activatedState(); + const state = activatedState(true); if (state && !readonly) { copyShape(state); } From 39afcd443fdbd160e0a6242bd6addb6f46de1103 Mon Sep 17 00:00:00 2001 From: Boris Sekachev Date: Fri, 3 May 2024 16:33:11 +0300 Subject: [PATCH 15/29] Added ability to call analytics report manually (#7805) --- ...26_143800_boris_manual_analytics_report.md | 4 + cvat-core/src/analytics-report.ts | 7 +- cvat-core/src/api-implementation.ts | 36 ++- cvat-core/src/api.ts | 8 + cvat-core/src/index.ts | 14 +- cvat-core/src/server-proxy.ts | 121 ++++++++-- cvat-core/src/server-response-types.ts | 24 +- .../analytics-page/analytics-page.tsx | 214 +++++++++++------- .../analytics-page/analytics-performance.tsx | 88 +++++-- .../src/components/analytics-page/styles.scss | 10 + cvat/apps/analytics_report/apps.py | 10 - .../apps/analytics_report/default_settings.py | 8 - cvat/apps/analytics_report/permissions.py | 68 +++--- cvat/apps/analytics_report/report/create.py | 84 ++----- cvat/apps/analytics_report/report/get.py | 20 +- .../rules/analytics_reports.rego | 5 + cvat/apps/analytics_report/signals.py | 40 ---- cvat/settings/development.py | 1 - cvat/settings/testing_rest.py | 1 - docker-compose.dev.yml | 1 - .../e2e/features/analytics_pipeline.js | 97 +++----- .../cypress/e2e/features/ground_truth_jobs.js | 2 +- tests/cypress/support/commands.js | 7 + 23 files changed, 481 insertions(+), 389 deletions(-) create mode 100644 changelog.d/20240426_143800_boris_manual_analytics_report.md delete mode 100644 cvat/apps/analytics_report/default_settings.py delete mode 100644 cvat/apps/analytics_report/signals.py diff --git a/changelog.d/20240426_143800_boris_manual_analytics_report.md b/changelog.d/20240426_143800_boris_manual_analytics_report.md new file mode 100644 index 000000000000..e616757cca37 --- /dev/null +++ b/changelog.d/20240426_143800_boris_manual_analytics_report.md @@ -0,0 +1,4 @@ +### Changed + +- Analytics reports calculation may be initiated manually instead of automatic scheduling + () diff --git a/cvat-core/src/analytics-report.ts b/cvat-core/src/analytics-report.ts index 4d3637c7ad67..895ecb128c2e 100644 --- a/cvat-core/src/analytics-report.ts +++ b/cvat-core/src/analytics-report.ts @@ -1,9 +1,10 @@ -// Copyright (C) 2023 CVAT.ai Corporation +// Copyright (C) 2023-2024 CVAT.ai Corporation // // SPDX-License-Identifier: MIT import { - SerializedAnalyticsEntry, SerializedAnalyticsReport, SerializedDataEntry, SerializedTransformationEntry, + SerializedAnalyticsEntry, SerializedAnalyticsReport, + SerializedDataEntry, SerializedTransformationEntry, } from './server-response-types'; import { ArgumentError } from './exceptions'; @@ -126,7 +127,7 @@ export default class AnalyticsReport { #statistics: AnalyticsEntry[]; constructor(initialData: SerializedAnalyticsReport) { - this.#id = initialData.id; + this.#id = initialData.job_id || initialData.task_id || initialData.project_id; this.#target = initialData.target as AnalyticsReportTarget; this.#createdDate = initialData.created_date; this.#statistics = []; diff --git a/cvat-core/src/api-implementation.ts b/cvat-core/src/api-implementation.ts index 331ea9e5d8da..b9d3d1f450d5 100644 --- a/cvat-core/src/api-implementation.ts +++ b/cvat-core/src/api-implementation.ts @@ -29,7 +29,10 @@ import CloudStorage from './cloud-storage'; import Organization, { Invitation } from './organization'; import Webhook from './webhook'; import { ArgumentError } from './exceptions'; -import { SerializedAsset } from './server-response-types'; +import { + AnalyticsReportFilter, QualityConflictsFilter, QualityReportsFilter, + QualitySettingsFilter, SerializedAsset, +} from './server-response-types'; import QualityReport from './quality-report'; import QualityConflict, { ConflictSeverity } from './quality-conflict'; import QualitySettings from './quality-settings'; @@ -403,7 +406,7 @@ export default function implementAPI(cvat: CVATCore): CVATCore { return webhooks; }); - implementationMixin(cvat.analytics.quality.reports, async (filter) => { + implementationMixin(cvat.analytics.quality.reports, async (filter: QualityReportsFilter) => { checkFilter(filter, { page: isInteger, pageSize: isPageSize, @@ -426,7 +429,7 @@ export default function implementAPI(cvat: CVATCore): CVATCore { ); return reports; }); - implementationMixin(cvat.analytics.quality.conflicts, async (filter) => { + implementationMixin(cvat.analytics.quality.conflicts, async (filter: QualityConflictsFilter) => { checkFilter(filter, { reportID: isInteger, }); @@ -502,7 +505,7 @@ export default function implementAPI(cvat: CVATCore): CVATCore { return mergedConflicts; }); - implementationMixin(cvat.analytics.quality.settings.get, async (filter) => { + implementationMixin(cvat.analytics.quality.settings.get, async (filter: QualitySettingsFilter) => { checkFilter(filter, { taskID: isInteger, }); @@ -512,7 +515,7 @@ export default function implementAPI(cvat: CVATCore): CVATCore { const settings = await serverProxy.analytics.quality.settings.get(params); return new QualitySettings({ ...settings }); }); - implementationMixin(cvat.analytics.performance.reports, async (filter) => { + implementationMixin(cvat.analytics.performance.reports, async (filter: AnalyticsReportFilter) => { checkFilter(filter, { jobID: isInteger, taskID: isInteger, @@ -527,9 +530,30 @@ export default function implementAPI(cvat: CVATCore): CVATCore { const reportData = await serverProxy.analytics.performance.reports(params); return new AnalyticsReport(reportData); }); + implementationMixin(cvat.analytics.performance.calculate, async ( + body: Parameters[0], + onUpdate: Parameters[1], + ) => { + checkFilter(body, { + jobID: isInteger, + taskID: isInteger, + projectID: isInteger, + }); + + checkExclusiveFields(body, ['jobID', 'taskID', 'projectID'], []); + if (!('jobID' in body || 'taskID' in body || 'projectID' in body)) { + throw new ArgumentError('One of "jobID", "taskID", "projectID" is required, but not provided'); + } + + const params = fieldsToSnakeCase(body); + await serverProxy.analytics.performance.calculate(params, onUpdate); + }); implementationMixin(cvat.frames.getMeta, async (type, id) => { const result = await serverProxy.frames.getMeta(type, id); - return new FramesMetaData({ ...result }); + return new FramesMetaData({ + ...result, + deleted_frames: Object.fromEntries(result.deleted_frames.map((_frame) => [_frame, true])), + }); }); return cvat; diff --git a/cvat-core/src/api.ts b/cvat-core/src/api.ts index d4c6a21e9bf2..20e9ce8577fa 100644 --- a/cvat-core/src/api.ts +++ b/cvat-core/src/api.ts @@ -348,6 +348,14 @@ function build(): CVATCore { const result = await PluginRegistry.apiWrapper(cvat.analytics.performance.reports, filter); return result; }, + async calculate(body, onUpdate) { + const result = await PluginRegistry.apiWrapper( + cvat.analytics.performance.calculate, + body, + onUpdate, + ); + return result; + }, }, quality: { async reports(filter = {}) { diff --git a/cvat-core/src/index.ts b/cvat-core/src/index.ts index 9db3b5fca9e1..402ea4a69d9b 100644 --- a/cvat-core/src/index.ts +++ b/cvat-core/src/index.ts @@ -105,11 +105,11 @@ export default interface CVATCore { projects: { get: ( filter: { - id: number; - page: number; - search: string; - sort: string; - filter: string; + id?: number; + page?: number; + search?: string; + sort?: string; + filter?: string; } ) => Promise>; searchNames: any; @@ -141,6 +141,10 @@ export default interface CVATCore { }; performance: { reports: (filter: AnalyticsReportFilter) => Promise; + calculate: ( + body: { jobID?: number; taskID?: number; projectID?: number; }, + onUpdate: (status: enums.RQStatus, progress: number, message: string) => void, + ) => Promise; }; }; frames: { diff --git a/cvat-core/src/server-proxy.ts b/cvat-core/src/server-proxy.ts index c5a84a3b0afb..63759e33f8ed 100644 --- a/cvat-core/src/server-proxy.ts +++ b/cvat-core/src/server-proxy.ts @@ -1,5 +1,5 @@ // Copyright (C) 2019-2022 Intel Corporation -// Copyright (C) 2022-2023 CVAT.ai Corporation +// Copyright (C) 2022-2024 CVAT.ai Corporation // // SPDX-License-Identifier: MIT @@ -16,8 +16,8 @@ import { SerializedAbout, SerializedRemoteFile, SerializedUserAgreement, SerializedRegister, JobsFilter, SerializedJob, SerializedGuide, SerializedAsset, SerializedAPISchema, SerializedInvitationData, SerializedCloudStorage, SerializedFramesMetaData, SerializedCollection, - SerializedQualitySettingsData, ApiQualitySettingsFilter, SerializedQualityConflictData, ApiQualityConflictsFilter, - SerializedQualityReportData, ApiQualityReportsFilter, SerializedAnalyticsReport, ApiAnalyticsReportFilter, + SerializedQualitySettingsData, APIQualitySettingsFilter, SerializedQualityConflictData, APIQualityConflictsFilter, + SerializedQualityReportData, APIQualityReportsFilter, SerializedAnalyticsReport, APIAnalyticsReportFilter, } from './server-response-types'; import { PaginatedResource } from './core-types'; import { Storage } from './storage'; @@ -1185,18 +1185,20 @@ async function restoreProject(storage: Storage, file: File | string) { return wait(); } -const listenToCreateCallbacks: Record; +type LongProcessListener = Record; onUpdate: ((state: string, progress: number, message: string) => void)[]; -}> = {}; +}>; + +const listenToCreateTaskCallbacks: LongProcessListener = {}; function listenToCreateTask( id, onUpdate: (state: RQStatus, progress: number, message: string) => void, ): Promise { - if (id in listenToCreateCallbacks) { - listenToCreateCallbacks[id].onUpdate.push(onUpdate); + if (id in listenToCreateTaskCallbacks) { + listenToCreateTaskCallbacks[id].onUpdate.push(onUpdate); // to avoid extra status check requests we do not create any more promises - return listenToCreateCallbacks[id].promise; + return listenToCreateTaskCallbacks[id].promise; } const promise = new Promise((resolve, reject) => { @@ -1208,7 +1210,7 @@ function listenToCreateTask( const state = response.data.state?.toLowerCase(); if ([RQStatus.QUEUED, RQStatus.STARTED].includes(state)) { // notify all the subscribtions when data status changed - listenToCreateCallbacks[id].onUpdate.forEach((callback) => { + listenToCreateTaskCallbacks[id].onUpdate.forEach((callback) => { callback( state, response.data.progress || 0, @@ -1223,14 +1225,14 @@ function listenToCreateTask( resolve(createdTask); } else if (state === RQStatus.FAILED) { const failMessage = 'Images processing failed'; - listenToCreateCallbacks[id].onUpdate.forEach((callback) => { + listenToCreateTaskCallbacks[id].onUpdate.forEach((callback) => { callback(state, 0, failMessage); }); reject(new ServerError(filterPythonTraceback(response.data.message), 400)); } else { const failMessage = 'Unknown status received'; - listenToCreateCallbacks[id].onUpdate.forEach((callback) => { + listenToCreateTaskCallbacks[id].onUpdate.forEach((callback) => { callback(state || RQStatus.UNKNOWN, 0, failMessage); }); reject( @@ -1241,7 +1243,7 @@ function listenToCreateTask( ); } } catch (errorData) { - listenToCreateCallbacks[id].onUpdate.forEach((callback) => { + listenToCreateTaskCallbacks[id].onUpdate.forEach((callback) => { callback('failed', 0, 'Server request failed'); }); reject(generateError(errorData)); @@ -1251,13 +1253,13 @@ function listenToCreateTask( setTimeout(checkStatus, 100); }); - listenToCreateCallbacks[id] = { + listenToCreateTaskCallbacks[id] = { promise, onUpdate: [onUpdate], }; promise.catch(() => { // do nothing, avoid uncaught promise exceptions - }).finally(() => delete listenToCreateCallbacks[id]); + }).finally(() => delete listenToCreateTaskCallbacks[id]); return promise; } @@ -2337,7 +2339,7 @@ async function createAsset(file: File, guideId: number): Promise { const { backendAPI } = config; @@ -2373,7 +2375,7 @@ async function updateQualitySettings( } async function getQualityConflicts( - filter: ApiQualityConflictsFilter, + filter: APIQualityConflictsFilter, ): Promise { const params = enableOrganization(); const { backendAPI } = config; @@ -2391,7 +2393,7 @@ async function getQualityConflicts( } async function getQualityReports( - filter: ApiQualityReportsFilter, + filter: APIQualityReportsFilter, ): Promise> { const { backendAPI } = config; @@ -2410,7 +2412,7 @@ async function getQualityReports( } async function getAnalyticsReports( - filter: ApiAnalyticsReportFilter, + filter: APIAnalyticsReportFilter, ): Promise { const { backendAPI } = config; @@ -2427,6 +2429,86 @@ async function getAnalyticsReports( } } +const listenToCreateAnalyticsReportCallbacks: { + job: LongProcessListener; + task: LongProcessListener; + project: LongProcessListener; +} = { + job: {}, + task: {}, + project: {}, +}; + +async function calculateAnalyticsReport( + body: { + job_id?: number; + task_id?: number; + project_id?: number; + }, + onUpdate: (state: string, progress: number, message: string) => void, +): Promise { + const id = body.job_id || body.task_id || body.project_id; + const { backendAPI } = config; + const params = enableOrganization(); + let listenerStorage: LongProcessListener = null; + + if (Number.isInteger(body.job_id)) { + listenerStorage = listenToCreateAnalyticsReportCallbacks.job; + } else if (Number.isInteger(body.task_id)) { + listenerStorage = listenToCreateAnalyticsReportCallbacks.task; + } else if (Number.isInteger(body.project_id)) { + listenerStorage = listenToCreateAnalyticsReportCallbacks.project; + } + + if (listenerStorage[id]) { + listenerStorage[id].onUpdate.push(onUpdate); + return listenerStorage[id].promise; + } + + const promise = new Promise((resolve, reject) => { + Axios.post(`${backendAPI}/analytics/reports`, { + ...body, + ...params, + }).then(({ data: { rq_id: rqID } }) => { + listenerStorage[id].onUpdate.forEach((_onUpdate) => _onUpdate(RQStatus.QUEUED, 0, 'Analytics report request sent')); + const checkStatus = (): void => { + Axios.post(`${backendAPI}/analytics/reports`, { + ...body, + ...params, + }, { params: { rq_id: rqID } }).then((response) => { + // TODO: rewrite server logic, now it returns 202, 201 codes, but we need RQ statuses and details + // after this patch is merged https://github.com/cvat-ai/cvat/pull/7537 + if (response.status === 201) { + listenerStorage[id].onUpdate.forEach((_onUpdate) => _onUpdate(RQStatus.FINISHED, 0, 'Done')); + resolve(); + return; + } + + listenerStorage[id].onUpdate.forEach((_onUpdate) => _onUpdate(RQStatus.QUEUED, 0, 'Analytics report calculation is in progress')); + setTimeout(checkStatus, 10000); + }).catch((errorData) => { + reject(generateError(errorData)); + }); + }; + + setTimeout(checkStatus, 2500); + }).catch((errorData) => { + reject(generateError(errorData)); + }); + }); + + listenerStorage[id] = { + promise, + onUpdate: [onUpdate], + }; + + promise.finally(() => { + delete listenerStorage[id]; + }); + + return promise; +} + export default Object.freeze({ server: Object.freeze({ setAuthData, @@ -2578,6 +2660,7 @@ export default Object.freeze({ analytics: Object.freeze({ performance: Object.freeze({ reports: getAnalyticsReports, + calculate: calculateAnalyticsReport, }), quality: Object.freeze({ reports: getQualityReports, diff --git a/cvat-core/src/server-response-types.ts b/cvat-core/src/server-response-types.ts index 1894b17078f5..0bcce7cfb67e 100644 --- a/cvat-core/src/server-response-types.ts +++ b/cvat-core/src/server-response-types.ts @@ -26,7 +26,7 @@ export interface SerializedAnnotationFormats { exporters: SerializedAnnotationExporter[]; } -export interface ApiCommonFilterParams { +export interface APICommonFilterParams { page?: number; page_size?: number | 'all'; filter?: string; @@ -36,7 +36,7 @@ export interface ApiCommonFilterParams { search?: string; } -export interface ProjectsFilter extends ApiCommonFilterParams { +export interface ProjectsFilter extends APICommonFilterParams { id?: number; } @@ -233,10 +233,10 @@ export interface SerializedOrganization { contact?: SerializedOrganizationContact, } -export interface ApiQualitySettingsFilter extends ApiCommonFilterParams { +export interface APIQualitySettingsFilter extends APICommonFilterParams { task_id?: number; } -export type QualitySettingsFilter = Camelized; +export type QualitySettingsFilter = Camelized; export interface SerializedQualitySettingsData { id?: number; @@ -255,10 +255,10 @@ export interface SerializedQualitySettingsData { compare_attributes?: boolean; } -export interface ApiQualityConflictsFilter extends ApiCommonFilterParams { +export interface APIQualityConflictsFilter extends APICommonFilterParams { report_id?: number; } -export type QualityConflictsFilter = Camelized; +export type QualityConflictsFilter = Camelized; export interface SerializedAnnotationConflictData { job_id?: number; @@ -279,14 +279,14 @@ export interface SerializedQualityConflictData { description?: string; } -export interface ApiQualityReportsFilter extends ApiCommonFilterParams { +export interface APIQualityReportsFilter extends APICommonFilterParams { parent_id?: number; peoject_id?: number; task_id?: number; job_id?: number; target?: string; } -export type QualityReportsFilter = Camelized; +export type QualityReportsFilter = Camelized; export interface SerializedQualityReportData { id?: number; @@ -345,17 +345,19 @@ export interface SerializedAnalyticsEntry { transformations?: SerializedTransformationEntry[]; } -export interface ApiAnalyticsReportFilter extends ApiCommonFilterParams { +export interface APIAnalyticsReportFilter { project_id?: number; task_id?: number; job_id?: number; start_date?: string; end_date?: string; } -export type AnalyticsReportFilter = Camelized; +export type AnalyticsReportFilter = Camelized; export interface SerializedAnalyticsReport { - id?: number; + job_id?: number; + task_id?: number; + project_id?: number; target?: string; created_date?: string; statistics?: SerializedAnalyticsEntry[]; diff --git a/cvat-ui/src/components/analytics-page/analytics-page.tsx b/cvat-ui/src/components/analytics-page/analytics-page.tsx index 8284130052f9..410fd3007f0b 100644 --- a/cvat-ui/src/components/analytics-page/analytics-page.tsx +++ b/cvat-ui/src/components/analytics-page/analytics-page.tsx @@ -13,7 +13,9 @@ import Title from 'antd/lib/typography/Title'; import notification from 'antd/lib/notification'; import { useIsMounted } from 'utils/hooks'; import { Project, Task } from 'reducers'; -import { AnalyticsReport, Job, getCore } from 'cvat-core-wrapper'; +import { + AnalyticsReport, Job, RQStatus, getCore, +} from 'cvat-core-wrapper'; import moment from 'moment'; import CVATLoadingSpinner from 'components/common/loading-spinner'; import GoBackButton from 'components/common/go-back-button'; @@ -53,106 +55,121 @@ function handleTimePeriod(interval: DateIntervals): [string, string] { } } +function readInstanceType(location: ReturnType): InstanceType { + if (location.pathname.includes('projects')) { + return 'project'; + } + if (location.pathname.includes('jobs')) { + return 'job'; + } + return 'task'; +} + +function readInstanceId(type: InstanceType): number { + if (type === 'project') { + return +useParams<{ pid: string }>().pid; + } + if (type === 'job') { + return +useParams<{ jid: string }>().jid; + } + return +useParams<{ tid: string }>().tid; +} + type InstanceType = 'project' | 'task' | 'job'; function AnalyticsPage(): JSX.Element { const location = useLocation(); - const requestedInstanceType: InstanceType = (() => { - if (location.pathname.includes('projects')) { - return 'project'; - } - if (location.pathname.includes('jobs')) { - return 'job'; - } - return 'task'; - })(); - - const requestedInstanceID: number = (() => { - if (requestedInstanceType === 'project') { - return +useParams<{ pid: string }>().pid; - } - if (requestedInstanceType === 'job') { - return +useParams<{ jid: string }>().jid; - } - return +useParams<{ tid: string }>().tid; - })(); + const requestedInstanceType: InstanceType = readInstanceType(location); + const requestedInstanceID = readInstanceId(requestedInstanceType); const [activeTab, setTab] = useState(getTabFromHash()); - const [instanceType, setInstanceType] = useState(null); const [instance, setInstance] = useState(null); const [analyticsReport, setAnalyticsReport] = useState(null); const [timePeriod, setTimePeriod] = useState(DateIntervals.LAST_WEEK); + const [reportRefreshingStatus, setReportRefreshingStatus] = useState(null); const [fetching, setFetching] = useState(true); const isMounted = useIsMounted(); - const receiveInstance = (type: InstanceType, id: number): Promise => { - if (type === 'project') { - return core.projects.get({ id }); - } + const receiveInstance = async (type: InstanceType, id: number): Promise => { + let receivedInstance: Task | Project | Job | null = null; - if (type === 'task') { - return core.tasks.get({ id }); - } + try { + switch (type) { + case 'project': { + [receivedInstance] = await core.projects.get({ id }); + break; + } + case 'task': { + [receivedInstance] = await core.tasks.get({ id }); + break; + } + case 'job': { + [receivedInstance] = await core.jobs.get({ jobID: id }); + break; + } + default: + return; + } - return core.jobs.get({ jobID: id }); + if (isMounted()) { + setInstance(receivedInstance); + setInstanceType(type); + } + } catch (error: unknown) { + notification.error({ + message: `Could not receive requested ${type}`, + description: `${error instanceof Error ? error.message : ''}`, + }); + } }; - const receiveReport = (timeInterval: DateIntervals, type: InstanceType, id: number): Promise => { + const receiveReport = async (timeInterval: DateIntervals, type: InstanceType, id: number): Promise => { const [endDate, startDate] = handleTimePeriod(timeInterval); - if (type === 'project') { - return core.analytics.performance.reports({ - projectID: id, - endDate, - startDate, - }); - } + let report: AnalyticsReport | null = null; + + try { + const body = { endDate, startDate }; + switch (type) { + case 'project': { + report = await core.analytics.performance.reports({ ...body, projectID: id }); + break; + } + case 'task': { + report = await core.analytics.performance.reports({ ...body, taskID: id }); + break; + } + case 'job': { + report = await core.analytics.performance.reports({ ...body, jobID: id }); + break; + } + default: + return; + } - if (type === 'task') { - return core.analytics.performance.reports({ - taskID: id, - endDate, - startDate, + if (isMounted()) { + setAnalyticsReport(report); + } + } catch (error: unknown) { + notification.error({ + message: 'Could not receive requested report', + description: `${error instanceof Error ? error.message : ''}`, }); } - - return core.analytics.performance.reports({ - jobID: id, - endDate, - startDate, - }); }; useEffect(() => { - setFetching(true); - if (Number.isInteger(requestedInstanceID) && ['project', 'task', 'job'].includes(requestedInstanceType)) { + setFetching(true); Promise.all([ receiveInstance(requestedInstanceType, requestedInstanceID), receiveReport(timePeriod, requestedInstanceType, requestedInstanceID), - ]) - .then(([instanceResponse, report]) => { - const receivedInstance: Task | Project | Job = instanceResponse[0]; - if (receivedInstance && isMounted()) { - setInstance(receivedInstance); - setInstanceType(requestedInstanceType); - } - if (report && isMounted()) { - setAnalyticsReport(report); - } - }) - .catch((error: Error) => { - notification.error({ - message: 'Could not receive requested resources', - description: `${error.toString()}`, - }); - }) - .finally(() => { - if (isMounted()) { - setFetching(false); - } - }); + ]).finally(() => { + if (isMounted()) { + setFetching(false); + } + }); } else { notification.error({ message: 'Could not load this page', @@ -168,6 +185,42 @@ function AnalyticsPage(): JSX.Element { }; }, [requestedInstanceType, requestedInstanceID, timePeriod]); + useEffect(() => { + window.addEventListener('hashchange', () => { + const hash = getTabFromHash(); + setTab(hash); + }); + }, []); + + useEffect(() => { + window.location.hash = activeTab; + }, [activeTab]); + + const onCreateReport = useCallback(() => { + const onUpdate = (status: RQStatus, progress: number, message: string): void => { + setReportRefreshingStatus(message); + }; + + const body = { + ...(requestedInstanceType === 'project' ? { projectID: requestedInstanceID } : {}), + ...(requestedInstanceType === 'task' ? { taskID: requestedInstanceID } : {}), + ...(requestedInstanceType === 'job' ? { jobID: requestedInstanceID } : {}), + }; + + core.analytics.performance.calculate(body, onUpdate).then(() => { + receiveReport(timePeriod, requestedInstanceType, requestedInstanceID); + }).finally(() => { + setReportRefreshingStatus(null); + }).catch((error: unknown) => { + if (isMounted()) { + notification.error({ + message: 'Error occurred during requesting performance report', + description: error instanceof Error ? error.message : '', + }); + } + }); + }, [requestedInstanceType, requestedInstanceID, timePeriod]); + const onJobUpdate = useCallback((job: Job): void => { setFetching(true); @@ -185,20 +238,9 @@ function AnalyticsPage(): JSX.Element { }); }, []); - useEffect(() => { - window.addEventListener('hashchange', () => { - const hash = getTabFromHash(); - setTab(hash); - }); - }, []); - - const onTabKeyChange = (key: string): void => { + const onTabKeyChange = useCallback((key: string): void => { setTab(key as AnalyticsTabs); - }; - - useEffect(() => { - window.location.hash = activeTab; - }, [activeTab]); + }, []); let backNavigation: JSX.Element | null = null; let title: JSX.Element | null = null; @@ -238,7 +280,9 @@ function AnalyticsPage(): JSX.Element { {instanceType === 'task' && ( diff --git a/cvat-ui/src/components/analytics-page/analytics-performance.tsx b/cvat-ui/src/components/analytics-page/analytics-performance.tsx index 164504f7816c..afeab8ccbfd4 100644 --- a/cvat-ui/src/components/analytics-page/analytics-performance.tsx +++ b/cvat-ui/src/components/analytics-page/analytics-performance.tsx @@ -8,8 +8,14 @@ import RGL, { WidthProvider } from 'react-grid-layout'; import Text from 'antd/lib/typography/Text'; import Select from 'antd/lib/select'; import Notification from 'antd/lib/notification'; -import { AnalyticsReport, AnalyticsEntryViewType } from 'cvat-core-wrapper'; import { Col, Row } from 'antd/lib/grid'; +import Button from 'antd/lib/button'; +import Card from 'antd/lib/card'; +import Title from 'antd/lib/typography/Title'; +import { ReloadOutlined, LoadingOutlined } from '@ant-design/icons'; + +import { AnalyticsReport, AnalyticsEntryViewType } from 'cvat-core-wrapper'; +import CVATTooltip from 'components/common/cvat-tooltip'; import HistogramView from './views/histogram-view'; import AnalyticsCard from './views/analytics-card'; @@ -25,7 +31,9 @@ export enum DateIntervals { interface Props { report: AnalyticsReport | null; timePeriod: DateIntervals; + reportRefreshingStatus: string | null; onTimePeriodChange: (val: DateIntervals) => void; + onCreateReport: () => void; } const colors = [ @@ -37,12 +45,44 @@ const colors = [ ]; function AnalyticsOverview(props: Props): JSX.Element | null { - const { report, timePeriod, onTimePeriodChange } = props; + const { + report, timePeriod, reportRefreshingStatus, + onTimePeriodChange, onCreateReport, + } = props; - if (!report) return null; const layout: any = []; let histogramCount = 0; let numericCount = 0; + + if (report === null) { + return null; + } + + if (!report.id) { + return ( +
+ + + +
+ {reportRefreshingStatus ? {reportRefreshingStatus} : + Performance report was not calculated yet... } + +
+
+ +
+
+ ); + } + const views: { view: React.JSX.Element, key: string }[] = []; report.statistics.forEach((entry) => { const tooltip = ( @@ -138,12 +178,21 @@ function AnalyticsOverview(props: Props): JSX.Element | null { } } }); + return (
+ +