diff --git a/.editorconfig b/.editorconfig index fb43e5c2954..ced8784480f 100644 --- a/.editorconfig +++ b/.editorconfig @@ -1,4 +1,5 @@ root = true + [*] charset = utf-8 indent_style = space @@ -6,3 +7,9 @@ indent_size = 2 end_of_line = lf insert_final_newline = true trim_trailing_whitespace = true + +[*.py] +indent_size = 4 + +[justfile] +indent_size = 4 diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 970f4d99649..00000000000 --- a/.flake8 +++ /dev/null @@ -1,15 +0,0 @@ -[flake8] -# match black formatter's behavior -# https://www.flake8rules.com/rules/E203.html -# https://www.flake8rules.com/rules/W503.html -ignore = E203, W503 -per-file-ignores = - # Ignore maximum line length rule for test files - *test*:E501 - # https://www.flake8rules.com/rules/F401.html; init files act as re-exporters - *__init__*:F401 - # https://www.flake8rules.com/rules/E402.html; patches are applied before all imports are finished - *wsgi.py:E402 - # https://www.flake8rules.com/rules/F401.html; django settings holds global values - *settings.py:F401 -max-line-length = 88 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 2a8d9885b07..86831213032 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -29,6 +29,7 @@ rfcs/ @WordPress/openverse-maintainers templates/ @WordPress/openverse-maintainers .gitattributes @WordPress/openverse-maintainers .gitignore @WordPress/openverse-maintainers +.github/ @WordPress/openverse-maintainers .pre-commit-config.yaml @WordPress/openverse-maintainers CODE_OF_CONDUCT.md @WordPress/openverse-maintainers CONTRIBUTING.md @WordPress/openverse-maintainers diff --git a/.github/GITHUB.md b/.github/GITHUB.md index 060af47fb48..9c84fe3c03c 100644 --- a/.github/GITHUB.md +++ b/.github/GITHUB.md @@ -61,8 +61,11 @@ truth, it creates PRs to resolve any differences. ### New issue automation -This workflow adds issues to the "Backlog" column in the Openverse project as +This workflow adds issues to the "Backlog" column in the [(old) Openverse project](https://github.com/orgs/WordPress/projects/3/) as soon as they are created. +This workflow also adds issues to the "Backlog" column in the + [Openverse project](https://github.com/orgs/WordPress/projects/75/) as soon as they are created. It also adds a priority field +value to the project item based on the priority label applied to the issue. **Issue:** opened **Dispatch:** disabled diff --git a/.github/actions/get-changes/action.yml b/.github/actions/get-changes/action.yml index 297b955cc91..bf5c3351c04 100644 --- a/.github/actions/get-changes/action.yml +++ b/.github/actions/get-changes/action.yml @@ -2,6 +2,9 @@ name: openverse/get-changes description: Determine where changes happened in the repository outputs: + changes: + description: "JSON array of keys from `.github/filters.yml`" + value: ${{ steps.paths-filter.outputs.changes }} api: description: "'true' if API changes are present" value: ${{ steps.paths-filter.outputs.api }} diff --git a/.github/filters.yml b/.github/filters.yml index 2773b0e5c19..86d3255c733 100644 --- a/.github/filters.yml +++ b/.github/filters.yml @@ -1,15 +1,12 @@ api: - - .github/workflows/ci_cd.yml - api/** # Change to the CI + CD workflow should trigger complete workflow. - .github/workflows/ci_cd.yml ingestion_server: - - .github/workflows/ci_cd.yml - ingestion_server/** # Change to the CI + CD workflow should trigger complete workflow. - .github/workflows/ci_cd.yml frontend: - - .github/workflows/ci_cd.yml - frontend/** - package.json - pnpm-lock.yaml diff --git a/.github/release-drafter-api.yml b/.github/release-drafter-api.yml new file mode 100644 index 00000000000..2e6e2961bb5 --- /dev/null +++ b/.github/release-drafter-api.yml @@ -0,0 +1,27 @@ +# This file is generated from templates/release-drafter.yml.jinja +# Changes to this file must be made in the template first + +tag-prefix: api- +categories: + - title: New Features + label: "🌟 goal: addition" + - title: Improvements + label: "✨ goal: improvement" + - title: Internal Improvements + labels: + - "🤖 aspect: dx" + - "🧰 goal: internal improvement" + - title: Bug Fixes + label: "🛠 goal: fix" +# Identical to the default except it uses `-` instead of `*` to match our Markdown linter +change-template: "- $TITLE (#$NUMBER) @$AUTHOR" +exclude-labels: + - "skip-changelog" +include-labels: + - "🧱 stack: api" +template: | + $CHANGES + + ## Credits + + Thanks to $CONTRIBUTORS for their contributions! diff --git a/.github/release-drafter-frontend.yml b/.github/release-drafter-frontend.yml new file mode 100644 index 00000000000..251b3f9257a --- /dev/null +++ b/.github/release-drafter-frontend.yml @@ -0,0 +1,27 @@ +# This file is generated from templates/release-drafter.yml.jinja +# Changes to this file must be made in the template first + +tag-prefix: frontend- +categories: + - title: New Features + label: "🌟 goal: addition" + - title: Improvements + label: "✨ goal: improvement" + - title: Internal Improvements + labels: + - "🤖 aspect: dx" + - "🧰 goal: internal improvement" + - title: Bug Fixes + label: "🛠 goal: fix" +# Identical to the default except it uses `-` instead of `*` to match our Markdown linter +change-template: "- $TITLE (#$NUMBER) @$AUTHOR" +exclude-labels: + - "skip-changelog" +include-labels: + - "🧱 stack: frontend" +template: | + $CHANGES + + ## Credits + + Thanks to $CONTRIBUTORS for their contributions! diff --git a/.github/release-drafter-ingestion_server.yml b/.github/release-drafter-ingestion_server.yml new file mode 100644 index 00000000000..576fc63b158 --- /dev/null +++ b/.github/release-drafter-ingestion_server.yml @@ -0,0 +1,27 @@ +# This file is generated from templates/release-drafter.yml.jinja +# Changes to this file must be made in the template first + +tag-prefix: ingestion_server- +categories: + - title: New Features + label: "🌟 goal: addition" + - title: Improvements + label: "✨ goal: improvement" + - title: Internal Improvements + labels: + - "🤖 aspect: dx" + - "🧰 goal: internal improvement" + - title: Bug Fixes + label: "🛠 goal: fix" +# Identical to the default except it uses `-` instead of `*` to match our Markdown linter +change-template: "- $TITLE (#$NUMBER) @$AUTHOR" +exclude-labels: + - "skip-changelog" +include-labels: + - "🧱 stack: ingestion server" +template: | + $CHANGES + + ## Credits + + Thanks to $CONTRIBUTORS for their contributions! diff --git a/.github/release_drafter.yml b/.github/release_drafter.yml deleted file mode 100644 index 1925e58a2d3..00000000000 --- a/.github/release_drafter.yml +++ /dev/null @@ -1,39 +0,0 @@ -# yaml-language-server: $schema=https://raw.githubusercontent.com/release-drafter/release-drafter/master/schema.json -# -# Configuration for the action `release-drafter/release-drafter` -# Docs: https://github.com/release-drafter/release-drafter -# Workflow: Draft release - -name-template: "v$RESOLVED_VERSION" -tag-template: "v$RESOLVED_VERSION" -categories: - - title: New Features - label: "🌟 goal: addition" - - title: Improvements - label: "✨ goal: improvement" - - title: Internal Improvements - labels: - - "🤖 aspect: dx" - - "🧰 goal: internal improvement" - - title: Bug Fixes - label: "🛠 goal: fix" -change-template: "- $TITLE (#$NUMBER) @$AUTHOR" -exclude-labels: - - "skip-changelog" -version-resolver: - major: - labels: - - "💥 versioning: major" - minor: - labels: - - "🎉 versioning: minor" - patch: - labels: - - "🐛 versioning: patch" - default: patch -template: | - $CHANGES - - ## Credits - - Thanks to $CONTRIBUTORS for their contributions! diff --git a/.github/workflows/ci_cd.yml b/.github/workflows/ci_cd.yml index e267e3507e8..841dc8604e7 100644 --- a/.github/workflows/ci_cd.yml +++ b/.github/workflows/ci_cd.yml @@ -5,9 +5,6 @@ on: push: branches: - main - release: - types: - - published workflow_dispatch: inputs: image-tag: @@ -36,12 +33,14 @@ jobs: api: ${{ steps.paths-filter.outputs.api }} ingestion_server: ${{ steps.paths-filter.outputs.ingestion_server }} frontend: ${{ steps.paths-filter.outputs.frontend }} + changes: ${{ steps.paths-filter.outputs.changes }} steps: - name: Checkout repository uses: actions/checkout@v3 + - name: Get changes - uses: ./.github/actions/get-changes id: paths-filter + uses: ./.github/actions/get-changes get-image-tag: name: Get image tag @@ -61,6 +60,66 @@ jobs: echo "image-tag=${{ github.sha }}" >> "$GITHUB_OUTPUT" fi + determine-images: + name: Determine images to build and publish + runs-on: ubuntu-latest + outputs: + do-build: ${{ steps.set-matrix.outputs.do-build }} + build-matrix: ${{ steps.set-matrix.outputs.build-matrix }} + do-publish: ${{ steps.set-matrix.outputs.do-publish }} + publish-matrix: ${{ steps.set-matrix.outputs.publish-matrix }} + needs: + - get-changes + + steps: + - name: Set matrix images + id: set-matrix + env: + CHANGES: ${{ needs.get-changes.outputs.changes }} + shell: python + run: | + import os + import json + + changes = json.loads(os.environ.get('CHANGES')) + + build_matrix = {"image": [], "include": []} + publish_matrix = {"image": []} + + if "frontend" in changes: + build_matrix["image"].append("frontend") + build_matrix["include"].append({"image": "frontend", "context": "frontend", "target": "app"}) + publish_matrix["image"].append("frontend") + if "api" in changes or "ingestion_server" in changes: + # Always build the ingestion server and API images for either changeset + build_matrix["image"] += ["api", "ingestion_server"] + build_matrix["include"] += [ + {"image": "ingestion_server", "context": "ingestion_server", "target": "ing"}, + {"image": "api", "context": "api", "target": "api"}, + ] + if "api" in changes: + build_matrix["image"].append("api_nginx") + build_matrix["include"].append({"image": "api_nginx", "context": "api", "target": "nginx"}) + publish_matrix["image"] += ["api", "api_nginx"] + if "ingestion_server" in changes: + publish_matrix["image"].append("ingestion_server") + + do_build = 'true' if len(build_matrix["image"]) else 'false' + build_matrix = json.dumps(build_matrix) + do_publish = 'true' if len(publish_matrix["image"]) else 'false' + publish_matrix = json.dumps(publish_matrix) + + print(f"do-build={do_build}") + print(f"build-matrix={build_matrix}") + print(f"do-publish={do_publish}") + print(f"publish-matrix={publish_matrix}") + + with open(os.environ.get("GITHUB_OUTPUT"), "a") as gh_out: + print(f"do-build={do_build}", file=gh_out) + print(f"build-matrix={build_matrix}", file=gh_out) + print(f"do-publish={do_publish}", file=gh_out) + print(f"publish-matrix={publish_matrix}", file=gh_out) + ############# # Universal # ############# @@ -122,26 +181,46 @@ jobs: with: labels: "🧱 stack: frontend" - ############################ - # API and ingestion server # - ############################ - build-images: name: Build Docker images + if: needs.determine-images.outputs.do-build == 'true' runs-on: ubuntu-latest strategy: - matrix: - image: - - api - - ingestion_server + matrix: ${{ fromJson(needs.determine-images.outputs.build-matrix) }} needs: - get-image-tag - lint + - determine-images steps: - name: Checkout repository uses: actions/checkout@v3 + # ℹ️Step only applies for frontend image. + - name: Setup CI env + if: matrix.image == 'frontend' + uses: ./.github/actions/setup-env + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + setup_python: false + # Python is not needed to build the image. + install_recipe: node-install + + # ℹ️Step only applies for frontend image. + # This step + # - downloads translation strings from GlotPress so that they can be + # bundled inside the Docker image + # - copies pnpm config files from the root to the `frontend/` directory + # so that they can be used to mock a workspace inside the Docker image + - name: Prepare frontend for building + if: matrix.image == 'frontend' + run: | + just frontend/run i18n + cp .npmrc .pnpmfile.cjs pnpm-lock.yaml frontend/ + env: + GLOTPRESS_USERNAME: ${{ secrets.MAKE_USERNAME }} + GLOTPRESS_PASSWORD: ${{ secrets.MAKE_LOGIN_PASSWORD }} + - name: Setup Docker Buildx uses: docker/setup-buildx-action@v2 with: @@ -150,7 +229,8 @@ jobs: - name: Build image `${{ matrix.image }}` uses: docker/build-push-action@v4 with: - context: ${{ matrix.image }} + context: ${{ matrix.context }} + target: ${{ matrix.target }} push: false tags: openverse-${{ matrix.image }} cache-from: type=gha,scope=${{ matrix.image }} @@ -182,12 +262,6 @@ jobs: - name: Checkout repository uses: actions/checkout@v3 - - id: paths-filter - name: Conditionally skip job - uses: dorny/paths-filter@v2 - with: - filters: .github/filters.yml - - name: Setup CI env uses: ./.github/actions/setup-env with: @@ -247,7 +321,7 @@ jobs: uses: ./.github/actions/load-img - name: Start API, ingest and index test data - run: just init + run: just api/init - name: Run API tests run: just api/test @@ -267,8 +341,10 @@ jobs: test-redoc: name: Check for API consumer docs + if: needs.get-changes.outputs.api == 'true' runs-on: ubuntu-latest needs: + - get-changes - build-images steps: @@ -293,8 +369,10 @@ jobs: validate-openapi-spec: name: Validate Open API spec + if: needs.get-changes.outputs.api == 'true' runs-on: ubuntu-latest needs: + - get-changes - build-images steps: @@ -328,8 +406,10 @@ jobs: django-check: name: Run Django check + if: needs.get-changes.outputs.api == 'true' runs-on: ubuntu-latest needs: + - get-changes - build-images steps: @@ -353,9 +433,11 @@ jobs: run: just api/dj check check-migrations: - name: Check for uncommited Django migrations + name: Check for uncommitted Django migrations + if: needs.get-changes.outputs.api == 'true' runs-on: ubuntu-latest needs: + - get-changes - build-images steps: @@ -378,73 +460,16 @@ jobs: - name: Run makemigrations run: just api/dj makemigrations --check --noinput --merge - ######### - # NGINX # - ######### - - build-nginx: - # This requires a separate job due to the dependency on the other image builds - name: Build `nginx` Docker image - runs-on: ubuntu-latest - needs: - - build-images - - steps: - - name: Checkout repository - uses: actions/checkout@v3 - - - name: Setup CI env - uses: ./.github/actions/setup-env - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - setup_python: false - # Python is not needed to build the image. - setup_nodejs: false - # Node.js is not needed to build the image. - install_recipe: "" - - - name: Load Docker images - uses: ./.github/actions/load-img - - - name: collectstatic - run: | - just _all-up - just api/collectstatic - env: - DC_USER: root - - - name: Setup Docker Buildx - uses: docker/setup-buildx-action@v2 - with: - install: true - - - name: Build image `api_nginx` - uses: docker/build-push-action@v4 - with: - context: api - target: nginx # from `api/Dockerfile` - push: false - tags: openverse-api_nginx - cache-from: type=gha,scope=nginx - cache-to: type=gha,scope=nginx - outputs: type=docker,dest=/tmp/api_nginx.tar - build-args: | - SEMANTIC_VERSION=${{ needs.get-image-tag.outputs.image-tag }} - - - name: Upload image `api_nginx` - uses: actions/upload-artifact@v3 - with: - name: api_nginx - path: /tmp/api_nginx.tar - ############ # Frontend # ############ frontend-unit: name: Run frontend unit tests + if: needs.get-changes.outputs.frontend == 'true' runs-on: ubuntu-latest needs: + - get-changes - lint steps: @@ -463,8 +488,10 @@ jobs: storybook-smoke: name: Check Storybook smoke test + if: needs.get-changes.outputs.frontend == 'true' runs-on: ubuntu-latest needs: + - get-changes - lint steps: @@ -483,8 +510,10 @@ jobs: nuxt-build: name: Check Nuxt build + if: needs.get-changes.outputs.frontend == 'true' runs-on: ubuntu-latest needs: + - get-changes - lint steps: @@ -642,62 +671,6 @@ jobs: Read more about how to use this artifact here: - build-frontend: - # This requires a separate job due to a difference in build steps - name: Build `frontend` Docker image - runs-on: ubuntu-latest - needs: - - build-images - - steps: - - name: Checkout repository - uses: actions/checkout@v3 - - - name: Setup CI env - uses: ./.github/actions/setup-env - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - setup_python: false - # Python is not needed to build the image. - install_recipe: node-install - - - name: Download translation strings - run: just frontend/run i18n - env: - GLOTPRESS_USERNAME: ${{ secrets.MAKE_USERNAME }} - GLOTPRESS_PASSWORD: ${{ secrets.MAKE_LOGIN_PASSWORD }} - - - name: Copy files into Docker context - run: | - cp .npmrc .pnpmfile.cjs pnpm-lock.yaml frontend/ - # These files are copied into the `frontend/` folder so that they can be - # included in the Docker context. The `Dockerfile` will use them to mock - # up a monorepo inside the Docker image. - - - name: Setup Docker Buildx - uses: docker/setup-buildx-action@v2 - with: - install: true - - - name: Build image `frontend` - uses: docker/build-push-action@v4 - with: - context: frontend - target: app # from `frontend/Dockerfile` - push: false - tags: openverse-frontend - cache-from: type=gha,scope=frontend - cache-to: type=gha,scope=frontend - outputs: type=docker,dest=/tmp/frontend.tar - build-args: | - SEMANTIC_VERSION=${{ needs.get-image-tag.outputs.image-tag }} - - - name: Upload image `frontend` - uses: actions/upload-artifact@v3 - with: - name: frontend - path: /tmp/frontend.tar - ################# # Documentation # ################# @@ -726,9 +699,6 @@ jobs: with: github_token: ${{ secrets.GITHUB_TOKEN }} - - name: Load Docker images - uses: ./.github/actions/load-img - - name: Compile documentation uses: ./.github/actions/build-docs # Docs will be located at `/tmp/docs`. @@ -747,6 +717,7 @@ jobs: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: /tmp/docs force_orphan: true + cname: docs.openverse.org - name: Checkout repository # again, to enable cleaning uses: actions/checkout@v3 @@ -804,9 +775,6 @@ jobs: with: github_token: ${{ secrets.GITHUB_TOKEN }} - - name: Load Docker images - uses: ./.github/actions/load-img - - name: Compile documentation uses: ./.github/actions/build-docs # Docs will be located at `/tmp/docs`. @@ -838,6 +806,7 @@ jobs: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: /tmp/gh-pages force_orphan: true + cname: docs.openverse.org - uses: peter-evans/find-comment@v2 id: final-preview-comment @@ -872,26 +841,23 @@ jobs: # prevent running on fork PRs if: | !failure() && !cancelled() && + needs.determine-images.outputs.do-publish == 'true' && (needs.test-ing.result == 'success' || needs.test-ing.result == 'skipped') && (needs.test-api.result == 'success' || needs.test-api.result == 'skipped') && + (needs.nuxt-build.result == 'success' || needs.nuxt-build.result == 'skipped') && ((github.event_name == 'push' && github.repository == 'WordPress/openverse') || (github.event_name == 'release' && github.repository == 'WordPress/openverse')) needs: - # `build-images` is automatically included in `test-ing` and `test-api`. - - test-ing - - test-api - - build-nginx - - build-frontend + - determine-images - get-image-tag + - build-images + - test-ing # test for ingestion server + - test-api # test for API + - nuxt-build # test for frontend permissions: packages: write contents: read strategy: - matrix: - image: - - api - - ingestion_server - - api_nginx - - frontend + matrix: ${{ fromJson(needs.determine-images.outputs.publish-matrix) }} steps: - name: Log in to GitHub Docker Registry @@ -925,7 +891,7 @@ jobs: if: | !failure() && !cancelled() && github.event_name == 'push' && needs.get-changes.outputs.frontend == 'true' && - (needs.nuxt-playwright-e2e.result == 'success' && needs.nuxt-playwright-vr.result == 'success' && needs.storybook-playwright.result == 'success') + (needs.nuxt-build.result == 'success' && needs.nuxt-playwright-e2e.result == 'success' && needs.nuxt-playwright-vr.result == 'success' && needs.storybook-playwright.result == 'success') needs: - get-image-tag - get-changes @@ -944,7 +910,7 @@ jobs: if: | !failure() && !cancelled() && github.event_name == 'push' && needs.get-changes.outputs.api == 'true' && - (needs.test-ing.result == 'success' && needs.publish-images.result == 'success') + (needs.test-api.result == 'success' && needs.publish-images.result == 'success') needs: - get-image-tag - get-changes diff --git a/.github/workflows/deploy-production-api.yml b/.github/workflows/deploy-production-api.yml index 6b4f22ae8dc..87c78ede38e 100644 --- a/.github/workflows/deploy-production-api.yml +++ b/.github/workflows/deploy-production-api.yml @@ -8,6 +8,12 @@ name: "Deployment: production-api" on: + workflow_dispatch: + inputs: + tag: + type: string + required: true + description: Image tag to deploy. workflow_call: inputs: tag: @@ -15,6 +21,9 @@ on: required: true description: Image tag to deploy. secrets: + ACCESS_TOKEN: + required: true + description: GitHub access token. AWS_ACCESS_KEY_ID: required: true description: AWS access key ID. @@ -28,6 +37,11 @@ on: required: true description: JSON mapping of GitHub usernames to Slack user IDs. +# Only allow a single deployment workflow (service + environment) to happen at a time +# If you need to stop an in-progress deployment of a service to force another for +# the same environment, you'll need to manually cancel it +concurrency: ${{ github.workflow }} + jobs: deploy: name: Deploy production-api @@ -37,6 +51,24 @@ jobs: - name: Checkout repository uses: actions/checkout@v3 + - name: Validate initiation user + uses: actions/github-script@v6 + with: + github-token: ${{ secrets.ACCESS_TOKEN }} + script: | + const { data: members } = await github.rest.teams.listMembersInOrg({ + org: 'WordPress', + team_slug: 'openverse-maintainers', + }); + const isAllowed = members.some(m => m.login === "${{ github.actor }}") + if (!isAllowed) { + throw new Error( + "Only GitHub users in the @WordPress/openverse-maintainers " + + "team are allowed to run this workflow. If you need to run " + + "this workflow, please reach out to that group for help." + ) + } + - name: Set the Slack user shell: python env: @@ -62,7 +94,7 @@ jobs: "type": "section", "text": { "type": "mrkdwn", - "text": ":spinning-cd: A deployment of production-api triggered by <@${{ env.SLACK_USER_ID }}> is starting using the `${{ inputs.tag }}` tag." + "text": ":spinning-cd: A deployment of *production-api* triggered by <@${{ env.SLACK_USER_ID }}> is starting using the `${{ inputs.tag }}` tag." } }, { @@ -188,7 +220,7 @@ jobs: "type": "section", "text": { "type": "mrkdwn", - "text": ":tadaco: The deployment of production-api triggered by <@${{ env.SLACK_USER_ID }}> using the `${{ inputs.tag }}` tag *succeeded*." + "text": ":tadaco: The deployment of *production-api* triggered by <@${{ env.SLACK_USER_ID }}> using the `${{ inputs.tag }}` tag *succeeded*." } }, { @@ -216,7 +248,7 @@ jobs: "type": "section", "text": { "type": "mrkdwn", - "text": ":alert: The deployment of production-api triggered by <@${{ env.SLACK_USER_ID }}> using the `${{ inputs.tag }}` tag *failed* :alert:" + "text": ":alert: The deployment of *production-api* triggered by <@${{ env.SLACK_USER_ID }}> using the `${{ inputs.tag }}` tag *failed* :alert:" } }, { diff --git a/.github/workflows/deploy-production-nuxt.yml b/.github/workflows/deploy-production-nuxt.yml index 2a169acda7f..431679f8208 100644 --- a/.github/workflows/deploy-production-nuxt.yml +++ b/.github/workflows/deploy-production-nuxt.yml @@ -8,6 +8,12 @@ name: "Deployment: production-nuxt" on: + workflow_dispatch: + inputs: + tag: + type: string + required: true + description: Image tag to deploy. workflow_call: inputs: tag: @@ -15,6 +21,9 @@ on: required: true description: Image tag to deploy. secrets: + ACCESS_TOKEN: + required: true + description: GitHub access token. AWS_ACCESS_KEY_ID: required: true description: AWS access key ID. @@ -28,6 +37,11 @@ on: required: true description: JSON mapping of GitHub usernames to Slack user IDs. +# Only allow a single deployment workflow (service + environment) to happen at a time +# If you need to stop an in-progress deployment of a service to force another for +# the same environment, you'll need to manually cancel it +concurrency: ${{ github.workflow }} + jobs: deploy: name: Deploy production-nuxt @@ -37,6 +51,24 @@ jobs: - name: Checkout repository uses: actions/checkout@v3 + - name: Validate initiation user + uses: actions/github-script@v6 + with: + github-token: ${{ secrets.ACCESS_TOKEN }} + script: | + const { data: members } = await github.rest.teams.listMembersInOrg({ + org: 'WordPress', + team_slug: 'openverse-maintainers', + }); + const isAllowed = members.some(m => m.login === "${{ github.actor }}") + if (!isAllowed) { + throw new Error( + "Only GitHub users in the @WordPress/openverse-maintainers " + + "team are allowed to run this workflow. If you need to run " + + "this workflow, please reach out to that group for help." + ) + } + - name: Set the Slack user shell: python env: @@ -62,7 +94,7 @@ jobs: "type": "section", "text": { "type": "mrkdwn", - "text": ":spinning-cd: A deployment of production-nuxt triggered by <@${{ env.SLACK_USER_ID }}> is starting using the `${{ inputs.tag }}` tag." + "text": ":spinning-cd: A deployment of *production-nuxt* triggered by <@${{ env.SLACK_USER_ID }}> is starting using the `${{ inputs.tag }}` tag." } }, { @@ -152,7 +184,7 @@ jobs: "type": "section", "text": { "type": "mrkdwn", - "text": ":tadaco: The deployment of production-nuxt triggered by <@${{ env.SLACK_USER_ID }}> using the `${{ inputs.tag }}` tag *succeeded*." + "text": ":tadaco: The deployment of *production-nuxt* triggered by <@${{ env.SLACK_USER_ID }}> using the `${{ inputs.tag }}` tag *succeeded*." } }, { @@ -180,7 +212,7 @@ jobs: "type": "section", "text": { "type": "mrkdwn", - "text": ":alert: The deployment of production-nuxt triggered by <@${{ env.SLACK_USER_ID }}> using the `${{ inputs.tag }}` tag *failed* :alert:" + "text": ":alert: The deployment of *production-nuxt* triggered by <@${{ env.SLACK_USER_ID }}> using the `${{ inputs.tag }}` tag *failed* :alert:" } }, { diff --git a/.github/workflows/deploy-staging-api.yml b/.github/workflows/deploy-staging-api.yml index 5774693df2c..8aa8e9b6b19 100644 --- a/.github/workflows/deploy-staging-api.yml +++ b/.github/workflows/deploy-staging-api.yml @@ -8,6 +8,12 @@ name: "Deployment: staging-api" on: + workflow_dispatch: + inputs: + tag: + type: string + required: true + description: Image tag to deploy. workflow_call: inputs: tag: @@ -15,6 +21,9 @@ on: required: true description: Image tag to deploy. secrets: + ACCESS_TOKEN: + required: true + description: GitHub access token. AWS_ACCESS_KEY_ID: required: true description: AWS access key ID. @@ -28,6 +37,11 @@ on: required: true description: JSON mapping of GitHub usernames to Slack user IDs. +# Only allow a single deployment workflow (service + environment) to happen at a time +# If you need to stop an in-progress deployment of a service to force another for +# the same environment, you'll need to manually cancel it +concurrency: ${{ github.workflow }} + jobs: deploy: name: Deploy staging-api @@ -37,6 +51,24 @@ jobs: - name: Checkout repository uses: actions/checkout@v3 + - name: Validate initiation user + uses: actions/github-script@v6 + with: + github-token: ${{ secrets.ACCESS_TOKEN }} + script: | + const { data: members } = await github.rest.teams.listMembersInOrg({ + org: 'WordPress', + team_slug: 'openverse-maintainers', + }); + const isAllowed = members.some(m => m.login === "${{ github.actor }}") + if (!isAllowed) { + throw new Error( + "Only GitHub users in the @WordPress/openverse-maintainers " + + "team are allowed to run this workflow. If you need to run " + + "this workflow, please reach out to that group for help." + ) + } + - name: Set the Slack user shell: python env: @@ -62,7 +94,7 @@ jobs: "type": "section", "text": { "type": "mrkdwn", - "text": ":spinning-cd: A deployment of staging-api triggered by <@${{ env.SLACK_USER_ID }}> is starting using the `${{ inputs.tag }}` tag." + "text": ":spinning-cd: A deployment of *staging-api* triggered by <@${{ env.SLACK_USER_ID }}> is starting using the `${{ inputs.tag }}` tag." } }, { @@ -188,7 +220,7 @@ jobs: "type": "section", "text": { "type": "mrkdwn", - "text": ":tadaco: The deployment of staging-api triggered by <@${{ env.SLACK_USER_ID }}> using the `${{ inputs.tag }}` tag *succeeded*." + "text": ":tadaco: The deployment of *staging-api* triggered by <@${{ env.SLACK_USER_ID }}> using the `${{ inputs.tag }}` tag *succeeded*." } }, { @@ -216,7 +248,7 @@ jobs: "type": "section", "text": { "type": "mrkdwn", - "text": ":alert: The deployment of staging-api triggered by <@${{ env.SLACK_USER_ID }}> using the `${{ inputs.tag }}` tag *failed* :alert:" + "text": ":alert: The deployment of *staging-api* triggered by <@${{ env.SLACK_USER_ID }}> using the `${{ inputs.tag }}` tag *failed* :alert:" } }, { diff --git a/.github/workflows/deploy-staging-nuxt.yml b/.github/workflows/deploy-staging-nuxt.yml index 626970c541c..231a346d911 100644 --- a/.github/workflows/deploy-staging-nuxt.yml +++ b/.github/workflows/deploy-staging-nuxt.yml @@ -8,6 +8,12 @@ name: "Deployment: staging-nuxt" on: + workflow_dispatch: + inputs: + tag: + type: string + required: true + description: Image tag to deploy. workflow_call: inputs: tag: @@ -15,6 +21,9 @@ on: required: true description: Image tag to deploy. secrets: + ACCESS_TOKEN: + required: true + description: GitHub access token. AWS_ACCESS_KEY_ID: required: true description: AWS access key ID. @@ -28,6 +37,11 @@ on: required: true description: JSON mapping of GitHub usernames to Slack user IDs. +# Only allow a single deployment workflow (service + environment) to happen at a time +# If you need to stop an in-progress deployment of a service to force another for +# the same environment, you'll need to manually cancel it +concurrency: ${{ github.workflow }} + jobs: deploy: name: Deploy staging-nuxt @@ -37,6 +51,24 @@ jobs: - name: Checkout repository uses: actions/checkout@v3 + - name: Validate initiation user + uses: actions/github-script@v6 + with: + github-token: ${{ secrets.ACCESS_TOKEN }} + script: | + const { data: members } = await github.rest.teams.listMembersInOrg({ + org: 'WordPress', + team_slug: 'openverse-maintainers', + }); + const isAllowed = members.some(m => m.login === "${{ github.actor }}") + if (!isAllowed) { + throw new Error( + "Only GitHub users in the @WordPress/openverse-maintainers " + + "team are allowed to run this workflow. If you need to run " + + "this workflow, please reach out to that group for help." + ) + } + - name: Set the Slack user shell: python env: @@ -62,7 +94,7 @@ jobs: "type": "section", "text": { "type": "mrkdwn", - "text": ":spinning-cd: A deployment of staging-nuxt triggered by <@${{ env.SLACK_USER_ID }}> is starting using the `${{ inputs.tag }}` tag." + "text": ":spinning-cd: A deployment of *staging-nuxt* triggered by <@${{ env.SLACK_USER_ID }}> is starting using the `${{ inputs.tag }}` tag." } }, { @@ -152,7 +184,7 @@ jobs: "type": "section", "text": { "type": "mrkdwn", - "text": ":tadaco: The deployment of staging-nuxt triggered by <@${{ env.SLACK_USER_ID }}> using the `${{ inputs.tag }}` tag *succeeded*." + "text": ":tadaco: The deployment of *staging-nuxt* triggered by <@${{ env.SLACK_USER_ID }}> using the `${{ inputs.tag }}` tag *succeeded*." } }, { @@ -180,7 +212,7 @@ jobs: "type": "section", "text": { "type": "mrkdwn", - "text": ":alert: The deployment of staging-nuxt triggered by <@${{ env.SLACK_USER_ID }}> using the `${{ inputs.tag }}` tag *failed* :alert:" + "text": ":alert: The deployment of *staging-nuxt* triggered by <@${{ env.SLACK_USER_ID }}> using the `${{ inputs.tag }}` tag *failed* :alert:" } }, { diff --git a/.github/workflows/draft_release.yml b/.github/workflows/draft_release.yml deleted file mode 100644 index 4312caf83c6..00000000000 --- a/.github/workflows/draft_release.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: Draft release -# ℹ️ https://github.com/WordPress/openverse/blob/main/.github/GITHUB.md#draft-release - -on: - push: - branches: - - main - -jobs: - update_draft_release: - name: Update draft release - runs-on: ubuntu-latest - steps: - - name: Update draft release - uses: release-drafter/release-drafter@v5 - with: - config-name: release_drafter.yml - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/generate_pot.yml b/.github/workflows/generate_pot.yml index e31573275e1..8a92a5848da 100644 --- a/.github/workflows/generate_pot.yml +++ b/.github/workflows/generate_pot.yml @@ -2,6 +2,9 @@ name: Generate POT file on: push: + # The workflow will only run when both filters are satisfied. + paths: + - frontend/src/locales/scripts/en.json5 branches: - main diff --git a/.github/workflows/new_issues.yml b/.github/workflows/new_issues.yml index e7e0eb5864f..b30cfd7f5fb 100644 --- a/.github/workflows/new_issues.yml +++ b/.github/workflows/new_issues.yml @@ -5,6 +5,10 @@ on: issues: types: - opened +env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ISSUE_ID: ${{ github.event.issue.node_id }} # The global issue ID that works in both REST and GraphQL APIs. + PROJECT_ID: "PVT_kwDOAAQ2Js4AMZdL" # The ID for the Openverse project (#75). jobs: add_issue: @@ -18,3 +22,75 @@ jobs: column: Backlog # TODO: Switch to GITHUB_TOKEN if the project is moved to a repo repo-token: ${{ secrets.ACCESS_TOKEN }} + + add_issue_to_project: + name: Add new issue to project + runs-on: ubuntu-latest + steps: + - name: Add issue to "Backlog" + run: | + # shellcheck disable=SC2016 + gh api graphql -f query=' + mutation($project:ID!, $issue:ID!) { + addProjectV2ItemById(input: {projectId: $project, contentId: $issue}) { + item { + id + content { + ... on Issue { + labels(first: 10) { + nodes { + name + } + } + } + } + } + } + }' -f project="$PROJECT_ID" -f issue="$ISSUE_ID" >> issue_data.json + + echo 'ITEM_ID='"$(jq '.data.addProjectV2ItemById.item.id' issue_data.json)" >> "$GITHUB_ENV" + ITEM_PRIORITY="$(jq '.data.addProjectV2ItemById.item.content.labels.nodes[] | select(.name | contains("priority")).name | split(": ")[1]' issue_data.json)" >> "$GITHUB_ENV" + # The IDs for the project's Priority custom field options. + # These IDs were manually retrieved from the GitHub API. + if [[ $ITEM_PRIORITY == "low" ]]; then + PRIORITY_VALUE_ID="279ae886" + elif [[ $ITEM_PRIORITY == "medium" ]]; then + PRIORITY_VALUE_ID="333b3c1d" + elif [[ $ITEM_PRIORITY == "high" ]]; then + PRIORITY_VALUE_ID="03fe8945" + else + PRIORITY_VALUE_ID="fb76bdbc" + fi + echo 'PRIORITY_VALUE_ID='"$PRIORITY_VALUE_ID" >> "$GITHUB_ENV" + + set_issue_priority_field: + name: Set issue priority + runs-on: ubuntu-latest + needs: + - add_issue_to_project + steps: + - name: Set issue priority + env: + PRIORITY_FIELD_ID: "PVTSSF_lADOAAQ2Js4AMZdLzgH6Kbo" # The ID for the project Priority custom field. + run: | + # shellcheck disable=SC2016 + gh api graphql -f query=' + mutation ( + $project: ID! + $item: ID! + $priority_field: ID! + $priority_value: String! + ) { + set_priority_field: updateProjectV2ItemFieldValue(input: { + projectId: $project + itemId: $item + fieldId: $priority_field + value: { + singleSelectOptionId: $priority_value + } + }) { + projectV2Item { + id + } + } + }' -f project="$PROJECT_ID" -f item="$ITEM_ID" -f priority_field="$PRIORITY_FIELD_ID" -f priority_value="$PRIORITY_VALUE_ID" diff --git a/.github/workflows/release-app.yml b/.github/workflows/release-app.yml new file mode 100644 index 00000000000..b46ba9c2192 --- /dev/null +++ b/.github/workflows/release-app.yml @@ -0,0 +1,165 @@ +name: Release app + +on: + workflow_dispatch: + inputs: + app: + type: choice + options: + - api + - ingestion_server + - frontend + required: true + description: Application to release. If `api` or `frontend`, the deployment workflow will automatically be dispatched for you. + image-sha: + type: string + required: true + description: The SHA of the staging image to tag. + +concurrency: ${{ github.workflow }}-${{ inputs.app }} + +jobs: + release-app: + name: Release app + runs-on: ubuntu-latest + permissions: + # Needed for pushing the new docker image tag + packages: write + # Needed to open the changelog PR + pull-requests: write + steps: + - uses: actions/checkout@v3 + with: + # Creating the tag requires having the whole history of `main` + fetch-depth: 0 + + - name: Validate `sha-tag` input + uses: actions/github-script@v6 + with: + script: | + let exists = undefined, + page = 0 + while (!exists) { + page += 1 + const { data: versions } = + await github.rest.packages.getAllPackageVersionsForPackageOwnedByOrg({ + package_type: 'container', + // We do not have to validate that auxiliary images also exist as they're built at the same time + // as the "main" image. e.g., `api_nginx` is always built when `api` is built and they'll have + // the same set of tags. + package_name: 'openverse-${{ inputs.app }}', + org: 'WordPress', + page, + // max of `per_page` + per_page: 100, + }) + if (!versions.length) { + break + } + exists = versions.some((v) => v.metadata.container.tags.includes('${{ inputs.image-sha }}')) + } + if (!exists) { + throw new Error( + `'${{ inputs.image-sha }}' does not appear to be a valid SHA tag for ${{ inputs.app }}.` + ) + } + + - name: Calculate tag name + id: tag + run: | + # Format example: 2023.03.22.04.56.29 + # `-u` forces UTC + formatted_date="$(date -u +%Y.%m.%d.%H.%M.%S)" + + # Split image and git tag to avoid app name duplicated in the fully qualified image name + { + echo "date=$formatted_date"; + echo "git-tag=${{ inputs.app }}-$formatted_date"; + echo "image-tag=rel-$formatted_date"; + } >> "$GITHUB_OUTPUT" + + - name: Log in to GitHub Docker Registry + uses: docker/login-action@v2 + with: + registry: https://ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Add new tag to existing docker image + run: | + docker buildx imagetools create ghcr.io/wordpress/openverse-${{ inputs.app }}:${{ inputs.image-sha }} --tag ghcr.io/wordpress/openverse-${{ inputs.app }}:${{ steps.tag.outputs.image-tag }} + + if [ "${{ inputs.app }}" = "api" ]; then + docker buildx imagetools create ghcr.io/wordpress/openverse-api_nginx:${{ inputs.image-sha }} --tag ghcr.io/wordpress/openverse-api_nginx:${{ steps.tag.outputs.image-tag }} + fi + + - name: Create and publish release + uses: release-drafter/release-drafter@v5 + id: release-drafter + with: + config-name: release-drafter-${{ inputs.app }}.yml + version: ${{ steps.tag.outputs.date }} + tag: ${{ steps.tag.outputs.git-tag }} + name: ${{ steps.tag.outputs.git-tag }} + publish: true + commitish: main + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Add new changelog file to documentation + run: | + cat << EOF > documentation/changelogs/${{ inputs.app }}/${{ steps.tag.outputs.date }}.md + # ${{ steps.tag.outputs.date }} + + ${{ steps.release-drafter.outputs.body }} + EOF + + - name: Setup CI env + uses: ./.github/actions/setup-env + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + # Python is not needed to run pre-commit. + setup_python: false + # Node.js is needed by lint actions. + install_recipe: "node-install" + + - name: Cache pre-commit envs + uses: actions/cache@v3 + with: + path: ~/.cache/pre-commit + key: ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} + + - name: Lint the changelog file so that it passes CI + run: | + just precommit + just lint + + - name: Open changelog PR + uses: peter-evans/create-pull-request@v4 + with: + # Access token necessary for PRs to run with CI + token: ${{ secrets.ACCESS_TOKEN }} + base: main + branch: changelog/${{ steps.tag.outputs.git-tag }} + commit-message: Publish changelog for ${{ steps.tag.outputs.git-tag }} + title: Publish changelog for ${{ steps.tag.outputs.git-tag }} + # Add labels to pass CI + labels: | + 🧱 stack: ${{ inputs.app == 'ingestion_server' && 'ingestion server' || inputs.app }} + 🌟 goal: addition + 📄 aspect: text + 🟩 priority: low + body: | + This changelog PR was automatically generated for @${{ github.actor }} as a result of the ${{ github.workflow }} workflow. + + - name: Deploy production api + if: inputs.app == 'api' + uses: ./.github/workflows/deploy-production-api.yml + with: + tag: ${{ steps.tag.outputs.image-tag }} + + - name: Deploy production frontend + if: inputs.app == 'frontend' + uses: ./.github/workflows/deploy-production-nuxt.yml + with: + tag: ${{ steps.tag.outputs.image-tag }} diff --git a/.github/workflows/weekly_updates.yml b/.github/workflows/weekly_updates.yml index e7cdd8bf7b6..f04b8538ff4 100644 --- a/.github/workflows/weekly_updates.yml +++ b/.github/workflows/weekly_updates.yml @@ -26,5 +26,5 @@ jobs: setup_python: false install_recipe: node-install - - name: Create draft post + - name: Publish post run: just automations/js/report diff --git a/.github/workflows_downstream/draft_release.yml b/.github/workflows_downstream/draft_release.yml deleted file mode 100644 index 4312caf83c6..00000000000 --- a/.github/workflows_downstream/draft_release.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: Draft release -# ℹ️ https://github.com/WordPress/openverse/blob/main/.github/GITHUB.md#draft-release - -on: - push: - branches: - - main - -jobs: - update_draft_release: - name: Update draft release - runs-on: ubuntu-latest - steps: - - name: Update draft release - uses: release-drafter/release-drafter@v5 - with: - config-name: release_drafter.yml - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index 4d7fc01a6e0..b19e0cfe935 100644 --- a/.gitignore +++ b/.gitignore @@ -49,3 +49,5 @@ grafana.db # Ingestion server # TODO: This shouldn't be in root ingestion_server.log + +*.tsbuildinfo diff --git a/.isort.cfg b/.isort.cfg index 21c126ddb8c..74597f1f38b 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -3,7 +3,7 @@ profile=black sections=FUTURE,STDLIB,DJANGO,THIRDPARTY,FIRSTPARTY,LOCALFOLDER known_django=django,rest_framework -known_first_party=catalog +known_first_party=catalog,ingestion_server lines_after_imports=2 multi_line_output=3 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b419f69842d..575f62212cb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -58,14 +58,10 @@ repos: args: - --py310-plus - # Use the `.flake8` file to configure additional project-specific requirements. - - repo: https://github.com/PyCQA/flake8 - rev: 3.9.2 + - repo: https://github.com/charliermarsh/ruff-pre-commit + rev: "v0.0.257" hooks: - - id: flake8 - args: - - --extend-ignore=E203,W503 - - --max-line-length=88 + - id: ruff - repo: https://github.com/ambv/black rev: 22.3.0 @@ -95,7 +91,7 @@ repos: hooks: - id: eslint files: \.(js|ts|vue)$ - exclude: ^load_testing/.*|frontend/(nuxt-template-overrides/.*|.remake/.*)$ # ESLint raises warnings for ignored files. + exclude: ^utilities/load_testing/.*|frontend/(nuxt-template-overrides/.*|.remake/.*)$ # ESLint raises warnings for ignored files. "types": [file] # ESLint only accepts [javascript] by default. args: - --ignore-path=.gitignore @@ -138,7 +134,7 @@ repos: - id: types name: types files: ^frontend/.*$ - entry: bash -c 'just frontend/run types' + entry: bash -c 'pnpm exec vue-tsc -p frontend --noEmit' language: system pass_filenames: false - id: test:unit @@ -167,3 +163,9 @@ repos: entry: bash -c 'just automations/js/render-github' language: system pass_filenames: false + - id: render-release-drafter + name: render-release-drafter + files: ^templates/.*$ + entry: bash -c 'just automations/js/render-release-drafter' + language: system + pass_filenames: false diff --git a/.ruff.toml b/.ruff.toml new file mode 100644 index 00000000000..4000dd4db5c --- /dev/null +++ b/.ruff.toml @@ -0,0 +1,5 @@ +[per-file-ignores] +"*test*" = ["E501"] +"*__init__*" = ["F401"] +"*wsgi.py" = ["E402"] +"*settings.py" = ["F401"] diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index def137b5fc5..304d071837d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -22,20 +22,19 @@ ping us via any of our [communication channels](README.md#keep-in-touch). ### Get started Detailed help for contributing code can be found in the -[developer documentation](https://wordpress.github.io/openverse/), which also -includes the following subfolders. +[developer documentation](https://docs.openverse.org/), which also includes the +following subfolders. -- [Storybook](https://wordpress.github.io/openverse/storybook/) (UI components) -- [Tailwind Config Viewer](https://wordpress.github.io/openverse/tailwind/) - (design tokens) +- [Storybook](https://docs.openverse.org/storybook/) (UI components) +- [Tailwind Config Viewer](https://docs.openverse.org/tailwind/) (design tokens) The following resources are preferred reading for starting your Openverse contribution journey in the code and code-related space. -- [GitHub contribution practices](https://wordpress.github.io/openverse/reference/github_contribution_practices.html) -- [Dev flow](https://wordpress.github.io/openverse/reference/code_contribution_practices.html) -- [General setup guide](https://wordpress.github.io/openverse/guides/general_setup.html) -- [Quickstart guide](https://wordpress.github.io/openverse/guides/quickstart.html) +- [GitHub contribution practices](https://docs.openverse.org/reference/github_contribution_practices.html) +- [Dev flow](https://docs.openverse.org/reference/dev_flow.html) +- [General setup guide](https://docs.openverse.org/guides/general_setup.html) +- [Quickstart guide](https://docs.openverse.org/guides/quickstart.html) #### Friendly notes diff --git a/README.md b/README.md index 2e08f67039a..e869867bd17 100644 --- a/README.md +++ b/README.md @@ -34,6 +34,8 @@ Eventually the catalog will be merged into this repository as well. [openverse.org](https://openverse.org), built with Vue and Nuxt - [Automations](automations/) | Scripts used for various workflows around Openverse repositories and processes +- [Utilities](utilities/) | Scripts or utilities which are useful across + multiple projects or don't necessarily fit into a specific project. This repository also contains the following directories. diff --git a/api/Dockerfile b/api/Dockerfile index 34362738bf7..c8d64b31bd1 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -34,29 +34,6 @@ COPY Pipfile Pipfile.lock ./ # Install Python dependencies system-wide (uses the active virtualenv) RUN pipenv install --system --deploy --dev -######### -# Nginx # -######### - -# This target assumes that the build host has run `manage.py collectstatic` -# `just api/collectstatic` is provided as a convenient alias for running it the expected way - -FROM nginx:1.23.3-alpine as nginx - -LABEL org.opencontainers.image.source = "https://github.com/WordPress/openverse" - -WORKDIR /app - -COPY nginx.conf.template /etc/nginx/templates/openverse-api.conf.template -COPY /static /app/static - -# Only environment variables with this prefix will be available in the template -ENV NGINX_ENVSUBST_FILTER="DJANGO_NGINX_" -ENV DJANGO_NGINX_ENVIRONMENT="local" -# Add the release version to the docker container -ARG SEMANTIC_VERSION -ENV DJANGO_NGINX_GIT_REVISION=$SEMANTIC_VERSION - ####### # API # ####### @@ -90,13 +67,24 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && mkdir -p /var/log/openverse_api/openverse_api.log -# Create a non-root user -RUN useradd --create-home opener +# Create a folder for placing static files +RUN mkdir /static + +# Create a non-root user, and make it the owner of the static dir created above +RUN useradd --create-home opener \ + && chown -R opener /static USER opener # Copy code into the final image COPY --chown=opener . /api/ +# Collect static assets, these are used by the next stage, `nginx` +RUN env \ + SETUP_ES="False" \ + STATIC_ROOT="/static" \ + DJANGO_SECRET_KEY="any string" \ + python manage.py collectstatic + # Add the release version to the docker container ARG SEMANTIC_VERSION ENV SEMANTIC_VERSION=$SEMANTIC_VERSION @@ -111,3 +99,25 @@ ENTRYPOINT ["./run.sh"] # Run Django dev server, can be overridden from Docker Compose CMD ["python", "manage.py", "runserver", "0.0.0.0:8000"] + +######### +# NGINX # +######### + +FROM nginx:1.23.3-alpine as nginx + +LABEL org.opencontainers.image.source = "https://github.com/WordPress/openverse" + +WORKDIR /app + +COPY nginx.conf.template /etc/nginx/templates/openverse-api.conf.template + +# Copy static files from `api` target +COPY --from=api /static /app/static + +# Only environment variables with this prefix will be available in the template +ENV NGINX_ENVSUBST_FILTER="DJANGO_NGINX_" +ENV DJANGO_NGINX_ENVIRONMENT="local" +# Add the release version to the docker container +ARG SEMANTIC_VERSION +ENV DJANGO_NGINX_GIT_REVISION=$SEMANTIC_VERSION diff --git a/api/Pipfile b/api/Pipfile index ce1edc24eaf..0167901cd7c 100644 --- a/api/Pipfile +++ b/api/Pipfile @@ -33,7 +33,6 @@ django-storages = "~=1.13" django-tqdm = "~=1.3" django-uuslug = "~=2.0" djangorestframework = "~=3.14" -djangorestframework-xml = "~=2.0" drf-yasg = "~=1.21" elasticsearch-dsl = "~=7.4" future = "~=0.18" @@ -42,6 +41,7 @@ gunicorn = "~=20.1" hvac = "~=1.0" ipaddress = "~=1.0" limit = "~=0.2" +orjson = "~=3.8.7" piexif = "~=1.1" Pillow = "~=9.3" psycopg2 = "~=2.9" diff --git a/api/Pipfile.lock b/api/Pipfile.lock index e47f6853c10..255366cca29 100644 --- a/api/Pipfile.lock +++ b/api/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "89c134c3bf22c1815734ec4102e65233e7ad8cff277752874bf3b567f48e1920" + "sha256": "bc5cffae5c3c028c9af0f9a09d94ceb6edcc4789fd28181ae999a8012db21518" }, "pipfile-spec": 6, "requires": { @@ -151,19 +151,19 @@ }, "boto3": { "hashes": [ - "sha256:7ab7bb335b726e2f472b5c050028198d16338560c83c40b2bd2bd4e4018ec802", - "sha256:d97176a7ffb37539bc53671cb0bf1c5b304f1c78bbd748553df549a9d4f92a9e" + "sha256:19762b6a1adbe1963e26b8280211ca148017c970a2e1386312a9fc8a0a17dbd5", + "sha256:367a73c1ff04517849d8c4177fd775da2e258a3912ff6a497be258c30f509046" ], "index": "pypi", - "version": "==1.26.84" + "version": "==1.26.97" }, "botocore": { "hashes": [ - "sha256:0f976427ad0a2602624ba784b5db328a865c2e9e0cc1bb6d8cffb6c0a2d177e1", - "sha256:a36f7f6f8eae5dbd4a1cc8cb6fc747f6315500541181eff2093ee0529fc8e4bc" + "sha256:0df677eb2bef3ba18ac69e007633559b4426df310eee99df9882437b5faf498a", + "sha256:176740221714c0f031c2cd773879df096dbc0f977c63b3e2ed6a956205f02e82" ], "markers": "python_version >= '3.7'", - "version": "==1.29.84" + "version": "==1.29.97" }, "certifi": { "hashes": [ @@ -244,96 +244,84 @@ }, "charset-normalizer": { "hashes": [ - "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b", - "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42", - "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d", - "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b", - "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a", - "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59", - "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154", - "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1", - "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c", - "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a", - "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d", - "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6", - "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b", - "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b", - "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783", - "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5", - "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918", - "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555", - "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639", - "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786", - "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e", - "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed", - "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820", - "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8", - "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3", - "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541", - "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14", - "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be", - "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e", - "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76", - "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b", - "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c", - "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b", - "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3", - "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc", - "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6", - "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59", - "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4", - "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d", - "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d", - "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3", - "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a", - "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea", - "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6", - "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e", - "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603", - "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24", - "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a", - "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58", - "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678", - "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a", - "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c", - "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6", - "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18", - "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174", - "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317", - "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f", - "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc", - "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837", - "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41", - "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c", - "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579", - "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753", - "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8", - "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291", - "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087", - "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866", - "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3", - "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d", - "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1", - "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca", - "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e", - "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db", - "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72", - "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d", - "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc", - "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539", - "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d", - "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af", - "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b", - "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602", - "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f", - "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478", - "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c", - "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e", - "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479", - "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7", - "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8" - ], - "version": "==3.0.1" + "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6", + "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1", + "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e", + "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373", + "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62", + "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230", + "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be", + "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c", + "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0", + "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448", + "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f", + "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649", + "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d", + "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0", + "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706", + "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a", + "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59", + "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23", + "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5", + "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb", + "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e", + "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e", + "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c", + "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28", + "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d", + "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41", + "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974", + "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce", + "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f", + "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1", + "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d", + "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8", + "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017", + "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31", + "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7", + "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8", + "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e", + "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14", + "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd", + "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d", + "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795", + "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b", + "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b", + "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b", + "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203", + "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f", + "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19", + "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1", + "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a", + "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac", + "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9", + "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0", + "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137", + "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f", + "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6", + "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5", + "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909", + "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f", + "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0", + "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324", + "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755", + "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb", + "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854", + "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c", + "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60", + "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84", + "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0", + "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b", + "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1", + "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531", + "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1", + "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11", + "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326", + "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df", + "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab" + ], + "markers": "python_full_version >= '3.7.0'", + "version": "==3.1.0" }, "coreapi": { "hashes": [ @@ -380,11 +368,11 @@ }, "deepdiff": { "hashes": [ - "sha256:a02aaa8171351eba675cff5f795ec7a90987f86ad5449553308d4e18df57dc3d", - "sha256:d83b06e043447d6770860a635abecb46e849b0494c43ced2ecafda7628c7ce72" + "sha256:15838bd1cbd046ce15ed0c41e837cd04aff6b3e169c5e06fca69d7aa11615ceb", + "sha256:6a3bf1e7228ac5c71ca2ec43505ca0a743ff54ec77aa08d7db22de6bc7b2b644" ], "index": "pypi", - "version": "==6.2.3" + "version": "==6.3.0" }, "defusedxml": { "hashes": [ @@ -496,14 +484,6 @@ "index": "pypi", "version": "==3.14.0" }, - "djangorestframework-xml": { - "hashes": [ - "sha256:35f6c811d0ab8c8466b26db234e16a2ed32d76381715257aebf4c7be2c202ca1", - "sha256:975955fbb0d49ac44a90bdeb33b7923d95b79884d283f983e116c80a936ef4d0" - ], - "index": "pypi", - "version": "==2.0.0" - }, "drf-yasg": { "hashes": [ "sha256:ba9cf4bf79f259290daee9b400fa4fcdb0e78d2f043fa5e9f6589c939fd06d05", @@ -517,7 +497,7 @@ "sha256:0e2454645dc00517dee4c6de3863411a9c5f1955d013c5fefa29123dadc92f98", "sha256:66c4ece2adfe7cc120e2b6a6798a1fd5c777aecf82eec39bb95cef7cfc7ea2b3" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' and python_full_version < '4.0.0'", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' and python_version < '4'", "version": "==7.17.9" }, "elasticsearch-dsl": { @@ -749,11 +729,11 @@ }, "hvac": { "hashes": [ - "sha256:e4028c5c0ecc7b7fcf6a54d290f99240e5abcdb9ffe442d1c14f061310d4c61c", - "sha256:e8256343de2576b18bc8d49f09a04c728f2a8f3a866825bb413aa4f9ebab1fea" + "sha256:079dca58856dee6646ed5a2f2283809c16d2deedde1e9e9615b2910324a4b969", + "sha256:12368860f117c7e886acc348bf6a3f58d01c3fad17d4604c24ee9df3ee8a3dee" ], "index": "pypi", - "version": "==1.0.2" + "version": "==1.1.0" }, "idna": { "hashes": [ @@ -970,53 +950,58 @@ }, "orjson": { "hashes": [ - "sha256:010e2970ec9e826c332819e0da4b14b29b19641da0f1a6af4cec91629ef9b988", - "sha256:0110970aed35dec293f30ed1e09f8604afd5d15c5ef83de7f6c427619b3ba47b", - "sha256:0295a7bfd713fa89231fd0822c995c31fc2343c59a1d13aa1b8b6651335654f5", - "sha256:06180014afcfdc167ca984b312218aa62ce20093965c437c5f9166764cb65ef7", - "sha256:109b539ce5bf60a121454d008fa67c3b67e5a3249e47d277012645922cf74bd0", - "sha256:188ed9f9a781333ad802af54c55d5a48991e292239aef41bd663b6e314377eb8", - "sha256:1a1a8f4980059f48483782c608145b0f74538c266e01c183d9bcd9f8b71dbada", - "sha256:1c19f47b35b9966a3abadf341b18ee4a860431bf2b00fd8d58906d51cf78aa70", - "sha256:1dee503c6c1a0659c5b46f5f39d9ca9d3657b11ca8bb4af8506086df416887d9", - "sha256:226bfc1da2f21ee74918cee2873ea9a0fec1a8830e533cb287d192d593e99d02", - "sha256:2e8c430d82b532c5ab95634e034bbf6ca7432ffe175a3e63eadd493e00b3a555", - "sha256:366cc75f7e09106f9dac95a675aef413367b284f25507d21e55bd7f45f445e80", - "sha256:3ffaabb380cd0ee187b4fc362516df6bf739808130b1339445c7d8878fca36e7", - "sha256:403c8c84ac8a02c40613b0493b74d5256379e65196d39399edbf2ed3169cbeb5", - "sha256:41244431ba13f2e6ef22b52c5cf0202d17954489f4a3c0505bd28d0e805c3546", - "sha256:4f733062d84389c32c0492e5a4929056fac217034a94523debe0430bcc602cda", - "sha256:51b275475d4e36118b65ad56f9764056a09d985c5d72e64579bf8816f1356a5e", - "sha256:5bb32259ea22cc9dd47a6fdc4b8f9f1e2f798fcf56c7c1122a7df0f4c5d33bf3", - "sha256:5d88837002c5a8af970745b8e0ca1b0fdb06aafbe7f1279e110d338ea19f3d23", - "sha256:63144d27735f3b60f079f247ac9a289d80dfe49a7f03880dfa0c0ba64d6491d5", - "sha256:697abde7350fb8076d44bcb6b4ab3ce415ae2b5a9bb91efc460e5ab0d96bb5d3", - "sha256:78604d3acfd7cd502f6381eea0c42281fe2b74755b334074ab3ebc0224100be1", - "sha256:7a3ab1a473894e609b6f1d763838c6689ba2b97620c256a32c4d9f10595ac179", - "sha256:7bd4fd37adb03b1f2a1012d43c9f95973a02164e131dfe3ff804d7e180af5653", - "sha256:7d6ac5f8a2a17095cd927c4d52abbb38af45918e0d3abd60fb50cfd49d71ae24", - "sha256:8460c8810652dba59c38c80d27c325b5092d189308d8d4f3e688dbd8d4f3b2dc", - "sha256:84d154d07e8b17d97e990d5d710b719a031738eb1687d8a05b9089f0564ff3e0", - "sha256:89dc786419e1ce2588345f58dd6a434e6728bce66b94989644234bcdbe39b603", - "sha256:9e432c6c9c8b97ad825276d5795286f7cc9689f377a97e3b7ecf14918413303f", - "sha256:a16273d77db746bb1789a2bbfded81148a60743fd6f9d5185e02d92e3732fa18", - "sha256:ad02e9102d4ba67db30a136e631e32aeebd1dce26c9f5942a457b02df131c5d0", - "sha256:ad4d441fbde4133af6fee37f67dbf23181b9c537ecc317346ec8c3b4c8ec7705", - "sha256:b20f29fa8371b8023f1791df035a2c3ccbd98baa429ac3114fc104768f7db6f8", - "sha256:cc4fa83831f42ce5c938f8cefc2e175fa1df6f661fdeaba3badf26d2b8cfcf73", - "sha256:cc52f58c688cb10afd810280e450f56fbcb27f52c053463e625c8335c95db0dc", - "sha256:d60304172a33705ce4bd25a6261ab84bed2dab0b3d3b79672ea16c7648af4832", - "sha256:dbcfcec2b7ac52deb7be3685b551addc28ee8fa454ef41f8b714df6ba0e32a27", - "sha256:e1a0e5504a5fc86083cc210c6946e8d61e13fe9f1d7a7bf81b42f7050a49d4fb", - "sha256:e7129a6847f0494aa1427167486ef6aea2e835ba05f6c627df522692ee228f65", - "sha256:e75c11023ac29e29fd3e75038d0e8dd93f9ea24d7b9a5e871967a8921a88df24", - "sha256:ee519964a5a0efb9633f38b1129fd242807c5c57162844efeeaab1c8de080051", - "sha256:f98c82850b7b4b7e27785ca43706fa86c893cdb88d54576bbb9b0d9c1070e421", - "sha256:feb32aaaa34cf2f891eb793ad320d4bb6731328496ae59b6c9eb1b620c42b529", - "sha256:ff60187d1b7e0bfab376b6002b08c560b7de06c87cf3a8ac639ecf58f84c5f3b" + "sha256:0204bc414bc6f7a595211569840b422d96649fd8686efa1fbbcb12eed5dd9521", + "sha256:022347dad2253081eaa25366834bb8b06a5aceb0e83b39c6b0aa865759e49d69", + "sha256:02f5b5db1e424706eb9f70f1c25699ff4cef16fadfc64af5b70f8628eafe4771", + "sha256:0dc4a52f1087baeec6b58248fd6b01f17c124fb99f6f770596851ea434a7be0b", + "sha256:18fcdea75d8b571dc9b185652b81397b62878ae7934fd62e6a0103a5b8448e34", + "sha256:1af1cfad5d90b68e15fd625c889c4f9f91d7a88f49512cdb89f01c3881e0c9d9", + "sha256:1b2abf93b727a6af7c5ec8816168cbdff39c716af18ced425dd50ae46d69765c", + "sha256:2006d9c046bbf335c951f61e016a27bd4f17323dd116f601e4a8a11739cd0a62", + "sha256:23447d38375a19d57975d4e32d9ce9f533803c197fd4292e10d3234c052037a8", + "sha256:24ad122d8dd057acf2a9965a2ffc1bc12fb310ae1cfe2912db930cbb9ef7eaba", + "sha256:28075c4b502d792fb6703e983d456b2a30d5d6f332d26092eb312dc782e64c64", + "sha256:28dfe774c345130f1117c4d023644ec52d9d50e3eaadb9bd1c668d91dc109bb5", + "sha256:2c2c5f3d3bbd61dba646e2b9c54a0dd7941b03fba49726bd31c1c23fedf0b9aa", + "sha256:306618884929b596e2e083f82b5617da812df25b0c467542371f1d51f0c5a6f5", + "sha256:317164f7d4c0540a6eb8b0a0faeec84ef011d359da05188423db762b65f84e1d", + "sha256:343124f84da0a33c83ee106a98b3e3c42767c88323d4a2809683cbe83816e8be", + "sha256:449d8ed1e0e6b24e9df5a06b59fd66ea7f7293e141257069601ae8ff9fad705c", + "sha256:4553d85bad4cbd634a40b7b5d36daaa197a6025f9ce3e2165b371e528759093d", + "sha256:4c2e19d2b46cc93c7218bf8180807bf922ff61dc9883458a06edc66d22970fff", + "sha256:4d7c9f3b1598a1ccd806ef02257a76a00c7ede09662ddb54eec2b4bd92874254", + "sha256:52293a6097750c2d434737966fe6e2a1ed489ac70cc8e584f5944af83de0b787", + "sha256:56bb6eb7a254eec3b15feba9b20f4172ccbe6ea50a54cf66cbc8e1e4a19585c2", + "sha256:57ee45d2cc6c11c50afb5a0c09d7cd559aea76c77250dbe996be6a03464d4a50", + "sha256:5e7e39357371d4ae5649f33c01886508a4c8e5fa5c7344554af041dc0f004c01", + "sha256:60fefd4bbd796b4296f478e705fe2c2c7defd28da98d3017743eb87c3238a380", + "sha256:66045850f286090800a18662d81d44f88c3fcb60ea3a9947d5caeab5d1efc92e", + "sha256:68d59e3ae84a9b6f14b45a89f7fde4a08a87ea5eb76bfc854b354640de8156f5", + "sha256:747bd4e09d8aa61e1ff677a7dd1cffd28a5d13c22f3769123c58ec988bf1b83d", + "sha256:81d3c5b253eebfc4a61cea1f255a576cb2b889afa99f4510f30ec13201d4f457", + "sha256:88bf40e5468444c04374d1b8f1877cebbaef6bb7406cb6b4a34a570c5cbb87bc", + "sha256:8e0bff5656b99dd975cae2e5230b39e5909d06c0692fd1f6f06dc46f1fe705d0", + "sha256:8f84116fcc3714e7ba3cbeb1b11ac5e4549e7d2726c50142f8299fff9dea7d53", + "sha256:9322450f392dceb49810d2f820b1932af22d66f67f1d45c31f160067dd06359f", + "sha256:9c98dc791aa44268ba7f6e21124cf885c813b155316c6bf257560571d243fe15", + "sha256:9cb36d4a14f3a911369219d5abc19b907bc41ed2730f7bfe0847b0fd3e834c87", + "sha256:9eda4c37e48ff549763183a1549c10eec6ea40439520b17d09359cd74a425069", + "sha256:a3eac485a15493164867729f44e1e1247b3094ff19d37708e8cdc9c88a93c623", + "sha256:a6bcb449537a99f55c5f05187bac00b4549a795e89c10dcca0d7629548852357", + "sha256:b90d171932b6a9d50e79fa2762cb303e3556bbf25c08bb316fe346ec58af9c19", + "sha256:c096d7a523bae6ffb9c4a228ba4691d66113f0f2231579dc945523fbef09c6da", + "sha256:c2f28a92a9bcb4e8635524b20db1b539bda8613872f306b36cdfd9d3577d03ac", + "sha256:d5514dfe200356a1d5a6039e00dca78d87d063f3da1eb6a371253e5a8b7ab5b0", + "sha256:dd7d86c5f5f820ac9d4783477e86eb984b63bdb32359935609eb33cf65049c54", + "sha256:ddfcc54793e266056fe1c257d0804c336bca1c5c1ee7979d674e1fc19cfb0a6a", + "sha256:e480d74d7bf415e6548a364669404119a85dbe0e3c6cd5f7cb4c7003eac20164", + "sha256:e6a6d55e01bce74516dff15302627a13b1f4edcb1c3942dd660978dee423ccf2", + "sha256:e991a5c2c5f2f299c77e1d07ef2812ff5b68e1d97a2aab01aca29cf756473aa3", + "sha256:edc65ddb6ae6f8fbb2bbf78ac98f75b729c9eeb0776d5508dd76d3a948dda1dd", + "sha256:f989f8580db86166aaaa938ccd1597ba1817e3f5df14c047baafe783e3d24173" ], - "markers": "python_version >= '3.7'", - "version": "==3.8.7" + "index": "pypi", + "version": "==3.8.8" }, "packaging": { "hashes": [ @@ -1205,11 +1190,11 @@ }, "redis": { "hashes": [ - "sha256:1eec3741cda408d3a5f84b78d089c8b8d895f21b3b050988351e925faf202864", - "sha256:5deb072d26e67d2be1712603bfb7947ec3431fb0eec9c578994052e33035af6d" + "sha256:56732e156fe31801c4f43396bd3ca0c2a7f6f83d7936798531b9848d103381aa", + "sha256:7df17a0a2b72a4c8895b462dd07616c51b1dcb48fdd7ecb7b6f4bf39ecb2e94e" ], "markers": "python_version >= '3.7'", - "version": "==4.5.1" + "version": "==4.5.3" }, "redlock-py": { "hashes": [ @@ -1223,7 +1208,7 @@ "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa", "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf" ], - "markers": "python_version >= '3.7' and python_full_version < '4.0.0'", + "markers": "python_version >= '3.7' and python_version < '4'", "version": "==2.28.2" }, "requests-oauthlib": { @@ -1294,19 +1279,19 @@ }, "sentry-sdk": { "hashes": [ - "sha256:633edefead34d976ff22e7edc367cdf57768e24bc714615ccae746d9d91795ae", - "sha256:a900845bd78c263d49695d48ce78a4bce1030bbd917e0b6cc021fc000c901113" + "sha256:3c4e898f7a3edf5a2042cd0dcab6ee124e2112189228c272c08ad15d3850c201", + "sha256:ad40860325c94d1a656da70fba5a7c4dbb2f6809d3cc2d00f74ca0b608330f14" ], "index": "pypi", - "version": "==1.16.0" + "version": "==1.17.0" }, "setuptools": { "hashes": [ - "sha256:e5fd0a713141a4a105412233c63dc4e17ba0090c8e8334594ac790ec97792330", - "sha256:f106dee1b506dee5102cc3f3e9e68137bbad6d47b616be7991714b0c62204251" + "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077", + "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2" ], "markers": "python_version >= '3.7'", - "version": "==67.4.0" + "version": "==67.6.0" }, "six": { "hashes": [ @@ -1349,11 +1334,11 @@ }, "urllib3": { "hashes": [ - "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72", - "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1" + "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305", + "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42" ], "markers": "python_version >= '3.6'", - "version": "==1.26.14" + "version": "==1.26.15" }, "webob": { "hashes": [ @@ -1541,48 +1526,50 @@ }, "zope.interface": { "hashes": [ - "sha256:008b0b65c05993bb08912f644d140530e775cf1c62a072bf9340c2249e613c32", - "sha256:0217a9615531c83aeedb12e126611b1b1a3175013bbafe57c702ce40000eb9a0", - "sha256:0fb497c6b088818e3395e302e426850f8236d8d9f4ef5b2836feae812a8f699c", - "sha256:17ebf6e0b1d07ed009738016abf0d0a0f80388e009d0ac6e0ead26fc162b3b9c", - "sha256:311196634bb9333aa06f00fc94f59d3a9fddd2305c2c425d86e406ddc6f2260d", - "sha256:3218ab1a7748327e08ef83cca63eea7cf20ea7e2ebcb2522072896e5e2fceedf", - "sha256:404d1e284eda9e233c90128697c71acffd55e183d70628aa0bbb0e7a3084ed8b", - "sha256:4087e253bd3bbbc3e615ecd0b6dd03c4e6a1e46d152d3be6d2ad08fbad742dcc", - "sha256:40f4065745e2c2fa0dff0e7ccd7c166a8ac9748974f960cd39f63d2c19f9231f", - "sha256:5334e2ef60d3d9439c08baedaf8b84dc9bb9522d0dacbc10572ef5609ef8db6d", - "sha256:604cdba8f1983d0ab78edc29aa71c8df0ada06fb147cea436dc37093a0100a4e", - "sha256:6373d7eb813a143cb7795d3e42bd8ed857c82a90571567e681e1b3841a390d16", - "sha256:655796a906fa3ca67273011c9805c1e1baa047781fca80feeb710328cdbed87f", - "sha256:65c3c06afee96c654e590e046c4a24559e65b0a87dbff256cd4bd6f77e1a33f9", - "sha256:696f3d5493eae7359887da55c2afa05acc3db5fc625c49529e84bd9992313296", - "sha256:6e972493cdfe4ad0411fd9abfab7d4d800a7317a93928217f1a5de2bb0f0d87a", - "sha256:7579960be23d1fddecb53898035a0d112ac858c3554018ce615cefc03024e46d", - "sha256:765d703096ca47aa5d93044bf701b00bbce4d903a95b41fff7c3796e747b1f1d", - "sha256:7e66f60b0067a10dd289b29dceabd3d0e6d68be1504fc9d0bc209cf07f56d189", - "sha256:8a2ffadefd0e7206adc86e492ccc60395f7edb5680adedf17a7ee4205c530df4", - "sha256:959697ef2757406bff71467a09d940ca364e724c534efbf3786e86eee8591452", - "sha256:9d783213fab61832dbb10d385a319cb0e45451088abd45f95b5bb88ed0acca1a", - "sha256:a16025df73d24795a0bde05504911d306307c24a64187752685ff6ea23897cb0", - "sha256:a2ad597c8c9e038a5912ac3cf166f82926feff2f6e0dabdab956768de0a258f5", - "sha256:bfee1f3ff62143819499e348f5b8a7f3aa0259f9aca5e0ddae7391d059dce671", - "sha256:d169ccd0756c15bbb2f1acc012f5aab279dffc334d733ca0d9362c5beaebe88e", - "sha256:d514c269d1f9f5cd05ddfed15298d6c418129f3f064765295659798349c43e6f", - "sha256:d692374b578360d36568dd05efb8a5a67ab6d1878c29c582e37ddba80e66c396", - "sha256:dbaeb9cf0ea0b3bc4b36fae54a016933d64c6d52a94810a63c00f440ecb37dd7", - "sha256:dc26c8d44472e035d59d6f1177eb712888447f5799743da9c398b0339ed90b1b", - "sha256:e1574980b48c8c74f83578d1e77e701f8439a5d93f36a5a0af31337467c08fcf", - "sha256:e74a578172525c20d7223eac5f8ad187f10940dac06e40113d62f14f3adb1e8f", - "sha256:e945de62917acbf853ab968d8916290548df18dd62c739d862f359ecd25842a6", - "sha256:f0980d44b8aded808bec5059018d64692f0127f10510eca71f2f0ace8fb11188", - "sha256:f98d4bd7bbb15ca701d19b93263cc5edfd480c3475d163f137385f49e5b3a3a7", - "sha256:fb68d212efd057596dee9e6582daded9f8ef776538afdf5feceb3059df2d2e7b" + "sha256:042f2381118b093714081fd82c98e3b189b68db38ee7d35b63c327c470ef8373", + "sha256:0ec9653825f837fbddc4e4b603d90269b501486c11800d7c761eee7ce46d1bbb", + "sha256:12175ca6b4db7621aedd7c30aa7cfa0a2d65ea3a0105393e05482d7a2d367446", + "sha256:1592f68ae11e557b9ff2bc96ac8fc30b187e77c45a3c9cd876e3368c53dc5ba8", + "sha256:23ac41d52fd15dd8be77e3257bc51bbb82469cf7f5e9a30b75e903e21439d16c", + "sha256:424d23b97fa1542d7be882eae0c0fc3d6827784105264a8169a26ce16db260d8", + "sha256:4407b1435572e3e1610797c9203ad2753666c62883b921318c5403fb7139dec2", + "sha256:48f4d38cf4b462e75fac78b6f11ad47b06b1c568eb59896db5b6ec1094eb467f", + "sha256:4c3d7dfd897a588ec27e391edbe3dd320a03684457470415870254e714126b1f", + "sha256:5171eb073474a5038321409a630904fd61f12dd1856dd7e9d19cd6fe092cbbc5", + "sha256:5a158846d0fca0a908c1afb281ddba88744d403f2550dc34405c3691769cdd85", + "sha256:6ee934f023f875ec2cfd2b05a937bd817efcc6c4c3f55c5778cbf78e58362ddc", + "sha256:790c1d9d8f9c92819c31ea660cd43c3d5451df1df61e2e814a6f99cebb292788", + "sha256:809fe3bf1a91393abc7e92d607976bbb8586512913a79f2bf7d7ec15bd8ea518", + "sha256:87b690bbee9876163210fd3f500ee59f5803e4a6607d1b1238833b8885ebd410", + "sha256:89086c9d3490a0f265a3c4b794037a84541ff5ffa28bb9c24cc9f66566968464", + "sha256:99856d6c98a326abbcc2363827e16bd6044f70f2ef42f453c0bd5440c4ce24e5", + "sha256:aab584725afd10c710b8f1e6e208dbee2d0ad009f57d674cb9d1b3964037275d", + "sha256:af169ba897692e9cd984a81cb0f02e46dacdc07d6cf9fd5c91e81f8efaf93d52", + "sha256:b39b8711578dcfd45fc0140993403b8a81e879ec25d53189f3faa1f006087dca", + "sha256:b3f543ae9d3408549a9900720f18c0194ac0fe810cecda2a584fd4dca2eb3bb8", + "sha256:d0583b75f2e70ec93f100931660328965bb9ff65ae54695fb3fa0a1255daa6f2", + "sha256:dfbbbf0809a3606046a41f8561c3eada9db811be94138f42d9135a5c47e75f6f", + "sha256:e538f2d4a6ffb6edfb303ce70ae7e88629ac6e5581870e66c306d9ad7b564a58", + "sha256:eba51599370c87088d8882ab74f637de0c4f04a6d08a312dce49368ba9ed5c2a", + "sha256:ee4b43f35f5dc15e1fec55ccb53c130adb1d11e8ad8263d68b1284b66a04190d", + "sha256:f2363e5fd81afb650085c6686f2ee3706975c54f331b426800b53531191fdf28", + "sha256:f299c020c6679cb389814a3b81200fe55d428012c5e76da7e722491f5d205990", + "sha256:f72f23bab1848edb7472309e9898603141644faec9fd57a823ea6b4d1c4c8995", + "sha256:fa90bac61c9dc3e1a563e5babb3fd2c0c1c80567e815442ddbe561eadc803b30" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==5.5.2" + "markers": "python_version >= '3.7'", + "version": "==6.0" } }, "develop": { + "appnope": { + "hashes": [ + "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24", + "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e" + ], + "markers": "sys_platform == 'darwin'", + "version": "==0.1.3" + }, "asttokens": { "hashes": [ "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3", @@ -1651,11 +1638,11 @@ }, "exceptiongroup": { "hashes": [ - "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e", - "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23" + "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e", + "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785" ], "markers": "python_version < '3.11'", - "version": "==1.1.0" + "version": "==1.1.1" }, "executing": { "hashes": [ @@ -1674,19 +1661,19 @@ }, "faker": { "hashes": [ - "sha256:51f37ff9df710159d6d736d0ba1c75e063430a8c806b91334d7794305b5a6114", - "sha256:5aaa16fa9cfde7d117eef70b6b293a705021e57158f3fa6b44ed1b70202d2065" + "sha256:2deeee8fed3d1b8ae5f87d172d4569ddc859aab8693f7cd68eddc5d20400563a", + "sha256:e7c058e1f360f245f265625b32d3189d7229398ad80a8b6bac459891745de052" ], "markers": "python_version >= '3.7'", - "version": "==17.6.0" + "version": "==18.3.0" }, "fakeredis": { "hashes": [ - "sha256:35962e0ded572302c4461ad1a40308259cc10b7c546b13228e6d7880bf7b74bb", - "sha256:3bcc2b5c10d5e03cc5b46697d77c651f48391b506ff931576d2869c7549e2e25" + "sha256:6377c27bc557be46089381d43fd670aece46672d091a494f73ab4c96c34022b3", + "sha256:e2a95fbda7b11188c117d68b0f9eecc00600cb449ccf3362a15fc03cf9e2477d" ], "index": "pypi", - "version": "==2.9.2" + "version": "==2.10.2" }, "freezegun": { "hashes": [ @@ -1737,11 +1724,11 @@ }, "jsonschema-spec": { "hashes": [ - "sha256:8d8db7c255e524fab1016a952a9143e5b6e3c074f4ed25d1878f8e97806caec0", - "sha256:b3cde007ad65c2e631e2f8653cf187124a2c714d02d9fafbab68ad64bf5745d6" + "sha256:34471d8b60e1f06d174236c4d3cf9590fbf3cff1cc733b28d15cd83672bcd062", + "sha256:824c743197bbe2104fcc6dce114a4082bf7f7efdebf16683510cb0ec6d8d53d0" ], - "markers": "python_version >= '3.7' and python_full_version < '4.0.0'", - "version": "==0.1.3" + "markers": "python_full_version >= '3.7.0' and python_full_version < '4.0.0'", + "version": "==0.1.4" }, "lazy-object-proxy": { "hashes": [ @@ -1795,19 +1782,19 @@ }, "openapi-schema-validator": { "hashes": [ - "sha256:6940dba9f4906c97078fea6fd9d5a3a3384207db368c4e32f6af6abd7c5c560b", - "sha256:f1eff2a7936546a3ce62b88a17d09de93c9bd229cbc43cb696c988a61a382548" + "sha256:79f37f38ef9fd5206b924ed7a6f382cea7b649b3b56383c47f1906082b7b9015", + "sha256:c573e2be2c783abae56c5a1486ab716ca96e09d1c3eab56020d1dc680aa57bf8" ], - "markers": "python_version >= '3.7' and python_full_version < '4.0.0'", - "version": "==0.4.3" + "markers": "python_full_version >= '3.7.0' and python_full_version < '4.0.0'", + "version": "==0.4.4" }, "openapi-spec-validator": { "hashes": [ - "sha256:3010df5237748e25d7fac2b2aaf13457c1afd02735b2bd6f008a10079c8f443a", - "sha256:93ba247f585e1447214b4207728a7cce3726d148238217be69e6b8725c118fbe" + "sha256:1189d0618ae0678ccf6c883cc1266d381454eece6f21fcf330cc7caea5fc25eb", + "sha256:4145478f26df16059c147406eaaa59b77ff60e3461ba6edb9aa84d481ed89aaf" ], "index": "pypi", - "version": "==0.5.5" + "version": "==0.5.6" }, "orderedmultidict": { "hashes": [ @@ -1837,7 +1824,7 @@ "sha256:5c869d315be50776cc8a993f3af43e0c60dc01506b399643f919034ebf4cdcab", "sha256:cdd7b1f9d7d5c8b8d3315dbf5a86b2596053ae845f056f57d97c0eefff84da14" ], - "markers": "python_version >= '3.7' and python_full_version < '4.0.0'", + "markers": "python_full_version >= '3.7.0' and python_full_version < '4.0.0'", "version": "==0.4.3" }, "pendulum": { @@ -1918,7 +1905,7 @@ "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b", "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f" ], - "markers": "python_version >= '3.7'", + "markers": "python_full_version >= '3.7.0'", "version": "==3.0.38" }, "psycopg": { @@ -2080,11 +2067,11 @@ }, "redis": { "hashes": [ - "sha256:1eec3741cda408d3a5f84b78d089c8b8d895f21b3b050988351e925faf202864", - "sha256:5deb072d26e67d2be1712603bfb7947ec3431fb0eec9c578994052e33035af6d" + "sha256:56732e156fe31801c4f43396bd3ca0c2a7f6f83d7936798531b9848d103381aa", + "sha256:7df17a0a2b72a4c8895b462dd07616c51b1dcb48fdd7ecb7b6f4bf39ecb2e94e" ], "markers": "python_version >= '3.7'", - "version": "==4.5.1" + "version": "==4.5.3" }, "remote-pdb": { "hashes": [ diff --git a/api/README.md b/api/README.md deleted file mode 100644 index ef19616ebe0..00000000000 --- a/api/README.md +++ /dev/null @@ -1,24 +0,0 @@ -# Openverse API - -The API has two sets of documentation. - -- [Developer docs](https://wordpress.github.io/openverse-api/) - - - are focused towards developers who are building the Openverse API - - can be seen locally by - - running the following recipe: - ```bash - just api/sphinx-live - ``` - - visiting the `https://localhost:50230/` endpoint - - contain more details on how to contribute to the API project - -- [Consumer docs](https://api.openverse.engineering/) - - are focused towards consumers who are using the Openverse API - - can be seen locally by - - running the API service - ```bash - just up - ``` - - visiting the `https://localhost:8000/` endpoint - - contain more details about the API endpoints with usage examples diff --git a/api/catalog/api/admin/__init__.py b/api/catalog/api/admin/__init__.py index 7e62ee5d17e..a81ba23684b 100644 --- a/api/catalog/api/admin/__init__.py +++ b/api/catalog/api/admin/__init__.py @@ -16,12 +16,12 @@ admin.sites.site = openverse_admin -@admin.register(Image) +# @admin.register(Image) class ImageAdmin(admin.ModelAdmin): search_fields = ("identifier",) -@admin.register(Audio) +# @admin.register(Audio) class AudioAdmin(admin.ModelAdmin): search_fields = ("identifier",) @@ -58,12 +58,12 @@ def get_readonly_fields(self, request, obj=None): return readonly_fields -@admin.register(ImageReport) +# @admin.register(ImageReport) class ImageReportAdmin(MediaReportAdmin): media_specific_list_display = ("image_url",) -@admin.register(AudioReport) +# @admin.register(AudioReport) class AudioReportAdmin(MediaReportAdmin): media_specific_list_display = ("audio_url",) diff --git a/api/catalog/api/controllers/search_controller.py b/api/catalog/api/controllers/search_controller.py index a5056601b97..b5461256939 100644 --- a/api/catalog/api/controllers/search_controller.py +++ b/api/catalog/api/controllers/search_controller.py @@ -20,8 +20,8 @@ from catalog.api.constants.sorting import INDEXED_ON from catalog.api.serializers import media_serializers from catalog.api.utils import tallies +from catalog.api.utils.check_dead_links import check_dead_links from catalog.api.utils.dead_link_mask import get_query_hash, get_query_mask -from catalog.api.utils.validate_images import validate_images ELASTICSEARCH_MAX_RESULT_WINDOW = 10000 @@ -33,6 +33,7 @@ PROVIDER = "provider" DEEP_PAGINATION_ERROR = "Deep pagination is not allowed." QUERY_SPECIAL_CHARACTER_ERROR = "Unescaped special characters are not allowed." +DEFAULT_BOOST = 10000 class RankFeature(Query): @@ -178,7 +179,7 @@ def _post_process_results( if filter_dead: query_hash = get_query_hash(s) - validate_images(query_hash, start, results, to_validate) + check_dead_links(query_hash, start, results, to_validate) if len(results) == 0: # first page is all dead links @@ -374,7 +375,12 @@ def search( s = s.query("simple_query_string", fields=["tags.name"], query=tags) if settings.USE_RANK_FEATURES: - feature_boost = {"standardized_popularity": 10000} + feature_boost = {"standardized_popularity": DEFAULT_BOOST} + if search_params.data["unstable__authority"]: + feature_boost["authority_boost"] = ( + search_params.data["unstable__authority_boost"] * DEFAULT_BOOST + ) + rank_queries = [] for field, boost in feature_boost.items(): rank_queries.append(Q("rank_feature", field=field, boost=boost)) diff --git a/api/catalog/api/docs/README.md b/api/catalog/api/docs/README.md index cf59ea2bd71..69d2351bc27 100644 --- a/api/catalog/api/docs/README.md +++ b/api/catalog/api/docs/README.md @@ -1,5 +1,6 @@ This documentation is focused towards consumers who are using the Openverse API. -The developer documentation for the Openverse API can be found [here](). +To contribute to the development of the Openverse API, please refer to the +[Openverse developer documentation](https://wordpress.githb.io/openverse/). # Introduction @@ -9,12 +10,25 @@ openly-licensed works, including articles, songs, videos, photographs, paintings, and more. Using this API, developers will be able to access the digital commons in their own applications. -Please note that there is a rate limit of 100 requests per day and 5 requests -per hour rate limit in place for anonymous users. This is fine for introducing -yourself to the API, but we strongly recommend that you obtain an API key as -soon as possible. Authorized clients have a higher rate limit of 10000 requests -per day and 100 requests per minute. Additionally, Openverse can give your key -an even higher limit that fits your application's needs. See the +# Rate limits + +Please note that the API has imposed rate limits to prevent abuse. In the +response for each request that is subject to rate-limits, you can see the +`X-RateLimit-` headers for information about your permitted and available usage. +Exceeding the limit will result in '429: Too Many Requests' responses. + +## Anonymous usage + +Anonymous usage is subject to a rate limit of 100 requests per day and, +simultaneously, 5 requests per hour. This is fine for introducing yourself to +the API, but we strongly recommend that you obtain an API key as soon as +possible. + +## Authenticated usage + +Authorized clients have a higher rate limit of 10000 requests per day and 100 +requests per minute. Additionally, Openverse can give your key an even higher +limit that fits your application's needs. See the [Register and Authenticate section](#section/Register-and-Authenticate) for instructions on obtaining an API key. @@ -36,6 +50,7 @@ $ curl \ ``` If your request is successful, you will get a `client_id` and `client_secret`. +Save these keys securely because you will not be able to see them again. Example of successful request: diff --git a/api/catalog/api/docs/image_docs.py b/api/catalog/api/docs/image_docs.py index 2f637edebcd..933228a82f1 100644 --- a/api/catalog/api/docs/image_docs.py +++ b/api/catalog/api/docs/image_docs.py @@ -208,10 +208,14 @@ class ImageComplain(MediaComplain): class ImageOembed: desc = f""" -oembed_list is an API endpoint to retrieve embedded content from a specified image URL. - -By using this endpoint, you can retrieve embedded content such as `version`, `type`, -`width`, `height`, `title`, `author_name`, `author_url` and `license_url`. +oembed is an API endpoint to retrieve the structured data for a specified image URL in +the oEmbed format, which can be used to embed the image on the consumer's website. +Only JSON format is supported. The oEmbed specification is available +at https://oembed.com/. + +By using this endpoint, you can retrieve a JSON oembed object containing the following +properties: `version`, `type`, `width`, `height`, `title`, `author_name`, `author_url` +and `license_url`. {refer_sample}""" diff --git a/api/catalog/api/middleware/force_debug_cursor_middleware.py b/api/catalog/api/middleware/force_debug_cursor_middleware.py new file mode 100644 index 00000000000..142ecb63416 --- /dev/null +++ b/api/catalog/api/middleware/force_debug_cursor_middleware.py @@ -0,0 +1,14 @@ +from django.db import connection + + +# Makes DB query logging possible when debugging is disabled +# by forcing the connection to use a debug cursor. +# +# WARNING: This can have performance implications and +# should only be used in production temporarily. +def force_debug_cursor_middleware(get_response): + def middleware(request): + connection.force_debug_cursor = True + return get_response(request) + + return middleware diff --git a/api/catalog/api/serializers/image_serializers.py b/api/catalog/api/serializers/image_serializers.py index 1e2dd32c0a8..4fb5ebefb04 100644 --- a/api/catalog/api/serializers/image_serializers.py +++ b/api/catalog/api/serializers/image_serializers.py @@ -148,11 +148,11 @@ class OembedSerializer(BaseModelSerializer): """ version = serializers.ReadOnlyField( - help_text="The oEmbed version number. This must be 1.0.", + help_text="The oEmbed version number, always set to 1.0.", default="1.0", ) type = serializers.ReadOnlyField( - help_text="The resource type. This must be 'photo' for images.", + help_text="The resource type, always set to 'photo' for images.", default="photo", ) width = serializers.SerializerMethodField( diff --git a/api/catalog/api/serializers/media_serializers.py b/api/catalog/api/serializers/media_serializers.py index b8c63cfff84..266717355c1 100644 --- a/api/catalog/api/serializers/media_serializers.py +++ b/api/catalog/api/serializers/media_serializers.py @@ -50,6 +50,8 @@ class MediaSearchRequestSerializer(serializers.Serializer): "qa", # "unstable__sort_by", # excluding unstable fields # "unstable__sort_dir", # excluding unstable fields + # "unstable__authority", # excluding unstable fields + # "unstable__authority_boost", # excluding unstable fields "page_size", "page", ] @@ -138,6 +140,22 @@ class MediaSearchRequestSerializer(serializers.Serializer): required=False, default=DESCENDING, ) + unstable__authority = serializers.BooleanField( + label="authority", + help_text="If enabled, the search will add a boost to results that are " + "from authoritative sources.", + required=False, + default=False, + ) + unstable__authority_boost = serializers.FloatField( + label="authority_boost", + help_text="The boost coefficient to apply to authoritative sources, " + "multiplied with the popularity boost.", + required=False, + default=1.0, + min_value=0.0, + max_value=10.0, + ) page_size = serializers.IntegerField( label="page_size", @@ -155,6 +173,10 @@ class MediaSearchRequestSerializer(serializers.Serializer): min_value=1, ) + def is_request_anonymous(self): + request = self.context.get("request") + return bool(request and request.user and request.user.is_anonymous) + @staticmethod def _truncate(value): max_length = 200 @@ -201,18 +223,16 @@ def validate_title(self, value): return self._truncate(value) def validate_unstable__sort_by(self, value): - request = self.context.get("request") - is_anonymous = bool(request and request.user and request.user.is_anonymous) - return RELEVANCE if is_anonymous else value + return RELEVANCE if self.is_request_anonymous() else value def validate_unstable__sort_dir(self, value): - request = self.context.get("request") - is_anonymous = bool(request and request.user and request.user.is_anonymous) - return DESCENDING if is_anonymous else value + return DESCENDING if self.is_request_anonymous() else value + + def validate_unstable__authority(self, value): + return False if self.is_request_anonymous() else value def validate_page_size(self, value): - request = self.context.get("request") - is_anonymous = bool(request and request.user and request.user.is_anonymous) + is_anonymous = self.is_request_anonymous() max_value = ( settings.MAX_ANONYMOUS_PAGE_SIZE if is_anonymous diff --git a/api/catalog/api/utils/validate_images.py b/api/catalog/api/utils/check_dead_links.py similarity index 92% rename from api/catalog/api/utils/validate_images.py rename to api/catalog/api/utils/check_dead_links.py index 1bfcab7ace7..7fa11db97fd 100644 --- a/api/catalog/api/utils/validate_images.py +++ b/api/catalog/api/utils/check_dead_links.py @@ -52,7 +52,7 @@ async def _make_head_requests(urls: list[str]) -> list[tuple[str, int]]: return responses.result() -def validate_images( +def check_dead_links( query_hash: str, start_slice: int, results: list[Hit], image_urls: list[str] ) -> None: """ @@ -64,7 +64,7 @@ def validate_images( Results are cached in redis and shared amongst all API servers in the cluster. """ - logger = parent_logger.getChild("validate_images") + logger = parent_logger.getChild("check_dead_links") if not image_urls: logger.info("no image urls to validate") return @@ -119,7 +119,14 @@ def validate_images( for idx, _ in enumerate(cached_statuses): del_idx = len(cached_statuses) - idx - 1 status = cached_statuses[del_idx] - if status == 429 or status == 403: + # thingiverse treated as failure despite the suspect status code + # due to issues described here: + # https://github.com/WordPress/openverse/issues/900 + if ( + status == 429 + or status == 403 + and results[del_idx]["provider"] != "thingiverse" + ): logger.warning( "Image validation failed due to rate limiting or blocking. " f"url={image_urls[idx]} " diff --git a/api/catalog/api/utils/drf_renderer.py b/api/catalog/api/utils/drf_renderer.py new file mode 100644 index 00000000000..b152f11d9f8 --- /dev/null +++ b/api/catalog/api/utils/drf_renderer.py @@ -0,0 +1,27 @@ +from rest_framework.renderers import BrowsableAPIRenderer + + +class BrowsableAPIRendererWithoutForms(BrowsableAPIRenderer): + """ + Renders the browsable api, but excludes the forms. + + See https://github.com/WordPress/openverse/issues/970 + + CC BY 3.0 Brad Montgomery + https://bradmontgomery.net/blog/disabling-forms-django-rest-frameworks-browsable-api/ + """ + + def get_context(self, *args, **kwargs): + ctx = super().get_context(*args, **kwargs) + ctx["display_edit_forms"] = False + return ctx + + def show_form_for_method(self, view, method, request, obj): + """We never want to do this! So just return False.""" + return False + + def get_rendered_html_form(self, data, view, method, request): + """Why render _any_ forms at all. This method should return + rendered HTML, so let's simply return an empty string. + """ + return "" diff --git a/api/catalog/api/utils/exceptions.py b/api/catalog/api/utils/exceptions.py index 1db366e73fe..51b93967052 100644 --- a/api/catalog/api/utils/exceptions.py +++ b/api/catalog/api/utils/exceptions.py @@ -7,12 +7,12 @@ def exception_handler(ex, context): Handle the exception raised in a DRF context. See `DRF docs`_. - .. _DRF docs: https://www.django-rest-framework.org/api-guide/exceptions/#custom-exception-handling # noqa: E501 + .. _DRF docs: https://www.django-rest-framework.org/api-guide/exceptions/#custom-exception-handling :param ex: the exception that has occurred :param context: additional data about the context of the exception :return: the response to show for the exception - """ + """ # noqa: E501 res = drf_exception_handler(ex, context) if isinstance(ex, ValidationError): diff --git a/api/catalog/api/utils/throttle.py b/api/catalog/api/utils/throttle.py index 5ff5bd6ddbe..d57f9362ab4 100644 --- a/api/catalog/api/utils/throttle.py +++ b/api/catalog/api/utils/throttle.py @@ -11,7 +11,36 @@ parent_logger = logging.getLogger(__name__) -class AbstractAnonRateThrottle(SimpleRateThrottle, metaclass=abc.ABCMeta): +class SimpleRateThrottleHeader(SimpleRateThrottle, metaclass=abc.ABCMeta): + """ + Extends the ``SimpleRateThrottle`` class to provide additional functionality such as + rate-limit headers in the response. + """ + + def allow_request(self, request, view): + is_allowed = super().allow_request(request, view) + view.headers |= self.headers() + return is_allowed + + def headers(self): + """ + Get `X-RateLimit-` headers for this particular throttle. Each pair of headers + contains the limit and the number of requests left in the limit. Since multiple + rate limits can apply concurrently, the suffix identifies each pair uniquely. + """ + + prefix = "X-RateLimit" + suffix = self.scope or self.__class__.__name__.lower() + if hasattr(self, "history"): + return { + f"{prefix}-Limit-{suffix}": self.rate, + f"{prefix}-Available-{suffix}": self.num_requests - len(self.history), + } + else: + return {} + + +class AbstractAnonRateThrottle(SimpleRateThrottleHeader, metaclass=abc.ABCMeta): """ Limits the rate of API calls that may be made by a anonymous users. @@ -61,15 +90,11 @@ class TenPerDay(AbstractAnonRateThrottle): rate = "10/day" -class OneThousandPerMinute(AbstractAnonRateThrottle): - rate = "1000/min" - - class OnePerSecond(AbstractAnonRateThrottle): rate = "1/second" -class AbstractOAuth2IdRateThrottle(SimpleRateThrottle, metaclass=abc.ABCMeta): +class AbstractOAuth2IdRateThrottle(SimpleRateThrottleHeader, metaclass=abc.ABCMeta): """ Ties a particular throttling scope from ``settings.py`` to a rate limit model. diff --git a/api/catalog/api/views/audio_views.py b/api/catalog/api/views/audio_views.py index fb65cd78a31..3a2f9e5c561 100644 --- a/api/catalog/api/views/audio_views.py +++ b/api/catalog/api/views/audio_views.py @@ -23,7 +23,10 @@ AudioWaveformSerializer, ) from catalog.api.serializers.media_serializers import MediaThumbnailRequestSerializer -from catalog.api.utils.throttle import OneThousandPerMinute +from catalog.api.utils.throttle import ( + AnonThumbnailRateThrottle, + OAuth2IdThumbnailRateThrottle, +) from catalog.api.views.media_views import MediaViewSet @@ -54,7 +57,7 @@ def get_queryset(self): url_path="thumb", url_name="thumb", serializer_class=MediaThumbnailRequestSerializer, - throttle_classes=[OneThousandPerMinute], + throttle_classes=[AnonThumbnailRateThrottle, OAuth2IdThumbnailRateThrottle], ) def thumbnail(self, request, *_, **__): audio = self.get_object() @@ -72,7 +75,7 @@ def thumbnail(self, request, *_, **__): @action( detail=True, serializer_class=AudioWaveformSerializer, - throttle_classes=[OneThousandPerMinute], + throttle_classes=[AnonThumbnailRateThrottle, OAuth2IdThumbnailRateThrottle], ) def waveform(self, *_, **__): audio = self.get_object() diff --git a/api/catalog/api/views/image_views.py b/api/catalog/api/views/image_views.py index 05389b53270..eeeacabbd8a 100644 --- a/api/catalog/api/views/image_views.py +++ b/api/catalog/api/views/image_views.py @@ -1,5 +1,4 @@ import io -import re import struct from django.conf import settings @@ -107,17 +106,7 @@ def oembed(self, request, *_, **__): ) def thumbnail(self, request, *_, **__): image = self.get_object() - - image_url = image.url - if not image_url: - raise NotFound("Could not find image.", 404) - - # Hotfix to use scaled down version of the image from SMK - # TODO Remove when this issue is addressed: - # TODO https://github.com/WordPress/openverse-catalog/issues/698 - if "iip.smk.dk" in image_url: - width = settings.THUMBNAIL_WIDTH_PX - image_url = re.sub(r"!\d+,", f"!{width},", image_url) + image_url = image.thumbnail or image.url return super().thumbnail(image_url, request) diff --git a/api/catalog/configuration/elasticsearch.py b/api/catalog/configuration/elasticsearch.py index af0df41ada3..53a8e5997f3 100644 --- a/api/catalog/configuration/elasticsearch.py +++ b/api/catalog/configuration/elasticsearch.py @@ -43,10 +43,14 @@ def _elasticsearch_connect(): return _es -ES = _elasticsearch_connect() -#: Elasticsearch client, also aliased to connection 'default' - -connections.add_connection("default", ES) +SETUP_ES = config("SETUP_ES", default=True, cast=bool) +if SETUP_ES: + ES = _elasticsearch_connect() + #: Elasticsearch client, also aliased to connection 'default' + + connections.add_connection("default", ES) +else: + ES = None MEDIA_INDEX_MAPPING = { media_type: config(f"{media_type.upper()}_INDEX_NAME", default=media_type) diff --git a/api/catalog/configuration/logging.py b/api/catalog/configuration/logging.py index 188638ca972..62cf0ac4032 100644 --- a/api/catalog/configuration/logging.py +++ b/api/catalog/configuration/logging.py @@ -9,7 +9,7 @@ def health_check_filter(record: LogRecord) -> bool: LOG_LEVEL = config("LOG_LEVEL", default="INFO").upper() - +DJANGO_DB_LOGGING = config("DJANGO_DB_LOGGING", cast=bool, default=False) # Logging configuration LOGGING = { @@ -45,7 +45,7 @@ def health_check_filter(record: LogRecord) -> bool: }, # Add a clause to log error messages to the console in production "console_prod": { - "level": "WARNING", + "level": LOG_LEVEL, "filters": ["require_debug_false", "request_id"], "class": "logging.StreamHandler", "formatter": "console", @@ -94,3 +94,12 @@ def health_check_filter(record: LogRecord) -> bool: }, }, } + +if DJANGO_DB_LOGGING: + # Behind a separate flag as it's a very noisy debug logger + # and it's nice to be able to enable it conditionally within that context + LOGGING["loggers"]["django.db.backends"] = { + "level": "DEBUG", + "handlers": ["console", "console_prod"], + "propagate": False, + } diff --git a/api/catalog/settings.py b/api/catalog/settings.py index f04fd2d8fb0..5feab7cca50 100644 --- a/api/catalog/settings.py +++ b/api/catalog/settings.py @@ -24,7 +24,7 @@ from catalog.configuration.link_validation_cache import ( LinkValidationCacheExpiryConfiguration, ) -from catalog.configuration.logging import LOGGING +from catalog.configuration.logging import DJANGO_DB_LOGGING, LOGGING # Build paths inside the project like this: BASE_DIR.join('dir', 'subdir'...) @@ -48,7 +48,7 @@ ENVIRONMENT = config("ENVIRONMENT", default="local") -ALLOWED_HOSTS = config("ALLOWED_HOSTS").split(",") + [ +ALLOWED_HOSTS = config("ALLOWED_HOSTS", default="").split(",") + [ gethostname(), gethostbyname(gethostname()), ] @@ -109,6 +109,13 @@ "oauth2_provider.middleware.OAuth2TokenMiddleware", ] +# WARNING: This should not be run in production long-term as it can impact performance +if not DEBUG and DJANGO_DB_LOGGING: + MIDDLEWARE.append( + "catalog.api.middleware.force_debug_cursor_middleware.force_debug_cursor_middleware" # noqa: E501 + ) + + SWAGGER_SETTINGS = { "DEFAULT_INFO": "catalog.urls.swagger.open_api_info", "SECURITY_DEFINITIONS": {}, @@ -138,8 +145,7 @@ "DEFAULT_VERSIONING_CLASS": "rest_framework.versioning.URLPathVersioning", "DEFAULT_RENDERER_CLASSES": ( "rest_framework.renderers.JSONRenderer", - "rest_framework.renderers.BrowsableAPIRenderer", - "rest_framework_xml.renderers.XMLRenderer", + "catalog.api.utils.drf_renderer.BrowsableAPIRendererWithoutForms", ), "DEFAULT_THROTTLE_CLASSES": ( "catalog.api.utils.throttle.BurstRateThrottle", @@ -260,6 +266,11 @@ def _make_cache_config(dbnum: int, **overrides) -> dict: "USER": config("DJANGO_DATABASE_USER", default="deploy"), "PASSWORD": config("DJANGO_DATABASE_PASSWORD", default="deploy"), "NAME": config("DJANGO_DATABASE_NAME", default="openledger"), + "OPTIONS": { + "application_name": config( + "DJANGO_DATABASE_APPLICATION_NAME", default="openverse-api" + ), + }, }, } diff --git a/api/catalog/urls/swagger.py b/api/catalog/urls/swagger.py index 49cac6fdffd..7a82ced1443 100644 --- a/api/catalog/urls/swagger.py +++ b/api/catalog/urls/swagger.py @@ -15,8 +15,8 @@ with open(description_path) as description_file: description = description_file.read() -tos_url = "https://wordpress.github.io/openverse-api/terms_of_service.html" -license_url = "https://github.com/WordPress/openverse-api/blob/HEAD/LICENSE" +tos_url = "https://docs.openverse.org/terms_of_service.html" +license_url = "https://github.com/WordPress/openverse/blob/HEAD/LICENSE" logo_url = "https://raw.githubusercontent.com/WordPress/openverse/HEAD/brand/logo.svg" open_api_info = openapi.Info( title="Openverse API consumer docs", diff --git a/api/env.template b/api/env.template index d9a043c0e85..e7d00266b4f 100644 --- a/api/env.template +++ b/api/env.template @@ -7,7 +7,7 @@ DJANGO_DEBUG_ENABLED=True BASE_URL=http://localhost:50280/ ENVIRONMENT=development # List of comma-separated hosts/domain names, e.g., 127.17.0.1,local.app -ALLOWED_HOSTS=172.17.0.1,host.docker.internal +ALLOWED_HOSTS=localhost,172.17.0.1,host.docker.internal #LOAD_BALANCER_URL= @@ -31,11 +31,13 @@ ALLOWED_HOSTS=172.17.0.1,host.docker.internal #DJANGO_DATABASE_USER=deploy #DJANGO_DATABASE_PASSWORD=deploy #DJANGO_DATABASE_NAME=openledger +#DJANGO_DATABASE_APPLICATION_NAME=openverse-api SEMANTIC_VERSION=1.0.0 #WATERMARK_ENABLED=False +#SETUP_ES=True #ELASTICSEARCH_URL=es #ELASTICSEARCH_PORT=9200 #ELASTICSEARCH_AWS_REGION=us-east-1 diff --git a/api/justfile b/api/justfile index c7f132703ac..3563bea6c20 100644 --- a/api/justfile +++ b/api/justfile @@ -10,15 +10,32 @@ NO_COLOR := "\\033[0m" just --list --unsorted -# Aliases _all-up from the parent directory -# https://github.com/casey/just/issues/1550 -_all-up: - just ../_all-up - # Install dependencies install *args="--dev": pipenv install {{ args }} +###### +# Up # +###### + +# Bring up services specific to the API profile +up *flags: + env COMPOSE_PROFILES="api" just ../up {{ flags }} + +# Wait for all profile services to be up +wait-up: up + just ../ingestion_server/wait # API profile includes ingestion server + just wait # API waits for ES in entrypoint + +# Load sample data into API via ingestion server +init: wait-up + cd .. && ./load_sample_data.sh + +recreate: + just ../down -v + just up "--force-recreate --build" + just init + ########## # Health # ########## @@ -60,7 +77,7 @@ dj-local +args="": pipenv run python manage.py {{ args }} # Run Django administrative commands inside the Docker container -dj +args="": _all-up +dj +args="": wait-up just ../exec web python manage.py {{ args }} # Get IPython shell inside the Docker container @@ -71,19 +88,12 @@ ipython: dbshell: just dj dbshell -# Run `collectstatic` to prepare for building the `nginx` Dockerfile target. -collectstatic: - # The STATIC_ROOT environment variable is relative to the Django container's - # default working directory i.e. the `/api/` directory. The resulting output - # will be `/api/static/`, which is ignored by Git for convenience. - STATIC_ROOT="./static" just dj collectstatic --noinput - ######### # Tests # ######### # Run API tests inside the Docker container -test *args: _all-up +test *args: wait-up just ../exec web ./test/run_test.sh {{ args }} # Run API tests locally @@ -91,5 +101,20 @@ test-local *args: pipenv run ./test/run_test.sh {{ args }} # Run smoke test for the API docs -doc-test: _all-up +doc-test: wait-up curl --fail 'http://localhost:50280/v1/?format=openapi' + +######### +# NGINX # +######### + +# Build and run the NGINX image locally +nginx upstream_url='api.openverse.engineering': + # upstream_url can also be set to 172.17.0.1:50280 for local testing + docker build --target nginx . -t openverse-api_nginx:latest + @echo "--> NGINX server will be run at http://localhost:9090, upstream at {{ upstream_url }}" + @echo "--> Try a static URL like http://localhost:9090/static/admin/css/base.css to test" + docker run --rm -p 9090:8080 -it \ + -e DJANGO_NGINX_UPSTREAM_URL="{{ upstream_url }}" \ + -e DJANGO_NGINX_GIT_REVISION="$(git rev-parse HEAD)" \ + openverse-api_nginx:latest diff --git a/api/test/audio_integration_test.py b/api/test/audio_integration_test.py index 6a90b146ed1..d785b0f8881 100644 --- a/api/test/audio_integration_test.py +++ b/api/test/audio_integration_test.py @@ -28,7 +28,7 @@ import requests from django_redis import get_redis_connection -from catalog.api.utils.validate_images import CACHE_PREFIX +from catalog.api.utils.check_dead_links import CACHE_PREFIX @pytest.fixture diff --git a/api/test/auth_test.py b/api/test/auth_test.py index b5022570d84..daa3973061e 100644 --- a/api/test/auth_test.py +++ b/api/test/auth_test.py @@ -121,6 +121,35 @@ def test_sorting_authed( assert indexed_on == exp_indexed_on +@pytest.mark.django_db +@pytest.mark.parametrize( + "authority_boost, exp_source", + [ + ("1.0", "stocksnap"), + ("0.0", "flickr"), # Authority boost is disabled + ], +) +def test_authority_authed( + client, monkeypatch, test_auth_token_exchange, authority_boost, exp_source +): + # Prevent DB lookup for ES results because DB is empty. + monkeypatch.setattr("catalog.api.views.image_views.ImageSerializer.needs_db", False) + + time.sleep(1) + token = test_auth_token_exchange["access_token"] + query_params = { + "q": "cat", + "unstable__authority": "true", + "unstable__authority_boost": authority_boost, + } + res = client.get("/v1/images/", query_params, HTTP_AUTHORIZATION=f"Bearer {token}") + assert res.status_code == 200 + + res_data = res.json() + source = res_data["results"][0]["source"] + assert source == exp_source + + @pytest.mark.django_db def test_page_size_limit_unauthed(client): query_params = {"page_size": 20} diff --git a/api/test/dead_link_filter_test.py b/api/test/dead_link_filter_test.py index dbca58e6547..27fb59a6ae8 100644 --- a/api/test/dead_link_filter_test.py +++ b/api/test/dead_link_filter_test.py @@ -57,13 +57,13 @@ def get_empty_cached_statuses(_, image_urls): return [None] * len(image_urls) monkeypatch.setattr( - "catalog.api.utils.validate_images._get_cached_statuses", + "catalog.api.utils.check_dead_links._get_cached_statuses", get_empty_cached_statuses, ) _MAKE_HEAD_REQUESTS_MODULE_PATH = ( - "catalog.api.utils.validate_images._make_head_requests" + "catalog.api.utils.check_dead_links._make_head_requests" ) diff --git a/api/test/image_integration_test.py b/api/test/image_integration_test.py index f9e8709e1d5..e0c26d43502 100644 --- a/api/test/image_integration_test.py +++ b/api/test/image_integration_test.py @@ -6,7 +6,6 @@ """ import json -import xml.etree.ElementTree as ET from test.constants import API_URL from test.media_integration import ( detail, @@ -125,27 +124,6 @@ def test_oembed_endpoint_for_json(): assert parsed["license_url"] == "https://creativecommons.org/licenses/by/2.0/" -def test_oembed_endpoint_for_xml(): - params = { - "url": f"https://any.domain/any/path/{identifier}", - "format": "xml", - } - response = requests.get( - f"{API_URL}/v1/images/oembed?{urlencode(params)}", verify=False - ) - assert response.status_code == 200 - assert response.headers["Content-Type"] == "application/xml; charset=utf-8" - - response_body_as_xml = ET.fromstring(response.content) - xml_tree = ET.ElementTree(response_body_as_xml) - assert xml_tree.find("width").text == "1024" - assert xml_tree.find("height").text == "683" - assert ( - xml_tree.find("license_url").text - == "https://creativecommons.org/licenses/by/2.0/" - ) - - def test_image_license_filter_case_insensitivity(): license_filter_case_insensitivity("images") diff --git a/api/test/unit/utils/validate_images_test.py b/api/test/unit/utils/check_dead_links_test.py similarity index 60% rename from api/test/unit/utils/validate_images_test.py rename to api/test/unit/utils/check_dead_links_test.py index f0af832b120..fd9d229cf00 100644 --- a/api/test/unit/utils/validate_images_test.py +++ b/api/test/unit/utils/check_dead_links_test.py @@ -4,7 +4,7 @@ import aiohttp import pook -from catalog.api.utils.validate_images import HEADERS, validate_images +from catalog.api.utils.check_dead_links import HEADERS, check_dead_links @mock.patch.object(aiohttp, "ClientSession", wraps=aiohttp.ClientSession) @@ -22,7 +22,7 @@ def test_sends_user_agent(wrapped_client_session: mock.AsyncMock): .mock ) - validate_images(query_hash, start_slice, results, image_urls) + check_dead_links(query_hash, start_slice, results, image_urls) assert head_mock.calls == len(results) requested_urls = [req.rawurl for req in head_mock.matches] @@ -50,9 +50,34 @@ def raise_timeout_error(*args, **kwargs): with mock.patch( "aiohttp.client.ClientSession._request", side_effect=raise_timeout_error ): - validate_images(query_hash, start_slice, results, image_urls) + check_dead_links(query_hash, start_slice, results, image_urls) - # `validate_images` directly modifies the results list + # `check_dead_links` directly modifies the results list # if the results are timing out then they're considered dead and discarded # so should not appear in the final list of results. assert len(results) == 0 + + +def test_thingiverse_403_considered_dead(): + query_hash = "test_thingiverse_403_considered_dead" + results = [ + {"identifier": i, "provider": "thingiverse" if i % 2 else "flickr"} + for i in range(4) + ] + image_urls = [f"https://example.org/{i}" for i in range(len(results))] + start_slice = 0 + + head_mock = ( + pook.head(pook.regex(r"https://example.org/\d")) + .times(len(results)) + .reply(403) + .mock + ) + + check_dead_links(query_hash, start_slice, results, image_urls) + + assert head_mock.calls == len(results) + + # All the "thingiverse" results should have been filtered out + # whereas the flickr results should be left + assert all([r["provider"] == "flickr" for r in results]) diff --git a/api/test/unit/utils/drf_renderer_test.py b/api/test/unit/utils/drf_renderer_test.py new file mode 100644 index 00000000000..d5a4e0f083b --- /dev/null +++ b/api/test/unit/utils/drf_renderer_test.py @@ -0,0 +1,64 @@ +from rest_framework.request import Request +from rest_framework.views import APIView + +import pytest + +from catalog.api.utils.drf_renderer import BrowsableAPIRendererWithoutForms + + +@pytest.fixture +def api_request(request_factory): + return request_factory.get("/") + + +@pytest.fixture +def view(): + return APIView.as_view() + + +@pytest.fixture +def response(view, api_request): + return view(api_request) + + +def test_without_forms_renderer_context_should_not_show_edit_forms( + api_request, response +): + """ + See https://github.com/encode/django-rest-framework/blob/master/tests/test_renderers.py + for example of test setup. + """ + cls = BrowsableAPIRendererWithoutForms() + + parent_context = { + "view": APIView(), + "request": Request(api_request), + "response": response, + } + + ctx = cls.get_context({}, "text/html", parent_context) + + assert ctx["display_edit_forms"] is False + + +parametrize_methods = pytest.mark.parametrize( + "method", ("GET", "PUT", "POST", "DELETE", "OPTIONS") +) + + +@parametrize_methods +def test_without_forms_renderer_show_form_for_method_returns_false( + method, api_request, view +): + cls = BrowsableAPIRendererWithoutForms() + + assert cls.show_form_for_method(view, method, api_request, None) is False + + +@parametrize_methods +def test_without_forms_renderer_get_rendered_html_form_empty(method, api_request, view): + cls = BrowsableAPIRendererWithoutForms() + + data = {} + + assert cls.get_rendered_html_form(data, view, method, api_request) == "" diff --git a/api/test/unit/utils/throttle_test.py b/api/test/unit/utils/throttle_test.py index f3cd08e576e..7d47c4daa6c 100644 --- a/api/test/unit/utils/throttle_test.py +++ b/api/test/unit/utils/throttle_test.py @@ -1,5 +1,6 @@ from test.factory.models.oauth2 import AccessTokenFactory +from django.core.cache import cache from rest_framework.test import force_authenticate from rest_framework.views import APIView @@ -10,6 +11,8 @@ from catalog.api.utils.throttle import ( AbstractAnonRateThrottle, AbstractOAuth2IdRateThrottle, + BurstRateThrottle, + TenPerDay, ) @@ -124,3 +127,50 @@ def test_abstract_oauth2_id_rate_throttle_does_not_apply_if_token_app_rate_limit ) access_token.application.save() assert throttle.get_cache_key(view.initialize_request(authed_request), view) is None + + +@pytest.mark.django_db +def test_rate_limit_headers(request_factory): + cache.delete_pattern("throttle_*") + limit = 2 + + class DummyThrottle(BurstRateThrottle): + THROTTLE_RATES = {"anon_burst": f"{limit}/hour"} + + class ThrottledView(APIView): + throttle_classes = [DummyThrottle] + + view = ThrottledView().as_view() + request = request_factory.get("/") + + # Send three requests. The third one should be throttled. + for idx in range(1, limit + 2): + response = view(request) + headers = [h for h in response.headers.items() if "X-RateLimit" in h[0]] + + # Assert that request returns 429 response if limit has been exceeded. + assert response.status_code == 429 if idx == limit + 1 else 200 + + # Assert that the 'Available' header constantly decrements, but not below zero. + assert [ + ("X-RateLimit-Limit-anon_burst", f"{limit}/hour"), + ("X-RateLimit-Available-anon_burst", str(max(0, limit - idx))), + ] == headers + + +@pytest.mark.django_db +def test_rate_limit_headers_when_no_scope(request_factory): + cache.delete_pattern("throttle_*") + + class ThrottledView(APIView): + throttle_classes = [TenPerDay] + + view = ThrottledView().as_view() + request = request_factory.get("/") + + response = view(request) + headers = [h for h in response.headers.items() if "X-RateLimit" in h[0]] + assert [ + ("X-RateLimit-Limit-tenperday", "10/day"), + ("X-RateLimit-Available-tenperday", "9"), + ] == headers diff --git a/api/test/unit/utils/watermark_test.py b/api/test/unit/utils/watermark_test.py index c649140dd1c..6c5198e2cd0 100644 --- a/api/test/unit/utils/watermark_test.py +++ b/api/test/unit/utils/watermark_test.py @@ -21,7 +21,7 @@ @dataclass class RequestsFixture: requests: list[Request] - response_factory: Callable[ + response_factory: Callable[ # noqa: E731 [Request], Response ] = lambda x: RequestsFixture._default_response_factory(x) diff --git a/api/test/unit/utils/waveform_test.py b/api/test/unit/utils/waveform_test.py index d0b70f15249..0308dee4448 100644 --- a/api/test/unit/utils/waveform_test.py +++ b/api/test/unit/utils/waveform_test.py @@ -19,7 +19,7 @@ @dataclass class RequestsFixture: requests: list[Request] - response_factory: Callable[ + response_factory: Callable[ # noqa: E731 [Request], Response ] = lambda x: RequestsFixture._default_response_factory(x) diff --git a/api/test/unit/views/image_views_test.py b/api/test/unit/views/image_views_test.py index a9580f01c9e..b6795475ac6 100644 --- a/api/test/unit/views/image_views_test.py +++ b/api/test/unit/views/image_views_test.py @@ -3,6 +3,9 @@ from dataclasses import dataclass from pathlib import Path from test.factory.models.image import ImageFactory +from unittest.mock import ANY, patch + +from django.http import HttpResponse import pytest from requests import Request, Response @@ -18,7 +21,7 @@ @dataclass class RequestsFixture: requests: list[Request] - response_factory: Callable[ + response_factory: Callable[ # noqa: E731 [Request], Response ] = lambda x: RequestsFixture._default_response_factory(x) @@ -56,3 +59,21 @@ def test_oembed_sends_ua_header(api_client, requests): assert len(requests.requests) > 0 for r in requests.requests: assert r.headers == ImageViewSet.OEMBED_HEADERS + + +@pytest.mark.django_db +@pytest.mark.parametrize( + "has_thumbnail, expected_thumb_url", + [(True, "http://example.com/thumb.jpg"), (False, "http://example.com/image.jpg")], +) +def test_thumbnail_uses_upstream_thumb(api_client, has_thumbnail, expected_thumb_url): + thumb_url = "http://example.com/thumb.jpg" if has_thumbnail else None + image = ImageFactory.create( + url="http://example.com/image.jpg", + thumbnail=thumb_url, + ) + with patch("catalog.api.views.media_views.MediaViewSet.thumbnail") as thumb_call: + mock_response = HttpResponse("mock_response") + thumb_call.return_value = mock_response + api_client.get(f"/v1/images/{image.identifier}/thumb/") + thumb_call.assert_called_once_with(expected_thumb_url, ANY) diff --git a/automations/js/justfile b/automations/js/justfile index 3b97a4887a3..56f71915b6f 100644 --- a/automations/js/justfile +++ b/automations/js/justfile @@ -24,7 +24,7 @@ report: # Run `render-jinja.js` with given input file, output file and context render in_file out_file ctx="{}": - node src/render-jinja.js {{ in_file }} {{ out_file }} {{ ctx }} + node src/render-jinja.js {{ in_file }} {{ out_file }} '{{ ctx }}' # Render `.pre-commit-config.yaml` render-precommit: @@ -38,8 +38,14 @@ render-prettier: render-github: just render templates/pull_request_template.md.jinja .github/PULL_REQUEST_TEMPLATE/pull_request_template.md +render-release-drafter: + just render templates/release-drafter.yml.jinja .github/release-drafter-api.yml '{ "app": "API" }' + just render templates/release-drafter.yml.jinja .github/release-drafter-ingestion_server.yml '{ "app": "Ingestion server" }' + just render templates/release-drafter.yml.jinja .github/release-drafter-frontend.yml '{ "app": "Frontend" }' + # Render all templates (shortcut for easy iteration) render-templates: just render-precommit just render-prettier just render-github + just render-release-drafter diff --git a/automations/js/src/last_week_tonight.mjs b/automations/js/src/last_week_tonight.mjs index b3cedee2a11..52f0187b7aa 100644 --- a/automations/js/src/last_week_tonight.mjs +++ b/automations/js/src/last_week_tonight.mjs @@ -122,7 +122,7 @@ const postActivities = (activities) => { slug: `last-week-openverse-${startDate}-${endDate}`, excerpt: `The developments in Openverse between ${startDate} and ${endDate}`, content: report, - status: 'draft', + status: 'publish', tags: [ 3, // openverse 5, // week-in-openverse diff --git a/automations/python/label_pr.py b/automations/python/label_pr.py index 6bea6d33f1b..af1bfc04c0d 100644 --- a/automations/python/label_pr.py +++ b/automations/python/label_pr.py @@ -23,7 +23,7 @@ log = logging.getLogger(__name__) REQUIRED_LABEL_CATEGORIES = ["aspect", "priority", "goal", "stack"] -# Categories where all labels should be retrievd rather than first only +# Categories where all labels should be retrieved rather than first only GET_ALL_LABEL_CATEGORIES = {"stack"} # region argparse @@ -150,7 +150,7 @@ def get_linked_issues(url: str) -> list[str]: def get_all_labels_of_cat(cat: str, labels: list[Label]) -> list[Label]: """ - Get all of the available labels from a category from a given list of + Get all the available labels from a category from a given list of labels. :param cat: the category to which the label should belong @@ -222,7 +222,8 @@ def main(): break else: log.info("Could not find properly labelled issue") - pr.set_labels("🚦 status: awaiting triage") + # Only add the triage label onto existing labels, don't replace them + pr.set_labels("🚦 status: awaiting triage", *pr.get_labels()) if __name__ == "__main__": diff --git a/automations/python/shared/github.py b/automations/python/shared/github.py index 6a243d5b71b..d3dcfb3a4d5 100644 --- a/automations/python/shared/github.py +++ b/automations/python/shared/github.py @@ -24,7 +24,7 @@ def get_client(is_authenticated: bool = True) -> Github: """ Get a PyGithub client to access the GitHub API. - The client can optionally be authenticated using the GITHUB_ACCESS_TOKEN + The client can optionally be authenticated using the ACCESS_TOKEN from the environment variables. :param is_authenticated: whether to authenticate the client diff --git a/docker-compose.yml b/docker-compose.yml index 81655b78778..b1b4121c6fc 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,51 +1,64 @@ version: "2.4" services: db: + profiles: + - ingestion_server + - api image: postgres:13.2-alpine ports: - "50254:5432" volumes: - api-postgres:/var/lib/postgresql/data env_file: - - postgres/env.docker + - docker/db/env.docker healthcheck: test: "pg_isready -U deploy -d openledger" upstream_db: - image: postgres:13.2-alpine + profiles: + - ingestion_server + - api + build: ./docker/upstream_db/ + image: openverse-upstream_db expose: - "5432" volumes: - catalog-postgres:/var/lib/postgresql/data - ./sample_data:/sample_data env_file: - - postgres/env.docker + - docker/upstream_db/env.docker healthcheck: test: "pg_isready -U deploy -d openledger" plausible_db: + profiles: + - frontend image: postgres:13.2-alpine expose: - "5432" volumes: - plausible-postgres:/var/lib/postgresql/data env_file: - - postgres/plausible.env.docker + - ./docker/plausible_db/env.docker healthcheck: test: "pg_isready -U deploy -d plausible" plausible_ch: + profiles: + - frontend image: clickhouse/clickhouse-server:22.6-alpine volumes: - plausible-clickhouse:/var/lib/clickhouse - - ./clickhouse/clickhouse-config.xml:/etc/clickhouse-server/config.d/logging.xml:ro - - ./clickhouse/clickhouse-user-config.xml:/etc/clickhouse-server/users.d/logging.xml:ro + - ./docker/clickhouse/clickhouse-config.xml:/etc/clickhouse-server/config.d/logging.xml:ro + - ./docker/clickhouse/clickhouse-user-config.xml:/etc/clickhouse-server/users.d/logging.xml:ro ulimits: nofile: soft: 262144 hard: 262144 plausible: + profiles: + - frontend image: plausible/analytics:latest ports: - "50288:8000" @@ -54,24 +67,24 @@ services: - plausible_db - plausible_ch env_file: - - plausible/env.docker + - docker/plausible/env.docker cache: + profiles: + - api image: redis:4.0.10 ports: - "50263:6379" es: + profiles: + - ingestion_server + - api image: docker.elastic.co/elasticsearch/elasticsearch:7.12.0 ports: - "50292:9200" - environment: - # disable XPack - # https://www.elastic.co/guide/en/elasticsearch/reference/5.3/docker.html#_security_note - - xpack.security.enabled=false - - discovery.type=single-node - - http.cors.enabled=true - - http.cors.allow-origin=/.*/ + env_file: + - docker/es/env.docker healthcheck: test: [ @@ -93,8 +106,11 @@ services: - es-data:/usr/share/elasticsearch/data web: + profiles: + - api build: context: ./api/ + target: api args: SEMANTIC_VERSION: ${SEMANTIC_VERSION:-v1.0.0} image: openverse-api @@ -117,6 +133,9 @@ services: tty: true ingestion_server: + profiles: + - ingestion_server + - api build: ./ingestion_server/ image: openverse-ingestion_server command: gunicorn -c ./gunicorn.conf.py @@ -135,6 +154,9 @@ services: tty: true indexer_worker: + profiles: + - ingestion_server + - api build: ./ingestion_server/ image: openverse-ingestion_server command: gunicorn -c ./gunicorn_worker.conf.py @@ -152,6 +174,8 @@ services: tty: true proxy: + profiles: + - api image: nginx:alpine ports: - "50200:9080" @@ -161,8 +185,8 @@ services: depends_on: - web volumes: - - ./nginx/templates:/etc/nginx/templates - - ./nginx/certs:/etc/nginx/certs + - ./docker/nginx/templates:/etc/nginx/templates + - ./docker/nginx/certs:/etc/nginx/certs volumes: api-postgres: diff --git a/clickhouse/clickhouse-config.xml b/docker/clickhouse/clickhouse-config.xml similarity index 100% rename from clickhouse/clickhouse-config.xml rename to docker/clickhouse/clickhouse-config.xml diff --git a/clickhouse/clickhouse-user-config.xml b/docker/clickhouse/clickhouse-user-config.xml similarity index 100% rename from clickhouse/clickhouse-user-config.xml rename to docker/clickhouse/clickhouse-user-config.xml diff --git a/postgres/env.docker b/docker/db/env.docker similarity index 70% rename from postgres/env.docker rename to docker/db/env.docker index 0f0f16d4097..b2356fc7c55 100644 --- a/postgres/env.docker +++ b/docker/db/env.docker @@ -2,3 +2,6 @@ POSTGRES_DB="openledger" POSTGRES_USER="deploy" POSTGRES_PASSWORD="deploy" POSTGRES_HOST="0.0.0.0" + +PGDATABASE="openledger" +PGUSER="deploy" diff --git a/docker/es/env.docker b/docker/es/env.docker new file mode 100644 index 00000000000..cd80e5a6f57 --- /dev/null +++ b/docker/es/env.docker @@ -0,0 +1,8 @@ +# Disable XPack +# https://www.elastic.co/guide/en/elasticsearch/reference/5.3/docker.html#_security_note +xpack.security.enabled="false" + +discovery.type="single-node" + +http.cors.enabled="true" +http.cors.allow-origin="/.*/" diff --git a/docker/es/justfile b/docker/es/justfile new file mode 100644 index 00000000000..156fab5916c --- /dev/null +++ b/docker/es/justfile @@ -0,0 +1,44 @@ +set dotenv-load := false + +COLOR := "\\033[0;32m" +NO_COLOR := "\\033[0m" + +# Show all available recipes +@_default: + printf "\n{{ COLOR }}# ES (path: \`docker/es/\`)\n" + printf "========================={{ NO_COLOR }}\n" + just --list --unsorted + + +########## +# Health # +########## + +# Check the health of the service +@health host: + -curl -s -o /dev/null -w '%{http_code}' 'http://{{ host }}/_cluster/health' + +# Wait for the service to be healthy +@wait host="localhost:50292": + # The just command on the second line is executed in the context of the + # root directory and so must be prefixed with `docker/es/`. + just ../../_loop \ + '"$(just docker/es/health {{ host }})" != "200"' \ + "Waiting for Elasticsearch to be healthy..." + +@check-index index="image": + -curl \ + -s \ + -H 'Accept: application/json' \ + 'http://localhost:50292/_cat/indices/{{ index }}' \ + | grep -o "{{ index }}" \ + | wc -l \ + | xargs + +# Wait for the media to be indexed in Elasticsearch +@wait-for-index index="image": + # The just command on the second line is executed in the context of the + # root directory and so must be prefixed with `docker/es/`. + just ../../_loop \ + '"$(just docker/es/check-index {{ index }})" != "1"' \ + "Waiting for index '{{ index }}' to be ready..." diff --git a/nginx/.gitignore b/docker/nginx/.gitignore similarity index 100% rename from nginx/.gitignore rename to docker/nginx/.gitignore diff --git a/docker/nginx/justfile b/docker/nginx/justfile new file mode 100644 index 00000000000..7392c3bad6e --- /dev/null +++ b/docker/nginx/justfile @@ -0,0 +1,19 @@ +set dotenv-load := false + +COLOR := "\\033[0;35m" +NO_COLOR := "\\033[0m" + +# Show all available recipes +@_default: + printf "\n{{ COLOR }}# NGINX (path: \`docker/nginx/\`)\n" + printf "==============================={{ NO_COLOR }}\n" + just --list --unsorted + + +# Make locally trusted certificates (requires mkcert installed) +cert: + mkdir -p nginx/certs/ + mkcert \ + -cert-file nginx/certs/openverse.crt \ + -key-file nginx/certs/openverse.key \ + dev.openverse.test localhost 127.0.0.1 ::1 diff --git a/nginx/templates/web.conf.template b/docker/nginx/templates/web.conf.template similarity index 100% rename from nginx/templates/web.conf.template rename to docker/nginx/templates/web.conf.template diff --git a/plausible/env.docker b/docker/plausible/env.docker similarity index 100% rename from plausible/env.docker rename to docker/plausible/env.docker diff --git a/postgres/plausible.env.docker b/docker/plausible_db/env.docker similarity index 71% rename from postgres/plausible.env.docker rename to docker/plausible_db/env.docker index 2520ff22514..79a6d17f671 100644 --- a/postgres/plausible.env.docker +++ b/docker/plausible_db/env.docker @@ -2,3 +2,6 @@ POSTGRES_DB="plausible" POSTGRES_USER="deploy" POSTGRES_PASSWORD="deploy" POSTGRES_HOST="0.0.0.0" + +PGDATABASE="plausible" +PGUSER="deploy" diff --git a/docker/upstream_db/0001_airflow_user_db.sql b/docker/upstream_db/0001_airflow_user_db.sql new file mode 100644 index 00000000000..06c1c4486fb --- /dev/null +++ b/docker/upstream_db/0001_airflow_user_db.sql @@ -0,0 +1,2 @@ +CREATE ROLE airflow WITH LOGIN PASSWORD 'airflow'; +CREATE DATABASE airflow OWNER airflow; diff --git a/docker/upstream_db/0002_aws_s3_mock.sql b/docker/upstream_db/0002_aws_s3_mock.sql new file mode 100644 index 00000000000..16ae8d5b0c6 --- /dev/null +++ b/docker/upstream_db/0002_aws_s3_mock.sql @@ -0,0 +1,42 @@ +CREATE SCHEMA IF NOT EXISTS aws_s3; +CREATE EXTENSION plpython3u; + +CREATE OR REPLACE FUNCTION aws_s3.table_import_from_s3 ( + table_name text, + column_list text, + options text, + bucket text, + file_path text, + region text +) RETURNS int +LANGUAGE plpython3u +AS $$ + import os + import boto3 + from datetime import datetime as dt + s3_obj = boto3.resource( + 's3', + aws_access_key_id=os.getenv('AWS_ACCESS_KEY', 'test_key'), + aws_secret_access_key=os.getenv('AWS_SECRET_KEY', 'test_secret'), + region_name=region, + endpoint_url=os.getenv('S3_LOCAL_ENDPOINT', 'http://s3:5000') + ).Object(bucket, file_path) + temp_location = f"/tmp/pg_load_{dt.now().timestamp()}_{file_path.split('/')[-1]}" + s3_obj.download_file(temp_location) + if file_path[-3:]=='.gz': + copy_from = f"PROGRAM 'gzip -dc {temp_location}'" + else: + copy_from = plpy.quote_literal(temp_location) + with open(temp_location) as f: + columns = '({})'.format(column_list) if column_list else '' + res = plpy.execute( + 'COPY {} {} FROM {} {};'.format( + table_name, + columns, + copy_from, + options + ) + ) + os.remove(temp_location) + return res.nrows() +$$; diff --git a/docker/upstream_db/0003_openledger_image_schema.sql b/docker/upstream_db/0003_openledger_image_schema.sql new file mode 100644 index 00000000000..69c3275d8a9 --- /dev/null +++ b/docker/upstream_db/0003_openledger_image_schema.sql @@ -0,0 +1,54 @@ +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET client_min_messages = warning; +SET row_security = off; +SET default_tablespace = ''; +SET default_with_oids = false; + +CREATE EXTENSION IF NOT EXISTS "uuid-ossp" WITH SCHEMA public; +COMMENT ON EXTENSION "uuid-ossp" IS 'generate universally unique identifiers (UUIDs)'; + +CREATE TABLE public.image ( + identifier uuid PRIMARY KEY DEFAULT public.uuid_generate_v4(), + created_on timestamp with time zone NOT NULL, + updated_on timestamp with time zone NOT NULL, + ingestion_type character varying(80), + provider character varying(80), + source character varying(80), + foreign_identifier character varying(3000), + foreign_landing_url character varying(1000), + url character varying(3000) NOT NULL, + thumbnail character varying(3000), + width integer, + height integer, + filesize integer, + license character varying(50) NOT NULL, + license_version character varying(25), + creator character varying(2000), + creator_url character varying(2000), + title character varying(5000), + meta_data jsonb, + tags jsonb, + watermarked boolean, + last_synced_with_source timestamp with time zone, + removed_from_source boolean NOT NULL, + filetype character varying(5), + category character varying(80) +); + + +ALTER TABLE public.image OWNER TO deploy; +CREATE UNIQUE INDEX image_provider_fid_idx + ON public.image + USING btree (provider, md5(foreign_identifier)); +CREATE UNIQUE INDEX image_identifier_key + ON public.image + USING btree (identifier); +CREATE UNIQUE INDEX image_url_key + ON public.image + USING btree (url); diff --git a/docker/upstream_db/0004_openledger_image_view.sql b/docker/upstream_db/0004_openledger_image_view.sql new file mode 100644 index 00000000000..20dc28f4901 --- /dev/null +++ b/docker/upstream_db/0004_openledger_image_view.sql @@ -0,0 +1,90 @@ +CREATE TABLE public.image_popularity_metrics ( + provider character varying(80) PRIMARY KEY, + metric character varying(80), + percentile float +); + + +-- For more information on these values see: +-- https://github.com/cc-archive/cccatalog/issues/405#issuecomment-629233047 +-- https://github.com/cc-archive/cccatalog/pull/477 +INSERT INTO public.image_popularity_metrics ( + provider, metric, percentile +) VALUES + ('flickr', 'views', 0.85), + ('nappy', 'downloads', 0.85), + ('rawpixel', 'download_count', 0.85), + ('stocksnap', 'downloads_raw', 0.85), + ('wikimedia', 'global_usage_count', 0.85) +; + + +CREATE FUNCTION image_popularity_percentile( + provider text, pop_field text, percentile float +) RETURNS FLOAT AS $$ + SELECT percentile_disc($3) WITHIN GROUP ( + ORDER BY (meta_data->>$2)::float + ) + FROM image WHERE provider=$1; +$$ +LANGUAGE SQL +STABLE +RETURNS NULL ON NULL INPUT; + + +CREATE MATERIALIZED VIEW public.image_popularity_constants AS + WITH popularity_metric_values AS ( + SELECT + *, + image_popularity_percentile(provider, metric, percentile) AS val + FROM image_popularity_metrics + ) + SELECT *, ((1 - percentile) / percentile) * val AS constant + FROM popularity_metric_values; + +CREATE UNIQUE INDEX ON image_popularity_constants (provider); + + +CREATE FUNCTION standardized_image_popularity(provider text, meta_data jsonb) +RETURNS FLOAT AS $$ + SELECT ($2->>metric)::FLOAT / (($2->>metric)::FLOAT + constant) + FROM image_popularity_constants WHERE provider=$1; +$$ +LANGUAGE SQL +STABLE +RETURNS NULL ON NULL INPUT; + + +CREATE MATERIALIZED VIEW image_view AS + SELECT + identifier, + created_on, + updated_on, + ingestion_type, + provider, + source, + foreign_identifier, + foreign_landing_url, + url, + thumbnail, + width, + height, + filesize, + license, + license_version, + creator, + creator_url, + title, + meta_data, + tags, + watermarked, + last_synced_with_source, + removed_from_source, + filetype, + category, + standardized_image_popularity( + image.provider, image.meta_data + ) AS standardized_popularity + FROM image; + +CREATE UNIQUE INDEX ON image_view (identifier); diff --git a/docker/upstream_db/0005_openledger_old_image_schema.sql b/docker/upstream_db/0005_openledger_old_image_schema.sql new file mode 100644 index 00000000000..75c1c6b7e6c --- /dev/null +++ b/docker/upstream_db/0005_openledger_old_image_schema.sql @@ -0,0 +1,133 @@ +-- +-- PostgreSQL database dump +-- + +-- Dumped from database version 10.9 +-- Dumped by pg_dump version 10.4 + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET client_min_messages = warning; +SET row_security = off; + +-- +-- Name: uuid-ossp; Type: EXTENSION; Schema: -; Owner: +-- + +CREATE EXTENSION IF NOT EXISTS "uuid-ossp" WITH SCHEMA public; + + +-- +-- Name: EXTENSION "uuid-ossp"; Type: COMMENT; Schema: -; Owner: +-- + +COMMENT ON EXTENSION "uuid-ossp" IS 'generate universally unique identifiers (UUIDs)'; + + +SET default_tablespace = ''; + +SET default_with_oids = false; + +-- +-- Name: old_image; Type: TABLE; Schema: public; Owner: deploy +-- + +CREATE TABLE public.old_image ( + id integer NOT NULL, + created_on timestamp with time zone NOT NULL, + updated_on timestamp with time zone NOT NULL, + identifier uuid DEFAULT public.uuid_generate_v4(), + perceptual_hash character varying(255), + provider character varying(80), + source character varying(80), + foreign_identifier character varying(3000), + foreign_landing_url character varying(1000), + url character varying(3000) NOT NULL, + thumbnail character varying(3000), + width integer, + height integer, + filesize integer, + license character varying(50) NOT NULL, + license_version character varying(25), + creator character varying(2000), + creator_url character varying(2000), + title character varying(5000), + tags_list character varying(255)[], + last_synced_with_source timestamp with time zone, + removed_from_source boolean NOT NULL, + meta_data jsonb, + tags jsonb, + watermarked boolean, + view_count integer DEFAULT 0 NOT NULL +); + + +ALTER TABLE public.old_image OWNER TO deploy; + +-- +-- Name: old_image_id_seq; Type: SEQUENCE; Schema: public; Owner: deploy +-- + +CREATE SEQUENCE public.old_image_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.old_image_id_seq OWNER TO deploy; + +-- +-- Name: old_image_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: deploy +-- + +ALTER SEQUENCE public.old_image_id_seq OWNED BY public.old_image.id; + + +-- +-- Name: old_image id; Type: DEFAULT; Schema: public; Owner: deploy +-- + +ALTER TABLE ONLY public.old_image ALTER COLUMN id SET DEFAULT nextval('public.old_image_id_seq'::regclass); + + +-- +-- Name: old_image old_image_pkey; Type: CONSTRAINT; Schema: public; Owner: deploy +-- + +ALTER TABLE ONLY public.old_image + ADD CONSTRAINT old_image_pkey PRIMARY KEY (id); + + +-- +-- Name: old_image_9e9f3d70; Type: INDEX; Schema: public; Owner: deploy +-- + +CREATE INDEX old_image_9e9f3d70 ON public.old_image USING btree (provider); + +CREATE UNIQUE INDEX old_image_provider_fid_key ON public.old_image USING btree (provider, md5((foreign_identifier)::text)); + + +-- +-- Name: old_image_url_key; Type: INDEX; Schema: public; Owner: deploy +-- + +CREATE INDEX old_image_url_key ON public.old_image USING btree (provider, md5((url)::text)); + + +-- +-- Name: uuid_index; Type: INDEX; Schema: public; Owner: deploy +-- + +CREATE INDEX uuid_index ON public.old_image USING btree (identifier); + + +-- +-- PostgreSQL database dump complete +-- diff --git a/docker/upstream_db/0006_openledger_audio_schema.sql b/docker/upstream_db/0006_openledger_audio_schema.sql new file mode 100644 index 00000000000..b547b1d3060 --- /dev/null +++ b/docker/upstream_db/0006_openledger_audio_schema.sql @@ -0,0 +1,60 @@ +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET client_min_messages = warning; +SET row_security = off; +SET default_tablespace = ''; +SET default_with_oids = false; + +CREATE EXTENSION IF NOT EXISTS "uuid-ossp" WITH SCHEMA public; +COMMENT ON EXTENSION "uuid-ossp" IS 'generate universally unique identifiers (UUIDs)'; + + +CREATE TABLE public.audio ( + identifier uuid PRIMARY KEY DEFAULT public.uuid_generate_v4(), + created_on timestamp with time zone NOT NULL, + updated_on timestamp with time zone NOT NULL, + ingestion_type character varying(80), + provider character varying(80), + source character varying(80), + foreign_identifier character varying(3000), + foreign_landing_url character varying(1000), + url character varying(3000) NOT NULL, + thumbnail character varying(3000), + filetype character varying(5), + duration integer, + bit_rate integer, + sample_rate integer, + category character varying(80), + genres character varying(80)[], + audio_set jsonb, + set_position integer, + alt_files jsonb, + filesize integer, + license character varying(50) NOT NULL, + license_version character varying(25), + creator character varying(2000), + creator_url character varying(2000), + title character varying(5000), + meta_data jsonb, + tags jsonb, + watermarked boolean, + last_synced_with_source timestamp with time zone, + removed_from_source boolean NOT NULL +); + + +ALTER TABLE public.audio OWNER TO deploy; +CREATE UNIQUE INDEX audio_provider_fid_idx + ON public.audio + USING btree (provider, md5(foreign_identifier)); +CREATE UNIQUE INDEX audio_identifier_key + ON public.audio + USING btree (identifier); +CREATE UNIQUE INDEX audio_url_key + ON public.audio + USING btree (url); diff --git a/docker/upstream_db/0007_openledger_audio_view.sql b/docker/upstream_db/0007_openledger_audio_view.sql new file mode 100644 index 00000000000..559698258e7 --- /dev/null +++ b/docker/upstream_db/0007_openledger_audio_view.sql @@ -0,0 +1,116 @@ +CREATE TABLE public.audio_popularity_metrics ( + provider character varying(80) PRIMARY KEY, + metric character varying(80), + percentile float +); + + +INSERT INTO public.audio_popularity_metrics ( + provider, metric, percentile +) VALUES + ('wikimedia_audio', 'global_usage_count', 0.85), + ('jamendo', 'listens', 0.85), + ('freesound', 'num_downloads', 0.85); + + +CREATE FUNCTION audio_popularity_percentile( + provider text, pop_field text, percentile float +) RETURNS FLOAT AS $$ + SELECT percentile_disc($3) WITHIN GROUP ( + ORDER BY (meta_data->>$2)::float + ) + FROM audio WHERE provider=$1; +$$ +LANGUAGE SQL +STABLE +RETURNS NULL ON NULL INPUT; + + +CREATE MATERIALIZED VIEW public.audio_popularity_constants AS + WITH popularity_metric_values AS ( + SELECT + *, + audio_popularity_percentile(provider, metric, percentile) AS val + FROM audio_popularity_metrics + ) + SELECT *, ((1 - percentile) / percentile) * val AS constant + FROM popularity_metric_values; + +CREATE UNIQUE INDEX ON audio_popularity_constants (provider); + + +CREATE FUNCTION standardized_audio_popularity(provider text, meta_data jsonb) +RETURNS FLOAT AS $$ + SELECT ($2->>metric)::FLOAT / (($2->>metric)::FLOAT + constant) + FROM audio_popularity_constants WHERE provider=$1; +$$ +LANGUAGE SQL +STABLE +RETURNS NULL ON NULL INPUT; + + +CREATE MATERIALIZED VIEW audio_view AS + SELECT + identifier, + created_on, + updated_on, + ingestion_type, + provider, + source, + foreign_identifier, + foreign_landing_url, + url, + thumbnail, + filetype, + duration, + bit_rate, + sample_rate, + category, + genres, + audio_set, + alt_files, + filesize, + license, + license_version, + creator, + creator_url, + title, + meta_data, + tags, + watermarked, + last_synced_with_source, + removed_from_source, + audio_set ->> 'foreign_identifier' AS audio_set_foreign_identifier, + standardized_audio_popularity( + audio.provider, audio.meta_data + ) AS standardized_popularity + FROM audio; + +CREATE UNIQUE INDEX ON audio_view (identifier); + + + +CREATE VIEW audioset_view AS + -- DISTINCT clause exists to ensure that only one record is present for a given + -- foreign identifier/provider pair. This exists as a hard constraint in the API table + -- downstream, so we must enforce it here. The audio_set data is chosen by which audio + -- record was most recently updated (see the final section of the ORDER BY clause + -- below). More info here: + -- https://github.com/WordPress/openverse-catalog/issues/658 + SELECT DISTINCT ON (audio_view.audio_set ->> 'foreign_identifier', audio_view.provider) + (audio_view.audio_set ->> 'foreign_identifier'::text) ::character varying(1000) AS foreign_identifier, + (audio_view.audio_set ->> 'title'::text) ::character varying(2000) AS title, + (audio_view.audio_set ->> 'foreign_landing_url'::text) ::character varying(1000) AS foreign_landing_url, + (audio_view.audio_set ->> 'creator'::text) ::character varying(2000) AS creator, + (audio_view.audio_set ->> 'creator_url'::text) ::character varying(2000) AS creator_url, + (audio_view.audio_set ->> 'url'::text) ::character varying(1000) AS url, + (audio_view.audio_set ->> 'filesize'::text) ::integer AS filesize, + (audio_view.audio_set ->> 'filetype'::text) ::character varying(80) AS filetype, + (audio_view.audio_set ->> 'thumbnail'::text) ::character varying(1000) AS thumbnail, + audio_view.provider +FROM audio_view +WHERE (audio_view.audio_set IS NOT NULL) +ORDER BY + audio_view.audio_set ->> 'foreign_identifier', + audio_view.provider, + audio_view.updated_on DESC; diff --git a/docker/upstream_db/Dockerfile b/docker/upstream_db/Dockerfile new file mode 100644 index 00000000000..4dd89f91bc1 --- /dev/null +++ b/docker/upstream_db/Dockerfile @@ -0,0 +1,20 @@ +FROM postgres:13.6 + +ARG PGCLI_VERSION=3.2.0 + +# Container optimizations +ENV PYTHONUNBUFFERED=1 +ENV PIP_NO_CACHE_DIR=1 +ENV PIP_NO_COLOR=1 + +RUN apt-get update \ + && apt-get -yqq install \ + python3-boto3 \ + postgresql-plpython3-13 \ + python3-pip \ + libpq-dev \ + && apt-get autoremove -y \ + && rm -rf /var/lib/apt/lists/* \ + && pip3 install -U pip pgcli==${PGCLI_VERSION} + +COPY *.sql /docker-entrypoint-initdb.d/ diff --git a/docker/upstream_db/env.docker b/docker/upstream_db/env.docker new file mode 100644 index 00000000000..b2356fc7c55 --- /dev/null +++ b/docker/upstream_db/env.docker @@ -0,0 +1,7 @@ +POSTGRES_DB="openledger" +POSTGRES_USER="deploy" +POSTGRES_PASSWORD="deploy" +POSTGRES_HOST="0.0.0.0" + +PGDATABASE="openledger" +PGUSER="deploy" diff --git a/docs/projects/templates/project-proposal.md b/docs/projects/templates/project-proposal.md new file mode 100644 index 00000000000..7eab113f8bf --- /dev/null +++ b/docs/projects/templates/project-proposal.md @@ -0,0 +1,44 @@ +# Project Proposal: + +## Reviewers + + + +- [ ] TBD +- [ ] TBD + +## Project summary + + + +## Goals + + + +## Requirements + + + +## Success + + + +## Participants and stakeholders + + + +## Infrastructure + + + +## Accessibility + + + +## Marketing + + + +## Required implementation plans + + diff --git a/documentation/_ext/link_issues.py b/documentation/_ext/link_issues.py new file mode 100644 index 00000000000..b2e7958bc1e --- /dev/null +++ b/documentation/_ext/link_issues.py @@ -0,0 +1,370 @@ +"""Issue linking w/ plain-text autolinking, e.g. #42 + +Credit: https://github.com/ignatenkobrain/sphinxcontrib-issuetracker +License: BSD + +Changes by Tony Narlock (2022-08-21): +- Type annotations + + mypy --strict, requires types-requests, types-docutils + + Python < 3.10 require typing-extensions +- TrackerConfig: Use dataclasses instead of typing.NamedTuple and hacking __new__ +- app.warn (removed in 5.0) -> Use Sphinx Logging API + + https://www.sphinx-doc.org/en/master/extdev/logging.html#logging-api +- Add PendingIssueXRef + + Typing for tracker_config and precision +- Add IssueTrackerBuildEnvironment + + Subclassed / typed BuildEnvironment with .tracker_config +- Just GitHub (for demonstration) +""" +import dataclasses +import re +import time +import typing as t + +import requests +from docutils import nodes +from sphinx.addnodes import pending_xref +from sphinx.application import Sphinx +from sphinx.config import Config +from sphinx.environment import BuildEnvironment +from sphinx.transforms import SphinxTransform +from sphinx.util import logging + + +if t.TYPE_CHECKING: + from typing import TypeGuard + +logger = logging.getLogger(__name__) + +GITHUB_API_URL = "https://api.github.com/repos/{0.project}/issues/{1}" + + +class IssueTrackerBuildEnvironment(BuildEnvironment): + tracker_config: "TrackerConfig" + issuetracker_cache: "IssueTrackerCache" + github_rate_limit: t.Tuple[float, bool] + + +class Issue(t.NamedTuple): + id: str + title: str + url: str + closed: bool + + +IssueTrackerCache = t.Dict[str, Issue] + + +@dataclasses.dataclass +class TrackerConfig: + project: str + url: str + + """ + Issue tracker configuration. + This class provides configuration for trackers, and is passed as + ``tracker_config`` arguments to callbacks of + :event:`issuetracker-lookup-issue`. + """ + + def __post_init__(self) -> None: + if self.url is not None: + self.url = self.url.rstrip("/") + + @classmethod + def from_sphinx_config(cls, config: Config) -> "TrackerConfig": + """Get tracker configuration from ``config``.""" + project = config.issuetracker_project or config.project + url = config.issuetracker_url + return cls(project=project, url=url) + + +class PendingIssueXRef(pending_xref): + tracker_config: TrackerConfig + + +class IssueReferences(SphinxTransform): + + default_priority = 999 + + def apply(self) -> None: + config = self.document.settings.env.config + tracker_config = TrackerConfig.from_sphinx_config(config) + issue_pattern = config.issuetracker_issue_pattern + title_template = None + if isinstance(issue_pattern, str): + issue_pattern = re.compile(issue_pattern) + for node in self.document.traverse(nodes.Text): + parent = node.parent + if isinstance(parent, (nodes.literal, nodes.FixedTextElement)): + # ignore inline and block literal text + continue + if isinstance(parent, nodes.reference): + continue + text = str(node) + new_nodes = [] + last_issue_ref_end = 0 + for match in issue_pattern.finditer(text): + # catch invalid pattern with too many groups + if len(match.groups()) != 1: + raise ValueError( + "issuetracker_issue_pattern must have " + "exactly one group: {!r}".format(match.groups()) + ) + # extract the text between the last issue reference and the + # current issue reference and put it into a new text node + head = text[last_issue_ref_end : match.start()] + if head: + new_nodes.append(nodes.Text(head)) + # adjust the position of the last issue reference in the + # text + last_issue_ref_end = match.end() + # extract the issue text (including the leading dash) + issuetext = match.group(0) + # extract the issue number (excluding the leading dash) + issue_id = match.group(1) + # turn the issue reference into a reference node + refnode = PendingIssueXRef() + + refnode["refdomain"] = None + refnode["reftarget"] = issue_id + refnode["reftype"] = "issue" + refnode["trackerconfig"] = tracker_config + reftitle = title_template or issuetext + refnode.append( + nodes.inline(issuetext, reftitle, classes=["xref", "issue"]) + ) + new_nodes.append(refnode) + if not new_nodes: + # no issue references were found, move on to the next node + continue + # extract the remaining text after the last issue reference, and + # put it into a text node + tail = text[last_issue_ref_end:] + if tail: + new_nodes.append(nodes.Text(tail)) + # find and remove the original node, and insert all new nodes + # instead + parent.replace(node, new_nodes) + + +def is_issuetracker_env( + env: t.Any, +) -> "TypeGuard['IssueTrackerBuildEnvironment']": + return hasattr(env, "issuetracker_cache") and env.issuetracker_cache is not None + + +def lookup_issue( + app: Sphinx, tracker_config: TrackerConfig, issue_id: str +) -> t.Optional[Issue]: + """ + Lookup the given issue. + The issue is first looked up in an internal cache. If it is not found, the + event ``issuetracker-lookup-issue`` is emitted. The result of this + invocation is then cached and returned. + ``app`` is the sphinx application object. ``tracker_config`` is the + :class:`TrackerConfig` object representing the issue tracker configuration. + ``issue_id`` is a string containing the issue id. + Return a :class:`Issue` object for the issue with the given ``issue_id``, + or ``None`` if the issue wasn't found. + """ + env = app.env + if is_issuetracker_env(env): + cache: IssueTrackerCache = env.issuetracker_cache + if issue_id not in cache: + issue = app.emit_firstresult( + "issuetracker-lookup-issue", tracker_config, issue_id + ) + cache[issue_id] = issue + return cache[issue_id] + return None + + +def lookup_issues(app: Sphinx, doctree: nodes.document) -> None: + """ + Lookup issues found in the given ``doctree``. + Each issue reference in the given ``doctree`` is looked up. Each lookup + result is cached by mapping the referenced issue id to the looked up + :class:`Issue` object (an existing issue) or ``None`` (a missing issue). + The cache is available at ``app.env.issuetracker_cache`` and is pickled + along with the environment. + """ + for node in doctree.traverse(PendingIssueXRef): + if node["reftype"] == "issue": + lookup_issue(app, node["trackerconfig"], node["reftarget"]) + + +def make_issue_reference(issue: Issue, content_node: nodes.inline) -> nodes.reference: + """ + Create a reference node for the given issue. + ``content_node`` is a docutils node which is supposed to be added as + content of the created reference. ``issue`` is the :class:`Issue` which + the reference shall point to. + Return a :class:`docutils.nodes.reference` for the issue. + """ + reference = nodes.reference() + reference["refuri"] = issue.url + if issue.title: + reference["reftitle"] = issue.title + if issue.closed: + content_node["classes"].append("closed") + reference.append(content_node) + return reference + + +def resolve_issue_reference( + app: Sphinx, env: BuildEnvironment, node: PendingIssueXRef, contnode: nodes.inline +) -> t.Optional[nodes.reference]: + """ + Resolve an issue reference and turn it into a real reference to the + corresponding issue. + ``app`` and ``env`` are the Sphinx application and environment + respectively. ``node`` is a ``pending_xref`` node representing the missing + reference. It is expected to have the following attributes: + - ``reftype``: The reference type + - ``trackerconfig``: The :class:`TrackerConfig`` to use for this node + - ``reftarget``: The issue id + - ``classes``: The node classes + References with a ``reftype`` other than ``'issue'`` are skipped by + returning ``None``. Otherwise the new node is returned. + If the referenced issue was found, a real reference to this issue is + returned. The text of this reference is formatted with the :class:`Issue` + object available in the ``issue`` key. The reference title is set to the + issue title. If the issue is closed, the class ``closed`` is added to the + new content node. + Otherwise, if the issue was not found, the content node is returned. + """ + if node["reftype"] != "issue": + return None + + issue = lookup_issue(app, node["trackerconfig"], node["reftarget"]) + if issue is None: + return contnode + else: + classes = contnode["classes"] + conttext = str(contnode[0]) + formatted_conttext = nodes.Text(conttext.format(issue=issue)) + formatted_contnode = nodes.inline(conttext, formatted_conttext, classes=classes) + assert issue is not None + return make_issue_reference(issue, formatted_contnode) + return None + + +def init_cache(app: Sphinx) -> None: + if not hasattr(app.env, "issuetracker_cache"): + app.env.issuetracker_cache: "IssueTrackerCache" = {} # type: ignore + return None + + +def check_project_with_username(tracker_config: TrackerConfig) -> None: + if "/" not in tracker_config.project: + raise ValueError(f"username missing in project name: {tracker_config.project}") + + +HEADERS = {"User-Agent": "sphinxcontrib-issuetracker v{}".format("1.0")} + + +def get(app: Sphinx, url: str) -> t.Optional[requests.Response]: + """ + Get a response from the given ``url``. + ``url`` is a string containing the URL to request via GET. ``app`` is the + Sphinx application object. + Return the :class:`~requests.Response` object on status code 200, or + ``None`` otherwise. If the status code is not 200 or 404, a warning is + emitted via ``app``. + """ + response = requests.get(url, headers=HEADERS) + if response.status_code == requests.codes.ok: + return response + elif response.status_code != requests.codes.not_found: + msg = "GET {0.url} failed with code {0.status_code}" + logger.warning(msg.format(response)) + + return None + + +def lookup_github_issue( + app: Sphinx, tracker_config: TrackerConfig, issue_id: str +) -> t.Optional[Issue]: + check_project_with_username(tracker_config) + + env = app.env + if is_issuetracker_env(env): + # Get rate limit information from the environment + timestamp, limit_hit = getattr(env, "github_rate_limit", (0, False)) + + if limit_hit and time.time() - timestamp > 3600: + # Github limits applications hourly + limit_hit = False + + if not limit_hit: + url = GITHUB_API_URL.format(tracker_config, issue_id) + response = get(app, url) + if response: + rate_remaining = response.headers.get("X-RateLimit-Remaining") + assert rate_remaining is not None + if rate_remaining.isdigit() and int(rate_remaining) == 0: + logger.warning("Github rate limit hit") + env.github_rate_limit = (time.time(), True) + issue = response.json() + closed = issue["state"] == "closed" + return Issue( + id=issue_id, + title=issue["title"], + closed=closed, + url=issue["html_url"], + ) + else: + logger.warning( + f"Github rate limit exceeded, not resolving issue {issue_id}" + ) + return None + + +BUILTIN_ISSUE_TRACKERS: t.Dict[str, t.Any] = { + "github": lookup_github_issue, +} + + +def init_transformer(app: Sphinx) -> None: + if app.config.issuetracker_plaintext_issues: + app.add_transform(IssueReferences) + + +def connect_builtin_tracker(app: Sphinx) -> None: + if app.config.issuetracker: + tracker = BUILTIN_ISSUE_TRACKERS[app.config.issuetracker.lower()] + app.connect("issuetracker-lookup-issue", tracker) + + +def setup(app: Sphinx) -> t.Dict[str, t.Any]: + app.add_config_value("mybase", "https://github.com/cihai/unihan-etl", "env") + app.add_event("issuetracker-lookup-issue") + app.connect("builder-inited", connect_builtin_tracker) + app.add_config_value("issuetracker", None, "env") + app.add_config_value("issuetracker_project", None, "env") + app.add_config_value("issuetracker_url", None, "env") + # configuration specific to plaintext issue references + app.add_config_value("issuetracker_plaintext_issues", True, "env") + app.add_config_value( + "issuetracker_issue_pattern", + re.compile( + r"#(\d+)", + ), + "env", + ) + app.add_config_value("issuetracker_title_template", None, "env") + app.connect("builder-inited", init_cache) + app.connect("builder-inited", init_transformer) + app.connect("doctree-read", lookup_issues) + app.connect("missing-reference", resolve_issue_reference) + return { + "version": "1.0", + "parallel_read_safe": True, + "parallel_write_safe": True, + } diff --git a/documentation/changelogs/api/index.md b/documentation/changelogs/api/index.md new file mode 100644 index 00000000000..cb43a9b9192 --- /dev/null +++ b/documentation/changelogs/api/index.md @@ -0,0 +1,10 @@ +# API changelogs + +Changelogs for . + +```{toctree} +:glob: +:reverse: +:titlesonly: +* +``` diff --git a/documentation/changelogs/frontend/index.md b/documentation/changelogs/frontend/index.md new file mode 100644 index 00000000000..620ca243132 --- /dev/null +++ b/documentation/changelogs/frontend/index.md @@ -0,0 +1,11 @@ +# Frontend changelogs + +Changelogs for . + +```{toctree} +:glob: +:reverse: +:titlesonly: + +* +``` diff --git a/documentation/changelogs/index.md b/documentation/changelogs/index.md new file mode 100644 index 00000000000..061d54d341a --- /dev/null +++ b/documentation/changelogs/index.md @@ -0,0 +1,18 @@ +# Changelogs + +These changelog files are automatically generated when maintainers run the +"Release app" workflow. + +The lists of changes are derived using the "stack" labels and are organised by +type of change. The documents are generated using the +[`release-drafter`](https://github.com/release-drafter/release-drafter) +workflow. + +```{toctree} +:maxdepth: 1 +:glob: + +api/index +frontend/index +ingestion_server/index +``` diff --git a/documentation/changelogs/ingestion_server/index.md b/documentation/changelogs/ingestion_server/index.md new file mode 100644 index 00000000000..35e3ac69c1e --- /dev/null +++ b/documentation/changelogs/ingestion_server/index.md @@ -0,0 +1,11 @@ +# Ingestion server changelogs + +Changelogs for the internal ingestion server service. + +```{toctree} +:glob: +:reverse: +:titlesonly: + +* +``` diff --git a/documentation/conf.py b/documentation/conf.py index 6fa093d70bc..6dfa216b051 100644 --- a/documentation/conf.py +++ b/documentation/conf.py @@ -1,3 +1,18 @@ +import sys +from pathlib import Path + + +def add_ext_to_path(): + """Add the ``_ext`` directory to the module path""" + cwd = Path(__file__).parent + project_root = cwd.parent + + sys.path.insert(0, str(project_root)) + sys.path.insert(0, str(cwd / "_ext")) + + +add_ext_to_path() + # Configuration file for the Sphinx documentation builder. # # For the full list of built-in configuration values, see the documentation: @@ -12,7 +27,7 @@ # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration -extensions = ["myst_parser"] +extensions = ["myst_parser", "link_issues"] myst_heading_anchors = 6 # Add anchors to all headers, this is disabled by default. source_suffix = {".rst": "restructuredtext", ".md": "markdown"} @@ -37,3 +52,6 @@ html_static_path = ["_static"] html_show_copyright = False + +issuetracker = "github" +issuetracker_project = "WordPress/openverse" diff --git a/documentation/guides/api/index.md b/documentation/guides/api/index.md index e961259a558..68f2bce31ac 100644 --- a/documentation/guides/api/index.md +++ b/documentation/guides/api/index.md @@ -3,5 +3,6 @@ ```{toctree} :maxdepth: 1 +quickstart test ``` diff --git a/documentation/guides/api/quickstart.md b/documentation/guides/api/quickstart.md new file mode 100644 index 00000000000..2295d01d058 --- /dev/null +++ b/documentation/guides/api/quickstart.md @@ -0,0 +1,104 @@ +# Quickstart guide + +This is the quick start guide for setting up and running the API locally. + +## Prerequisites + +Refer to the [general setup guide](../general_setup.md) for setting up the +prerequisites. Refer to the 'API' column in the +[requirement matrix](../general_setup.md#requirement-matrix) to know what you +need to run this. + +## Starting up + +1. Ensure you download, install and set up all prerequisites. Ensure that the + Docker daemon is running. + +2. Clone the repository to your computer. Then switch to the cloned directory. + If you're planning to contribute, fork the repo and clone your fork instead. + + ```console + $ git clone https://github.com/WordPress/openverse.git # or your fork + $ cd openverse/ + ``` + + If you followed the general setup guide and installed + [GitHub CLI](./general_setup.md#github-cli), you can clone more simply using + the `gh` command. + + ```console + $ gh repo clone WordPress/openverse # or your fork + $ cd openverse/ + ``` + +3. Bring the ingestion server and API up, along with all their dependent + services. + + ```console + $ just api/up + ``` + + The `api/up` recipe orchestrates the following services: `cache`, `db`, + `upstream_db`, `es`, `indexer_worker`, `ingestion_server`, `web` and `proxy`. + + Now you should be able to access the following endpoints: + + - the list of ingestion jobs on + [http://localhost:50281/task](http://localhost:50281/task) + - the API documentation on [http://localhost:50280](http://localhost:50280) + +4. Load the sample data. This step can take a few minutes to complete. + + ```console + $ just api/init + ``` + + ````{admonition} Troubleshooting + If this step fails, cleaning up and restarting usually fixes it. + + ```console + $ just down -v + $ just api/init + ``` + ```` + +5. With the data loaded, the API can now return JSON responses to your HTTP + requests. + + ```console + $ just api/stats + just _curl-get "images/stats/" localhost:50280 + curl "http://localhost:50280/v1/images/stats/" + [{"source_name":"flickr","display_name":"Flickr","source_url":"https://www.flickr.com","logo_url":null,"media_count":2500},{"source_name":"stocksnap","display_name":"StockSnap","source_url":"https://stocksnap.io","logo_url":null,"media_count":2500}]% + ``` + + ````{tip} + [`jq`](https://stedolan.github.io/jq/) is a tool for parsing and manipulating + JSON data. If you have `jq` installed, you can pipe the response to it and + transform it. + + ```console + $ just api/stats | jq '.[0]' + { + "source_name": "flickr", + "display_name": "Flickr", + "source_url": "https://www.flickr.com", + "logo_url": null, + "media_count": 2500 + } + + $ just api/stats 'audio' | jq '[.[] | .source_name]' + [ + "freesound", + "jamendo", + "wikimedia_audio" + ] + ``` + + `jq` is great, we recommend you + [download](https://stedolan.github.io/jq/download/) it. + ```` + +## Shutting down + +Refer to the [common instructions](../quickstart.md#shutting-down). diff --git a/documentation/guides/document.md b/documentation/guides/documentation/guidelines.md similarity index 100% rename from documentation/guides/document.md rename to documentation/guides/documentation/guidelines.md diff --git a/documentation/guides/documentation/index.md b/documentation/guides/documentation/index.md new file mode 100644 index 00000000000..9a34154322f --- /dev/null +++ b/documentation/guides/documentation/index.md @@ -0,0 +1,8 @@ +# Documentation + +```{toctree} +:maxdepth: 1 + +quickstart +guidelines +``` diff --git a/documentation/guides/documentation/quickstart.md b/documentation/guides/documentation/quickstart.md new file mode 100644 index 00000000000..e96ca87b99c --- /dev/null +++ b/documentation/guides/documentation/quickstart.md @@ -0,0 +1,62 @@ +# Quickstart guide + +This is the quick start guide for setting up and running the documentation +locally. + +## Prerequisites + +Refer to the [general setup guide](../general_setup.md) for setting up the +prerequisites. Refer to the 'Docs' column in the +[requirement matrix](../general_setup.md#requirement-matrix) to know what you +need to run this. + +## Starting up + +1. Ensure you download, install and set up all prerequisites. + +2. Clone the repository to your computer. Then switch to the cloned directory. + If you're planning to contribute, fork the repo and clone your fork instead. + + ```console + $ git clone https://github.com/WordPress/openverse.git # or your fork + $ cd openverse/ + ``` + + If you followed the general setup guide and installed + [GitHub CLI](./general_setup.md#github-cli), you can clone more simply using + the `gh` command. + + ```console + $ gh repo clone WordPress/openverse # or your fork + $ cd openverse/ + ``` + +3. Install only the Python dependencies. You do not need to install any Node.js + dependencies to run the documentation. + + ```console + $ just documentation/install + ``` + +4. Run the documentation live server. Once this is done, you should be able to + see the documentation on [http://127.0.0.1:50230](http://127.0.0.1:50230). + + ```console + $ just documentation/live + ``` + + ````{admonition} Troubleshooting + Sometimes, the documentation does not refresh to reflect changes in the table + of contents or changes to the file system. In those cases, you can clean the + caches and restart the live server. + + ```console + $ just documentation/clean + $ just documentation/live + ``` + ```` + +## Shutting down + +You can press Ctrl + C to terminate the documentation live +server. diff --git a/documentation/guides/frontend/analytics.md b/documentation/guides/frontend/analytics.md new file mode 100644 index 00000000000..93e37a3427d --- /dev/null +++ b/documentation/guides/frontend/analytics.md @@ -0,0 +1,23 @@ +# Analytics + +Analytics on the frontend requires the Plausible setup to be up and running. + +## Starting up + +Bring up the Docker services needed by the frontend. This includes Plausible and +the PostgreSQL and Clickhouse databases it needs. + +```console +$ just frontend/up +``` + +The `frontend/up` recipe orchestrates the following services: `plausible_ch`, +`plasible_db` and `plausible`. + +Now you should be able to access the following endpoints: + +- the Plausible UI on [http://localhost:50288](http://localhost:50288) + +## Shutting down + +Refer to the [common instructions](../quickstart.md#shutting-down). diff --git a/documentation/guides/frontend/index.md b/documentation/guides/frontend/index.md index 1fe2e42cef2..80e89ab1419 100644 --- a/documentation/guides/frontend/index.md +++ b/documentation/guides/frontend/index.md @@ -3,5 +3,6 @@ ```{toctree} :maxdepth: 1 +quickstart test ``` diff --git a/documentation/guides/frontend/quickstart.md b/documentation/guides/frontend/quickstart.md new file mode 100644 index 00000000000..021abfacabf --- /dev/null +++ b/documentation/guides/frontend/quickstart.md @@ -0,0 +1,64 @@ +# Quickstart guide + +This is the quick start guide for setting up and running the frontend locally. + +## Prerequisites + +Refer to the [general setup guide](../general_setup.md) for setting up the +prerequisites. Refer to the 'Frontend' column in the +[requirement matrix](../general_setup.md#requirement-matrix) to know what you +need to run this. + +## Starting up + +1. Ensure you download, install and set up all prerequisites. Ensure that the + Docker daemon is running. + +2. Clone the repository to your computer. Then switch to the cloned directory. + If you're planning to contribute, fork the repo and clone your fork instead. + + ```console + $ git clone https://github.com/WordPress/openverse.git # or your fork + $ cd openverse/ + ``` + + If you followed the general setup guide and installed + [GitHub CLI](./general_setup.md#github-cli), you can clone more simply using + the `gh` command. + + ```console + $ gh repo clone WordPress/openverse # or your fork + $ cd openverse/ + ``` + +3. Install only the Node.js dependencies. You do not need to install any Python + dependencies to run the frontend. + + ```console + $ just node-install + ``` + +4. To bring up the frontend, we have another `just` recipe. We have `just` + recipes for almost everything. + + ```console + $ just frontend/run dev + ``` + + If you want your frontend to use a different API instance, you can set the + `API_URL` environment variable to point to that instance. If you had the + [API running locally](../api/quickstart.md), you can do the following to use + the local API with the frontend. + + ```console + $ env API_URL="http://localhost:50280" just frontend/run dev + ``` + + Now you should be able to access the following endpoints: + + - the Openverse search engine frontend on + [http://localhost:8443](http://localhost:8443) + +## Shutting down + +You can press Ctrl + C to terminate the frontend process. diff --git a/documentation/guides/general_setup.md b/documentation/guides/general_setup.md index 7d98ea91ef2..5652021a7fc 100644 --- a/documentation/guides/general_setup.md +++ b/documentation/guides/general_setup.md @@ -27,6 +27,25 @@ Installation instructions for WSL on Windows 10 and 11 can be found in Microsoft's [official documentation](https://docs.microsoft.com/en-us/windows/wsl/install). +## Requirement matrix + +Based on which part of the Openverse stack you are contributing to, you might +not need everything mentioned on this page. Refer to this chart to see which +prerequisites are required to get started with your contributions. + +| Requirement | Docs | Ingestion server | API | Frontend | Management | +| ------------------ | ---- | ---------------- | --- | -------------- | ---------- | +| [Git](#git) | ✅ | ✅ | ✅ | ✅ | ✅ | +| [`just`](#just) | ✅ | ✅ | ✅ | ✅ | ✅ | +| [Python](#python) | ✅ | ➖ | ➖ | ➖ | ✅ | +| [Node.js](#nodejs) | ➖ | ➖ | ➖ | ✅ | ✅ | +| [Docker](#docker) | ➖ | ✅ | ✅ | ❔[^analytics] | ➖ | + +Here ✅ means required, ➖ means not required and ❔ means conditionally +required. + +[^analytics]: This is required to run analytics, not required otherwise. + ## Required setup The following setup steps are needed to set up a local copy of Openverse and do @@ -47,11 +66,48 @@ If you see `git version x.y.z`, you have Git installed. If you see an error, you need to install it by following the [official instructions](https://git-scm.com/downloads). +### `just` + +We use `just` as our command runner. It makes it easier to run cumbersome +commands that are generally needed a lot during development, like bringing up +our backend services or linting the codebase. + +`just` can be [installed](https://github.com/casey/just#installation) for a host +of operating systems via their respective +[pacakge managers](https://github.com/casey/just#packages) or using +[pre-built binaries](https://github.com/casey/just#pre-built-binaries) available +for some operating systems. + +````{tip} +If you run `just` inside the Openverse root repo without a recipe name, you can +see a huge list of all the different recipes present in the project. + +```console +$ cd openverse/ +$ just +``` +```` + +If for some reason, you are unable to install `just`, you can refer to the +`justfile` to see the commands that make up a recipe, and then run those +commands individually in a terminal. It won't be the best user experience, but +it will work just the same. + +## Conditional setup + +A subset of the following requirements will be required depending on the extent +of your contribution to the project. To see which of these you need, refer to +the [requirement matrix](#requirement-matrix) above. + ### Python ```{note} -This is only needed for working with the Python stack outside of Docker and for -Python automations. +This is only needed if you are working with the following: + +- documentation +- Python automations +- API (outside Docker, for debugging purposes) +- ingestion server (outside Docker, for debugging purposes) ``` We use Python 3 in the backend of our stack. So to work with that part of the @@ -75,12 +131,15 @@ set up a virtualenv, provided you have the version of Python mentioned in the `Pipfile` installed and accessible locally. You can install Pipenv by following the -[official instructions](https://pipenv.pypa.io/en/latest/install/#installing-pipenv). +[official instructions](https://pipenv.pypa.io/en/latest/installation/#installing-pipenv). ### Node.js ```{note} -This is only needed for working with the frontend and for Node.js automations. +This is only needed if you are working with the following: + +- frontend +- Node.js automations ``` We use Node.js in the frontend of our stack. So to work with that part of the @@ -108,8 +167,11 @@ onwards. So no installation is needed. ### Docker ```{note} -This is only needed for working with the Python stack. The Node.js stack runs -on the host. +This is only needed if you are working with the following: + +- API +- ingestion server +- frontend ``` Our Python packages are published as Docker images to make it easier to work @@ -139,31 +201,12 @@ If you see `Docker Compose version vx.y.z`, you have Docker Compose installed. If you see an error, you need to install it by following the [official instructions](https://docs.docker.com/compose/install/). -### `just` - -We use `just` as our command runner. It makes it easier to run cumbersome -commands that are generally needed a lot during development, like bringing up -our backend services or linting the codebase. - -`just` can be [installed](https://github.com/casey/just#installation) for a host -of operating systems via their respective pacakge managers. - -````{tip} -If you run `just` inside the Openverse root repo without a recipe name, you can -see a huge list of all the different recipes present in the project. +### GitHub -```console -$ cd openverse/ -$ just +```{note} +This is only needed if you want to contribute code to Openverse. The codebase +can be read, accessed and downloaded without an account. ``` -```` - -## Development dependencies - -The following setup steps are needed to not just setup Openverse but to also -contribute code to the project. - -### GitHub The source code for Openverse is hosted on GitHub. To contribute to Openverse, you will also need to [sign up](https://github.com/signup) for a GitHub account. @@ -200,6 +243,17 @@ different editor if you have a preference. The following setup steps are only needed in very specific scenarios. +### coreutils + +```{note} +This is only needed on macOS. +``` + +`coreutils` adds GNU utils to macOS. `timeout` from the package is required. You +can install the +[`coreutils` formula](https://formulae.brew.sh/formula/coreutils) using +[Homebrew](https://brew.sh), which is a package manager for macOS. + ### mkcert ```{note} @@ -220,6 +274,7 @@ found for -lssl` error when running the project outside Docker. ``` This `psycopg2` package can fail to install on Apple Silicon Macs with the -`ld: library not found for -lssl` error. To rectify this, install `openssl` via -Homebrew and set `LDFLAGS` and `CPPFLAGS` as per the instructions in the output -of the Homebrew installation. +`ld: library not found for -lssl` error. To rectify this, install the +[`openssl` formula](https://formulae.brew.sh/formula/openssl@3) using +[Homebrew](https://brew.sh/) and set `LDFLAGS` and `CPPFLAGS` as per the +instructions in the output of the Homebrew installation. diff --git a/documentation/guides/index.md b/documentation/guides/index.md index f374a12d768..bad3aff9b53 100644 --- a/documentation/guides/index.md +++ b/documentation/guides/index.md @@ -2,20 +2,29 @@ Here you will find guides to get you started with Openverse development. +## General guides + ```{toctree} :maxdepth: 1 general_setup quickstart +# run +# test +# https +# publish +# deploy +logging +zero-downtime-database-management +``` + +## Stack-specific guides + +```{toctree} +:maxdepth: 2 + api/index ingestion_server/index frontend/index -run -test -https -document -publish -deploy -logging -zero-downtime-database-management +documentation/index ``` diff --git a/documentation/guides/ingestion_server/index.md b/documentation/guides/ingestion_server/index.md index 573fb671e72..3e323c36586 100644 --- a/documentation/guides/ingestion_server/index.md +++ b/documentation/guides/ingestion_server/index.md @@ -3,5 +3,6 @@ ```{toctree} :maxdepth: 1 +quickstart test ``` diff --git a/documentation/guides/ingestion_server/quickstart.md b/documentation/guides/ingestion_server/quickstart.md new file mode 100644 index 00000000000..5e4ac89e9e1 --- /dev/null +++ b/documentation/guides/ingestion_server/quickstart.md @@ -0,0 +1,51 @@ +# Quickstart guide + +This is the quick start guide for setting up and running the ingestion server +locally. + +## Prerequisites + +Refer to the [general setup guide](../general_setup.md) for setting up the +prerequisites. Refer to the 'Ingestion server' column in the +[requirement matrix](../general_setup.md#requirement-matrix) to know what you +need to run this. + +## Starting up + +1. Ensure you download, install and set up all prerequisites. Ensure that the + Docker daemon is running. + +2. Clone the repository to your computer. Then switch to the cloned directory. + If you're planning to contribute, fork the repo and clone your fork instead. + + ```console + $ git clone https://github.com/WordPress/openverse.git # or your fork + $ cd openverse/ + ``` + + If you followed the general setup guide and installed + [GitHub CLI](./general_setup.md#github-cli), you can clone more simply using + the `gh` command. + + ```console + $ gh repo clone WordPress/openverse # or your fork + $ cd openverse/ + ``` + +3. Bring the ingestion server up, along with all their dependent services. + + ```console + $ just ingestion_server/up + ``` + + The `ingestion_server/up` recipe orchestrates the following services: `db`, + `upstream_db`, `es`, `indexer_worker` and `ingestion_server`. + + Now you should be able to access the following endpoints: + + - the list of ingestion jobs on + [http://localhost:50281/task](http://localhost:50281/task) + +## Shutting down + +Refer to the [common instructions](../quickstart.md#shutting-down). diff --git a/documentation/guides/quickstart.md b/documentation/guides/quickstart.md index 3efda9553ee..e767ec9e62c 100644 --- a/documentation/guides/quickstart.md +++ b/documentation/guides/quickstart.md @@ -1,14 +1,32 @@ # Quickstart guide + + This guide covers the steps to get the Openverse stack running locally on your -computer. +computer. This guide is for setting up the full stack, which includes the API, +the ingestion server and the frontend. + +## Stack-specific quickstarts + +It is very unlikely that you want to contribute to everything, everywhere, all +at once. In all likelihood, you intend to contribute to a narrower slice of the +stack. In such cases, you might find it more beneficial to go through one of +these stack-specific quickstart guides. + +- [API](./api/quickstart.md) +- [Frontend](./frontend/quickstart.md) +- [Ingestion server](./ingestion_server/quickstart.md) +- [Documentation](./documentation/quickstart.md) + +That said, there is something very appealing about running the full stack +locally, which this guide is all about. ## Prerequisites Refer to the [general setup guide](./general_setup.md) for setting up the prerequisites. -## Steps +## Starting up 1. Ensure you download, install and set up all prerequisites. Ensure that the Docker daemon is running. @@ -30,30 +48,53 @@ prerequisites. $ cd openverse/ ``` -3. Install all dependencies. This is generally not advisable unless you plan to - work on everything! This step won't install API or ingestion server - dependencies because they are meant to run using Docker containers. +3. Install all dependencies. This step installs dependencies for the frontend, + the documentation and the automations (both Node.js and Python) but won't + install API or ingestion server dependencies because they are meant to run + using Docker containers. ```console $ just install ``` -4. Bring the ingestion server and API up, along with all their dependent - services. Once this is done, you should be able to see the API documentation - on [http://localhost:50280](http://localhost:50280). + To be more specific with your install, you can run either of the following. + + ```console + $ just node-install # only frontend and Node.js automations + $ just py-install # only documentation and Python automations + ``` + +4. Spin up and orchestrate all Docker services. ```console $ just up ``` -5. Load the sample data. This step take a few minutes. If it fails, take down - everything with `just down -v` and start again from the previous step. + The `up` recipe orchestrates the following services: `cache`, `db`, + `upstream_db`, `es`, `indexer_worker`, `ingestion_server`, `web`, `proxy`, + `plausible-ch`, `plausible-db` and `plausible`. + + Now you should be able to access the following endpoints: + + - the list of ingestion jobs on + [http://localhost:50281/task](http://localhost:50281/task) + - the API documentation on [http://localhost:50280](http://localhost:50280) + - the Plausible UI on [http://localhost:50288](http://localhost:50288) + +5. Load the sample data. This step can take a few minutes to complete. ```console $ just init ``` - The ingestion server is working fine. + ````{admonition} Troubleshooting + If this step fails, cleaning up and restarting usually fixes it. + + ```console + $ just down -v + $ just api/init + ``` + ```` 6. With the data loaded, the API can now return JSON responses to your HTTP requests. @@ -65,8 +106,10 @@ prerequisites. [{"source_name":"flickr","display_name":"Flickr","source_url":"https://www.flickr.com","logo_url":null,"media_count":2500},{"source_name":"stocksnap","display_name":"StockSnap","source_url":"https://stocksnap.io","logo_url":null,"media_count":2500}]% ``` - If you don't have [`jq`](https://stedolan.github.io/jq/) installed, you - should, it's great. If you do, you can pipe the response through that. + ````{tip} + [`jq`](https://stedolan.github.io/jq/) is a tool for parsing and manipulating + JSON data. If you have `jq` installed, you can pipe the response to it and + transform it. ```console $ just api/stats | jq '.[0]' @@ -77,34 +120,47 @@ prerequisites. "logo_url": null, "media_count": 2500 } + + $ just api/stats 'audio' | jq '[.[] | .source_name]' + [ + "freesound", + "jamendo", + "wikimedia_audio" + ] ``` - The API is working fine. + `jq` is great, we recommend you + [download](https://stedolan.github.io/jq/download/) it. + ```` 7. To bring up the frontend, we have another `just` recipe. We have `just` - recipes for almost everything. You can open - [http://localhost:8443](http://localhost:8443) in a browser to see your very - own copy of Openverse. + recipes for almost everything. ```console $ env API_URL="http://localhost:50280" just frontend/run dev ``` - The frontend is working fine. + Now you should be able to access the following endpoints: + + - the Openverse search engine frontend on + [http://localhost:8443](http://localhost:8443) + +## Shutting down + +1. You can Ctrl + C to terminate the frontend process. -8. You can Ctrl + C to terminate the frontend process. - Then use another `just` recipe to bring down all the services. If you include - the `-v` flag, it'll remove all volumes too. +2. For services running inside Docker, like the API, ingestion server and + Plausible, use another `just` recipe to bring them down. ```console $ just down - $ just down -v # delete Docker volumes ``` -9. To see the logs for all services, you can use the `logs` recipe. To see the - logs for a particular service, pass the service name as an argument. + ````{tip} + If you include the `-v` flag, all Docker volumes (including their data) will + be deleted too, which is useful in case you want a fresh start. ```console - $ just logs - $ just logs web # only see logs for web + $ just down -v ``` + ```` diff --git a/documentation/index.md b/documentation/index.md index dd1c57427df..5fc56893b22 100644 --- a/documentation/index.md +++ b/documentation/index.md @@ -1,8 +1,19 @@ -# Openverse documentation +Openverse +Openverse is a search engine for openly licensed media. + +## Documentation + +To setup your computer and install pre-requisites for developing Openverse, +refer to the [general setup guide](./guides/general_setup.md). To run the +Openverse stack locally on your computer, refer to the +[quickstart guide](./guides/quickstart.md). + +```{note} This documentation is for developers who are building Openverse. If you want to use the Openverse API, you should instead refer to the [API consumer documentation](https://api.openverse.engineering/). +``` ## Related documentation @@ -18,6 +29,7 @@ use the Openverse API, you should instead refer to the guides/index reference/index +changelogs/index ``` ```{toctree} diff --git a/frontend/Dockerfile b/frontend/Dockerfile index c1c69190f53..62feb586b9f 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -1,3 +1,7 @@ +################### +# Node.js builder # +################### + FROM node:16-alpine as builder # Install system packages needed to build on macOS @@ -42,9 +46,9 @@ RUN echo "{\"release\":\"${RELEASE}\"}" > /home/node/frontend/src/static/version RUN pnpm build:only -################### -# Nuxt app -################### +############ +# Nuxt app # +############ FROM node:16-alpine as app diff --git a/frontend/justfile b/frontend/justfile index 0b197c9fd2c..d6a790f6eb4 100644 --- a/frontend/justfile +++ b/frontend/justfile @@ -10,6 +10,25 @@ NO_COLOR := "\\033[0m" just --list --unsorted +###### +# Up # +###### + +# Bring up services specific to the frontend profile +up *flags: + env COMPOSE_PROFILES="frontend" just ../up {{ flags }} + +# Wait for all profile services to be up +wait-up: up + echo "🚧 TODO" + +# Set up user and test site in Plausible +init: wait-up + cd .. && ./setup_plausible.sh + # Run a package.json script via pnpm run *args: pnpm run {{ args }} + +types: + cd .. && pnpm exec vue-tsc -p frontend --noEmit diff --git a/frontend/nuxt.config.ts b/frontend/nuxt.config.ts index f5e888acd9c..8e5c55c9dd3 100644 --- a/frontend/nuxt.config.ts +++ b/frontend/nuxt.config.ts @@ -147,6 +147,8 @@ const openverseLocales = [ ...(locales ?? []), ].filter((l) => Boolean(l.iso)) as LocaleObject[] +const port = process.env.PORT || 8443 + const config: NuxtConfig = { // eslint-disable-next-line no-undef version: pkg.version, // used to purge cache :) @@ -161,7 +163,7 @@ const config: NuxtConfig = { srcDir: "src/", modern: "client", server: { - port: process.env.PORT || 8443, + port, https: process.env.LOCAL_SSL ? { key: fs.readFileSync(path.resolve(__dirname, "localhost+1-key.pem")), @@ -194,7 +196,6 @@ const config: NuxtConfig = { buildModules: [ "@nuxt/typescript-build", "@nuxtjs/composition-api/module", - "@nuxt/postcss8", "@nuxtjs/style-resources", "@nuxtjs/svg", "@nuxtjs/eslint-module", @@ -203,9 +204,11 @@ const config: NuxtConfig = { modules: [ "portal-vue/nuxt", "@nuxtjs/i18n", + "@nuxtjs/proxy", "@nuxtjs/redirect-module", "@nuxtjs/sentry", "cookie-universal-nuxt", + "vue-plausible", "~/modules/prometheus.ts", // Sitemap must be last to ensure that even routes created by other modules are added "@nuxtjs/sitemap", @@ -277,24 +280,26 @@ const config: NuxtConfig = { filenames, friendlyErrors: false, postcss: { - plugins: { - tailwindcss: { - config: path.resolve(__dirname, "tailwind.config.js"), + postcssOptions: { + preset: { + features: { + // Disable conversion of logical properties to physical properties + // e.g.: `margin-inline-start` is NOT converted to `margin-left` + // Necessary for RTL support. + "logical-properties-and-values": false, + }, + }, + plugins: { + tailwindcss: { + config: path.resolve(__dirname, "tailwind.config.js"), + }, + "postcss-focus-visible": {}, }, - autoprefixer: {}, - "postcss-focus-visible": {}, }, }, extend(config, ctx) { // Enables use of IDE debuggers config.devtool = ctx.isClient ? "source-map" : "inline-source-map" - - // Mitigates import errors for Pinia - config.module?.rules.push({ - test: /\.mjs$/, - include: /node_modules/, - type: "javascript/auto", - }) }, }, storybook: { @@ -328,6 +333,26 @@ const config: NuxtConfig = { }, }, }, + proxy: { + // The key is appended to the address in the value. + "/api/event": + process.env.PLAUSIBLE_ORIGIN ?? isProd + ? "https://plausible.io" + : "http://localhost:50288", + }, + plausible: { + trackLocalhost: !isProd, + }, + publicRuntimeConfig: { + plausible: { + // This is the current domain of the site. + domain: process.env.SITE_DOMAIN ?? isProd ? "openverse.org" : "localhost", + apiHost: + process.env.SITE_DOMAIN ?? isProd + ? "https://openverse.org" + : `http://localhost:${port}`, + }, + }, } export default config diff --git a/frontend/package.json b/frontend/package.json index b3808ee4891..7b62322153b 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -59,11 +59,11 @@ "create:component": "./bin/create-component.sh" }, "dependencies": { - "@nuxt/components": "^2.1.6", - "@nuxt/postcss8": "^1.1.3", - "@nuxt/vue-app": "^2.15.8", + "@nuxt/components": "^2.2.1", + "@nuxt/vue-app": "^2.16.3", "@nuxtjs/composition-api": "^0.33.1", "@nuxtjs/i18n": "^7.0.3", + "@nuxtjs/proxy": "^2.1.0", "@nuxtjs/redirect-module": "^0.3.1", "@nuxtjs/sentry": "^7.1.4", "@nuxtjs/sitemap": "^2.4.0", @@ -89,8 +89,8 @@ "focus-visible": "^5.2.0", "glob": "^8.0.1", "node-html-parser": "^5.3.3", - "nuxt": "^2.15.4", - "pinia": "^2.0.16", + "nuxt": "^2.16.3", + "pinia": "^2.0.33", "portal-vue": "^2.1.7", "postcss-focus-visible": "^6.0.4", "prom-client": "^14.0.1", @@ -98,8 +98,9 @@ "seeded-rand": "^2.0.1", "throttle-debounce": "^5.0.0", "uuid": "^8.3.2", - "vue": "^2.7.10", - "vue-i18n": "^8.26.7" + "vue": "^2.7.14", + "vue-i18n": "^8.26.7", + "vue-plausible": "^1.3.2" }, "devDependencies": { "@babel/core": "^7.20.12", @@ -110,13 +111,13 @@ "@babel/runtime-corejs3": "^7.21.0", "@intlify/eslint-plugin-vue-i18n": "^1.4.0", "@itsjonq/remake": "^2.0.0", - "@nuxt/types": "^2.15.8", - "@nuxt/typescript-build": "^2.1.0", + "@nuxt/types": "^2.16.3", + "@nuxt/typescript-build": "^3.0.0", "@nuxtjs/eslint-module": "^3.1.0", "@nuxtjs/storybook": "^4.3.2", "@nuxtjs/style-resources": "^1.0.0", - "@pinia/testing": "^0.0.12", - "@playwright/test": "1.29.1", + "@pinia/testing": "^0.0.15", + "@playwright/test": "1.30.0", "@testing-library/dom": "^8.13.0", "@testing-library/jest-dom": "^5.16.1", "@testing-library/user-event": "^13.5.0", @@ -130,7 +131,6 @@ "@types/uuid": "^8.3.4", "@typescript-eslint/eslint-plugin": "^5.54.0", "@typescript-eslint/parser": "^5.54.0", - "@vue/runtime-dom": "^3.2.37", "@vue/test-utils": "^1.1.3", "adm-zip": "^0.5.10", "autoprefixer": "^10.4.0", @@ -170,24 +170,11 @@ "vue-i18n-extract": "^2.0.7", "vue-jest": "^3.0.7", "vue-loader": "^15.10.0", - "vue-server-renderer": "^2.7.10", - "vue-template-compiler": "^2.7.10", + "vue-server-renderer": "^2.7.14", + "vue-template-compiler": "^2.7.14", "vue-tsc": "1.2.0", "webpack": "^4.46.0" }, - "pnpm": { - "peerDependencyRules": { - "ignoreMissing": [ - "react", - "react-dom", - "consola" - ], - "allowedVersions": { - "babel-core": "7.0.0-bridge.0", - "postcss": "5.2.18" - } - } - }, "browserslist": [ "> 1%", "last 2 versions", diff --git a/frontend/src/components/LoadingIcon.vue b/frontend/src/components/LoadingIcon.vue index 18515c43e09..480cd2e1328 100644 --- a/frontend/src/components/LoadingIcon.vue +++ b/frontend/src/components/LoadingIcon.vue @@ -7,7 +7,7 @@ - + + diff --git a/frontend/src/components/VHeader/VFilterButton.vue b/frontend/src/components/VHeader/VFilterButton.vue index 333eb8d480f..4185b54c4b2 100644 --- a/frontend/src/components/VHeader/VFilterButton.vue +++ b/frontend/src/components/VHeader/VFilterButton.vue @@ -1,25 +1,25 @@ @@ -31,14 +31,12 @@ import { defineEvent } from "~/types/emits" import { useI18n } from "~/composables/use-i18n" import VButton from "~/components/VButton.vue" -import VIcon from "~/components/VIcon/VIcon.vue" - -import filterIcon from "~/assets/icons/filter.svg" +import VFilterIconOrCounter from "~/components/VHeader/VFilterIconOrCounter.vue" export default defineComponent({ name: "VFilterButton", components: { - VIcon, + VFilterIconOrCounter, VButton, }, props: { @@ -52,7 +50,6 @@ export default defineComponent({ }, }, emits: { - tab: defineEvent<[KeyboardEvent]>(), toggle: defineEvent(), }, setup() { @@ -61,26 +58,16 @@ export default defineComponent({ const filterCount = computed(() => searchStore.appliedFilterCount) const filtersAreApplied = computed(() => filterCount.value > 0) - /** - * This label's verbosity makes it useful for the aria-label - * where it is also used, especially on mobile where the - * label would just be the number of applied filters, and therefore - * basically useless as far as a label is concerned! - */ - const xlMinLabel = computed(() => - filtersAreApplied.value - ? i18n.tc("header.filter-button.with-count", filterCount.value) - : i18n.t("header.filter-button.simple") - ) - const lgMaxLabel = computed(() => - filtersAreApplied.value ? filterCount.value : "" + const textLabel = computed(() => i18n.t("header.filter-button.simple")) + const ariaLabel = computed(() => + i18n.tc("header.filter-button.with-count", filterCount.value) ) return { - filterIcon, - xlMinLabel, - lgMaxLabel, + ariaLabel, + textLabel, filtersAreApplied, + filterCount, } }, }) diff --git a/frontend/src/components/VHeader/VFilterIconOrCounter.vue b/frontend/src/components/VHeader/VFilterIconOrCounter.vue new file mode 100644 index 00000000000..bd38e0058cd --- /dev/null +++ b/frontend/src/components/VHeader/VFilterIconOrCounter.vue @@ -0,0 +1,40 @@ + + diff --git a/frontend/src/components/VHeader/VHeaderMobile/VContentSettingsModalContent.vue b/frontend/src/components/VHeader/VHeaderMobile/VContentSettingsModalContent.vue index 5a3f8a7b6ac..8bc270b01aa 100644 --- a/frontend/src/components/VHeader/VHeaderMobile/VContentSettingsModalContent.vue +++ b/frontend/src/components/VHeader/VHeaderMobile/VContentSettingsModalContent.vue @@ -21,9 +21,8 @@ id="content-settings" size="medium" class="gap-x-2 me-4" - >{{ - $t("search-type.heading") - }} +

{{ $t("search-type.heading") }}

{{ $t("search-type.heading") }}

- - {{ $t("filters.title") }} + :applied-filter-count="appliedFilterCount" + /> {{ clearFiltersLabel }} + >{{ $t("filter-list.clear") }} @@ -81,10 +77,10 @@ import { computed, defineComponent, ref } from "vue" import { useSearchStore } from "~/stores/search" -import { useI18n } from "~/composables/use-i18n" import useSearchType from "~/composables/use-search-type" import VButton from "~/components/VButton.vue" +import VFilterTab from "~/components/VHeader/VHeaderMobile/VFilterTab.vue" import VIcon from "~/components/VIcon/VIcon.vue" import VIconButton from "~/components/VIconButton/VIconButton.vue" import VModalContent from "~/components/VModal/VModalContent.vue" @@ -96,7 +92,6 @@ import VTabPanel from "~/components/VTabs/VTabPanel.vue" import VTabs from "~/components/VTabs/VTabs.vue" import closeIcon from "~/assets/icons/close-small.svg" -import filtersIcon from "~/assets/icons/filter.svg" export default defineComponent({ name: "VContentSettingsModalContent", @@ -104,6 +99,7 @@ export default defineComponent({ VIcon, VModalContent, VButton, + VFilterTab, VIconButton, VSearchGridFilter, VSearchTypes, @@ -135,8 +131,6 @@ export default defineComponent({ }, }, setup(props) { - const i18n = useI18n() - const searchStore = useSearchStore() const content = useSearchType() const selectedTab = ref<"content-settings" | "filters">("content-settings") @@ -152,13 +146,8 @@ export default defineComponent({ const isClearButtonDisabled = computed( () => !searchStore.isAnyFilterApplied ) - const appliedFilterCount = computed(() => searchStore.appliedFilterCount) - const clearFiltersLabel = computed(() => - searchStore.isAnyFilterApplied - ? i18n.t("filter-list.clear-numbered", { - number: appliedFilterCount.value, - }) - : i18n.t("filter-list.clear") + const appliedFilterCount = computed( + () => searchStore.appliedFilterCount ) const searchType = computed(() => content.getSearchTypeProps()) @@ -169,7 +158,6 @@ export default defineComponent({ return { closeIcon, - filtersIcon, searchType, selectedTab, @@ -179,7 +167,6 @@ export default defineComponent({ appliedFilterCount, showClearFiltersButton, isClearButtonDisabled, - clearFiltersLabel, clearFilters, } }, diff --git a/frontend/src/components/VHeader/VHeaderMobile/VFilterTab.vue b/frontend/src/components/VHeader/VHeaderMobile/VFilterTab.vue new file mode 100644 index 00000000000..4f3f55ce3e8 --- /dev/null +++ b/frontend/src/components/VHeader/VHeaderMobile/VFilterTab.vue @@ -0,0 +1,21 @@ + + diff --git a/frontend/src/components/VHeader/VHeaderMobile/meta/VFilterTab.stories.mdx b/frontend/src/components/VHeader/VHeaderMobile/meta/VFilterTab.stories.mdx new file mode 100644 index 00000000000..09f45a0777c --- /dev/null +++ b/frontend/src/components/VHeader/VHeaderMobile/meta/VFilterTab.stories.mdx @@ -0,0 +1,58 @@ +import { + ArgsTable, + Canvas, + Description, + Meta, + Story, +} from "@storybook/addon-docs" +import VFilterTab from "~/components/VHeader/VHeaderMobile/VFilterTab.vue" +import VTab from "~/components/VTabs/VTab.vue" +import VTabs from "~/components/VTabs/VTabs.vue" + + + +export const Template = (args, { argTypes }) => ({ + template: ` +
+ + + +
+
`, + components: { VFilterTab, VTabs, VTab }, + props: Object.keys(argTypes), + setup() { + args["selected"] = args.isSelected ? "filters" : "tab1" + return { args } + }, +}) + +# Filter tab + + + + + +The tab button on the mobile modal that opens the filters tab. It shows how many +filters are applied, or a filters icon. + + + {Template.bind({})} + diff --git a/frontend/src/components/VHeader/meta/VFilterButton.stories.mdx b/frontend/src/components/VHeader/meta/VFilterButton.stories.mdx index 302f249a64d..b3ac0464980 100644 --- a/frontend/src/components/VHeader/meta/VFilterButton.stories.mdx +++ b/frontend/src/components/VHeader/meta/VFilterButton.stories.mdx @@ -20,17 +20,19 @@ import { IMAGE } from "~/constants/media" appliedFilters: { type: "number", }, + disabled: { + type: "boolean", + }, toggle: { action: "toggle", }, - tab: { - action: "tab", - }, }} /> export const Template = (args, { argTypes }) => ({ - template: ``, + template: `
+ +
`, components: { VFilterButton }, props: Object.keys(argTypes), setup() { @@ -67,8 +69,7 @@ export const Template = (args, { argTypes }) => ({ The button opens and closes the filters sidebar. It also shows how many filters -are applied in the mobile view. the field receives an input. It also emits the -`search` event when the search button is clicked. +are applied. It also emits the `toggle` event when clicked. (null) // template ref const { dimens: gridDimens } = useResizeObserver(el) + const imageSet = computed(() => + props.set === "random" + ? imageInfo.sets[Math.floor(Math.random() * imageInfo.sets.length)] + : imageInfo.sets.find((item) => (item.key = props.set)) ?? + imageInfo.sets[0] + ) const imageList = computed(() => { - const imageSet = - props.set === "random" - ? imageInfo.sets[Math.floor(Math.random() * imageInfo.sets.length)] - : imageInfo.sets.find((item) => (item.key = props.set)) - return imageSet?.images.map((image, idx) => ({ + return imageSet.value.images.map((image, idx) => ({ ...image, - src: require(`~/assets/homepage_images/${imageSet.key}/${idx + 1}.png`), + src: require(`~/assets/homepage_images/${imageSet.value.key}/${ + idx + 1 + }.png`), url: router.resolve( app.localePath({ name: "image-id", @@ -119,6 +125,14 @@ export default defineComponent({ }) const imageCount = computed(() => columnCount.value * rowCount) + const { sendCustomEvent } = useAnalytics() + const handleClick = (identifier: string) => { + sendCustomEvent("CLICK_HOME_GALLERY_IMAGE", { + set: imageSet.value.key, + identifier, + }) + } + return { el, @@ -130,6 +144,8 @@ export default defineComponent({ imageList, prefersReducedMotion, + + handleClick, } }, }) diff --git a/frontend/src/components/VPill.vue b/frontend/src/components/VPill.vue index 91b1adb01c8..61bb1cd2a00 100644 --- a/frontend/src/components/VPill.vue +++ b/frontend/src/components/VPill.vue @@ -6,7 +6,7 @@
-