diff --git a/.circleci/config.yml b/.circleci/config.yml
deleted file mode 100644
index a373d685e0e3f9..00000000000000
--- a/.circleci/config.yml
+++ /dev/null
@@ -1,225 +0,0 @@
-version: 2.1
-
-orbs:
- ruby: circleci/ruby@2.0.0
- node: circleci/node@5.0.3
-
-executors:
- default:
- parameters:
- ruby-version:
- type: string
- docker:
- - image: cimg/ruby:<< parameters.ruby-version >>
- environment:
- BUNDLE_JOBS: 3
- BUNDLE_RETRY: 3
- CONTINUOUS_INTEGRATION: true
- DB_HOST: localhost
- DB_USER: root
- DISABLE_SIMPLECOV: true
- RAILS_ENV: test
- - image: cimg/postgres:14.5
- environment:
- POSTGRES_USER: root
- POSTGRES_HOST_AUTH_METHOD: trust
- - image: cimg/redis:7.0
-
-commands:
- install-system-dependencies:
- steps:
- - run:
- name: Install system dependencies
- command: |
- sudo apt-get update
- sudo apt-get install -y libicu-dev libidn11-dev
- install-ruby-dependencies:
- parameters:
- ruby-version:
- type: string
- steps:
- - run:
- command: |
- bundle config clean 'true'
- bundle config frozen 'true'
- bundle config without 'development production'
- name: Set bundler settings
- - ruby/install-deps:
- bundler-version: '2.3.26'
- key: ruby<< parameters.ruby-version >>-gems-v1
- wait-db:
- steps:
- - run:
- command: dockerize -wait tcp://localhost:5432 -wait tcp://localhost:6379 -timeout 1m
- name: Wait for PostgreSQL and Redis
-
-jobs:
- build:
- docker:
- - image: cimg/ruby:3.0-node
- environment:
- RAILS_ENV: test
- steps:
- - checkout
- - install-system-dependencies
- - install-ruby-dependencies:
- ruby-version: '3.0'
- - node/install-packages:
- cache-version: v1
- pkg-manager: yarn
- - run:
- command: |
- export NODE_OPTIONS=--openssl-legacy-provider
- ./bin/rails assets:precompile
- name: Precompile assets
- - persist_to_workspace:
- paths:
- - public/assets
- - public/packs-test
- root: .
-
- test:
- parameters:
- ruby-version:
- type: string
- executor:
- name: default
- ruby-version: << parameters.ruby-version >>
- environment:
- ALLOW_NOPAM: true
- PAM_ENABLED: true
- PAM_DEFAULT_SERVICE: pam_test
- PAM_CONTROLLED_SERVICE: pam_test_controlled
- parallelism: 4
- steps:
- - checkout
- - install-system-dependencies
- - run:
- command: sudo apt-get install -y ffmpeg imagemagick libpam-dev
- name: Install additional system dependencies
- - run:
- command: bundle config with 'pam_authentication'
- name: Enable PAM authentication
- - install-ruby-dependencies:
- ruby-version: << parameters.ruby-version >>
- - attach_workspace:
- at: .
- - wait-db
- - run:
- command: ./bin/rails db:create db:schema:load db:seed
- name: Load database schema
- - ruby/rspec-test
-
- test-migrations:
- executor:
- name: default
- ruby-version: '3.0'
- steps:
- - checkout
- - install-system-dependencies
- - install-ruby-dependencies:
- ruby-version: '3.0'
- - wait-db
- - run:
- command: ./bin/rails db:create
- name: Create database
- - run:
- command: ./bin/rails db:migrate VERSION=20171010025614
- name: Run migrations up to v2.0.0
- - run:
- command: ./bin/rails tests:migrations:populate_v2
- name: Populate database with test data
- - run:
- command: ./bin/rails db:migrate VERSION=20180514140000
- name: Run migrations up to v2.4.0
- - run:
- command: ./bin/rails tests:migrations:populate_v2_4
- name: Populate database with test data
- - run:
- command: ./bin/rails db:migrate VERSION=20180707154237
- name: Run migrations up to v2.4.3
- - run:
- command: ./bin/rails tests:migrations:populate_v2_4_3
- name: Populate database with test data
- - run:
- command: ./bin/rails db:migrate
- name: Run all remaining migrations
- - run:
- command: ./bin/rails tests:migrations:check_database
- name: Check migration result
-
- test-two-step-migrations:
- executor:
- name: default
- ruby-version: '3.0'
- steps:
- - checkout
- - install-system-dependencies
- - install-ruby-dependencies:
- ruby-version: '3.0'
- - wait-db
- - run:
- command: ./bin/rails db:create
- name: Create database
- - run:
- command: ./bin/rails db:migrate VERSION=20171010025614
- name: Run migrations up to v2.0.0
- - run:
- command: ./bin/rails tests:migrations:populate_v2
- name: Populate database with test data
- - run:
- command: ./bin/rails db:migrate VERSION=20180514140000
- name: Run pre-deployment migrations up to v2.4.0
- environment:
- SKIP_POST_DEPLOYMENT_MIGRATIONS: true
- - run:
- command: ./bin/rails tests:migrations:populate_v2_4
- name: Populate database with test data
- - run:
- command: ./bin/rails db:migrate VERSION=20180707154237
- name: Run migrations up to v2.4.3
- environment:
- SKIP_POST_DEPLOYMENT_MIGRATIONS: true
- - run:
- command: ./bin/rails tests:migrations:populate_v2_4_3
- name: Populate database with test data
- - run:
- command: ./bin/rails db:migrate
- name: Run all remaining pre-deployment migrations
- environment:
- SKIP_POST_DEPLOYMENT_MIGRATIONS: true
- - run:
- command: ./bin/rails db:migrate
- name: Run all post-deployment migrations
- - run:
- command: ./bin/rails tests:migrations:check_database
- name: Check migration result
-
-workflows:
- version: 2
- build-and-test:
- jobs:
- - build
- - test:
- matrix:
- parameters:
- ruby-version:
- - '2.7'
- - '3.0'
- name: test-ruby<< matrix.ruby-version >>
- requires:
- - build
- - test-migrations:
- requires:
- - build
- - test-two-step-migrations:
- requires:
- - build
- - node/run:
- cache-version: v1
- name: test-webui
- pkg-manager: yarn
- requires:
- - build
- version: '16.18'
- yarn-run: test:jest
diff --git a/.github/workflows/build-container-image.yml b/.github/workflows/build-container-image.yml
new file mode 100644
index 00000000000000..b9aebcc46c60d3
--- /dev/null
+++ b/.github/workflows/build-container-image.yml
@@ -0,0 +1,92 @@
+on:
+ workflow_call:
+ inputs:
+ platforms:
+ required: true
+ type: string
+ cache:
+ type: boolean
+ default: true
+ use_native_arm64_builder:
+ type: boolean
+ push_to_images:
+ type: string
+ flavor:
+ type: string
+ tags:
+ type: string
+ labels:
+ type: string
+
+jobs:
+ build-image:
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - uses: docker/setup-qemu-action@v2
+ if: contains(inputs.platforms, 'linux/arm64') && !inputs.use_native_arm64_builder
+
+ - uses: docker/setup-buildx-action@v2
+ id: buildx
+ if: ${{ !(inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')) }}
+
+ - name: Start a local Docker Builder
+ if: inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')
+ run: |
+ docker run --rm -d --name buildkitd -p 1234:1234 --privileged moby/buildkit:latest --addr tcp://0.0.0.0:1234
+
+ - uses: docker/setup-buildx-action@v2
+ id: buildx-native
+ if: inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')
+ with:
+ driver: remote
+ endpoint: tcp://localhost:1234
+ platforms: linux/amd64
+ append: |
+ - endpoint: tcp://${{ vars.DOCKER_BUILDER_HETZNER_ARM64_01_HOST }}:13865
+ platforms: linux/arm64
+ name: mastodon-docker-builder-arm64-01
+ driver-opts:
+ - servername=mastodon-docker-builder-arm64-01
+ env:
+ BUILDER_NODE_1_AUTH_TLS_CACERT: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_CACERT }}
+ BUILDER_NODE_1_AUTH_TLS_CERT: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_CERT }}
+ BUILDER_NODE_1_AUTH_TLS_KEY: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_KEY }}
+
+ - name: Log in to Docker Hub
+ if: contains(inputs.push_to_images, 'tootsuite')
+ uses: docker/login-action@v2
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ - name: Log in to the Github Container registry
+ if: contains(inputs.push_to_images, 'ghcr.io')
+ uses: docker/login-action@v2
+ with:
+ registry: ghcr.io
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - uses: docker/metadata-action@v4
+ id: meta
+ if: ${{ inputs.push_to_images != '' }}
+ with:
+ images: ${{ inputs.push_to_images }}
+ flavor: ${{ inputs.flavor }}
+ tags: ${{ inputs.tags }}
+ labels: ${{ inputs.labels }}
+
+ - uses: docker/build-push-action@v4
+ with:
+ context: .
+ platforms: ${{ inputs.platforms }}
+ provenance: false
+ builder: ${{ steps.buildx.outputs.name || steps.buildx-native.outputs.name }}
+ push: ${{ inputs.push_to_images != '' }}
+ tags: ${{ steps.meta.outputs.tags }}
+ labels: ${{ steps.meta.outputs.labels }}
+ cache-from: ${{ inputs.cache && 'type=gha' || '' }}
+ cache-to: ${{ inputs.cache && 'type=gha,mode=max' || '' }}
diff --git a/.github/workflows/build-image.yml b/.github/workflows/build-image.yml
deleted file mode 100644
index 76c1751622367f..00000000000000
--- a/.github/workflows/build-image.yml
+++ /dev/null
@@ -1,42 +0,0 @@
-name: Build container image
-on:
- workflow_dispatch:
-jobs:
- build-image:
- runs-on: ubuntu-latest
-
- concurrency:
- group: ${{ github.ref }}
- cancel-in-progress: true
-
- steps:
- - uses: actions/checkout@v3
- - uses: hadolint/hadolint-action@v3.1.0
- - uses: docker/setup-qemu-action@v2
- - uses: docker/setup-buildx-action@v2
- - uses: docker/login-action@v2
- with:
- username: ${{ secrets.DOCKERHUB_USERNAME }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
- if: github.event_name != 'pull_request'
- - uses: docker/metadata-action@v4
- id: meta
- with:
- images: tootsuite/mastodon
- flavor: |
- latest=auto
- tags: |
- type=edge,branch=main
- type=pep440,pattern={{raw}}
- type=pep440,pattern=v{{major}}.{{minor}}
- type=ref,event=pr
- - uses: docker/build-push-action@v4
- with:
- context: .
- platforms: linux/amd64,linux/arm64
- provenance: false
- builder: ${{ steps.buildx.outputs.name }}
- push: ${{ github.event_name != 'pull_request' }}
- tags: ${{ steps.meta.outputs.tags }}
- cache-from: type=gha
- cache-to: type=gha,mode=max
diff --git a/.github/workflows/build-releases.yml b/.github/workflows/build-releases.yml
new file mode 100644
index 00000000000000..c19766b1862ff6
--- /dev/null
+++ b/.github/workflows/build-releases.yml
@@ -0,0 +1,27 @@
+name: Build container release images
+on:
+ push:
+ tags:
+ - '*'
+
+permissions:
+ contents: read
+ packages: write
+
+jobs:
+ build-image:
+ uses: ./.github/workflows/build-container-image.yml
+ with:
+ platforms: linux/amd64,linux/arm64
+ use_native_arm64_builder: true
+ push_to_images: |
+ tootsuite/mastodon
+ ghcr.io/mastodon/mastodon
+ # Do not use cache when building releases, so apt update is always ran and the release always contain the latest packages
+ cache: false
+ flavor: |
+ latest=false
+ tags: |
+ type=pep440,pattern={{raw}}
+ type=pep440,pattern=v{{major}}.{{minor}}
+ secrets: inherit
diff --git a/.github/workflows/lint-ruby.yml b/.github/workflows/lint-ruby.yml
deleted file mode 100644
index b834e3053f1195..00000000000000
--- a/.github/workflows/lint-ruby.yml
+++ /dev/null
@@ -1,41 +0,0 @@
-name: Ruby Linting
-on:
- push:
- branches-ignore:
- - 'dependabot/**'
- paths:
- - 'Gemfile*'
- - '.rubocop.yml'
- - '**/*.rb'
- - '**/*.rake'
- - '.github/workflows/lint-ruby.yml'
-
- pull_request:
- paths:
- - 'Gemfile*'
- - '.rubocop.yml'
- - '**/*.rb'
- - '**/*.rake'
- - '.github/workflows/lint-ruby.yml'
-
-jobs:
- lint:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout Code
- uses: actions/checkout@v3
- with:
- fetch-depth: 0
-
- - name: Set-up RuboCop Problem Mathcher
- uses: r7kamura/rubocop-problem-matchers-action@v1
-
- - name: Run rubocop
- uses: github/super-linter@v4
- env:
- DEFAULT_BRANCH: main
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- LINTER_RULES_PATH: .
- RUBY_CONFIG_FILE: .rubocop.yml
- VALIDATE_ALL_CODEBASE: false
- VALIDATE_RUBY: true
diff --git a/.github/workflows/test-image-build.yml b/.github/workflows/test-image-build.yml
new file mode 100644
index 00000000000000..71344c0046aa01
--- /dev/null
+++ b/.github/workflows/test-image-build.yml
@@ -0,0 +1,15 @@
+name: Test container image build
+on:
+ pull_request:
+permissions:
+ contents: read
+
+jobs:
+ build-image:
+ concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+ uses: ./.github/workflows/build-container-image.yml
+ with:
+ platforms: linux/amd64 # Testing only on native platform so it is performant
diff --git a/.ruby-version b/.ruby-version
index b0f2dcb32fc28c..818bd47abfc912 100644
--- a/.ruby-version
+++ b/.ruby-version
@@ -1 +1 @@
-3.0.4
+3.0.6
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 7a6998e8c14ca7..57fabea9f21913 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,6 +3,275 @@ Changelog
All notable changes to this project will be documented in this file.
+## [4.1.15] - 2024-02-16
+
+### Fixed
+
+- Fix OmniAuth tests and edge cases in error handling ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/29201), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/29207))
+
+### Security
+
+- Fix insufficient checking of remote posts ([GHSA-jhrq-qvrm-qr36](https://github.com/mastodon/mastodon/security/advisories/GHSA-jhrq-qvrm-qr36))
+
+## [4.1.14] - 2024-02-14
+
+### Security
+
+- Update the `sidekiq-unique-jobs` dependency (see [GHSA-cmh9-rx85-xj38](https://github.com/mhenrixon/sidekiq-unique-jobs/security/advisories/GHSA-cmh9-rx85-xj38))
+ In addition, we have disabled the web interface for `sidekiq-unique-jobs` out of caution.
+ If you need it, you can re-enable it by setting `ENABLE_SIDEKIQ_UNIQUE_JOBS_UI=true`.
+ If you only need to clear all locks, you can now use `bundle exec rake sidekiq_unique_jobs:delete_all_locks`.
+- Update the `nokogiri` dependency (see [GHSA-xc9x-jj77-9p9j](https://github.com/sparklemotion/nokogiri/security/advisories/GHSA-xc9x-jj77-9p9j))
+- Disable administrative Doorkeeper routes ([ThisIsMissEm](https://github.com/mastodon/mastodon/pull/29187))
+- Fix ongoing streaming sessions not being invalidated when applications get deleted in some cases ([GHSA-7w3c-p9j8-mq3x](https://github.com/mastodon/mastodon/security/advisories/GHSA-7w3c-p9j8-mq3x))
+ In some rare cases, the streaming server was not notified of access tokens revocation on application deletion.
+- Change external authentication behavior to never reattach a new identity to an existing user by default ([GHSA-vm39-j3vx-pch3](https://github.com/mastodon/mastodon/security/advisories/GHSA-vm39-j3vx-pch3))
+ Up until now, Mastodon has allowed new identities from external authentication providers to attach to an existing local user based on their verified e-mail address.
+ This allowed upgrading users from a database-stored password to an external authentication provider, or move from one authentication provider to another.
+ However, this behavior may be unexpected, and means that when multiple authentication providers are configured, the overall security would be that of the least secure authentication provider.
+ For these reasons, this behavior is now locked under the `ALLOW_UNSAFE_AUTH_PROVIDER_REATTACH` environment variable.
+ In addition, regardless of this environment variable, Mastodon will refuse to attach two identities from the same authentication provider to the same account.
+
+## [4.1.13] - 2024-02-01
+
+### Security
+
+- Fix insufficient origin validation (CVE-2024-23832, [GHSA-3fjr-858r-92rw](https://github.com/mastodon/mastodon/security/advisories/GHSA-3fjr-858r-92rw))
+
+## [4.1.12] - 2024-01-24
+
+### Fixed
+
+- Fix error when processing remote files with unusually long names ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/28823))
+- Fix processing of compacted single-item JSON-LD collections ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/28816))
+- Retry 401 errors on replies fetching ([ShadowJonathan](https://github.com/mastodon/mastodon/pull/28788))
+- Fix `RecordNotUnique` errors in LinkCrawlWorker ([tribela](https://github.com/mastodon/mastodon/pull/28748))
+- Fix Mastodon not correctly processing HTTP Signatures with query strings ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/28443), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/28476))
+- Fix potential redirection loop of streaming endpoint ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/28665))
+- Fix streaming API redirection ignoring the port of `streaming_api_base_url` ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/28558))
+- Fix `Undo Announce` activity not being sent to non-follower authors ([MitarashiDango](https://github.com/mastodon/mastodon/pull/18482))
+- Fix `LinkCrawlWorker` error when encountering empty OEmbed response ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/28268))
+
+### Security
+
+- Add rate-limit of TOTP authentication attempts at controller level ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/28801))
+
+## [4.1.11] - 2023-12-04
+
+### Changed
+
+- Change GIF max matrix size error to explicitly mention GIF files ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27927))
+- Change `Follow` activities delivery to bypass availability check ([ShadowJonathan](https://github.com/mastodon/mastodon/pull/27586))
+- Change Content-Security-Policy to be tighter on media paths ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26889))
+
+### Fixed
+
+- Fix incoming status creation date not being restricted to standard ISO8601 ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27655), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/28081))
+- Fix posts from force-sensitized accounts being able to trend ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27620))
+- Fix processing LDSigned activities from actors with unknown public keys ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27474))
+- Fix error and incorrect URLs in `/api/v1/accounts/:id/featured_tags` for remote accounts ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27459))
+- Fix report processing notice not mentioning the report number when performing a custom action ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27442))
+- Fix some link anchors being recognized as hashtags ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27271), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/27584))
+
+## [4.1.10] - 2023-10-10
+
+### Changed
+
+- Change some worker lock TTLs to be shorter-lived ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27246))
+- Change user archive export allowed period from 7 days to 6 days ([suddjian](https://github.com/mastodon/mastodon/pull/27200))
+
+### Fixed
+
+- Fix mentions being matched in some URL query strings ([mjankowski](https://github.com/mastodon/mastodon/pull/25656))
+- Fix multiple instances of the trend refresh scheduler sometimes running at once ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27253))
+- Fix importer returning negative row estimates ([jgillich](https://github.com/mastodon/mastodon/pull/27258))
+- Fix filtering audit log for entries about disabling 2FA ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27186))
+- Fix tIME chunk not being properly removed from PNG uploads ([TheEssem](https://github.com/mastodon/mastodon/pull/27111))
+- Fix inefficient queries in “Follows and followers” as well as several admin pages ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/27116), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/27306))
+
+## [4.1.9] - 2023-09-20
+
+### Fixed
+
+- Fix post translation erroring out ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26990))
+
+## [4.1.8] - 2023-09-19
+
+### Fixed
+
+- Fix post edits not being forwarded as expected ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26936))
+- Fix moderator rights inconsistencies ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26729))
+- Fix crash when encountering invalid URL ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26814))
+- Fix cached posts including stale stats ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26409))
+- Fix uploading of video files for which `ffprobe` reports `0/0` average framerate ([NicolaiSoeborg](https://github.com/mastodon/mastodon/pull/26500))
+- Fix unexpected audio stream transcoding when uploaded video is eligible to passthrough ([yufushiro](https://github.com/mastodon/mastodon/pull/26608))
+
+### Security
+
+- Fix missing HTML sanitization in translation API (CVE-2023-42452)
+- Fix incorrect domain name normalization (CVE-2023-42451)
+
+## [4.1.7] - 2023-09-05
+
+### Changed
+
+- Change remote report processing to accept reports with long comments, but truncate them ([ThisIsMissEm](https://github.com/mastodon/mastodon/pull/25028))
+
+### Fixed
+
+- **Fix blocking subdomains of an already-blocked domain** ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26392))
+- Fix `/api/v1/timelines/tag/:hashtag` allowing for unauthenticated access when public preview is disabled ([danielmbrasil](https://github.com/mastodon/mastodon/pull/26237))
+- Fix inefficiencies in `PlainTextFormatter` ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26727))
+
+## [4.1.6] - 2023-07-31
+
+### Fixed
+
+- Fix memory leak in streaming server ([ThisIsMissEm](https://github.com/mastodon/mastodon/pull/26228))
+- Fix wrong filters sometimes applying in streaming ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26159), [ThisIsMissEm](https://github.com/mastodon/mastodon/pull/26213), [renchap](https://github.com/mastodon/mastodon/pull/26233))
+- Fix incorrect connect timeout in outgoing requests ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26116))
+
+## [4.1.5] - 2023-07-21
+
+### Added
+
+- Add check preventing Sidekiq workers from running with Makara configured ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25850))
+
+### Changed
+
+- Change request timeout handling to use a longer deadline ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26055))
+
+### Fixed
+
+- Fix moderation interface for remote instances with a .zip TLD ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25886))
+- Fix remote accounts being possibly persisted to database with incomplete protocol values ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25886))
+- Fix trending publishers table not rendering correctly on narrow screens ([vmstan](https://github.com/mastodon/mastodon/pull/25945))
+
+### Security
+
+- Fix CSP headers being unintentionally wide ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26105))
+
+## [4.1.4] - 2023-07-07
+
+### Fixed
+
+- Fix branding:generate_app_icons failing because of disallowed ICO coder ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25794))
+- Fix crash in admin interface when viewing a remote user with verified links ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25796))
+- Fix processing of media files with unusual names ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25788))
+
+## [4.1.3] - 2023-07-06
+
+### Added
+
+- Add fallback redirection when getting a webfinger query `LOCAL_DOMAIN@LOCAL_DOMAIN` ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/23600))
+
+### Changed
+
+- Change OpenGraph-based embeds to allow fullscreen ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25058))
+- Change AccessTokensVacuum to also delete expired tokens ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/24868))
+- Change profile updates to be sent to recently-mentioned servers ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/24852))
+- Change automatic post deletion thresholds and load detection ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/24614))
+- Change `/api/v1/statuses/:id/history` to always return at least one item ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25510))
+- Change auto-linking to allow carets in URL query params ([renchap](https://github.com/mastodon/mastodon/pull/25216))
+
+### Removed
+
+- Remove invalid `X-Frame-Options: ALLOWALL` ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25070))
+
+### Fixed
+
+- Fix wrong view being displayed when a webhook fails validation ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25464))
+- Fix soft-deleted post cleanup scheduler overwhelming the streaming server ([ThisIsMissEm](https://github.com/mastodon/mastodon/pull/25519))
+- Fix incorrect pagination headers in `/api/v2/admin/accounts` ([danielmbrasil](https://github.com/mastodon/mastodon/pull/25477))
+- Fix multiple inefficiencies in automatic post cleanup worker ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/24607), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/24785), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/24840))
+- Fix performance of streaming by parsing message JSON once ([ThisIsMissEm](https://github.com/mastodon/mastodon/pull/25278), [ThisIsMissEm](https://github.com/mastodon/mastodon/pull/25361))
+- Fix CSP headers when `S3_ALIAS_HOST` includes a path component ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25273))
+- Fix `tootctl accounts approve --number N` not approving N earliest registrations ([danielmbrasil](https://github.com/mastodon/mastodon/pull/24605))
+- Fix reports not being closed when performing batch suspensions ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/24988))
+- Fix being able to vote on your own polls ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25015))
+- Fix race condition when reblogging a status ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25016))
+- Fix “Authorized applications” inefficiently and incorrectly getting last use date ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25060))
+- Fix “Authorized applications” crashing when listing apps with certain admin API scopes ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25713))
+- Fix multiple N+1s in ConversationsController ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25134), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/25399), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/25499))
+- Fix user archive takeouts when using OpenStack Swift ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/24431))
+- Fix searching for remote content by URL not working under certain conditions ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25637))
+- Fix inefficiencies in indexing content for search ([VyrCossont](https://github.com/mastodon/mastodon/pull/24285), [VyrCossont](https://github.com/mastodon/mastodon/pull/24342))
+
+### Security
+
+- Add finer permission requirements for managing webhooks ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25463))
+- Update dependencies
+- Add hardening headers for user-uploaded files ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25756))
+- Fix verified links possibly hiding important parts of the URL (CVE-2023-36462)
+- Fix timeout handling of outbound HTTP requests (CVE-2023-36461)
+- Fix arbitrary file creation through media processing (CVE-2023-36460)
+- Fix possible XSS in preview cards (CVE-2023-36459)
+
+## [4.1.2] - 2023-04-04
+
+### Fixed
+
+- Fix crash in `tootctl` commands making use of parallelization when Elasticsearch is enabled ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/24182), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/24377))
+- Fix crash in `db:setup` when Elasticsearch is enabled ([rrgeorge](https://github.com/mastodon/mastodon/pull/24302))
+- Fix user archive takeout when using OpenStack Swift or S3 providers with no ACL support ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/24200))
+- Fix invalid/expired invites being processed on sign-up ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/24337))
+
+### Security
+
+- Update Ruby to 3.0.6 due to ReDoS vulnerabilities ([saizai](https://github.com/mastodon/mastodon/pull/24334))
+- Fix unescaped user input in LDAP query ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/24379))
+
+## [4.1.1] - 2023-03-16
+
+### Added
+
+- Add redirection from paths with url-encoded `@` to their decoded form ([thijskh](https://github.com/mastodon/mastodon/pull/23593))
+- Add `lang` attribute to native language names in language picker in Web UI ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/23749))
+- Add headers to outgoing mails to avoid auto-replies ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/23597))
+- Add support for refreshing many accounts at once with `tootctl accounts refresh` ([9p4](https://github.com/mastodon/mastodon/pull/23304))
+- Add confirmation modal when clicking to edit a post with a non-empty compose form ([PauloVilarinho](https://github.com/mastodon/mastodon/pull/23936))
+- Add support for the HAproxy PROXY protocol through the `PROXY_PROTO_V1` environment variable ([CSDUMMI](https://github.com/mastodon/mastodon/pull/24064))
+- Add `SENDFILE_HEADER` environment variable ([Gargron](https://github.com/mastodon/mastodon/pull/24123))
+- Add cache headers to static files served through Rails ([Gargron](https://github.com/mastodon/mastodon/pull/24120))
+
+### Changed
+
+- Increase contrast of upload progress bar background ([toolmantim](https://github.com/mastodon/mastodon/pull/23836))
+- Change post auto-deletion throttling constants to better scale with server size ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/23320))
+- Change order of bookmark and favourite sidebar entries in single-column UI for consistency ([TerryGarcia](https://github.com/mastodon/mastodon/pull/23701))
+- Change `ActivityPub::DeliveryWorker` retries to be spread out more ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/21956))
+
+### Fixed
+
+- Fix “Remove all followers from the selected domains” also removing follows and notifications ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/23805))
+- Fix streaming metrics format ([emilweth](https://github.com/mastodon/mastodon/pull/23519), [emilweth](https://github.com/mastodon/mastodon/pull/23520))
+- Fix case-sensitive check for previously used hashtags in hashtag autocompletion ([deanveloper](https://github.com/mastodon/mastodon/pull/23526))
+- Fix focus point of already-attached media not saving after edit ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/23566))
+- Fix sidebar behavior in settings/admin UI on mobile ([wxt2005](https://github.com/mastodon/mastodon/pull/23764))
+- Fix inefficiency when searching accounts per username in admin interface ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/23801))
+- Fix duplicate “Publish” button on mobile ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/23804))
+- Fix server error when failing to follow back followers from `/relationships` ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/23787))
+- Fix server error when attempting to display the edit history of a trendable post in the admin interface ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/23574))
+- Fix `tootctl accounts migrate` crashing because of a typo ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/23567))
+- Fix original account being unfollowed on migration before the follow request to the new account could be sent ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/21957))
+- Fix the “Back” button in column headers sometimes leaving Mastodon ([c960657](https://github.com/mastodon/mastodon/pull/23953))
+- Fix pgBouncer resetting application name on every transaction ([Gargron](https://github.com/mastodon/mastodon/pull/23958))
+- Fix unconfirmed accounts being counted as active users ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/23803))
+- Fix `/api/v1/streaming` sub-paths not being redirected ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/23988))
+- Fix drag'n'drop upload area text that spans multiple lines not being centered ([vintprox](https://github.com/mastodon/mastodon/pull/24029))
+- Fix sidekiq jobs not triggering Elasticsearch index updates ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/24046))
+- Fix tags being unnecessarily stripped from plain-text short site description ([c960657](https://github.com/mastodon/mastodon/pull/23975))
+- Fix HTML entities not being un-escaped in extracted plain-text from remote posts ([c960657](https://github.com/mastodon/mastodon/pull/24019))
+- Fix dashboard crash on ElasticSearch server error ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/23751))
+- Fix incorrect post links in strikes when the account is remote ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/23611))
+- Fix misleading error code when receiving invalid WebAuthn credentials ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/23568))
+- Fix duplicate mails being sent when the SMTP server is too slow to close the connection ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/23750))
+
+### Security
+
+- Change user backups to use expiring URLs for download when possible ([Gargron](https://github.com/mastodon/mastodon/pull/24136))
+- Add warning for object storage misconfiguration ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/24137))
+
## [4.1.0] - 2023-02-10
### Added
diff --git a/Dockerfile b/Dockerfile
index ce7f4d7186d34b..c0f584dc4f9ebf 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -2,7 +2,7 @@
# This needs to be bullseye-slim because the Ruby image is built on bullseye-slim
ARG NODE_VERSION="16.18.1-bullseye-slim"
-FROM ghcr.io/moritzheiber/ruby-jemalloc:3.0.4-slim as ruby
+FROM ghcr.io/moritzheiber/ruby-jemalloc:3.0.6-slim as ruby
FROM node:${NODE_VERSION} as build
COPY --link --from=ruby /opt/ruby /opt/ruby
@@ -17,6 +17,7 @@ COPY Gemfile* package.json yarn.lock /opt/mastodon/
# hadolint ignore=DL3008
RUN apt-get update && \
+ apt-get -yq dist-upgrade && \
apt-get install -y --no-install-recommends build-essential \
ca-certificates \
git \
diff --git a/Gemfile.lock b/Gemfile.lock
index 0b0b8859ccefed..ef1a19ae129bc8 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -10,40 +10,40 @@ GIT
GEM
remote: https://rubygems.org/
specs:
- actioncable (6.1.7.2)
- actionpack (= 6.1.7.2)
- activesupport (= 6.1.7.2)
+ actioncable (6.1.7.6)
+ actionpack (= 6.1.7.6)
+ activesupport (= 6.1.7.6)
nio4r (~> 2.0)
websocket-driver (>= 0.6.1)
- actionmailbox (6.1.7.2)
- actionpack (= 6.1.7.2)
- activejob (= 6.1.7.2)
- activerecord (= 6.1.7.2)
- activestorage (= 6.1.7.2)
- activesupport (= 6.1.7.2)
+ actionmailbox (6.1.7.6)
+ actionpack (= 6.1.7.6)
+ activejob (= 6.1.7.6)
+ activerecord (= 6.1.7.6)
+ activestorage (= 6.1.7.6)
+ activesupport (= 6.1.7.6)
mail (>= 2.7.1)
- actionmailer (6.1.7.2)
- actionpack (= 6.1.7.2)
- actionview (= 6.1.7.2)
- activejob (= 6.1.7.2)
- activesupport (= 6.1.7.2)
+ actionmailer (6.1.7.6)
+ actionpack (= 6.1.7.6)
+ actionview (= 6.1.7.6)
+ activejob (= 6.1.7.6)
+ activesupport (= 6.1.7.6)
mail (~> 2.5, >= 2.5.4)
rails-dom-testing (~> 2.0)
- actionpack (6.1.7.2)
- actionview (= 6.1.7.2)
- activesupport (= 6.1.7.2)
+ actionpack (6.1.7.6)
+ actionview (= 6.1.7.6)
+ activesupport (= 6.1.7.6)
rack (~> 2.0, >= 2.0.9)
rack-test (>= 0.6.3)
rails-dom-testing (~> 2.0)
rails-html-sanitizer (~> 1.0, >= 1.2.0)
- actiontext (6.1.7.2)
- actionpack (= 6.1.7.2)
- activerecord (= 6.1.7.2)
- activestorage (= 6.1.7.2)
- activesupport (= 6.1.7.2)
+ actiontext (6.1.7.6)
+ actionpack (= 6.1.7.6)
+ activerecord (= 6.1.7.6)
+ activestorage (= 6.1.7.6)
+ activesupport (= 6.1.7.6)
nokogiri (>= 1.8.5)
- actionview (6.1.7.2)
- activesupport (= 6.1.7.2)
+ actionview (6.1.7.6)
+ activesupport (= 6.1.7.6)
builder (~> 3.1)
erubi (~> 1.4)
rails-dom-testing (~> 2.0)
@@ -54,22 +54,22 @@ GEM
case_transform (>= 0.2)
jsonapi-renderer (>= 0.1.1.beta1, < 0.3)
active_record_query_trace (1.8)
- activejob (6.1.7.2)
- activesupport (= 6.1.7.2)
+ activejob (6.1.7.6)
+ activesupport (= 6.1.7.6)
globalid (>= 0.3.6)
- activemodel (6.1.7.2)
- activesupport (= 6.1.7.2)
- activerecord (6.1.7.2)
- activemodel (= 6.1.7.2)
- activesupport (= 6.1.7.2)
- activestorage (6.1.7.2)
- actionpack (= 6.1.7.2)
- activejob (= 6.1.7.2)
- activerecord (= 6.1.7.2)
- activesupport (= 6.1.7.2)
+ activemodel (6.1.7.6)
+ activesupport (= 6.1.7.6)
+ activerecord (6.1.7.6)
+ activemodel (= 6.1.7.6)
+ activesupport (= 6.1.7.6)
+ activestorage (6.1.7.6)
+ actionpack (= 6.1.7.6)
+ activejob (= 6.1.7.6)
+ activerecord (= 6.1.7.6)
+ activesupport (= 6.1.7.6)
marcel (~> 1.0)
mini_mime (>= 1.1.0)
- activesupport (6.1.7.2)
+ activesupport (6.1.7.6)
concurrent-ruby (~> 1.0, >= 1.0.2)
i18n (>= 1.6, < 2)
minitest (>= 5.1)
@@ -120,8 +120,7 @@ GEM
bindata (2.4.14)
binding_of_caller (1.0.0)
debug_inspector (>= 0.0.1)
- blurhash (0.1.6)
- ffi (~> 1.14)
+ blurhash (0.1.7)
bootsnap (1.16.0)
msgpack (~> 1.2)
brakeman (5.4.0)
@@ -174,7 +173,7 @@ GEM
cocoon (1.2.15)
coderay (1.1.3)
color_diff (0.1)
- concurrent-ruby (1.2.0)
+ concurrent-ruby (1.2.2)
connection_pool (2.3.0)
cose (1.2.1)
cbor (~> 0.5.9)
@@ -207,7 +206,7 @@ GEM
docile (1.4.0)
domain_name (0.5.20190701)
unf (>= 0.0.5, < 1.0.0)
- doorkeeper (5.6.4)
+ doorkeeper (5.6.6)
railties (>= 5)
dotenv (2.8.1)
dotenv-rails (2.8.1)
@@ -389,7 +388,7 @@ GEM
loofah (2.19.1)
crass (~> 1.0.2)
nokogiri (>= 1.5.9)
- mail (2.8.0.1)
+ mail (2.8.1)
mini_mime (>= 0.1.1)
net-imap
net-pop
@@ -405,13 +404,13 @@ GEM
mime-types (3.4.1)
mime-types-data (~> 3.2015)
mime-types-data (3.2022.0105)
- mini_mime (1.1.2)
- mini_portile2 (2.8.1)
+ mini_mime (1.1.5)
+ mini_portile2 (2.8.5)
minitest (5.17.0)
msgpack (1.6.0)
multi_json (1.15.0)
multipart-post (2.1.1)
- net-imap (0.3.4)
+ net-imap (0.3.7)
date
net-protocol
net-ldap (0.17.1)
@@ -424,9 +423,9 @@ GEM
net-smtp (0.3.3)
net-protocol
net-ssh (7.0.1)
- nio4r (2.5.8)
- nokogiri (1.14.1)
- mini_portile2 (~> 2.8.0)
+ nio4r (2.5.9)
+ nokogiri (1.16.2)
+ mini_portile2 (~> 2.8.2)
racc (~> 1.4)
nsa (0.2.8)
activesupport (>= 4.2, < 7)
@@ -469,7 +468,7 @@ GEM
parslet (2.0.0)
pastel (0.8.0)
tty-color (~> 0.5)
- pg (1.4.5)
+ pg (1.4.6)
pghero (3.1.0)
activerecord (>= 6)
pkg-config (1.5.1)
@@ -492,13 +491,13 @@ GEM
pry-rails (0.3.9)
pry (>= 0.10.4)
public_suffix (5.0.1)
- puma (5.6.5)
+ puma (5.6.7)
nio4r (~> 2.0)
pundit (2.3.0)
activesupport (>= 3.0.0)
raabro (1.4.0)
- racc (1.6.2)
- rack (2.2.6.2)
+ racc (1.7.3)
+ rack (2.2.8)
rack-attack (6.6.1)
rack (>= 1.0, < 3)
rack-cors (1.1.1)
@@ -513,20 +512,20 @@ GEM
rack
rack-test (2.0.2)
rack (>= 1.3)
- rails (6.1.7.2)
- actioncable (= 6.1.7.2)
- actionmailbox (= 6.1.7.2)
- actionmailer (= 6.1.7.2)
- actionpack (= 6.1.7.2)
- actiontext (= 6.1.7.2)
- actionview (= 6.1.7.2)
- activejob (= 6.1.7.2)
- activemodel (= 6.1.7.2)
- activerecord (= 6.1.7.2)
- activestorage (= 6.1.7.2)
- activesupport (= 6.1.7.2)
+ rails (6.1.7.6)
+ actioncable (= 6.1.7.6)
+ actionmailbox (= 6.1.7.6)
+ actionmailer (= 6.1.7.6)
+ actionpack (= 6.1.7.6)
+ actiontext (= 6.1.7.6)
+ actionview (= 6.1.7.6)
+ activejob (= 6.1.7.6)
+ activemodel (= 6.1.7.6)
+ activerecord (= 6.1.7.6)
+ activestorage (= 6.1.7.6)
+ activesupport (= 6.1.7.6)
bundler (>= 1.15.0)
- railties (= 6.1.7.2)
+ railties (= 6.1.7.6)
sprockets-rails (>= 2.0.0)
rails-controller-testing (1.0.5)
actionpack (>= 5.0.1.rc1)
@@ -542,9 +541,9 @@ GEM
railties (>= 6.0.0, < 7)
rails-settings-cached (0.6.6)
rails (>= 4.2.0)
- railties (6.1.7.2)
- actionpack (= 6.1.7.2)
- activesupport (= 6.1.7.2)
+ railties (6.1.7.6)
+ actionpack (= 6.1.7.6)
+ activesupport (= 6.1.7.6)
method_source
rake (>= 12.2)
thor (~> 1.0)
@@ -628,14 +627,14 @@ GEM
fugit (~> 1.1, >= 1.1.6)
safety_net_attestation (0.4.0)
jwt (~> 2.0)
- sanitize (6.0.1)
+ sanitize (6.0.2)
crass (~> 1.0.2)
nokogiri (>= 1.12.0)
scenic (1.7.0)
activerecord (>= 4.0.0)
railties (>= 4.0.0)
semantic_range (3.0.0)
- sidekiq (6.5.8)
+ sidekiq (6.5.12)
connection_pool (>= 2.2.5, < 3)
rack (~> 2.0)
redis (>= 4.5.0, < 5)
@@ -646,7 +645,7 @@ GEM
rufus-scheduler (~> 3.2)
sidekiq (>= 4, < 7)
tilt (>= 1.4.0)
- sidekiq-unique-jobs (7.1.29)
+ sidekiq-unique-jobs (7.1.33)
brpoplpush-redis_script (> 0.1.1, <= 2.0.0)
concurrent-ruby (~> 1.0, >= 1.0.5)
redis (< 5.0)
@@ -689,9 +688,9 @@ GEM
unicode-display_width (>= 1.1.1, < 3)
terrapin (0.6.0)
climate_control (>= 0.0.3, < 1.0)
- thor (1.2.1)
+ thor (1.2.2)
tilt (2.0.11)
- timeout (0.3.1)
+ timeout (0.3.2)
tpm-key_attestation (0.11.0)
bindata (~> 2.4)
openssl (> 2.0, < 3.1)
@@ -747,14 +746,14 @@ GEM
rack-proxy (>= 0.6.1)
railties (>= 5.2)
semantic_range (>= 2.3.0)
- websocket-driver (0.7.5)
+ websocket-driver (0.7.6)
websocket-extensions (>= 0.1.0)
websocket-extensions (0.1.5)
wisper (2.0.1)
xorcist (1.1.3)
xpath (3.2.0)
nokogiri (~> 1.8)
- zeitwerk (2.6.6)
+ zeitwerk (2.6.12)
PLATFORMS
ruby
diff --git a/README.md b/README.md
index 43062f6b6888d7..61f732f2af5c2b 100644
--- a/README.md
+++ b/README.md
@@ -36,13 +36,11 @@ DB_PASS=mastodon
[![Build Status](https://img.shields.io/circleci/project/github/mastodon/mastodon.svg)][circleci]
[![Code Climate](https://img.shields.io/codeclimate/maintainability/mastodon/mastodon.svg)][code_climate]
[![Crowdin](https://d322cqt584bo4o.cloudfront.net/mastodon/localized.svg)][crowdin]
-[![Docker Pulls](https://img.shields.io/docker/pulls/tootsuite/mastodon.svg)][docker]
[releases]: https://github.com/mastodon/mastodon/releases
[circleci]: https://circleci.com/gh/mastodon/mastodon
[code_climate]: https://codeclimate.com/github/mastodon/mastodon
[crowdin]: https://crowdin.com/project/mastodon
-[docker]: https://hub.docker.com/r/tootsuite/mastodon/
Mastodon is a **free, open-source social network server** based on ActivityPub where users can follow friends and discover new ones. On Mastodon, users can publish anything they want: links, pictures, text, video. All Mastodon servers are interoperable as a federated network (users on one server can seamlessly communicate with users from another one, including non-Mastodon software that implements ActivityPub!)
@@ -59,6 +57,7 @@ Click below to **learn more** in a video:
- [View sponsors](https://joinmastodon.org/sponsors)
- [Blog](https://blog.joinmastodon.org)
- [Documentation](https://docs.joinmastodon.org)
+- [Official Docker image](https://github.com/mastodon/mastodon/pkgs/container/mastodon)
- [Browse Mastodon servers](https://joinmastodon.org/communities)
- [Browse Mastodon apps](https://joinmastodon.org/apps)
diff --git a/SECURITY.md b/SECURITY.md
index ccc7c1034624e2..b14dc45bf724aa 100644
--- a/SECURITY.md
+++ b/SECURITY.md
@@ -10,8 +10,8 @@ A "vulnerability in Mastodon" is a vulnerability in the code distributed through
## Supported Versions
-| Version | Supported |
-| ------- | ----------|
-| 4.0.x | Yes |
-| 3.5.x | Yes |
-| < 3.5 | No |
+| Version | Supported |
+| ------- | ---------------- |
+| 4.2.x | Yes |
+| 4.1.x | Yes |
+| < 4.1 | No |
diff --git a/app/chewy/accounts_index.rb b/app/chewy/accounts_index.rb
index e38e14a10699ad..9b78af85d6f623 100644
--- a/app/chewy/accounts_index.rb
+++ b/app/chewy/accounts_index.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
class AccountsIndex < Chewy::Index
+ include DatetimeClampingConcern
+
settings index: { refresh_interval: '30s' }, analysis: {
analyzer: {
content: {
@@ -38,6 +40,6 @@ class AccountsIndex < Chewy::Index
field :following_count, type: 'long', value: ->(account) { account.following_count }
field :followers_count, type: 'long', value: ->(account) { account.followers_count }
- field :last_status_at, type: 'date', value: ->(account) { account.last_status_at || account.created_at }
+ field :last_status_at, type: 'date', value: ->(account) { clamp_date(account.last_status_at || account.created_at) }
end
end
diff --git a/app/chewy/concerns/datetime_clamping_concern.rb b/app/chewy/concerns/datetime_clamping_concern.rb
new file mode 100644
index 00000000000000..7f176b6e5489f4
--- /dev/null
+++ b/app/chewy/concerns/datetime_clamping_concern.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module DatetimeClampingConcern
+ extend ActiveSupport::Concern
+
+ MIN_ISO8601_DATETIME = '0000-01-01T00:00:00Z'.to_datetime.freeze
+ MAX_ISO8601_DATETIME = '9999-12-31T23:59:59Z'.to_datetime.freeze
+
+ class_methods do
+ def clamp_date(datetime)
+ datetime.clamp(MIN_ISO8601_DATETIME, MAX_ISO8601_DATETIME)
+ end
+ end
+end
diff --git a/app/chewy/tags_index.rb b/app/chewy/tags_index.rb
index df3d9e4cce2920..8c778dc65d09d6 100644
--- a/app/chewy/tags_index.rb
+++ b/app/chewy/tags_index.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
class TagsIndex < Chewy::Index
+ include DatetimeClampingConcern
+
settings index: { refresh_interval: '30s' }, analysis: {
analyzer: {
content: {
@@ -36,6 +38,6 @@ class TagsIndex < Chewy::Index
field :reviewed, type: 'boolean', value: ->(tag) { tag.reviewed? }
field :usage, type: 'long', value: ->(tag, crutches) { tag.history.aggregate(crutches.time_period).accounts }
- field :last_status_at, type: 'date', value: ->(tag) { tag.last_status_at || tag.created_at }
+ field :last_status_at, type: 'date', value: ->(tag) { clamp_date(tag.last_status_at || tag.created_at) }
end
end
diff --git a/app/controllers/admin/account_actions_controller.rb b/app/controllers/admin/account_actions_controller.rb
index e89404b6098e0f..e674bf55a028b0 100644
--- a/app/controllers/admin/account_actions_controller.rb
+++ b/app/controllers/admin/account_actions_controller.rb
@@ -21,7 +21,7 @@ def create
account_action.save!
if account_action.with_report?
- redirect_to admin_reports_path, notice: I18n.t('admin.reports.processed_msg', id: params[:report_id])
+ redirect_to admin_reports_path, notice: I18n.t('admin.reports.processed_msg', id: resource_params[:report_id])
else
redirect_to admin_account_path(@account.id)
end
diff --git a/app/controllers/admin/domain_blocks_controller.rb b/app/controllers/admin/domain_blocks_controller.rb
index 74764640b8f8b6..746623a06c372b 100644
--- a/app/controllers/admin/domain_blocks_controller.rb
+++ b/app/controllers/admin/domain_blocks_controller.rb
@@ -37,7 +37,7 @@ def create
@domain_block.errors.delete(:domain)
render :new
else
- if existing_domain_block.present?
+ if existing_domain_block.present? && existing_domain_block.domain == TagManager.instance.normalize_domain(@domain_block.domain.strip)
@domain_block = existing_domain_block
@domain_block.update(resource_params)
end
diff --git a/app/controllers/admin/webhooks_controller.rb b/app/controllers/admin/webhooks_controller.rb
index d6fb1a4eaf3009..76062ddf7f53b9 100644
--- a/app/controllers/admin/webhooks_controller.rb
+++ b/app/controllers/admin/webhooks_controller.rb
@@ -20,6 +20,7 @@ def create
authorize :webhook, :create?
@webhook = Webhook.new(resource_params)
+ @webhook.current_account = current_account
if @webhook.save
redirect_to admin_webhook_path(@webhook)
@@ -39,10 +40,12 @@ def edit
def update
authorize @webhook, :update?
+ @webhook.current_account = current_account
+
if @webhook.update(resource_params)
redirect_to admin_webhook_path(@webhook)
else
- render :show
+ render :edit
end
end
diff --git a/app/controllers/api/v1/conversations_controller.rb b/app/controllers/api/v1/conversations_controller.rb
index 6c7583403758e2..818ba6ebba2ebb 100644
--- a/app/controllers/api/v1/conversations_controller.rb
+++ b/app/controllers/api/v1/conversations_controller.rb
@@ -11,7 +11,7 @@ class Api::V1::ConversationsController < Api::BaseController
def index
@conversations = paginated_conversations
- render json: @conversations, each_serializer: REST::ConversationSerializer
+ render json: @conversations, each_serializer: REST::ConversationSerializer, relationships: StatusRelationshipsPresenter.new(@conversations.map(&:last_status), current_user&.account_id)
end
def read
@@ -32,6 +32,19 @@ def set_conversation
def paginated_conversations
AccountConversation.where(account: current_account)
+ .includes(
+ account: :account_stat,
+ last_status: [
+ :media_attachments,
+ :preview_cards,
+ :status_stat,
+ :tags,
+ {
+ active_mentions: [account: :account_stat],
+ account: :account_stat,
+ },
+ ]
+ )
.to_a_paginated_by_id(limit_param(LIMIT), params_slice(:max_id, :since_id, :min_id))
end
diff --git a/app/controllers/api/v1/statuses/histories_controller.rb b/app/controllers/api/v1/statuses/histories_controller.rb
index 7fe73a6f5493cc..b1c19987a434a2 100644
--- a/app/controllers/api/v1/statuses/histories_controller.rb
+++ b/app/controllers/api/v1/statuses/histories_controller.rb
@@ -7,11 +7,15 @@ class Api::V1::Statuses::HistoriesController < Api::BaseController
before_action :set_status
def show
- render json: @status.edits.includes(:account, status: [:account]), each_serializer: REST::StatusEditSerializer
+ render json: status_edits, each_serializer: REST::StatusEditSerializer
end
private
+ def status_edits
+ @status.edits.includes(:account, status: [:account]).to_a.presence || [@status.build_snapshot(at_time: @status.edited_at || @status.created_at)]
+ end
+
def set_status
@status = Status.find(params[:status_id])
authorize @status, :show?
diff --git a/app/controllers/api/v1/statuses/reblogs_controller.rb b/app/controllers/api/v1/statuses/reblogs_controller.rb
index 1be15a5a439604..a4079a16db09b4 100644
--- a/app/controllers/api/v1/statuses/reblogs_controller.rb
+++ b/app/controllers/api/v1/statuses/reblogs_controller.rb
@@ -2,6 +2,8 @@
class Api::V1::Statuses::ReblogsController < Api::BaseController
include Authorization
+ include Redisable
+ include Lockable
before_action -> { doorkeeper_authorize! :write, :'write:statuses' }
before_action :require_user!
@@ -10,7 +12,9 @@ class Api::V1::Statuses::ReblogsController < Api::BaseController
override_rate_limit_headers :create, family: :statuses
def create
- @status = ReblogService.new.call(current_account, @reblog, reblog_params)
+ with_lock("reblog:#{current_account.id}:#{@reblog.id}") do
+ @status = ReblogService.new.call(current_account, @reblog, reblog_params)
+ end
render json: @status, serializer: REST::StatusSerializer
end
diff --git a/app/controllers/api/v1/streaming_controller.rb b/app/controllers/api/v1/streaming_controller.rb
index b23a60170c7780..843065adba05b0 100644
--- a/app/controllers/api/v1/streaming_controller.rb
+++ b/app/controllers/api/v1/streaming_controller.rb
@@ -2,7 +2,7 @@
class Api::V1::StreamingController < Api::BaseController
def index
- if Rails.configuration.x.streaming_api_base_url == request.host
+ if same_host?
not_found
else
redirect_to streaming_api_url, status: 301
@@ -11,9 +11,16 @@ def index
private
+ def same_host?
+ base_url = Addressable::URI.parse(Rails.configuration.x.streaming_api_base_url)
+ request.host == base_url.host && request.port == (base_url.port || 80)
+ end
+
def streaming_api_url
Addressable::URI.parse(request.url).tap do |uri|
- uri.host = Addressable::URI.parse(Rails.configuration.x.streaming_api_base_url).host
+ base_url = Addressable::URI.parse(Rails.configuration.x.streaming_api_base_url)
+ uri.host = base_url.host
+ uri.port = base_url.port
end.to_s
end
end
diff --git a/app/controllers/api/v1/timelines/tag_controller.rb b/app/controllers/api/v1/timelines/tag_controller.rb
index 64a1db58df3ae7..3f41eb6887c750 100644
--- a/app/controllers/api/v1/timelines/tag_controller.rb
+++ b/app/controllers/api/v1/timelines/tag_controller.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
class Api::V1::Timelines::TagController < Api::BaseController
+ before_action -> { doorkeeper_authorize! :read, :'read:statuses' }, only: :show, if: :require_auth?
before_action :load_tag
after_action :insert_pagination_headers, unless: -> { @statuses.empty? }
@@ -11,6 +12,10 @@ def show
private
+ def require_auth?
+ !Setting.timeline_preview
+ end
+
def load_tag
@tag = Tag.find_normalized(params[:id])
end
diff --git a/app/controllers/api/v2/admin/accounts_controller.rb b/app/controllers/api/v2/admin/accounts_controller.rb
index b25831aa09e9ec..bc8f8b6f34c02d 100644
--- a/app/controllers/api/v2/admin/accounts_controller.rb
+++ b/app/controllers/api/v2/admin/accounts_controller.rb
@@ -18,6 +18,14 @@ class Api::V2::Admin::AccountsController < Api::V1::Admin::AccountsController
private
+ def next_path
+ api_v2_admin_accounts_url(pagination_params(max_id: pagination_max_id)) if records_continue?
+ end
+
+ def prev_path
+ api_v2_admin_accounts_url(pagination_params(min_id: pagination_since_id)) unless @accounts.empty?
+ end
+
def filtered_accounts
AccountFilter.new(translated_filter_params).results
end
diff --git a/app/controllers/auth/omniauth_callbacks_controller.rb b/app/controllers/auth/omniauth_callbacks_controller.rb
index 3d7962de56cb50..3968537ad38def 100644
--- a/app/controllers/auth/omniauth_callbacks_controller.rb
+++ b/app/controllers/auth/omniauth_callbacks_controller.rb
@@ -5,7 +5,7 @@ class Auth::OmniauthCallbacksController < Devise::OmniauthCallbacksController
def self.provides_callback_for(provider)
define_method provider do
- @user = User.find_for_oauth(request.env['omniauth.auth'], current_user)
+ @user = User.find_for_omniauth(request.env['omniauth.auth'], current_user)
if @user.persisted?
LoginActivity.create(
@@ -24,6 +24,9 @@ def self.provides_callback_for(provider)
session["devise.#{provider}_data"] = request.env['omniauth.auth']
redirect_to new_user_registration_url
end
+ rescue ActiveRecord::RecordInvalid
+ flash[:alert] = I18n.t('devise.failure.omniauth_user_creation_failure') if is_navigational_format?
+ redirect_to new_user_session_url
end
end
diff --git a/app/controllers/auth/registrations_controller.rb b/app/controllers/auth/registrations_controller.rb
index 71c0cd8271b1b6..540b04a6cc13c5 100644
--- a/app/controllers/auth/registrations_controller.rb
+++ b/app/controllers/auth/registrations_controller.rb
@@ -48,7 +48,7 @@ def build_resource(hash = nil)
super(hash)
resource.locale = I18n.locale
- resource.invite_code = params[:invite_code] if resource.invite_code.blank?
+ resource.invite_code = @invite&.code if resource.invite_code.blank?
resource.registration_form_time = session[:registration_form_time]
resource.sign_up_ip = request.remote_ip
diff --git a/app/controllers/auth/sessions_controller.rb b/app/controllers/auth/sessions_controller.rb
index afcf8b24b89a60..17d75e1bbf8561 100644
--- a/app/controllers/auth/sessions_controller.rb
+++ b/app/controllers/auth/sessions_controller.rb
@@ -1,6 +1,10 @@
# frozen_string_literal: true
class Auth::SessionsController < Devise::SessionsController
+ include Redisable
+
+ MAX_2FA_ATTEMPTS_PER_HOUR = 10
+
layout 'auth'
skip_before_action :require_no_authentication, only: [:create]
@@ -136,9 +140,23 @@ def clear_attempt_from_session
session.delete(:attempt_user_updated_at)
end
+ def clear_2fa_attempt_from_user(user)
+ redis.del(second_factor_attempts_key(user))
+ end
+
+ def check_second_factor_rate_limits(user)
+ attempts, = redis.multi do |multi|
+ multi.incr(second_factor_attempts_key(user))
+ multi.expire(second_factor_attempts_key(user), 1.hour)
+ end
+
+ attempts >= MAX_2FA_ATTEMPTS_PER_HOUR
+ end
+
def on_authentication_success(user, security_measure)
@on_authentication_success_called = true
+ clear_2fa_attempt_from_user(user)
clear_attempt_from_session
user.update_sign_in!(new_sign_in: true)
@@ -170,4 +188,8 @@ def on_authentication_failure(user, security_measure, failure_reason)
user_agent: request.user_agent
)
end
+
+ def second_factor_attempts_key(user)
+ "2fa_auth_attempts:#{user.id}:#{Time.now.utc.hour}"
+ end
end
diff --git a/app/controllers/backups_controller.rb b/app/controllers/backups_controller.rb
new file mode 100644
index 00000000000000..5891da6f6d62a2
--- /dev/null
+++ b/app/controllers/backups_controller.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+class BackupsController < ApplicationController
+ include RoutingHelper
+
+ skip_before_action :require_functional!
+
+ before_action :authenticate_user!
+ before_action :set_backup
+
+ def download
+ case Paperclip::Attachment.default_options[:storage]
+ when :s3
+ redirect_to @backup.dump.expiring_url(10)
+ when :fog
+ if Paperclip::Attachment.default_options.dig(:fog_credentials, :openstack_temp_url_key).present?
+ redirect_to @backup.dump.expiring_url(Time.now.utc + 10)
+ else
+ redirect_to full_asset_url(@backup.dump.url)
+ end
+ when :filesystem
+ redirect_to full_asset_url(@backup.dump.url)
+ end
+ end
+
+ private
+
+ def set_backup
+ @backup = current_user.backups.find(params[:id])
+ end
+end
diff --git a/app/controllers/concerns/signature_verification.rb b/app/controllers/concerns/signature_verification.rb
index 9c04ab4ca6b704..c4f7960d43215b 100644
--- a/app/controllers/concerns/signature_verification.rb
+++ b/app/controllers/concerns/signature_verification.rb
@@ -91,14 +91,23 @@ def signed_request_actor
raise SignatureVerificationError, "Public key not found for key #{signature_params['keyId']}" if actor.nil?
signature = Base64.decode64(signature_params['signature'])
- compare_signed_string = build_signed_string
+ compare_signed_string = build_signed_string(include_query_string: true)
return actor unless verify_signature(actor, signature, compare_signed_string).nil?
+ # Compatibility quirk with older Mastodon versions
+ compare_signed_string = build_signed_string(include_query_string: false)
+ return actor unless verify_signature(actor, signature, compare_signed_string).nil?
+
actor = stoplight_wrap_request { actor_refresh_key!(actor) }
raise SignatureVerificationError, "Could not refresh public key #{signature_params['keyId']}" if actor.nil?
+ compare_signed_string = build_signed_string(include_query_string: true)
+ return actor unless verify_signature(actor, signature, compare_signed_string).nil?
+
+ # Compatibility quirk with older Mastodon versions
+ compare_signed_string = build_signed_string(include_query_string: false)
return actor unless verify_signature(actor, signature, compare_signed_string).nil?
fail_with! "Verification failed for #{actor.to_log_human_identifier} #{actor.uri} using rsa-sha256 (RSASSA-PKCS1-v1_5 with SHA-256)", signed_string: compare_signed_string, signature: signature_params['signature']
@@ -177,16 +186,24 @@ def verify_signature(actor, signature, compare_signed_string)
nil
end
- def build_signed_string
+ def build_signed_string(include_query_string: true)
signed_headers.map do |signed_header|
- if signed_header == Request::REQUEST_TARGET
- "#{Request::REQUEST_TARGET}: #{request.method.downcase} #{request.path}"
- elsif signed_header == '(created)'
+ case signed_header
+ when Request::REQUEST_TARGET
+ if include_query_string
+ "#{Request::REQUEST_TARGET}: #{request.method.downcase} #{request.original_fullpath}"
+ else
+ # Current versions of Mastodon incorrectly omit the query string from the (request-target) pseudo-header.
+ # Therefore, temporarily support such incorrect signatures for compatibility.
+ # TODO: remove eventually some time after release of the fixed version
+ "#{Request::REQUEST_TARGET}: #{request.method.downcase} #{request.path}"
+ end
+ when '(created)'
raise SignatureVerificationError, 'Invalid pseudo-header (created) for rsa-sha256' unless signature_algorithm == 'hs2019'
raise SignatureVerificationError, 'Pseudo-header (created) used but corresponding argument missing' if signature_params['created'].blank?
"(created): #{signature_params['created']}"
- elsif signed_header == '(expires)'
+ when '(expires)'
raise SignatureVerificationError, 'Invalid pseudo-header (expires) for rsa-sha256' unless signature_algorithm == 'hs2019'
raise SignatureVerificationError, 'Pseudo-header (expires) used but corresponding argument missing' if signature_params['expires'].blank?
@@ -246,7 +263,7 @@ def actor_from_key_id(key_id)
stoplight_wrap_request { ResolveAccountService.new.call(key_id.gsub(/\Aacct:/, ''), suppress_errors: false) }
elsif !ActivityPub::TagManager.instance.local_uri?(key_id)
account = ActivityPub::TagManager.instance.uri_to_actor(key_id)
- account ||= stoplight_wrap_request { ActivityPub::FetchRemoteKeyService.new.call(key_id, id: false, suppress_errors: false) }
+ account ||= stoplight_wrap_request { ActivityPub::FetchRemoteKeyService.new.call(key_id, suppress_errors: false) }
account
end
rescue Mastodon::PrivateNetworkAddressError => e
diff --git a/app/controllers/concerns/two_factor_authentication_concern.rb b/app/controllers/concerns/two_factor_authentication_concern.rb
index 27f2367a8ec931..ade02f04ba0a43 100644
--- a/app/controllers/concerns/two_factor_authentication_concern.rb
+++ b/app/controllers/concerns/two_factor_authentication_concern.rb
@@ -65,6 +65,11 @@ def authenticate_with_two_factor_via_webauthn(user)
end
def authenticate_with_two_factor_via_otp(user)
+ if check_second_factor_rate_limits(user)
+ flash.now[:alert] = I18n.t('users.rate_limited')
+ return prompt_for_two_factor(user)
+ end
+
if valid_otp_attempt?(user)
on_authentication_success(user, :otp)
else
diff --git a/app/controllers/media_controller.rb b/app/controllers/media_controller.rb
index 3cdd97f067a48a..a90c585acafc0a 100644
--- a/app/controllers/media_controller.rb
+++ b/app/controllers/media_controller.rb
@@ -46,6 +46,6 @@ def check_playable
end
def allow_iframing
- response.headers['X-Frame-Options'] = 'ALLOWALL'
+ response.headers.delete('X-Frame-Options')
end
end
diff --git a/app/controllers/oauth/authorized_applications_controller.rb b/app/controllers/oauth/authorized_applications_controller.rb
index 45151cdd7754d0..63afc4c068f134 100644
--- a/app/controllers/oauth/authorized_applications_controller.rb
+++ b/app/controllers/oauth/authorized_applications_controller.rb
@@ -8,6 +8,8 @@ class Oauth::AuthorizedApplicationsController < Doorkeeper::AuthorizedApplicatio
before_action :require_not_suspended!, only: :destroy
before_action :set_body_classes
+ before_action :set_last_used_at_by_app, only: :index, unless: -> { request.format == :json }
+
skip_before_action :require_functional!
include Localized
@@ -30,4 +32,14 @@ def store_current_location
def require_not_suspended!
forbidden if current_account.suspended?
end
+
+ def set_last_used_at_by_app
+ @last_used_at_by_app = Doorkeeper::AccessToken
+ .select('DISTINCT ON (application_id) application_id, last_used_at')
+ .where(resource_owner_id: current_resource_owner.id)
+ .where.not(last_used_at: nil)
+ .order(application_id: :desc, last_used_at: :desc)
+ .pluck(:application_id, :last_used_at)
+ .to_h
+ end
end
diff --git a/app/controllers/relationships_controller.rb b/app/controllers/relationships_controller.rb
index 96cce55e9e4f6e..de5dc5879280d6 100644
--- a/app/controllers/relationships_controller.rb
+++ b/app/controllers/relationships_controller.rb
@@ -19,6 +19,8 @@ def update
@form.save
rescue ActionController::ParameterMissing
# Do nothing
+ rescue Mastodon::NotPermittedError, ActiveRecord::RecordNotFound
+ flash[:alert] = I18n.t('relationships.follow_failure') if action_from_button == 'follow'
ensure
redirect_to relationships_path(filter_params)
end
@@ -60,8 +62,8 @@ def action_from_button
'unfollow'
elsif params[:remove_from_followers]
'remove_from_followers'
- elsif params[:block_domains]
- 'block_domains'
+ elsif params[:block_domains] || params[:remove_domains_from_followers]
+ 'remove_domains_from_followers'
end
end
diff --git a/app/controllers/settings/two_factor_authentication/webauthn_credentials_controller.rb b/app/controllers/settings/two_factor_authentication/webauthn_credentials_controller.rb
index a50d30f06f32c6..8435155ddcefe7 100644
--- a/app/controllers/settings/two_factor_authentication/webauthn_credentials_controller.rb
+++ b/app/controllers/settings/two_factor_authentication/webauthn_credentials_controller.rb
@@ -52,7 +52,7 @@ def create
end
else
flash[:error] = I18n.t('webauthn_credentials.create.error')
- status = :internal_server_error
+ status = :unprocessable_entity
end
else
flash[:error] = t('webauthn_credentials.create.error')
diff --git a/app/controllers/statuses_controller.rb b/app/controllers/statuses_controller.rb
index 0e0783b4b26f5e..33defaa1c52681 100644
--- a/app/controllers/statuses_controller.rb
+++ b/app/controllers/statuses_controller.rb
@@ -43,7 +43,7 @@ def embed
return not_found if @status.hidden? || @status.reblog?
expires_in 180, public: true
- response.headers['X-Frame-Options'] = 'ALLOWALL'
+ response.headers.delete('X-Frame-Options')
render layout: 'embedded'
end
diff --git a/app/controllers/well_known/webfinger_controller.rb b/app/controllers/well_known/webfinger_controller.rb
index 2b296ea3be46ad..f83a62a1f4d703 100644
--- a/app/controllers/well_known/webfinger_controller.rb
+++ b/app/controllers/well_known/webfinger_controller.rb
@@ -18,7 +18,14 @@ def show
private
def set_account
- @account = Account.find_local!(username_from_resource)
+ username = username_from_resource
+ @account = begin
+ if username == Rails.configuration.x.local_domain
+ Account.representative
+ else
+ Account.find_local!(username)
+ end
+ end
end
def username_from_resource
diff --git a/app/helpers/formatting_helper.rb b/app/helpers/formatting_helper.rb
index c7093148979939..ce87c35243d4e1 100644
--- a/app/helpers/formatting_helper.rb
+++ b/app/helpers/formatting_helper.rb
@@ -58,6 +58,10 @@ def account_bio_format(account)
end
def account_field_value_format(field, with_rel_me: true)
- html_aware_format(field.value, field.account.local?, with_rel_me: with_rel_me, with_domains: true, multiline: false)
+ if field.verified? && !field.account.local?
+ TextFormatter.shortened_link(field.value_for_verification)
+ else
+ html_aware_format(field.value, field.account.local?, with_rel_me: with_rel_me, with_domains: true, multiline: false)
+ end
end
end
diff --git a/app/helpers/jsonld_helper.rb b/app/helpers/jsonld_helper.rb
index e5787fd471b1c9..28ca7fd82ac27f 100644
--- a/app/helpers/jsonld_helper.rb
+++ b/app/helpers/jsonld_helper.rb
@@ -157,8 +157,8 @@ def safe_for_forwarding?(original, compacted)
end
end
- def fetch_resource(uri, id, on_behalf_of = nil)
- unless id
+ def fetch_resource(uri, id_is_known, on_behalf_of = nil, request_options: {})
+ unless id_is_known
json = fetch_resource_without_id_validation(uri, on_behalf_of)
return if !json.is_a?(Hash) || unsupported_uri_scheme?(json['id'])
@@ -166,17 +166,29 @@ def fetch_resource(uri, id, on_behalf_of = nil)
uri = json['id']
end
- json = fetch_resource_without_id_validation(uri, on_behalf_of)
+ json = fetch_resource_without_id_validation(uri, on_behalf_of, request_options: request_options)
json.present? && json['id'] == uri ? json : nil
end
- def fetch_resource_without_id_validation(uri, on_behalf_of = nil, raise_on_temporary_error = false)
+ def fetch_resource_without_id_validation(uri, on_behalf_of = nil, raise_on_temporary_error = false, request_options: {})
on_behalf_of ||= Account.representative
- build_request(uri, on_behalf_of).perform do |response|
+ build_request(uri, on_behalf_of, options: request_options).perform do |response|
raise Mastodon::UnexpectedResponseError, response unless response_successful?(response) || response_error_unsalvageable?(response) || !raise_on_temporary_error
- body_to_json(response.body_with_limit) if response.code == 200
+ body_to_json(response.body_with_limit) if response.code == 200 && valid_activitypub_content_type?(response)
+ end
+ end
+
+ def valid_activitypub_content_type?(response)
+ return true if response.mime_type == 'application/activity+json'
+
+ # When the mime type is `application/ld+json`, we need to check the profile,
+ # but `http.rb` does not parse it for us.
+ return false unless response.mime_type == 'application/ld+json'
+
+ response.headers[HTTP::Headers::CONTENT_TYPE]&.split(';')&.map(&:strip)&.any? do |str|
+ str.start_with?('profile="') && str[9...-1].split.include?('https://www.w3.org/ns/activitystreams')
end
end
@@ -206,8 +218,8 @@ def response_error_unsalvageable?(response)
response.code == 501 || ((400...500).cover?(response.code) && ![401, 408, 429].include?(response.code))
end
- def build_request(uri, on_behalf_of = nil)
- Request.new(:get, uri).tap do |request|
+ def build_request(uri, on_behalf_of = nil, options: {})
+ Request.new(:get, uri, **options).tap do |request|
request.on_behalf_of(on_behalf_of) if on_behalf_of
request.add_headers('Accept' => 'application/activity+json, application/ld+json')
end
diff --git a/app/javascript/mastodon/actions/compose.js b/app/javascript/mastodon/actions/compose.js
index 72e59293586d1c..3756a975b79b7c 100644
--- a/app/javascript/mastodon/actions/compose.js
+++ b/app/javascript/mastodon/actions/compose.js
@@ -165,11 +165,19 @@ export function submitCompose(routerHistory) {
// API call.
let media_attributes;
if (statusId !== null) {
- media_attributes = media.map(item => ({
- id: item.get('id'),
- description: item.get('description'),
- focus: item.get('focus'),
- }));
+ media_attributes = media.map(item => {
+ let focus;
+
+ if (item.getIn(['meta', 'focus'])) {
+ focus = `${item.getIn(['meta', 'focus', 'x']).toFixed(2)},${item.getIn(['meta', 'focus', 'y']).toFixed(2)}`;
+ }
+
+ return {
+ id: item.get('id'),
+ description: item.get('description'),
+ focus,
+ };
+ });
}
api(getState).request({
diff --git a/app/javascript/mastodon/components/column_back_button.js b/app/javascript/mastodon/components/column_back_button.js
index 5bbf11652b8d4b..5c5226b7ead8ee 100644
--- a/app/javascript/mastodon/components/column_back_button.js
+++ b/app/javascript/mastodon/components/column_back_button.js
@@ -15,10 +15,10 @@ export default class ColumnBackButton extends React.PureComponent {
};
handleClick = () => {
- if (window.history && window.history.length === 1) {
- this.context.router.history.push('/');
- } else {
+ if (window.history && window.history.state) {
this.context.router.history.goBack();
+ } else {
+ this.context.router.history.push('/');
}
};
diff --git a/app/javascript/mastodon/components/column_header.js b/app/javascript/mastodon/components/column_header.js
index 38f6ad60fffc20..9ba783d903c666 100644
--- a/app/javascript/mastodon/components/column_header.js
+++ b/app/javascript/mastodon/components/column_header.js
@@ -43,14 +43,6 @@ class ColumnHeader extends React.PureComponent {
animating: false,
};
- historyBack = () => {
- if (window.history && window.history.length === 1) {
- this.context.router.history.push('/');
- } else {
- this.context.router.history.goBack();
- }
- };
-
handleToggleClick = (e) => {
e.stopPropagation();
this.setState({ collapsed: !this.state.collapsed, animating: true });
@@ -69,7 +61,11 @@ class ColumnHeader extends React.PureComponent {
};
handleBackClick = () => {
- this.historyBack();
+ if (window.history && window.history.state) {
+ this.context.router.history.goBack();
+ } else {
+ this.context.router.history.push('/');
+ }
};
handleTransitionEnd = () => {
diff --git a/app/javascript/mastodon/containers/status_container.js b/app/javascript/mastodon/containers/status_container.js
index 294105f259128f..580f409e94e729 100644
--- a/app/javascript/mastodon/containers/status_container.js
+++ b/app/javascript/mastodon/containers/status_container.js
@@ -56,6 +56,8 @@ const messages = defineMessages({
redraftMessage: { id: 'confirmations.redraft.message', defaultMessage: 'Are you sure you want to delete this status and re-draft it? Favourites and boosts will be lost, and replies to the original post will be orphaned.' },
replyConfirm: { id: 'confirmations.reply.confirm', defaultMessage: 'Reply' },
replyMessage: { id: 'confirmations.reply.message', defaultMessage: 'Replying now will overwrite the message you are currently composing. Are you sure you want to proceed?' },
+ editConfirm: { id: 'confirmations.edit.confirm', defaultMessage: 'Edit' },
+ editMessage: { id: 'confirmations.edit.message', defaultMessage: 'Editing now will overwrite the message you are currently composing. Are you sure you want to proceed?' },
blockDomainConfirm: { id: 'confirmations.domain_block.confirm', defaultMessage: 'Hide entire domain' },
});
@@ -149,7 +151,18 @@ const mapDispatchToProps = (dispatch, { intl, contextType }) => ({
},
onEdit (status, history) {
- dispatch(editStatus(status.get('id'), history));
+ dispatch((_, getState) => {
+ let state = getState();
+ if (state.getIn(['compose', 'text']).trim().length !== 0) {
+ dispatch(openModal('CONFIRM', {
+ message: intl.formatMessage(messages.editMessage),
+ confirm: intl.formatMessage(messages.editConfirm),
+ onConfirm: () => dispatch(editStatus(status.get('id'), history)),
+ }));
+ } else {
+ dispatch(editStatus(status.get('id'), history));
+ }
+ });
},
onTranslate (status) {
diff --git a/app/javascript/mastodon/features/compose/components/language_dropdown.js b/app/javascript/mastodon/features/compose/components/language_dropdown.js
index d96d39f23db62e..82547e0793592d 100644
--- a/app/javascript/mastodon/features/compose/components/language_dropdown.js
+++ b/app/javascript/mastodon/features/compose/components/language_dropdown.js
@@ -210,7 +210,7 @@ class LanguageDropdownMenu extends React.PureComponent {
return (
- {lang[2]}({lang[1]})
+ {lang[2]}({lang[1]})
);
};
diff --git a/app/javascript/mastodon/features/ui/components/header.js b/app/javascript/mastodon/features/ui/components/header.js
index 1384bebda0e1a8..92adc47a9c584e 100644
--- a/app/javascript/mastodon/features/ui/components/header.js
+++ b/app/javascript/mastodon/features/ui/components/header.js
@@ -22,8 +22,8 @@ const mapDispatchToProps = (dispatch) => ({
},
});
-export default @connect(null, mapDispatchToProps)
-@withRouter
+export default @withRouter
+@connect(null, mapDispatchToProps)
class Header extends React.PureComponent {
static contextTypes = {
diff --git a/app/javascript/mastodon/features/ui/components/navigation_panel.js b/app/javascript/mastodon/features/ui/components/navigation_panel.js
index f3b8f8e3b9a5a8..c828ed155d0d42 100644
--- a/app/javascript/mastodon/features/ui/components/navigation_panel.js
+++ b/app/javascript/mastodon/features/ui/components/navigation_panel.js
@@ -84,8 +84,8 @@ class NavigationPanel extends React.Component {
{signedIn && (
-
+
diff --git a/app/javascript/mastodon/features/ui/index.js b/app/javascript/mastodon/features/ui/index.js
index 3ac4c0f2c9f798..e1dfa48ed84709 100644
--- a/app/javascript/mastodon/features/ui/index.js
+++ b/app/javascript/mastodon/features/ui/index.js
@@ -474,10 +474,10 @@ class UI extends React.PureComponent {
};
handleHotkeyBack = () => {
- if (window.history && window.history.length === 1) {
- this.context.router.history.push('/');
- } else {
+ if (window.history && window.history.state) {
this.context.router.history.goBack();
+ } else {
+ this.context.router.history.push('/');
}
};
diff --git a/app/javascript/mastodon/locales/en.json b/app/javascript/mastodon/locales/en.json
index a3c57b1a00505d..530b53a78b9097 100644
--- a/app/javascript/mastodon/locales/en.json
+++ b/app/javascript/mastodon/locales/en.json
@@ -164,6 +164,8 @@
"confirmations.discard_edit_media.message": "You have unsaved changes to the media description or preview, discard them anyway?",
"confirmations.domain_block.confirm": "Block entire domain",
"confirmations.domain_block.message": "Are you really, really sure you want to block the entire {domain}? In most cases a few targeted blocks or mutes are sufficient and preferable. You will not see content from that domain in any public timelines or your notifications. Your followers from that domain will be removed.",
+ "confirmations.edit.confirm": "Edit",
+ "confirmations.edit.message": "Editing now will overwrite the message you are currently composing. Are you sure you want to proceed?",
"confirmations.logout.confirm": "Log out",
"confirmations.logout.message": "Are you sure you want to log out?",
"confirmations.mute.confirm": "Mute",
diff --git a/app/javascript/mastodon/reducers/compose.js b/app/javascript/mastodon/reducers/compose.js
index afc3cd31a8c01c..29dbc650eed224 100644
--- a/app/javascript/mastodon/reducers/compose.js
+++ b/app/javascript/mastodon/reducers/compose.js
@@ -195,11 +195,12 @@ const ignoreSuggestion = (state, position, token, completion, path) => {
};
const sortHashtagsByUse = (state, tags) => {
- const personalHistory = state.get('tagHistory');
+ const personalHistory = state.get('tagHistory').map(tag => tag.toLowerCase());
- return tags.sort((a, b) => {
- const usedA = personalHistory.includes(a.name);
- const usedB = personalHistory.includes(b.name);
+ const tagsWithLowercase = tags.map(t => ({ ...t, lowerName: t.name.toLowerCase() }));
+ const sorted = tagsWithLowercase.sort((a, b) => {
+ const usedA = personalHistory.includes(a.lowerName);
+ const usedB = personalHistory.includes(b.lowerName);
if (usedA === usedB) {
return 0;
@@ -209,6 +210,8 @@ const sortHashtagsByUse = (state, tags) => {
return 1;
}
});
+ sorted.forEach(tag => delete tag.lowerName);
+ return sorted;
};
const insertEmoji = (state, position, emojiData, needsSpace) => {
diff --git a/app/javascript/styles/mastodon-light/diff.scss b/app/javascript/styles/mastodon-light/diff.scss
index c37100a28fe61a..db564b227935cf 100644
--- a/app/javascript/styles/mastodon-light/diff.scss
+++ b/app/javascript/styles/mastodon-light/diff.scss
@@ -254,6 +254,10 @@ html {
border-color: $ui-base-color;
}
+.upload-progress__backdrop {
+ background: $ui-base-color;
+}
+
// Change the background colors of statuses
.focusable:focus {
background: $ui-base-color;
diff --git a/app/javascript/styles/mastodon/admin.scss b/app/javascript/styles/mastodon/admin.scss
index 674fafbe955a75..505f59a4654e76 100644
--- a/app/javascript/styles/mastodon/admin.scss
+++ b/app/javascript/styles/mastodon/admin.scss
@@ -384,7 +384,7 @@ $content-width: 840px;
position: fixed;
z-index: 10;
width: 100%;
- height: calc(100vh - 56px);
+ height: calc(100% - 56px);
left: 0;
bottom: 0;
overflow-y: auto;
diff --git a/app/javascript/styles/mastodon/components.scss b/app/javascript/styles/mastodon/components.scss
index 493efea3052661..e4616c1100dee7 100644
--- a/app/javascript/styles/mastodon/components.scss
+++ b/app/javascript/styles/mastodon/components.scss
@@ -4482,6 +4482,7 @@ a.status-card.compact:hover {
display: flex;
align-items: center;
justify-content: center;
+ text-align: center;
color: $secondary-text-color;
font-size: 18px;
font-weight: 500;
@@ -4516,7 +4517,7 @@ a.status-card.compact:hover {
width: 100%;
height: 6px;
border-radius: 6px;
- background: $ui-base-lighter-color;
+ background: darken($simple-background-color, 8%);
position: relative;
margin-top: 5px;
}
diff --git a/app/lib/account_reach_finder.rb b/app/lib/account_reach_finder.rb
index 706ce8c1fbbbf7..481e254396e453 100644
--- a/app/lib/account_reach_finder.rb
+++ b/app/lib/account_reach_finder.rb
@@ -6,7 +6,7 @@ def initialize(account)
end
def inboxes
- (followers_inboxes + reporters_inboxes + relay_inboxes).uniq
+ (followers_inboxes + reporters_inboxes + recently_mentioned_inboxes + relay_inboxes).uniq
end
private
@@ -19,6 +19,13 @@ def reporters_inboxes
Account.where(id: @account.targeted_reports.select(:account_id)).inboxes
end
+ def recently_mentioned_inboxes
+ cutoff_id = Mastodon::Snowflake.id_at(2.days.ago, with_random: false)
+ recent_statuses = @account.statuses.recent.where(id: cutoff_id...).limit(200)
+
+ Account.joins(:mentions).where(mentions: { status: recent_statuses }).inboxes.take(2000)
+ end
+
def relay_inboxes
Relay.enabled.pluck(:inbox_url)
end
diff --git a/app/models/account_statuses_filter.rb b/app/lib/account_statuses_filter.rb
similarity index 100%
rename from app/models/account_statuses_filter.rb
rename to app/lib/account_statuses_filter.rb
diff --git a/app/lib/activitypub/activity.rb b/app/lib/activitypub/activity.rb
index 900428e9204736..1738b8fe930767 100644
--- a/app/lib/activitypub/activity.rb
+++ b/app/lib/activitypub/activity.rb
@@ -153,7 +153,8 @@ def follow_from_object
def fetch_remote_original_status
if object_uri.start_with?('http')
return if ActivityPub::TagManager.instance.local_uri?(object_uri)
- ActivityPub::FetchRemoteStatusService.new.call(object_uri, id: true, on_behalf_of: @account.followers.local.first, request_id: @options[:request_id])
+
+ ActivityPub::FetchRemoteStatusService.new.call(object_uri, on_behalf_of: @account.followers.local.first, request_id: @options[:request_id])
elsif @object['url'].present?
::FetchRemoteStatusService.new.call(@object['url'], request_id: @options[:request_id])
end
diff --git a/app/lib/activitypub/activity/flag.rb b/app/lib/activitypub/activity/flag.rb
index b0443849a6b8b5..7539bda422ff38 100644
--- a/app/lib/activitypub/activity/flag.rb
+++ b/app/lib/activitypub/activity/flag.rb
@@ -16,7 +16,7 @@ def perform
@account,
target_account,
status_ids: target_statuses.nil? ? [] : target_statuses.map(&:id),
- comment: @json['content'] || '',
+ comment: report_comment,
uri: report_uri
)
end
@@ -35,4 +35,8 @@ def object_uris
def report_uri
@json['id'] unless @json['id'].nil? || invalid_origin?(@json['id'])
end
+
+ def report_comment
+ (@json['content'] || '')[0...5000]
+ end
end
diff --git a/app/lib/activitypub/activity/update.rb b/app/lib/activitypub/activity/update.rb
index e7c3bc9bf83dec..91ebd3732a828b 100644
--- a/app/lib/activitypub/activity/update.rb
+++ b/app/lib/activitypub/activity/update.rb
@@ -28,6 +28,6 @@ def update_status
return if @status.nil?
- ActivityPub::ProcessStatusUpdateService.new.call(@status, @object, request_id: @options[:request_id])
+ ActivityPub::ProcessStatusUpdateService.new.call(@status, @json, @object, request_id: @options[:request_id])
end
end
diff --git a/app/lib/activitypub/linked_data_signature.rb b/app/lib/activitypub/linked_data_signature.rb
index f90adaf6c5fd3b..dff052ffaadca8 100644
--- a/app/lib/activitypub/linked_data_signature.rb
+++ b/app/lib/activitypub/linked_data_signature.rb
@@ -18,8 +18,8 @@ def verify_actor!
return unless type == 'RsaSignature2017'
- creator = ActivityPub::TagManager.instance.uri_to_actor(creator_uri)
- creator ||= ActivityPub::FetchRemoteKeyService.new.call(creator_uri, id: false)
+ creator = ActivityPub::TagManager.instance.uri_to_actor(creator_uri)
+ creator = ActivityPub::FetchRemoteKeyService.new.call(creator_uri) if creator&.public_key.blank?
return if creator.nil?
@@ -27,9 +27,9 @@ def verify_actor!
document_hash = hash(@json.without('signature'))
to_be_verified = options_hash + document_hash
- if creator.keypair.public_key.verify(OpenSSL::Digest.new('SHA256'), Base64.decode64(signature), to_be_verified)
- creator
- end
+ creator if creator.keypair.public_key.verify(OpenSSL::Digest.new('SHA256'), Base64.decode64(signature), to_be_verified)
+ rescue OpenSSL::PKey::RSAError
+ false
end
def sign!(creator, sign_with: nil)
diff --git a/app/lib/activitypub/parser/status_parser.rb b/app/lib/activitypub/parser/status_parser.rb
index 3ba154d01551fa..45f5fc5bf2d549 100644
--- a/app/lib/activitypub/parser/status_parser.rb
+++ b/app/lib/activitypub/parser/status_parser.rb
@@ -53,7 +53,8 @@ def title
end
def created_at
- @object['published']&.to_datetime
+ datetime = @object['published']&.to_datetime
+ datetime if datetime.present? && (0..9999).cover?(datetime.year)
rescue ArgumentError
nil
end
diff --git a/app/lib/activitypub/tag_manager.rb b/app/lib/activitypub/tag_manager.rb
index 3d6b28ef5814d6..e05c0652268f7c 100644
--- a/app/lib/activitypub/tag_manager.rb
+++ b/app/lib/activitypub/tag_manager.rb
@@ -27,6 +27,8 @@ def url_for(target)
when :note, :comment, :activity
return activity_account_status_url(target.account, target) if target.reblog?
short_account_status_url(target.account, target)
+ when :flag
+ target.uri
end
end
@@ -41,6 +43,8 @@ def uri_for(target)
account_status_url(target.account, target)
when :emoji
emoji_url(target)
+ when :flag
+ target.uri
end
end
diff --git a/app/lib/admin/account_statuses_filter.rb b/app/lib/admin/account_statuses_filter.rb
new file mode 100644
index 00000000000000..94927e4b6806c9
--- /dev/null
+++ b/app/lib/admin/account_statuses_filter.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class Admin::AccountStatusesFilter < AccountStatusesFilter
+ private
+
+ def blocked?
+ false
+ end
+end
diff --git a/app/lib/admin/system_check.rb b/app/lib/admin/system_check.rb
index f512635abb0a8f..89dfcef9f1dcac 100644
--- a/app/lib/admin/system_check.rb
+++ b/app/lib/admin/system_check.rb
@@ -2,6 +2,7 @@
class Admin::SystemCheck
ACTIVE_CHECKS = [
+ Admin::SystemCheck::MediaPrivacyCheck,
Admin::SystemCheck::DatabaseSchemaCheck,
Admin::SystemCheck::SidekiqProcessCheck,
Admin::SystemCheck::RulesCheck,
diff --git a/app/lib/admin/system_check/elasticsearch_check.rb b/app/lib/admin/system_check/elasticsearch_check.rb
index 5b4c12399b8878..0b55be35017a62 100644
--- a/app/lib/admin/system_check/elasticsearch_check.rb
+++ b/app/lib/admin/system_check/elasticsearch_check.rb
@@ -31,7 +31,7 @@ def message
def running_version
@running_version ||= begin
Chewy.client.info['version']['number']
- rescue Faraday::ConnectionFailed
+ rescue Faraday::ConnectionFailed, Elasticsearch::Transport::Transport::Error
nil
end
end
diff --git a/app/lib/admin/system_check/media_privacy_check.rb b/app/lib/admin/system_check/media_privacy_check.rb
new file mode 100644
index 00000000000000..1df05b120ea80a
--- /dev/null
+++ b/app/lib/admin/system_check/media_privacy_check.rb
@@ -0,0 +1,105 @@
+# frozen_string_literal: true
+
+class Admin::SystemCheck::MediaPrivacyCheck < Admin::SystemCheck::BaseCheck
+ include RoutingHelper
+
+ def skip?
+ !current_user.can?(:view_devops)
+ end
+
+ def pass?
+ check_media_uploads!
+ @failure_message.nil?
+ end
+
+ def message
+ Admin::SystemCheck::Message.new(@failure_message, @failure_value, @failure_action, true)
+ end
+
+ private
+
+ def check_media_uploads!
+ if Rails.configuration.x.use_s3
+ check_media_listing_inaccessible_s3!
+ else
+ check_media_listing_inaccessible!
+ end
+ end
+
+ def check_media_listing_inaccessible!
+ full_url = full_asset_url(media_attachment.file.url(:original, false))
+
+ # Check if we can list the uploaded file. If true, that's an error
+ directory_url = Addressable::URI.parse(full_url)
+ directory_url.query = nil
+ filename = directory_url.path.gsub(%r{.*/}, '')
+ directory_url.path = directory_url.path.gsub(%r{/[^/]+\Z}, '/')
+ Request.new(:get, directory_url, allow_local: true).perform do |res|
+ if res.truncated_body&.include?(filename)
+ @failure_message = use_storage? ? :upload_check_privacy_error_object_storage : :upload_check_privacy_error
+ @failure_action = 'https://docs.joinmastodon.org/admin/optional/object-storage/#FS'
+ end
+ end
+ rescue
+ nil
+ end
+
+ def check_media_listing_inaccessible_s3!
+ urls_to_check = []
+ paperclip_options = Paperclip::Attachment.default_options
+ s3_protocol = paperclip_options[:s3_protocol]
+ s3_host_alias = paperclip_options[:s3_host_alias]
+ s3_host_name = paperclip_options[:s3_host_name]
+ bucket_name = paperclip_options.dig(:s3_credentials, :bucket)
+
+ urls_to_check << "#{s3_protocol}://#{s3_host_alias}/" if s3_host_alias.present?
+ urls_to_check << "#{s3_protocol}://#{s3_host_name}/#{bucket_name}/"
+ urls_to_check.uniq.each do |full_url|
+ check_s3_listing!(full_url)
+ break if @failure_message.present?
+ end
+ rescue
+ nil
+ end
+
+ def check_s3_listing!(full_url)
+ bucket_url = Addressable::URI.parse(full_url)
+ bucket_url.path = bucket_url.path.delete_suffix(media_attachment.file.path(:original))
+ bucket_url.query = "max-keys=1&x-random=#{SecureRandom.hex(10)}"
+ Request.new(:get, bucket_url, allow_local: true).perform do |res|
+ if res.truncated_body&.include?('ListBucketResult')
+ @failure_message = :upload_check_privacy_error_object_storage
+ @failure_action = 'https://docs.joinmastodon.org/admin/optional/object-storage/#S3'
+ end
+ end
+ end
+
+ def media_attachment
+ @media_attachment ||= begin
+ attachment = Account.representative.media_attachments.first
+ if attachment.present?
+ attachment.touch # rubocop:disable Rails/SkipsModelValidations
+ attachment
+ else
+ create_test_attachment!
+ end
+ end
+ end
+
+ def create_test_attachment!
+ Tempfile.create(%w(test-upload .jpg), binmode: true) do |tmp_file|
+ tmp_file.write(
+ Base64.decode64(
+ '/9j/4QAiRXhpZgAATU0AKgAAAAgAAQESAAMAAAABAAYAAAA' \
+ 'AAAD/2wCEAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBA' \
+ 'QEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQE' \
+ 'BAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAf/AABEIAAEAAgMBEQACEQEDEQH/x' \
+ 'ABKAAEAAAAAAAAAAAAAAAAAAAALEAEAAAAAAAAAAAAAAAAAAAAAAQEAAAAAAAAAAAAAAAA' \
+ 'AAAAAEQEAAAAAAAAAAAAAAAAAAAAA/9oADAMBAAIRAxEAPwA/8H//2Q=='
+ )
+ )
+ tmp_file.flush
+ Account.representative.media_attachments.create!(file: tmp_file)
+ end
+ end
+end
diff --git a/app/lib/admin/system_check/message.rb b/app/lib/admin/system_check/message.rb
index bfcad3bf3d00a6..ad8d4b6073872a 100644
--- a/app/lib/admin/system_check/message.rb
+++ b/app/lib/admin/system_check/message.rb
@@ -1,11 +1,12 @@
# frozen_string_literal: true
class Admin::SystemCheck::Message
- attr_reader :key, :value, :action
+ attr_reader :key, :value, :action, :critical
- def initialize(key, value = nil, action = nil)
- @key = key
- @value = value
- @action = action
+ def initialize(key, value = nil, action = nil, critical = false)
+ @key = key
+ @value = value
+ @action = action
+ @critical = critical
end
end
diff --git a/app/lib/application_extension.rb b/app/lib/application_extension.rb
index d61ec0e6e7f121..d1222656b75e18 100644
--- a/app/lib/application_extension.rb
+++ b/app/lib/application_extension.rb
@@ -4,16 +4,32 @@ module ApplicationExtension
extend ActiveSupport::Concern
included do
+ include Redisable
+
validates :name, length: { maximum: 60 }
validates :website, url: true, length: { maximum: 2_000 }, if: :website?
validates :redirect_uri, length: { maximum: 2_000 }
- end
- def most_recently_used_access_token
- @most_recently_used_access_token ||= access_tokens.where.not(last_used_at: nil).order(last_used_at: :desc).first
+ # The relationship used between Applications and AccessTokens is using
+ # dependent: delete_all, which means the ActiveRecord callback in
+ # AccessTokenExtension is not run, so instead we manually announce to
+ # streaming that these tokens are being deleted.
+ before_destroy :push_to_streaming_api, prepend: true
end
def confirmation_redirect_uri
redirect_uri.lines.first.strip
end
+
+ def push_to_streaming_api
+ # TODO: #28793 Combine into a single topic
+ payload = Oj.dump(event: :kill)
+ access_tokens.in_batches do |tokens|
+ redis.pipelined do |pipeline|
+ tokens.ids.each do |id|
+ pipeline.publish("timeline:access_token:#{id}", payload)
+ end
+ end
+ end
+ end
end
diff --git a/app/lib/importer/base_importer.rb b/app/lib/importer/base_importer.rb
index ea522c600cf2e4..7009db11f7bb26 100644
--- a/app/lib/importer/base_importer.rb
+++ b/app/lib/importer/base_importer.rb
@@ -34,7 +34,9 @@ def optimize_for_search!
# Estimate the amount of documents that would be indexed. Not exact!
# @returns [Integer]
def estimate!
- ActiveRecord::Base.connection_pool.with_connection { |connection| connection.select_one("SELECT reltuples AS estimate FROM pg_class WHERE relname = '#{index.adapter.target.table_name}'")['estimate'].to_i }
+ reltuples = ActiveRecord::Base.connection_pool.with_connection { |connection| connection.select_one("SELECT reltuples FROM pg_class WHERE relname = '#{index.adapter.target.table_name}'")['reltuples'].to_i }
+ # If the table has never yet been vacuumed or analyzed, reltuples contains -1
+ [reltuples, 0].max
end
# Import data from the database into the index
diff --git a/app/lib/link_details_extractor.rb b/app/lib/link_details_extractor.rb
index 2e0672abe1689a..9d774842506d12 100644
--- a/app/lib/link_details_extractor.rb
+++ b/app/lib/link_details_extractor.rb
@@ -140,7 +140,7 @@ def link_type
end
def html
- player_url.present? ? content_tag(:iframe, nil, src: player_url, width: width, height: height, allowtransparency: 'true', scrolling: 'no', frameborder: '0') : nil
+ player_url.present? ? content_tag(:iframe, nil, src: player_url, width: width, height: height, allowfullscreen: 'true', allowtransparency: 'true', scrolling: 'no', frameborder: '0') : nil
end
def width
diff --git a/app/lib/plain_text_formatter.rb b/app/lib/plain_text_formatter.rb
index 08aa29696450a8..d1ff6808b2a995 100644
--- a/app/lib/plain_text_formatter.rb
+++ b/app/lib/plain_text_formatter.rb
@@ -1,9 +1,7 @@
# frozen_string_literal: true
class PlainTextFormatter
- include ActionView::Helpers::TextHelper
-
- NEWLINE_TAGS_RE = /( | |<\/p>)+/.freeze
+ NEWLINE_TAGS_RE = %r{( | |)+}
attr_reader :text, :local
@@ -18,7 +16,10 @@ def to_s
if local?
text
else
- strip_tags(insert_newlines).chomp
+ node = Nokogiri::HTML.fragment(insert_newlines)
+ # Elements that are entirely removed with our Sanitize config
+ node.xpath('.//iframe|.//math|.//noembed|.//noframes|.//noscript|.//plaintext|.//script|.//style|.//svg|.//xmp').remove
+ node.text.chomp
end
end
diff --git a/app/lib/request.rb b/app/lib/request.rb
index 0508169dcba3c9..f45cacfcd73f3c 100644
--- a/app/lib/request.rb
+++ b/app/lib/request.rb
@@ -4,14 +4,60 @@
require 'socket'
require 'resolv'
-# Monkey-patch the HTTP.rb timeout class to avoid using a timeout block
+# Use our own timeout class to avoid using HTTP.rb's timeout block
# around the Socket#open method, since we use our own timeout blocks inside
# that method
-class HTTP::Timeout::PerOperation
+#
+# Also changes how the read timeout behaves so that it is cumulative (closer
+# to HTTP::Timeout::Global, but still having distinct timeouts for other
+# operation types)
+class PerOperationWithDeadline < HTTP::Timeout::PerOperation
+ READ_DEADLINE = 30
+
+ def initialize(*args)
+ super
+
+ @read_deadline = options.fetch(:read_deadline, READ_DEADLINE)
+ end
+
def connect(socket_class, host, port, nodelay = false)
@socket = socket_class.open(host, port)
@socket.setsockopt(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1) if nodelay
end
+
+ # Reset deadline when the connection is re-used for different requests
+ def reset_counter
+ @deadline = nil
+ end
+
+ # Read data from the socket
+ def readpartial(size, buffer = nil)
+ @deadline ||= Process.clock_gettime(Process::CLOCK_MONOTONIC) + @read_deadline
+
+ timeout = false
+ loop do
+ result = @socket.read_nonblock(size, buffer, exception: false)
+
+ return :eof if result.nil?
+
+ remaining_time = @deadline - Process.clock_gettime(Process::CLOCK_MONOTONIC)
+ raise HTTP::TimeoutError, "Read timed out after #{@read_timeout} seconds" if timeout
+ raise HTTP::TimeoutError, "Read timed out after a total of #{@read_deadline} seconds" if remaining_time <= 0
+ return result if result != :wait_readable
+
+ # marking the socket for timeout. Why is this not being raised immediately?
+ # it seems there is some race-condition on the network level between calling
+ # #read_nonblock and #wait_readable, in which #read_nonblock signalizes waiting
+ # for reads, and when waiting for x seconds, it returns nil suddenly without completing
+ # the x seconds. In a normal case this would be a timeout on wait/read, but it can
+ # also mean that the socket has been closed by the server. Therefore we "mark" the
+ # socket for timeout and try to read more bytes. If it returns :eof, it's all good, no
+ # timeout. Else, the first timeout was a proper timeout.
+ # This hack has to be done because io/wait#wait_readable doesn't provide a value for when
+ # the socket is closed by the server, and HTTP::Parser doesn't provide the limit for the chunks.
+ timeout = true unless @socket.to_io.wait_readable([remaining_time, @read_timeout].min)
+ end
+ end
end
class Request
@@ -20,7 +66,7 @@ class Request
# We enforce a 5s timeout on DNS resolving, 5s timeout on socket opening
# and 5s timeout on the TLS handshake, meaning the worst case should take
# about 15s in total
- TIMEOUT = { connect: 5, read: 10, write: 10 }.freeze
+ TIMEOUT = { connect_timeout: 5, read_timeout: 10, write_timeout: 10, read_deadline: 30 }.freeze
include RoutingHelper
@@ -31,7 +77,9 @@ def initialize(verb, url, **options)
@url = Addressable::URI.parse(url).normalize
@http_client = options.delete(:http_client)
@allow_local = options.delete(:allow_local)
+ @full_path = options.delete(:with_query_string)
@options = options.merge(socket_class: use_proxy? || @allow_local ? ProxySocket : Socket)
+ @options = @options.merge(timeout_class: PerOperationWithDeadline, timeout_options: TIMEOUT)
@options = @options.merge(proxy_url) if use_proxy?
@headers = {}
@@ -92,14 +140,14 @@ def valid_url?(url)
end
def http_client
- HTTP.use(:auto_inflate).timeout(TIMEOUT.dup).follow(max_hops: 3)
+ HTTP.use(:auto_inflate).follow(max_hops: 3)
end
end
private
def set_common_headers!
- @headers[REQUEST_TARGET] = "#{@verb} #{@url.path}"
+ @headers[REQUEST_TARGET] = request_target
@headers['User-Agent'] = Mastodon::Version.user_agent
@headers['Host'] = @url.host
@headers['Date'] = Time.now.utc.httpdate
@@ -110,6 +158,14 @@ def set_digest!
@headers['Digest'] = "SHA-256=#{Digest::SHA256.base64digest(@options[:body])}"
end
+ def request_target
+ if @url.query.nil? || !@full_path
+ "#{@verb} #{@url.path}"
+ else
+ "#{@verb} #{@url.path}?#{@url.query}"
+ end
+ end
+
def signature
algorithm = 'rsa-sha256'
signature = Base64.strict_encode64(@keypair.sign(OpenSSL::Digest.new('SHA256'), signed_string))
@@ -238,11 +294,11 @@ def open(host, *args)
end
until socks.empty?
- _, available_socks, = IO.select(nil, socks, nil, Request::TIMEOUT[:connect])
+ _, available_socks, = IO.select(nil, socks, nil, Request::TIMEOUT[:connect_timeout])
if available_socks.nil?
socks.each(&:close)
- raise HTTP::TimeoutError, "Connect timed out after #{Request::TIMEOUT[:connect]} seconds"
+ raise HTTP::TimeoutError, "Connect timed out after #{Request::TIMEOUT[:connect_timeout]} seconds"
end
available_socks.each do |sock|
diff --git a/app/lib/scope_parser.rb b/app/lib/scope_parser.rb
index d268688c83edfd..45eb3c7b93831c 100644
--- a/app/lib/scope_parser.rb
+++ b/app/lib/scope_parser.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
class ScopeParser < Parslet::Parser
- rule(:term) { match('[a-z]').repeat(1).as(:term) }
+ rule(:term) { match('[a-z_]').repeat(1).as(:term) }
rule(:colon) { str(':') }
rule(:access) { (str('write') | str('read')).as(:access) }
rule(:namespace) { str('admin').as(:namespace) }
diff --git a/app/lib/status_reach_finder.rb b/app/lib/status_reach_finder.rb
index 36fb0e80fb88c4..17e42e3ec38fe5 100644
--- a/app/lib/status_reach_finder.rb
+++ b/app/lib/status_reach_finder.rb
@@ -16,28 +16,28 @@ def inboxes
private
def reached_account_inboxes
+ Account.where(id: reached_account_ids).inboxes
+ end
+
+ def reached_account_ids
# When the status is a reblog, there are no interactions with it
# directly, we assume all interactions are with the original one
if @status.reblog?
- []
+ [reblog_of_account_id]
else
- Account.where(id: reached_account_ids).inboxes
- end
- end
-
- def reached_account_ids
- [
- replied_to_account_id,
- reblog_of_account_id,
- mentioned_account_ids,
- reblogs_account_ids,
- favourites_account_ids,
- replies_account_ids,
- ].tap do |arr|
- arr.flatten!
- arr.compact!
- arr.uniq!
+ [
+ replied_to_account_id,
+ reblog_of_account_id,
+ mentioned_account_ids,
+ reblogs_account_ids,
+ favourites_account_ids,
+ replies_account_ids,
+ ].tap do |arr|
+ arr.flatten!
+ arr.compact!
+ arr.uniq!
+ end
end
end
diff --git a/app/lib/tag_manager.rb b/app/lib/tag_manager.rb
index a1d12a654eb43a..2e929d6e3f3f36 100644
--- a/app/lib/tag_manager.rb
+++ b/app/lib/tag_manager.rb
@@ -7,18 +7,18 @@ class TagManager
include RoutingHelper
def web_domain?(domain)
- domain.nil? || domain.gsub(/[\/]/, '').casecmp(Rails.configuration.x.web_domain).zero?
+ domain.nil? || domain.delete_suffix('/').casecmp(Rails.configuration.x.web_domain).zero?
end
def local_domain?(domain)
- domain.nil? || domain.gsub(/[\/]/, '').casecmp(Rails.configuration.x.local_domain).zero?
+ domain.nil? || domain.delete_suffix('/').casecmp(Rails.configuration.x.local_domain).zero?
end
def normalize_domain(domain)
return if domain.nil?
uri = Addressable::URI.new
- uri.host = domain.gsub(/[\/]/, '')
+ uri.host = domain.delete_suffix('/')
uri.normalized_host
end
@@ -28,7 +28,7 @@ def local_url?(url)
domain = uri.host + (uri.port ? ":#{uri.port}" : '')
TagManager.instance.web_domain?(domain)
- rescue Addressable::URI::InvalidURIError
+ rescue Addressable::URI::InvalidURIError, IDN::Idna::IdnaError
false
end
end
diff --git a/app/lib/text_formatter.rb b/app/lib/text_formatter.rb
index 48e2fc2338dcc1..e51266a08ad4c1 100644
--- a/app/lib/text_formatter.rb
+++ b/app/lib/text_formatter.rb
@@ -48,6 +48,26 @@ def to_s
html.html_safe # rubocop:disable Rails/OutputSafety
end
+ class << self
+ include ERB::Util
+
+ def shortened_link(url, rel_me: false)
+ url = Addressable::URI.parse(url).to_s
+ rel = rel_me ? (DEFAULT_REL + %w(me)) : DEFAULT_REL
+
+ prefix = url.match(URL_PREFIX_REGEX).to_s
+ display_url = url[prefix.length, 30]
+ suffix = url[prefix.length + 30..-1]
+ cutoff = url[prefix.length..-1].length > 30
+
+ <<~HTML.squish.html_safe # rubocop:disable Rails/OutputSafety
+ #{h(prefix)}#{h(display_url)}#{h(suffix)}
+ HTML
+ rescue Addressable::URI::InvalidURIError, IDN::Idna::IdnaError
+ h(url)
+ end
+ end
+
private
def rewrite
@@ -70,19 +90,7 @@ def rewrite
end
def link_to_url(entity)
- url = Addressable::URI.parse(entity[:url]).to_s
- rel = with_rel_me? ? (DEFAULT_REL + %w(me)) : DEFAULT_REL
-
- prefix = url.match(URL_PREFIX_REGEX).to_s
- display_url = url[prefix.length, 30]
- suffix = url[prefix.length + 30..-1]
- cutoff = url[prefix.length..-1].length > 30
-
- <<~HTML.squish
- #{h(prefix)}#{h(display_url)}#{h(suffix)}
- HTML
- rescue Addressable::URI::InvalidURIError, IDN::Idna::IdnaError
- h(entity[:url])
+ TextFormatter.shortened_link(entity[:url], rel_me: with_rel_me?)
end
def link_to_hashtag(entity)
diff --git a/app/lib/translation_service/deepl.rb b/app/lib/translation_service/deepl.rb
index 537fd24c089662..2b4746a4d038be 100644
--- a/app/lib/translation_service/deepl.rb
+++ b/app/lib/translation_service/deepl.rb
@@ -46,7 +46,7 @@ def transform_response(str)
raise UnexpectedResponseError unless json.is_a?(Hash)
- Translation.new(text: json.dig('translations', 0, 'text'), detected_source_language: json.dig('translations', 0, 'detected_source_language')&.downcase, provider: 'DeepL.com')
+ Translation.new(text: Sanitize.fragment(json.dig('translations', 0, 'text'), Sanitize::Config::MASTODON_STRICT), detected_source_language: json.dig('translations', 0, 'detected_source_language')&.downcase, provider: 'DeepL.com')
rescue Oj::ParseError
raise UnexpectedResponseError
end
diff --git a/app/lib/translation_service/libre_translate.rb b/app/lib/translation_service/libre_translate.rb
index 4ebe21e4543d14..2a06cea59bddde 100644
--- a/app/lib/translation_service/libre_translate.rb
+++ b/app/lib/translation_service/libre_translate.rb
@@ -37,7 +37,7 @@ def transform_response(str, source_language)
raise UnexpectedResponseError unless json.is_a?(Hash)
- Translation.new(text: json['translatedText'], detected_source_language: source_language, provider: 'LibreTranslate')
+ Translation.new(text: Sanitize.fragment(json['translatedText'], Sanitize::Config::MASTODON_STRICT), detected_source_language: source_language, provider: 'LibreTranslate')
rescue Oj::ParseError
raise UnexpectedResponseError
end
diff --git a/app/lib/vacuum/access_tokens_vacuum.rb b/app/lib/vacuum/access_tokens_vacuum.rb
index 7b91f74a512a11..a224f6d6380e58 100644
--- a/app/lib/vacuum/access_tokens_vacuum.rb
+++ b/app/lib/vacuum/access_tokens_vacuum.rb
@@ -9,10 +9,12 @@ def perform
private
def vacuum_revoked_access_tokens!
- Doorkeeper::AccessToken.where.not(revoked_at: nil).where('revoked_at < NOW()').delete_all
+ Doorkeeper::AccessToken.where.not(expires_in: nil).where('created_at + make_interval(secs => expires_in) < NOW()').in_batches.delete_all
+ Doorkeeper::AccessToken.where.not(revoked_at: nil).where('revoked_at < NOW()').in_batches.delete_all
end
def vacuum_revoked_access_grants!
- Doorkeeper::AccessGrant.where.not(revoked_at: nil).where('revoked_at < NOW()').delete_all
+ Doorkeeper::AccessGrant.where.not(expires_in: nil).where('created_at + make_interval(secs => expires_in) < NOW()').in_batches.delete_all
+ Doorkeeper::AccessGrant.where.not(revoked_at: nil).where('revoked_at < NOW()').in_batches.delete_all
end
end
diff --git a/app/lib/video_metadata_extractor.rb b/app/lib/video_metadata_extractor.rb
index 2896620cb21b09..f27d34868a2798 100644
--- a/app/lib/video_metadata_extractor.rb
+++ b/app/lib/video_metadata_extractor.rb
@@ -43,6 +43,9 @@ def parse_metadata
@height = video_stream[:height]
@frame_rate = video_stream[:avg_frame_rate] == '0/0' ? nil : Rational(video_stream[:avg_frame_rate])
@r_frame_rate = video_stream[:r_frame_rate] == '0/0' ? nil : Rational(video_stream[:r_frame_rate])
+ # For some video streams the frame_rate reported by `ffprobe` will be 0/0, but for these streams we
+ # should use `r_frame_rate` instead. Video screencast generated by Gnome Screencast have this issue.
+ @frame_rate ||= @r_frame_rate
end
if (audio_stream = audio_streams.first)
diff --git a/app/mailers/application_mailer.rb b/app/mailers/application_mailer.rb
index a37682eca63ab6..4edcb75f31bef4 100644
--- a/app/mailers/application_mailer.rb
+++ b/app/mailers/application_mailer.rb
@@ -7,6 +7,8 @@ class ApplicationMailer < ActionMailer::Base
helper :instance
helper :formatting
+ after_action :set_autoreply_headers!
+
protected
def locale_for_account(account)
@@ -14,4 +16,10 @@ def locale_for_account(account)
yield
end
end
+
+ def set_autoreply_headers!
+ headers['Precedence'] = 'list'
+ headers['X-Auto-Response-Suppress'] = 'All'
+ headers['Auto-Submitted'] = 'auto-generated'
+ end
end
diff --git a/app/models/account.rb b/app/models/account.rb
index 9449b49fee26ff..1c74ff45107d2d 100644
--- a/app/models/account.rb
+++ b/app/models/account.rb
@@ -61,9 +61,9 @@ class Account < ApplicationRecord
trust_level
)
- USERNAME_RE = /[a-z0-9_]+([a-z0-9_\.-]+[a-z0-9_]+)?/i
- MENTION_RE = /(?<=^|[^\/[:word:]])@((#{USERNAME_RE})(?:@[[:word:]\.\-]+[[:word:]]+)?)/i
- URL_PREFIX_RE = /\Ahttp(s?):\/\/[^\/]+/
+ USERNAME_RE = /[a-z0-9_]+([a-z0-9_.-]+[a-z0-9_]+)?/i
+ MENTION_RE = %r{(? { where(actor_type: %w(Application Service)) }
scope :groups, -> { where(actor_type: 'Group') }
scope :alphabetic, -> { order(domain: :asc, username: :asc) }
- scope :matches_username, ->(value) { where(arel_table[:username].matches("#{value}%")) }
+ scope :matches_username, ->(value) { where('lower((username)::text) LIKE lower(?)', "#{value}%") }
scope :matches_display_name, ->(value) { where(arel_table[:display_name].matches("#{value}%")) }
scope :matches_domain, ->(value) { where(arel_table[:domain].matches("%#{value}%")) }
scope :without_unapproved, -> { left_outer_joins(:user).remote.or(left_outer_joins(:user).merge(User.approved.confirmed)) }
scope :searchable, -> { without_unapproved.without_suspended.where(moved_to_account_id: nil) }
- scope :discoverable, -> { searchable.without_silenced.where(discoverable: true).left_outer_joins(:account_stat) }
+ scope :discoverable, -> { searchable.without_silenced.where(discoverable: true).joins(:account_stat) }
scope :followable_by, ->(account) { joins(arel_table.join(Follow.arel_table, Arel::Nodes::OuterJoin).on(arel_table[:id].eq(Follow.arel_table[:target_account_id]).and(Follow.arel_table[:account_id].eq(account.id))).join_sources).where(Follow.arel_table[:id].eq(nil)).joins(arel_table.join(FollowRequest.arel_table, Arel::Nodes::OuterJoin).on(arel_table[:id].eq(FollowRequest.arel_table[:target_account_id]).and(FollowRequest.arel_table[:account_id].eq(account.id))).join_sources).where(FollowRequest.arel_table[:id].eq(nil)) }
- scope :by_recent_status, -> { order(Arel.sql('(case when account_stats.last_status_at is null then 1 else 0 end) asc, account_stats.last_status_at desc, accounts.id desc')) }
- scope :by_recent_sign_in, -> { order(Arel.sql('(case when users.current_sign_in_at is null then 1 else 0 end) asc, users.current_sign_in_at desc, accounts.id desc')) }
+ scope :by_recent_status, -> { includes(:account_stat).merge(AccountStat.order('last_status_at DESC NULLS LAST')).references(:account_stat) }
+ scope :by_recent_sign_in, -> { order(Arel.sql('users.current_sign_in_at DESC NULLS LAST')) }
scope :popular, -> { order('account_stats.followers_count desc') }
scope :by_domain_and_subdomains, ->(domain) { where(domain: domain).or(where(arel_table[:domain].matches("%.#{domain}"))) }
scope :not_excluded_by_account, ->(account) { where.not(id: account.excluded_from_timeline_account_ids) }
diff --git a/app/models/account_conversation.rb b/app/models/account_conversation.rb
index 45e74bbeb31920..38ee247cfdc206 100644
--- a/app/models/account_conversation.rb
+++ b/app/models/account_conversation.rb
@@ -16,34 +16,44 @@
class AccountConversation < ApplicationRecord
include Redisable
+ attr_writer :participant_accounts
+
+ before_validation :set_last_status
after_commit :push_to_streaming_api
belongs_to :account
belongs_to :conversation
belongs_to :last_status, class_name: 'Status'
- before_validation :set_last_status
-
def participant_account_ids=(arr)
self[:participant_account_ids] = arr.sort
+ @participant_accounts = nil
end
def participant_accounts
- if participant_account_ids.empty?
- [account]
- else
- participants = Account.where(id: participant_account_ids)
- participants.empty? ? [account] : participants
- end
+ @participant_accounts ||= Account.where(id: participant_account_ids).to_a
+ @participant_accounts.presence || [account]
end
class << self
def to_a_paginated_by_id(limit, options = {})
- if options[:min_id]
- paginate_by_min_id(limit, options[:min_id], options[:max_id]).reverse
- else
- paginate_by_max_id(limit, options[:max_id], options[:since_id]).to_a
+ array = begin
+ if options[:min_id]
+ paginate_by_min_id(limit, options[:min_id], options[:max_id]).reverse
+ else
+ paginate_by_max_id(limit, options[:max_id], options[:since_id]).to_a
+ end
end
+
+ # Preload participants
+ participant_ids = array.flat_map(&:participant_account_ids)
+ accounts_by_id = Account.where(id: participant_ids).index_by(&:id)
+
+ array.each do |conversation|
+ conversation.participant_accounts = conversation.participant_account_ids.filter_map { |id| accounts_by_id[id] }
+ end
+
+ array
end
def paginate_by_min_id(limit, min_id = nil, max_id = nil)
diff --git a/app/models/admin/action_log_filter.rb b/app/models/admin/action_log_filter.rb
index f89d452ef4f7bc..0117974628b7b7 100644
--- a/app/models/admin/action_log_filter.rb
+++ b/app/models/admin/action_log_filter.rb
@@ -38,7 +38,7 @@ class Admin::ActionLogFilter
destroy_status: { target_type: 'Status', action: 'destroy' }.freeze,
destroy_user_role: { target_type: 'UserRole', action: 'destroy' }.freeze,
destroy_canonical_email_block: { target_type: 'CanonicalEmailBlock', action: 'destroy' }.freeze,
- disable_2fa_user: { target_type: 'User', action: 'disable' }.freeze,
+ disable_2fa_user: { target_type: 'User', action: 'disable_2fa' }.freeze,
disable_custom_emoji: { target_type: 'CustomEmoji', action: 'disable' }.freeze,
disable_user: { target_type: 'User', action: 'disable' }.freeze,
enable_custom_emoji: { target_type: 'CustomEmoji', action: 'enable' }.freeze,
diff --git a/app/models/admin/status_batch_action.rb b/app/models/admin/status_batch_action.rb
index b8bdec7223fe36..6641688788847d 100644
--- a/app/models/admin/status_batch_action.rb
+++ b/app/models/admin/status_batch_action.rb
@@ -140,6 +140,6 @@ def report_params
end
def allowed_status_ids
- AccountStatusesFilter.new(@report.target_account, current_account).results.with_discarded.where(id: status_ids).pluck(:id)
+ Admin::AccountStatusesFilter.new(@report.target_account, current_account).results.with_discarded.where(id: status_ids).pluck(:id)
end
end
diff --git a/app/models/backup.rb b/app/models/backup.rb
index d242fd62c19d1d..8823e7cae56488 100644
--- a/app/models/backup.rb
+++ b/app/models/backup.rb
@@ -17,6 +17,6 @@
class Backup < ApplicationRecord
belongs_to :user, inverse_of: :backups
- has_attached_file :dump
+ has_attached_file :dump, s3_permissions: ->(*) { ENV['S3_PERMISSION'] == '' ? nil : 'private' }
do_not_validate_attachment_file_type :dump
end
diff --git a/app/models/concerns/account_avatar.rb b/app/models/concerns/account_avatar.rb
index e9b8b4adba23f2..b5919a9a23d585 100644
--- a/app/models/concerns/account_avatar.rb
+++ b/app/models/concerns/account_avatar.rb
@@ -18,7 +18,7 @@ def avatar_styles(file)
included do
# Avatar upload
- has_attached_file :avatar, styles: ->(f) { avatar_styles(f) }, convert_options: { all: '+profile "!icc,*" +set modify-date +set create-date' }, processors: [:lazy_thumbnail]
+ has_attached_file :avatar, styles: ->(f) { avatar_styles(f) }, convert_options: { all: '+profile "!icc,*" +set date:modify +set date:create +set date:timestamp' }, processors: [:lazy_thumbnail]
validates_attachment_content_type :avatar, content_type: IMAGE_MIME_TYPES
validates_attachment_size :avatar, less_than: LIMIT
remotable_attachment :avatar, LIMIT, suppress_errors: false
diff --git a/app/models/concerns/account_header.rb b/app/models/concerns/account_header.rb
index 0d197abfcd181d..e184880f93af39 100644
--- a/app/models/concerns/account_header.rb
+++ b/app/models/concerns/account_header.rb
@@ -19,7 +19,7 @@ def header_styles(file)
included do
# Header upload
- has_attached_file :header, styles: ->(f) { header_styles(f) }, convert_options: { all: '+profile "!icc,*" +set modify-date +set create-date' }, processors: [:lazy_thumbnail]
+ has_attached_file :header, styles: ->(f) { header_styles(f) }, convert_options: { all: '+profile "!icc,*" +set date:modify +set date:create +set date:timestamp' }, processors: [:lazy_thumbnail]
validates_attachment_content_type :header, content_type: IMAGE_MIME_TYPES
validates_attachment_size :header, less_than: LIMIT
remotable_attachment :header, LIMIT, suppress_errors: false
diff --git a/app/models/concerns/attachmentable.rb b/app/models/concerns/attachmentable.rb
index 01fae4236fea12..a61c78dda170ee 100644
--- a/app/models/concerns/attachmentable.rb
+++ b/app/models/concerns/attachmentable.rb
@@ -22,15 +22,14 @@ module Attachmentable
included do
def self.has_attached_file(name, options = {}) # rubocop:disable Naming/PredicateName
- options = { validate_media_type: false }.merge(options)
super(name, options)
- send(:"before_#{name}_post_process") do
+
+ send(:"before_#{name}_validate", prepend: true) do
attachment = send(name)
check_image_dimension(attachment)
set_file_content_type(attachment)
obfuscate_file_name(attachment)
set_file_extension(attachment)
- Paperclip::Validators::MediaTypeSpoofDetectionValidator.new(attributes: [name]).validate(self)
end
end
end
@@ -53,9 +52,13 @@ def check_image_dimension(attachment)
return if attachment.blank? || !/image.*/.match?(attachment.content_type) || attachment.queued_for_write[:original].blank?
width, height = FastImage.size(attachment.queued_for_write[:original].path)
- matrix_limit = attachment.content_type == 'image/gif' ? GIF_MATRIX_LIMIT : MAX_MATRIX_LIMIT
+ return unless width.present? && height.present?
- raise Mastodon::DimensionsValidationError, "#{width}x#{height} images are not supported" if width.present? && height.present? && (width * height > matrix_limit)
+ if attachment.content_type == 'image/gif' && width * height > GIF_MATRIX_LIMIT
+ raise Mastodon::DimensionsValidationError, "#{width}x#{height} GIF files are not supported"
+ elsif width * height > MAX_MATRIX_LIMIT
+ raise Mastodon::DimensionsValidationError, "#{width}x#{height} images are not supported"
+ end
end
def appropriate_extension(attachment)
diff --git a/app/models/concerns/ldap_authenticable.rb b/app/models/concerns/ldap_authenticable.rb
index dc5abcd5accefe..775df081764d96 100644
--- a/app/models/concerns/ldap_authenticable.rb
+++ b/app/models/concerns/ldap_authenticable.rb
@@ -6,7 +6,7 @@ module LdapAuthenticable
class_methods do
def authenticate_with_ldap(params = {})
ldap = Net::LDAP.new(ldap_options)
- filter = format(Devise.ldap_search_filter, uid: Devise.ldap_uid, mail: Devise.ldap_mail, email: params[:email])
+ filter = format(Devise.ldap_search_filter, uid: Devise.ldap_uid, mail: Devise.ldap_mail, email: Net::LDAP::Filter.escape(params[:email]))
if (user_info = ldap.bind_as(base: Devise.ldap_base, filter: filter, password: params[:password]))
ldap_get_user(user_info.first)
diff --git a/app/models/concerns/omniauthable.rb b/app/models/concerns/omniauthable.rb
index 7d54e9d6de0984..df6b207607f706 100644
--- a/app/models/concerns/omniauthable.rb
+++ b/app/models/concerns/omniauthable.rb
@@ -19,17 +19,18 @@ def email_present?
end
class_methods do
- def find_for_oauth(auth, signed_in_resource = nil)
+ def find_for_omniauth(auth, signed_in_resource = nil)
# EOLE-SSO Patch
auth.uid = (auth.uid[0][:uid] || auth.uid[0][:user]) if auth.uid.is_a? Hashie::Array
- identity = Identity.find_for_oauth(auth)
+ identity = Identity.find_for_omniauth(auth)
# If a signed_in_resource is provided it always overrides the existing user
# to prevent the identity being locked with accidentally created accounts.
# Note that this may leave zombie accounts (with no associated identity) which
# can be cleaned up at a later date.
user = signed_in_resource || identity.user
- user ||= create_for_oauth(auth)
+ user ||= reattach_for_auth(auth)
+ user ||= create_for_auth(auth)
if identity.user.nil?
identity.user = user
@@ -39,19 +40,35 @@ def find_for_oauth(auth, signed_in_resource = nil)
user
end
- def create_for_oauth(auth)
- # Check if the user exists with provided email. If no email was provided,
- # we assign a temporary email and ask the user to verify it on
- # the next step via Auth::SetupController.show
+ private
- strategy = Devise.omniauth_configs[auth.provider.to_sym].strategy
- assume_verified = strategy&.security&.assume_email_is_verified
- email_is_verified = auth.info.verified || auth.info.verified_email || auth.info.email_verified || assume_verified
- email = auth.info.verified_email || auth.info.email
+ def reattach_for_auth(auth)
+ # If allowed, check if a user exists with the provided email address,
+ # and return it if they does not have an associated identity with the
+ # current authentication provider.
+
+ # This can be used to provide a choice of alternative auth providers
+ # or provide smooth gradual transition between multiple auth providers,
+ # but this is discouraged because any insecure provider will put *all*
+ # local users at risk, regardless of which provider they registered with.
+
+ return unless ENV['ALLOW_UNSAFE_AUTH_PROVIDER_REATTACH'] == 'true'
- user = User.find_by(email: email) if email_is_verified
+ email, email_is_verified = email_from_auth(auth)
+ return unless email_is_verified
- return user unless user.nil?
+ user = User.find_by(email: email)
+ return if user.nil? || Identity.exists?(provider: auth.provider, user_id: user.id)
+
+ user
+ end
+
+ def create_for_auth(auth)
+ # Create a user for the given auth params. If no email was provided,
+ # we assign a temporary email and ask the user to verify it on
+ # the next step via Auth::SetupController.show
+
+ email, email_is_verified = email_from_auth(auth)
user = User.new(user_params_from_auth(email, auth))
@@ -68,7 +85,14 @@ def create_for_oauth(auth)
user
end
- private
+ def email_from_auth(auth)
+ strategy = Devise.omniauth_configs[auth.provider.to_sym].strategy
+ assume_verified = strategy&.security&.assume_email_is_verified
+ email_is_verified = auth.info.verified || auth.info.verified_email || auth.info.email_verified || assume_verified
+ email = auth.info.verified_email || auth.info.email
+
+ [email, email_is_verified]
+ end
def user_params_from_auth(email, auth)
{
diff --git a/app/models/custom_emoji.rb b/app/models/custom_emoji.rb
index 3048056591fe29..4fae32c4869630 100644
--- a/app/models/custom_emoji.rb
+++ b/app/models/custom_emoji.rb
@@ -37,7 +37,7 @@ class CustomEmoji < ApplicationRecord
belongs_to :category, class_name: 'CustomEmojiCategory', optional: true
has_one :local_counterpart, -> { where(domain: nil) }, class_name: 'CustomEmoji', primary_key: :shortcode, foreign_key: :shortcode
- has_attached_file :image, styles: { static: { format: 'png', convert_options: '-coalesce +profile "!icc,*" +set modify-date +set create-date' } }, validate_media_type: false
+ has_attached_file :image, styles: { static: { format: 'png', convert_options: '-coalesce +profile "!icc,*" +set date:modify +set date:create +set date:timestamp' } }, validate_media_type: false
before_validation :downcase_domain
diff --git a/app/models/form/account_batch.rb b/app/models/form/account_batch.rb
index 473622edf4de00..4665a586798398 100644
--- a/app/models/form/account_batch.rb
+++ b/app/models/form/account_batch.rb
@@ -17,8 +17,8 @@ def save
unfollow!
when 'remove_from_followers'
remove_from_followers!
- when 'block_domains'
- block_domains!
+ when 'remove_domains_from_followers'
+ remove_domains_from_followers!
when 'approve'
approve!
when 'reject'
@@ -35,9 +35,15 @@ def save
private
def follow!
+ error = nil
+
accounts.each do |target_account|
FollowService.new.call(current_account, target_account)
+ rescue Mastodon::NotPermittedError, ActiveRecord::RecordNotFound => e
+ error ||= e
end
+
+ raise error if error.present?
end
def unfollow!
@@ -50,10 +56,8 @@ def remove_from_followers!
RemoveFromFollowersService.new.call(current_account, account_ids)
end
- def block_domains!
- AfterAccountDomainBlockWorker.push_bulk(account_domains) do |domain|
- [current_account.id, domain]
- end
+ def remove_domains_from_followers!
+ RemoveDomainsFromFollowersService.new.call(current_account, account_domains)
end
def account_domains
@@ -119,7 +123,18 @@ def suspend_account(account)
account: current_account,
action: :suspend
)
+
Admin::SuspensionWorker.perform_async(account.id)
+
+ # Suspending a single account closes their associated reports, so
+ # mass-suspending would be consistent.
+ Report.where(target_account: account).unresolved.find_each do |report|
+ authorize(report, :update?)
+ log_action(:resolve, report)
+ report.resolve!(current_account)
+ rescue Mastodon::NotPermittedError
+ # This should not happen, but just in case, do not fail early
+ end
end
def approve_account(account)
diff --git a/app/models/identity.rb b/app/models/identity.rb
index 8cc65aef413d13..a396d76234369a 100644
--- a/app/models/identity.rb
+++ b/app/models/identity.rb
@@ -12,11 +12,11 @@
#
class Identity < ApplicationRecord
- belongs_to :user, dependent: :destroy
+ belongs_to :user
validates :uid, presence: true, uniqueness: { scope: :provider }
validates :provider, presence: true
- def self.find_for_oauth(auth)
+ def self.find_for_omniauth(auth)
find_or_create_by(uid: auth.uid, provider: auth.provider)
end
end
diff --git a/app/models/media_attachment.rb b/app/models/media_attachment.rb
index 617a26f7a4c4fc..93d8e732b931e0 100644
--- a/app/models/media_attachment.rb
+++ b/app/models/media_attachment.rb
@@ -169,7 +169,7 @@ class MediaAttachment < ApplicationRecord
}.freeze
GLOBAL_CONVERT_OPTIONS = {
- all: '-quality 90 +profile "!icc,*" +set modify-date +set create-date',
+ all: '-quality 90 +profile "!icc,*" +set date:modify +set date:create +set date:timestamp',
}.freeze
belongs_to :account, inverse_of: :media_attachments, optional: true
diff --git a/app/models/preview_card.rb b/app/models/preview_card.rb
index 56ca62d5ecd039..1fe71146900f8c 100644
--- a/app/models/preview_card.rb
+++ b/app/models/preview_card.rb
@@ -50,7 +50,7 @@ class PreviewCard < ApplicationRecord
has_and_belongs_to_many :statuses
has_one :trend, class_name: 'PreviewCardTrend', inverse_of: :preview_card, dependent: :destroy
- has_attached_file :image, processors: [:thumbnail, :blurhash_transcoder], styles: ->(f) { image_styles(f) }, convert_options: { all: '-quality 90 +profile "!icc,*" +set modify-date +set create-date' }, validate_media_type: false
+ has_attached_file :image, processors: [:thumbnail, :blurhash_transcoder], styles: ->(f) { image_styles(f) }, convert_options: { all: '-quality 90 +profile "!icc,*" +set date:modify +set date:create +set date:timestamp' }, validate_media_type: false
validates :url, presence: true, uniqueness: true
validates_attachment_content_type :image, content_type: IMAGE_MIME_TYPES
diff --git a/app/models/preview_card_provider.rb b/app/models/preview_card_provider.rb
index d61fe60208b44f..69ff4784e07d3c 100644
--- a/app/models/preview_card_provider.rb
+++ b/app/models/preview_card_provider.rb
@@ -25,7 +25,7 @@ class PreviewCardProvider < ApplicationRecord
validates :domain, presence: true, uniqueness: true, domain: true
- has_attached_file :icon, styles: { static: { format: 'png', convert_options: '-coalesce +profile "!icc,*" +set modify-date +set create-date' } }, validate_media_type: false
+ has_attached_file :icon, styles: { static: { format: 'png', convert_options: '-coalesce +profile "!icc,*" +set date:modify +set date:create +set date:timestamp' } }, validate_media_type: false
validates_attachment :icon, content_type: { content_type: ICON_MIME_TYPES }, size: { less_than: LIMIT }
remotable_attachment :icon, LIMIT
diff --git a/app/models/relationship_filter.rb b/app/models/relationship_filter.rb
index 249fe3df8e1dfa..8e069c80a7e1fb 100644
--- a/app/models/relationship_filter.rb
+++ b/app/models/relationship_filter.rb
@@ -60,13 +60,13 @@ def scope_for(key, value)
def relationship_scope(value)
case value
when 'following'
- account.following.eager_load(:account_stat).reorder(nil)
+ account.following.includes(:account_stat).reorder(nil)
when 'followed_by'
- account.followers.eager_load(:account_stat).reorder(nil)
+ account.followers.includes(:account_stat).reorder(nil)
when 'mutual'
- account.followers.eager_load(:account_stat).reorder(nil).merge(Account.where(id: account.following))
+ account.followers.includes(:account_stat).reorder(nil).merge(Account.where(id: account.following))
when 'invited'
- Account.joins(user: :invite).merge(Invite.where(user: account.user)).eager_load(:account_stat).reorder(nil)
+ Account.joins(user: :invite).merge(Invite.where(user: account.user)).includes(:account_stat).reorder(nil)
else
raise Mastodon::InvalidParameterError, "Unknown relationship: #{value}"
end
@@ -112,7 +112,7 @@ def order_scope(value)
def activity_scope(value)
case value
when 'dormant'
- AccountStat.where(last_status_at: nil).or(AccountStat.where(AccountStat.arel_table[:last_status_at].lt(1.month.ago)))
+ Account.joins(:account_stat).where(account_stat: { last_status_at: [nil, ...1.month.ago] })
else
raise Mastodon::InvalidParameterError, "Unknown activity: #{value}"
end
diff --git a/app/models/report.rb b/app/models/report.rb
index 525d22ad5decd3..3ae5c10dd0bd12 100644
--- a/app/models/report.rb
+++ b/app/models/report.rb
@@ -39,7 +39,10 @@ class Report < ApplicationRecord
scope :resolved, -> { where.not(action_taken_at: nil) }
scope :with_accounts, -> { includes([:account, :target_account, :action_taken_by_account, :assigned_account].index_with({ user: [:invite_request, :invite] })) }
- validates :comment, length: { maximum: 1_000 }
+ # A report is considered local if the reporter is local
+ delegate :local?, to: :account
+
+ validates :comment, length: { maximum: 1_000 }, if: :local?
validates :rule_ids, absence: true, unless: :violation?
validate :validate_rule_ids
@@ -50,10 +53,6 @@ class Report < ApplicationRecord
violation: 2_000,
}
- def local?
- false # Force uri_for to use uri attribute
- end
-
before_validation :set_uri, only: :create
after_create_commit :trigger_webhooks
diff --git a/app/models/site_upload.rb b/app/models/site_upload.rb
index 167131fdd9ef00..c2167070694e10 100644
--- a/app/models/site_upload.rb
+++ b/app/models/site_upload.rb
@@ -40,7 +40,7 @@ class SiteUpload < ApplicationRecord
mascot: {}.freeze,
}.freeze
- has_attached_file :file, styles: ->(file) { STYLES[file.instance.var.to_sym] }, convert_options: { all: '-coalesce +profile "!icc,*" +set modify-date +set create-date' }, processors: [:lazy_thumbnail, :blurhash_transcoder, :type_corrector]
+ has_attached_file :file, styles: ->(file) { STYLES[file.instance.var.to_sym] }, convert_options: { all: '-coalesce +profile "!icc,*" +set date:modify +set date:create +set date:timestamp' }, processors: [:lazy_thumbnail, :blurhash_transcoder, :type_corrector]
validates_attachment_content_type :file, content_type: /\Aimage\/.*\z/
validates :file, presence: true
diff --git a/app/models/status.rb b/app/models/status.rb
index 1f983ffce01ff2..5b67817968f2e7 100644
--- a/app/models/status.rb
+++ b/app/models/status.rb
@@ -355,13 +355,25 @@ def reload_stale_associations!(cached_items)
account_ids.uniq!
+ status_ids = cached_items.map { |item| item.reblog? ? item.reblog_of_id : item.id }.uniq
+
return if account_ids.empty?
accounts = Account.where(id: account_ids).includes(:account_stat, :user).index_by(&:id)
+ status_stats = StatusStat.where(status_id: status_ids).index_by(&:status_id)
+
cached_items.each do |item|
item.account = accounts[item.account_id]
item.reblog.account = accounts[item.reblog.account_id] if item.reblog?
+
+ if item.reblog?
+ status_stat = status_stats[item.reblog.id]
+ item.reblog.status_stat = status_stat if status_stat.present?
+ else
+ status_stat = status_stats[item.id]
+ item.status_stat = status_stat if status_stat.present?
+ end
end
end
diff --git a/app/models/tag.rb b/app/models/tag.rb
index 47a05d00a160e1..dcf8f9c78e34a7 100644
--- a/app/models/tag.rb
+++ b/app/models/tag.rb
@@ -33,7 +33,7 @@ class Tag < ApplicationRecord
HASTAG_LAST_SEQUENCE = '([[:word:]_]*[[:alpha:]][[:word:]_]*)'
HASHTAG_NAME_PAT = "#{HASHTAG_FIRST_SEQUENCE}|#{HASTAG_LAST_SEQUENCE}"
- HASHTAG_RE = /(?:^|[^\/\)\w])#(#{HASHTAG_NAME_PAT})/i
+ HASHTAG_RE = %r{(? { where(enabled: true) }
validates :url, presence: true, url: true
@@ -27,6 +29,7 @@ class Webhook < ApplicationRecord
validates :events, presence: true
validate :validate_events
+ validate :validate_permissions
before_validation :strip_events
before_validation :generate_secret
@@ -43,12 +46,29 @@ def disable!
update!(enabled: false)
end
+ def required_permissions
+ events.map { |event| Webhook.permission_for_event(event) }
+ end
+
+ def self.permission_for_event(event)
+ case event
+ when 'account.approved', 'account.created', 'account.updated'
+ :manage_users
+ when 'report.created'
+ :manage_reports
+ end
+ end
+
private
def validate_events
errors.add(:events, :invalid) if events.any? { |e| !EVENTS.include?(e) }
end
+ def validate_permissions
+ errors.add(:events, :invalid_permissions) if defined?(@current_account) && required_permissions.any? { |permission| !@current_account.user_role.can?(permission) }
+ end
+
def strip_events
self.events = events.map { |str| str.strip.presence }.compact if events.present?
end
diff --git a/app/policies/admin/status_policy.rb b/app/policies/admin/status_policy.rb
index ffaa30f13de60a..e9379c25eca903 100644
--- a/app/policies/admin/status_policy.rb
+++ b/app/policies/admin/status_policy.rb
@@ -12,7 +12,7 @@ def index?
end
def show?
- role.can?(:manage_reports, :manage_users) && (record.public_visibility? || record.unlisted_visibility? || record.reported?)
+ role.can?(:manage_reports, :manage_users) && (record.public_visibility? || record.unlisted_visibility? || record.reported? || viewable_through_normal_policy?)
end
def destroy?
@@ -26,4 +26,10 @@ def update?
def review?
role.can?(:manage_taxonomies)
end
+
+ private
+
+ def viewable_through_normal_policy?
+ StatusPolicy.new(current_account, record, @preloaded_relations).show?
+ end
end
diff --git a/app/policies/backup_policy.rb b/app/policies/backup_policy.rb
index 0ef89a8d0c8fb9..86b8efbe96fa7d 100644
--- a/app/policies/backup_policy.rb
+++ b/app/policies/backup_policy.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
class BackupPolicy < ApplicationPolicy
- MIN_AGE = 1.week
+ MIN_AGE = 6.days
def create?
user_signed_in? && current_user.backups.where('created_at >= ?', MIN_AGE.ago).count.zero?
diff --git a/app/policies/webhook_policy.rb b/app/policies/webhook_policy.rb
index a2199a333fcf29..577e891b6698b0 100644
--- a/app/policies/webhook_policy.rb
+++ b/app/policies/webhook_policy.rb
@@ -14,7 +14,7 @@ def show?
end
def update?
- role.can?(:manage_webhooks)
+ role.can?(:manage_webhooks) && record.required_permissions.all? { |permission| role.can?(permission) }
end
def enable?
@@ -30,6 +30,6 @@ def rotate_secret?
end
def destroy?
- role.can?(:manage_webhooks)
+ role.can?(:manage_webhooks) && record.required_permissions.all? { |permission| role.can?(permission) }
end
end
diff --git a/app/serializers/rest/featured_tag_serializer.rb b/app/serializers/rest/featured_tag_serializer.rb
index c4b35ab03ab96f..c1ff4602aa83f6 100644
--- a/app/serializers/rest/featured_tag_serializer.rb
+++ b/app/serializers/rest/featured_tag_serializer.rb
@@ -10,7 +10,9 @@ def id
end
def url
- short_account_tag_url(object.account, object.tag)
+ # The path is hardcoded because we have to deal with both local and
+ # remote users, which are different routes
+ account_with_domain_url(object.account, "tagged/#{object.tag.to_param}")
end
def name
diff --git a/app/serializers/rest/preview_card_serializer.rb b/app/serializers/rest/preview_card_serializer.rb
index 66ff47d22ea50e..e6d204fec3c076 100644
--- a/app/serializers/rest/preview_card_serializer.rb
+++ b/app/serializers/rest/preview_card_serializer.rb
@@ -11,4 +11,8 @@ class REST::PreviewCardSerializer < ActiveModel::Serializer
def image
object.image? ? full_asset_url(object.image.url(:original)) : nil
end
+
+ def html
+ Sanitize.fragment(object.html, Sanitize::Config::MASTODON_OEMBED)
+ end
end
diff --git a/app/services/activitypub/fetch_featured_collection_service.rb b/app/services/activitypub/fetch_featured_collection_service.rb
index 1208820df224d6..03c5448e4267c1 100644
--- a/app/services/activitypub/fetch_featured_collection_service.rb
+++ b/app/services/activitypub/fetch_featured_collection_service.rb
@@ -23,9 +23,9 @@ def collection_items(collection)
case collection['type']
when 'Collection', 'CollectionPage'
- collection['items']
+ as_array(collection['items'])
when 'OrderedCollection', 'OrderedCollectionPage'
- collection['orderedItems']
+ as_array(collection['orderedItems'])
end
end
diff --git a/app/services/activitypub/fetch_remote_account_service.rb b/app/services/activitypub/fetch_remote_account_service.rb
index 567dd8a14abc04..7b083d889b21fb 100644
--- a/app/services/activitypub/fetch_remote_account_service.rb
+++ b/app/services/activitypub/fetch_remote_account_service.rb
@@ -2,7 +2,7 @@
class ActivityPub::FetchRemoteAccountService < ActivityPub::FetchRemoteActorService
# Does a WebFinger roundtrip on each call, unless `only_key` is true
- def call(uri, id: true, prefetched_body: nil, break_on_redirect: false, only_key: false, suppress_errors: true, request_id: nil)
+ def call(uri, prefetched_body: nil, break_on_redirect: false, only_key: false, suppress_errors: true, request_id: nil)
actor = super
return actor if actor.nil? || actor.is_a?(Account)
diff --git a/app/services/activitypub/fetch_remote_actor_service.rb b/app/services/activitypub/fetch_remote_actor_service.rb
index 8908d21e2ee051..0054c2af78d021 100644
--- a/app/services/activitypub/fetch_remote_actor_service.rb
+++ b/app/services/activitypub/fetch_remote_actor_service.rb
@@ -10,15 +10,15 @@ class Error < StandardError; end
SUPPORTED_TYPES = %w(Application Group Organization Person Service).freeze
# Does a WebFinger roundtrip on each call, unless `only_key` is true
- def call(uri, id: true, prefetched_body: nil, break_on_redirect: false, only_key: false, suppress_errors: true, request_id: nil)
+ def call(uri, prefetched_body: nil, break_on_redirect: false, only_key: false, suppress_errors: true, request_id: nil)
return if domain_not_allowed?(uri)
return ActivityPub::TagManager.instance.uri_to_actor(uri) if ActivityPub::TagManager.instance.local_uri?(uri)
@json = begin
if prefetched_body.nil?
- fetch_resource(uri, id)
+ fetch_resource(uri, true)
else
- body_to_json(prefetched_body, compare_id: id ? uri : nil)
+ body_to_json(prefetched_body, compare_id: uri)
end
rescue Oj::ParseError
raise Error, "Error parsing JSON-LD document #{uri}"
diff --git a/app/services/activitypub/fetch_remote_key_service.rb b/app/services/activitypub/fetch_remote_key_service.rb
index 8eb97c1e66d188..e96b5ad3bb012c 100644
--- a/app/services/activitypub/fetch_remote_key_service.rb
+++ b/app/services/activitypub/fetch_remote_key_service.rb
@@ -6,23 +6,10 @@ class ActivityPub::FetchRemoteKeyService < BaseService
class Error < StandardError; end
# Returns actor that owns the key
- def call(uri, id: true, prefetched_body: nil, suppress_errors: true)
+ def call(uri, suppress_errors: true)
raise Error, 'No key URI given' if uri.blank?
- if prefetched_body.nil?
- if id
- @json = fetch_resource_without_id_validation(uri)
- if actor_type?
- @json = fetch_resource(@json['id'], true)
- elsif uri != @json['id']
- raise Error, "Fetched URI #{uri} has wrong id #{@json['id']}"
- end
- else
- @json = fetch_resource(uri, id)
- end
- else
- @json = body_to_json(prefetched_body, compare_id: id ? uri : nil)
- end
+ @json = fetch_resource(uri, false)
raise Error, "Unable to fetch key JSON at #{uri}" if @json.nil?
raise Error, "Unsupported JSON-LD context for document #{uri}" unless supported_context?(@json)
diff --git a/app/services/activitypub/fetch_remote_poll_service.rb b/app/services/activitypub/fetch_remote_poll_service.rb
index 1829e791ce6eba..41b9b2f0c9be97 100644
--- a/app/services/activitypub/fetch_remote_poll_service.rb
+++ b/app/services/activitypub/fetch_remote_poll_service.rb
@@ -8,6 +8,6 @@ def call(poll, on_behalf_of = nil)
return unless supported_context?(json)
- ActivityPub::ProcessStatusUpdateService.new.call(poll.status, json)
+ ActivityPub::ProcessStatusUpdateService.new.call(poll.status, json, json)
end
end
diff --git a/app/services/activitypub/fetch_remote_status_service.rb b/app/services/activitypub/fetch_remote_status_service.rb
index 936737bf6606f4..fa0884fdbae68d 100644
--- a/app/services/activitypub/fetch_remote_status_service.rb
+++ b/app/services/activitypub/fetch_remote_status_service.rb
@@ -7,13 +7,13 @@ class ActivityPub::FetchRemoteStatusService < BaseService
DISCOVERIES_PER_REQUEST = 1000
# Should be called when uri has already been checked for locality
- def call(uri, id: true, prefetched_body: nil, on_behalf_of: nil, expected_actor_uri: nil, request_id: nil)
+ def call(uri, prefetched_body: nil, on_behalf_of: nil, expected_actor_uri: nil, request_id: nil)
@request_id = request_id || "#{Time.now.utc.to_i}-status-#{uri}"
@json = begin
if prefetched_body.nil?
- fetch_resource(uri, id, on_behalf_of)
+ fetch_resource(uri, true, on_behalf_of)
else
- body_to_json(prefetched_body, compare_id: id ? uri : nil)
+ body_to_json(prefetched_body, compare_id: uri)
end
end
@@ -63,7 +63,7 @@ def trustworthy_attribution?(uri, attributed_to)
def account_from_uri(uri)
actor = ActivityPub::TagManager.instance.uri_to_resource(uri, Account)
- actor = ActivityPub::FetchRemoteAccountService.new.call(uri, id: true, request_id: @request_id) if actor.nil? || actor.possibly_stale?
+ actor = ActivityPub::FetchRemoteAccountService.new.call(uri, request_id: @request_id) if actor.nil? || actor.possibly_stale?
actor
end
diff --git a/app/services/activitypub/fetch_replies_service.rb b/app/services/activitypub/fetch_replies_service.rb
index 18a27e851d7bb2..d3f39f3bc89126 100644
--- a/app/services/activitypub/fetch_replies_service.rb
+++ b/app/services/activitypub/fetch_replies_service.rb
@@ -26,9 +26,9 @@ def collection_items(collection_or_uri)
case collection['type']
when 'Collection', 'CollectionPage'
- collection['items']
+ as_array(collection['items'])
when 'OrderedCollection', 'OrderedCollectionPage'
- collection['orderedItems']
+ as_array(collection['orderedItems'])
end
end
@@ -36,7 +36,21 @@ def fetch_collection(collection_or_uri)
return collection_or_uri if collection_or_uri.is_a?(Hash)
return unless @allow_synchronous_requests
return if invalid_origin?(collection_or_uri)
- fetch_resource_without_id_validation(collection_or_uri, nil, true)
+
+ # NOTE: For backward compatibility reasons, Mastodon signs outgoing
+ # queries incorrectly by default.
+ #
+ # While this is relevant for all URLs with query strings, this is
+ # the only code path where this happens in practice.
+ #
+ # Therefore, retry with correct signatures if this fails.
+ begin
+ fetch_resource_without_id_validation(collection_or_uri, nil, true)
+ rescue Mastodon::UnexpectedResponseError => e
+ raise unless e.response && e.response.code == 401 && Addressable::URI.parse(collection_or_uri).query.present?
+
+ fetch_resource_without_id_validation(collection_or_uri, nil, true, request_options: { with_query_string: true })
+ end
end
def filtered_replies
diff --git a/app/services/activitypub/process_account_service.rb b/app/services/activitypub/process_account_service.rb
index 2da9096c734d99..fef3781e1aaca1 100644
--- a/app/services/activitypub/process_account_service.rb
+++ b/app/services/activitypub/process_account_service.rb
@@ -76,6 +76,9 @@ def create_account
@account.suspended_at = domain_block.created_at if auto_suspend?
@account.suspension_origin = :local if auto_suspend?
@account.silenced_at = domain_block.created_at if auto_silence?
+
+ set_immediate_protocol_attributes!
+
@account.save
end
@@ -271,7 +274,7 @@ def collection_info(type)
def moved_account
account = ActivityPub::TagManager.instance.uri_to_resource(@json['movedTo'], Account)
- account ||= ActivityPub::FetchRemoteAccountService.new.call(@json['movedTo'], id: true, break_on_redirect: true, request_id: @options[:request_id])
+ account ||= ActivityPub::FetchRemoteAccountService.new.call(@json['movedTo'], break_on_redirect: true, request_id: @options[:request_id])
account
end
diff --git a/app/services/activitypub/process_status_update_service.rb b/app/services/activitypub/process_status_update_service.rb
index 1dc393e28e9a65..ecb058bf787125 100644
--- a/app/services/activitypub/process_status_update_service.rb
+++ b/app/services/activitypub/process_status_update_service.rb
@@ -5,10 +5,11 @@ class ActivityPub::ProcessStatusUpdateService < BaseService
include Redisable
include Lockable
- def call(status, json, request_id: nil)
+ def call(status, activity_json, object_json, request_id: nil)
raise ArgumentError, 'Status has unsaved changes' if status.changed?
- @json = json
+ @activity_json = activity_json
+ @json = object_json
@status_parser = ActivityPub::Parser::StatusParser.new(@json)
@uri = @status_parser.uri
@status = status
@@ -308,6 +309,6 @@ def forward_activity!
end
def forwarder
- @forwarder ||= ActivityPub::Forwarder.new(@account, @json, @status)
+ @forwarder ||= ActivityPub::Forwarder.new(@account, @activity_json, @status)
end
end
diff --git a/app/services/activitypub/synchronize_followers_service.rb b/app/services/activitypub/synchronize_followers_service.rb
index 93cd60253353cc..384fcc525f0485 100644
--- a/app/services/activitypub/synchronize_followers_service.rb
+++ b/app/services/activitypub/synchronize_followers_service.rb
@@ -59,9 +59,9 @@ def collection_items(collection_or_uri)
case collection['type']
when 'Collection', 'CollectionPage'
- collection['items']
+ as_array(collection['items'])
when 'OrderedCollection', 'OrderedCollectionPage'
- collection['orderedItems']
+ as_array(collection['orderedItems'])
end
end
diff --git a/app/services/fetch_oembed_service.rb b/app/services/fetch_oembed_service.rb
index 9851ac09826b3e..4c42c8da2097f0 100644
--- a/app/services/fetch_oembed_service.rb
+++ b/app/services/fetch_oembed_service.rb
@@ -100,7 +100,7 @@ def parse_for_format(body)
end
def validate(oembed)
- oembed if oembed[:version].to_s == '1.0' && oembed[:type].present?
+ oembed if oembed.present? && oembed[:version].to_s == '1.0' && oembed[:type].present?
end
def html
diff --git a/app/services/fetch_resource_service.rb b/app/services/fetch_resource_service.rb
index 4470fca010392b..01b602124b6705 100644
--- a/app/services/fetch_resource_service.rb
+++ b/app/services/fetch_resource_service.rb
@@ -43,11 +43,19 @@ def process_response(response, terminal = false)
@response_code = response.code
return nil if response.code != 200
- if ['application/activity+json', 'application/ld+json'].include?(response.mime_type)
+ if valid_activitypub_content_type?(response)
body = response.body_with_limit
json = body_to_json(body)
- [json['id'], { prefetched_body: body, id: true }] if supported_context?(json) && (equals_or_includes_any?(json['type'], ActivityPub::FetchRemoteActorService::SUPPORTED_TYPES) || expected_type?(json))
+ return unless supported_context?(json) && (equals_or_includes_any?(json['type'], ActivityPub::FetchRemoteActorService::SUPPORTED_TYPES) || expected_type?(json))
+
+ if json['id'] != @url
+ return if terminal
+
+ return process(json['id'], terminal: true)
+ end
+
+ [@url, { prefetched_body: body }]
elsif !terminal
link_header = response['Link'] && parse_link_header(response)
diff --git a/app/services/follow_migration_service.rb b/app/services/follow_migration_service.rb
new file mode 100644
index 00000000000000..cfe9093cbe5d92
--- /dev/null
+++ b/app/services/follow_migration_service.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+class FollowMigrationService < FollowService
+ # Follow an account with the same settings as another account, and unfollow the old account once the request is sent
+ # @param [Account] source_account From which to follow
+ # @param [Account] target_account Account to follow
+ # @param [Account] old_target_account Account to unfollow once the follow request has been sent to the new one
+ # @option [Boolean] bypass_locked Whether to immediately follow the new account even if it is locked
+ def call(source_account, target_account, old_target_account, bypass_locked: false)
+ @old_target_account = old_target_account
+
+ follow = source_account.active_relationships.find_by(target_account: old_target_account)
+ reblogs = follow&.show_reblogs?
+ notify = follow&.notify?
+ languages = follow&.languages
+
+ super(source_account, target_account, reblogs: reblogs, notify: notify, languages: languages, bypass_locked: bypass_locked, bypass_limit: true)
+ end
+
+ private
+
+ def request_follow!
+ follow_request = @source_account.request_follow!(@target_account, **follow_options.merge(rate_limit: @options[:with_rate_limit], bypass_limit: @options[:bypass_limit]))
+
+ if @target_account.local?
+ LocalNotificationWorker.perform_async(@target_account.id, follow_request.id, follow_request.class.name, 'follow_request')
+ UnfollowService.new.call(@source_account, @old_target_account, skip_unmerge: true)
+ elsif @target_account.activitypub?
+ ActivityPub::MigratedFollowDeliveryWorker.perform_async(build_json(follow_request), @source_account.id, @target_account.inbox_url, @old_target_account.id)
+ end
+
+ follow_request
+ end
+
+ def direct_follow!
+ follow = super
+ UnfollowService.new.call(@source_account, @old_target_account, skip_unmerge: true)
+ follow
+ end
+end
diff --git a/app/services/follow_service.rb b/app/services/follow_service.rb
index feea40e3c0a945..1aa0241fe62325 100644
--- a/app/services/follow_service.rb
+++ b/app/services/follow_service.rb
@@ -71,7 +71,7 @@ def request_follow!
if @target_account.local?
LocalNotificationWorker.perform_async(@target_account.id, follow_request.id, follow_request.class.name, 'follow_request')
elsif @target_account.activitypub?
- ActivityPub::DeliveryWorker.perform_async(build_json(follow_request), @source_account.id, @target_account.inbox_url)
+ ActivityPub::DeliveryWorker.perform_async(build_json(follow_request), @source_account.id, @target_account.inbox_url, { 'bypass_availability' => true })
end
follow_request
diff --git a/app/services/keys/query_service.rb b/app/services/keys/query_service.rb
index 404854c9fce9b0..8d727b094357c6 100644
--- a/app/services/keys/query_service.rb
+++ b/app/services/keys/query_service.rb
@@ -69,7 +69,7 @@ def query_remote_devices!
return if json['items'].blank?
- @devices = json['items'].map do |device|
+ @devices = as_array(json['items']).map do |device|
Device.new(device_id: device['id'], name: device['name'], identity_key: device.dig('identityKey', 'publicKeyBase64'), fingerprint_key: device.dig('fingerprintKey', 'publicKeyBase64'), claim_url: device['claim'])
end
rescue HTTP::Error, OpenSSL::SSL::SSLError, Mastodon::Error => e
diff --git a/app/services/reblog_service.rb b/app/services/reblog_service.rb
index 7d2981709b22e0..3f0594f392252a 100644
--- a/app/services/reblog_service.rb
+++ b/app/services/reblog_service.rb
@@ -45,11 +45,7 @@ def call(account, reblogged_status, options = {})
def create_notification(reblog)
reblogged_status = reblog.reblog
- if reblogged_status.account.local?
- LocalNotificationWorker.perform_async(reblogged_status.account_id, reblog.id, reblog.class.name, 'reblog')
- elsif reblogged_status.account.activitypub? && !reblogged_status.account.following?(reblog.account)
- ActivityPub::DeliveryWorker.perform_async(build_json(reblog), reblog.account_id, reblogged_status.account.inbox_url)
- end
+ LocalNotificationWorker.perform_async(reblogged_status.account_id, reblog.id, reblog.class.name, 'reblog') if reblogged_status.account.local?
end
def bump_potential_friendship(account, reblog)
diff --git a/app/services/remove_domains_from_followers_service.rb b/app/services/remove_domains_from_followers_service.rb
new file mode 100644
index 00000000000000..d76763409d3b0d
--- /dev/null
+++ b/app/services/remove_domains_from_followers_service.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+class RemoveDomainsFromFollowersService < BaseService
+ include Payloadable
+
+ def call(source_account, target_domains)
+ source_account.passive_relationships.where(account_id: Account.where(domain: target_domains)).find_each do |follow|
+ follow.destroy
+
+ create_notification(follow) if source_account.local? && !follow.account.local? && follow.account.activitypub?
+ end
+ end
+
+ private
+
+ def create_notification(follow)
+ ActivityPub::DeliveryWorker.perform_async(build_json(follow), follow.target_account_id, follow.account.inbox_url)
+ end
+
+ def build_json(follow)
+ Oj.dump(serialize_payload(follow, ActivityPub::RejectFollowSerializer))
+ end
+end
diff --git a/app/services/remove_status_service.rb b/app/services/remove_status_service.rb
index 45cfb75f47eead..c84f90726bc7e3 100644
--- a/app/services/remove_status_service.rb
+++ b/app/services/remove_status_service.rb
@@ -12,6 +12,7 @@ class RemoveStatusService < BaseService
# @option [Boolean] :immediate
# @option [Boolean] :preserve
# @option [Boolean] :original_removed
+ # @option [Boolean] :skip_streaming
def call(status, **options)
@payload = Oj.dump(event: :delete, payload: status.id.to_s)
@status = status
@@ -52,6 +53,9 @@ def call(status, **options)
private
+ # The following FeedManager calls all do not result in redis publishes for
+ # streaming, as the `:update` option is false
+
def remove_from_self
FeedManager.instance.unpush_from_home(@account, @status)
end
@@ -75,6 +79,8 @@ def remove_from_mentions
# followers. Here we send a delete to actively mentioned accounts
# that may not follow the account
+ return if skip_streaming?
+
@status.active_mentions.find_each do |mention|
redis.publish("timeline:#{mention.account_id}", @payload)
end
@@ -103,7 +109,7 @@ def remove_reblogs
# without us being able to do all the fancy stuff
@status.reblogs.rewhere(deleted_at: [nil, @status.deleted_at]).includes(:account).reorder(nil).find_each do |reblog|
- RemoveStatusService.new.call(reblog, original_removed: true)
+ RemoveStatusService.new.call(reblog, original_removed: true, skip_streaming: skip_streaming?)
end
end
@@ -114,6 +120,8 @@ def remove_from_hashtags
return unless @status.public_visibility?
+ return if skip_streaming?
+
@status.tags.map(&:name).each do |hashtag|
redis.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}", @payload)
redis.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}:local", @payload) if @status.local?
@@ -123,6 +131,8 @@ def remove_from_hashtags
def remove_from_public
return unless @status.public_visibility?
+ return if skip_streaming?
+
redis.publish('timeline:public', @payload)
redis.publish(@status.local? ? 'timeline:public:local' : 'timeline:public:remote', @payload)
end
@@ -130,6 +140,8 @@ def remove_from_public
def remove_from_media
return unless @status.public_visibility?
+ return if skip_streaming?
+
redis.publish('timeline:public:media', @payload)
redis.publish(@status.local? ? 'timeline:public:local:media' : 'timeline:public:remote:media', @payload)
end
@@ -143,4 +155,8 @@ def remove_media
def permanently?
@options[:immediate] || !(@options[:preserve] || @status.reported?)
end
+
+ def skip_streaming?
+ !!@options[:skip_streaming]
+ end
end
diff --git a/app/services/resolve_url_service.rb b/app/services/resolve_url_service.rb
index d8e795f3b042ab..d6e528654fda59 100644
--- a/app/services/resolve_url_service.rb
+++ b/app/services/resolve_url_service.rb
@@ -89,13 +89,28 @@ def local_url?
def process_local_url
recognized_params = Rails.application.routes.recognize_path(@url)
- return unless recognized_params[:action] == 'show'
+ case recognized_params[:controller]
+ when 'statuses'
+ return unless recognized_params[:action] == 'show'
- if recognized_params[:controller] == 'statuses'
status = Status.find_by(id: recognized_params[:id])
check_local_status(status)
- elsif recognized_params[:controller] == 'accounts'
+ when 'accounts'
+ return unless recognized_params[:action] == 'show'
+
Account.find_local(recognized_params[:username])
+ when 'home'
+ return unless recognized_params[:action] == 'index' && recognized_params[:username_with_domain].present?
+
+ if recognized_params[:any]&.match?(/\A[0-9]+\Z/)
+ status = Status.find_by(id: recognized_params[:any])
+ check_local_status(status)
+ elsif recognized_params[:any].blank?
+ username, domain = recognized_params[:username_with_domain].gsub(/\A@/, '').split('@')
+ return unless username.present? && domain.present?
+
+ Account.find_remote(username, domain)
+ end
end
end
diff --git a/app/services/translate_status_service.rb b/app/services/translate_status_service.rb
index 539a0d9db5fd93..6e6ed87b0a31da 100644
--- a/app/services/translate_status_service.rb
+++ b/app/services/translate_status_service.rb
@@ -12,7 +12,9 @@ def call(status, target_language)
@content = status_content_format(@status)
@target_language = target_language
- Rails.cache.fetch("translations/#{@status.language}/#{@target_language}/#{content_hash}", expires_in: CACHE_TTL) { translation_backend.translate(@content, @status.language, @target_language) }
+ Rails.cache.fetch("translations:v2/#{@status.language}/#{@target_language}/#{content_hash}", expires_in: CACHE_TTL) do
+ translation_backend.translate(@content, @status.language, @target_language)
+ end
end
private
diff --git a/app/validators/vote_validator.rb b/app/validators/vote_validator.rb
index b1692562d41598..4316c59ef09bb2 100644
--- a/app/validators/vote_validator.rb
+++ b/app/validators/vote_validator.rb
@@ -3,8 +3,8 @@
class VoteValidator < ActiveModel::Validator
def validate(vote)
vote.errors.add(:base, I18n.t('polls.errors.expired')) if vote.poll.expired?
-
vote.errors.add(:base, I18n.t('polls.errors.invalid_choice')) if invalid_choice?(vote)
+ vote.errors.add(:base, I18n.t('polls.errors.self_vote')) if self_vote?(vote)
if vote.poll.multiple? && vote.poll.votes.where(account: vote.account, choice: vote.choice).exists?
vote.errors.add(:base, I18n.t('polls.errors.already_voted'))
@@ -18,4 +18,8 @@ def validate(vote)
def invalid_choice?(vote)
vote.choice.negative? || vote.choice >= vote.poll.options.size
end
+
+ def self_vote?(vote)
+ vote.account_id == vote.poll.account_id
+ end
end
diff --git a/app/views/admin/dashboard/index.html.haml b/app/views/admin/dashboard/index.html.haml
index 8354f0b9f5a18e..425472abdecaf0 100644
--- a/app/views/admin/dashboard/index.html.haml
+++ b/app/views/admin/dashboard/index.html.haml
@@ -12,7 +12,7 @@
- unless @system_checks.empty?
.flash-message-stack
- @system_checks.each do |message|
- .flash-message.warning
+ .flash-message{ class: message.critical ? 'alert' : 'warning' }
= t("admin.system_checks.#{message.key}.message_html", value: message.value ? content_tag(:strong, message.value) : nil)
- if message.action
= link_to t("admin.system_checks.#{message.key}.action"), message.action
diff --git a/app/views/admin/reports/actions/preview.html.haml b/app/views/admin/reports/actions/preview.html.haml
index 58745319c804f2..70edb48d80af94 100644
--- a/app/views/admin/reports/actions/preview.html.haml
+++ b/app/views/admin/reports/actions/preview.html.haml
@@ -54,15 +54,15 @@
.strike-card__statuses-list__item
- if (status = status_map[status_id.to_i])
.one-liner
- = link_to short_account_status_url(@report.target_account, status_id), class: 'emojify' do
- = one_line_preview(status)
+ .emojify= one_line_preview(status)
- - status.ordered_media_attachments.each do |media_attachment|
- %abbr{ title: media_attachment.description }
- = fa_icon 'link'
- = media_attachment.file_file_name
+ - status.ordered_media_attachments.each do |media_attachment|
+ %abbr{ title: media_attachment.description }
+ = fa_icon 'link'
+ = media_attachment.file_file_name
.strike-card__statuses-list__item__meta
- %time.formatted{ datetime: status.created_at.iso8601, title: l(status.created_at) }= l(status.created_at)
+ = link_to ActivityPub::TagManager.instance.url_for(status), target: '_blank' do
+ %time.formatted{ datetime: status.created_at.iso8601, title: l(status.created_at) }= l(status.created_at)
- unless status.application.nil?
·
= status.application.name
diff --git a/app/views/admin/statuses/show.html.haml b/app/views/admin/statuses/show.html.haml
index 62b49de8c8b155..4631e97f161397 100644
--- a/app/views/admin/statuses/show.html.haml
+++ b/app/views/admin/statuses/show.html.haml
@@ -34,7 +34,7 @@
%td
- if @status.trend.allowed?
%abbr{ title: t('admin.trends.tags.current_score', score: @status.trend.score) }= t('admin.trends.tags.trending_rank', rank: @status.trend.rank)
- - elsif @status.trend.requires_review?
+ - elsif @status.requires_review?
= t('admin.trends.pending_review')
- else
= t('admin.trends.not_allowed_to_trend')
diff --git a/app/views/admin/trends/links/preview_card_providers/index.html.haml b/app/views/admin/trends/links/preview_card_providers/index.html.haml
index c3648c35e97bd3..025270c128fbc9 100644
--- a/app/views/admin/trends/links/preview_card_providers/index.html.haml
+++ b/app/views/admin/trends/links/preview_card_providers/index.html.haml
@@ -29,7 +29,7 @@
- Trends::PreviewCardProviderFilter::KEYS.each do |key|
= hidden_field_tag key, params[key] if params[key].present?
- .batch-table.optional
+ .batch-table
.batch-table__toolbar
%label.batch-table__toolbar__select.batch-checkbox-all
= check_box_tag :batch_checkbox_all, nil, false
diff --git a/app/views/admin/webhooks/_form.html.haml b/app/views/admin/webhooks/_form.html.haml
index c1e8f8979bdd43..6c5ff03dd510e9 100644
--- a/app/views/admin/webhooks/_form.html.haml
+++ b/app/views/admin/webhooks/_form.html.haml
@@ -5,7 +5,7 @@
= f.input :url, wrapper: :with_block_label, input_html: { placeholder: 'https://' }
.fields-group
- = f.input :events, collection: Webhook::EVENTS, wrapper: :with_block_label, include_blank: false, as: :check_boxes, collection_wrapper_tag: 'ul', item_wrapper_tag: 'li'
+ = f.input :events, collection: Webhook::EVENTS, wrapper: :with_block_label, include_blank: false, as: :check_boxes, collection_wrapper_tag: 'ul', item_wrapper_tag: 'li', disabled: Webhook::EVENTS.filter { |event| !current_user.role.can?(Webhook.permission_for_event(event)) }
.actions
= f.button :button, @webhook.new_record? ? t('admin.webhooks.add_new') : t('generic.save_changes'), type: :submit
diff --git a/app/views/application/_sidebar.html.haml b/app/views/application/_sidebar.html.haml
index 6d18668b08f262..9d0efa7e10b7e1 100644
--- a/app/views/application/_sidebar.html.haml
+++ b/app/views/application/_sidebar.html.haml
@@ -3,7 +3,7 @@
= image_tag @instance_presenter.thumbnail&.file&.url(:'@1x') || asset_pack_path('media/images/preview.png'), alt: @instance_presenter.title
.hero-widget__text
- %p= @instance_presenter.description.html_safe.presence || t('about.about_mastodon_html')
+ %p= @instance_presenter.description.presence || t('about.about_mastodon_html')
- if Setting.trends && !(user_signed_in? && !current_user.setting_trends)
- trends = Trends.tags.query.allowed.limit(3)
diff --git a/app/views/disputes/strikes/show.html.haml b/app/views/disputes/strikes/show.html.haml
index 7797348dd72756..ce52e470d9d070 100644
--- a/app/views/disputes/strikes/show.html.haml
+++ b/app/views/disputes/strikes/show.html.haml
@@ -50,15 +50,15 @@
.strike-card__statuses-list__item
- if (status = status_map[status_id.to_i])
.one-liner
- = link_to short_account_status_url(@strike.target_account, status_id), class: 'emojify' do
- = one_line_preview(status)
+ .emojify= one_line_preview(status)
- - status.ordered_media_attachments.each do |media_attachment|
- %abbr{ title: media_attachment.description }
- = fa_icon 'link'
- = media_attachment.file_file_name
+ - status.ordered_media_attachments.each do |media_attachment|
+ %abbr{ title: media_attachment.description }
+ = fa_icon 'link'
+ = media_attachment.file_file_name
.strike-card__statuses-list__item__meta
- %time.formatted{ datetime: status.created_at.iso8601, title: l(status.created_at) }= l(status.created_at)
+ = link_to ActivityPub::TagManager.instance.url_for(status), target: '_blank' do
+ %time.formatted{ datetime: status.created_at.iso8601, title: l(status.created_at) }= l(status.created_at)
- unless status.application.nil?
·
= status.application.name
diff --git a/app/views/oauth/authorized_applications/index.html.haml b/app/views/oauth/authorized_applications/index.html.haml
index 0280d8aef84a88..55d8524dbe91e8 100644
--- a/app/views/oauth/authorized_applications/index.html.haml
+++ b/app/views/oauth/authorized_applications/index.html.haml
@@ -18,8 +18,8 @@
.announcements-list__item__action-bar
.announcements-list__item__meta
- - if application.most_recently_used_access_token
- = t('doorkeeper.authorized_applications.index.last_used_at', date: l(application.most_recently_used_access_token.last_used_at.to_date))
+ - if @last_used_at_by_app[application.id]
+ = t('doorkeeper.authorized_applications.index.last_used_at', date: l(@last_used_at_by_app[application.id].to_date))
- else
= t('doorkeeper.authorized_applications.index.never_used')
diff --git a/app/views/relationships/show.html.haml b/app/views/relationships/show.html.haml
index 2899cd5140feda..f08e9c1df8efd5 100644
--- a/app/views/relationships/show.html.haml
+++ b/app/views/relationships/show.html.haml
@@ -48,7 +48,7 @@
= f.button safe_join([fa_icon('trash'), t('relationships.remove_selected_followers')]), name: :remove_from_followers, class: 'table-action-link', type: :submit, data: { confirm: t('relationships.confirm_remove_selected_followers') } unless following_relationship?
- = f.button safe_join([fa_icon('trash'), t('relationships.remove_selected_domains')]), name: :block_domains, class: 'table-action-link', type: :submit, data: { confirm: t('admin.reports.are_you_sure') } if followed_by_relationship?
+ = f.button safe_join([fa_icon('trash'), t('relationships.remove_selected_domains')]), name: :remove_domains_from_followers, class: 'table-action-link', type: :submit, data: { confirm: t('admin.reports.are_you_sure') } if followed_by_relationship?
.batch-table__body
- if @accounts.empty?
= nothing_here 'nothing-here--under-tabs'
diff --git a/app/views/settings/exports/show.html.haml b/app/views/settings/exports/show.html.haml
index c49613fdc0c61f..d7b59af270a43b 100644
--- a/app/views/settings/exports/show.html.haml
+++ b/app/views/settings/exports/show.html.haml
@@ -64,6 +64,6 @@
%td= l backup.created_at
- if backup.processed?
%td= number_to_human_size backup.dump_file_size
- %td= table_link_to 'download', t('exports.archive_takeout.download'), backup.dump.url
+ %td= table_link_to 'download', t('exports.archive_takeout.download'), download_backup_url(backup)
- else
%td{ colspan: 2 }= t('exports.archive_takeout.in_progress')
diff --git a/app/views/shared/_og.html.haml b/app/views/shared/_og.html.haml
index 2941b566e05ae5..a5d99ae33ac6f8 100644
--- a/app/views/shared/_og.html.haml
+++ b/app/views/shared/_og.html.haml
@@ -1,5 +1,5 @@
- thumbnail = @instance_presenter.thumbnail
-- description ||= strip_tags(@instance_presenter.description.presence || t('about.about_mastodon_html'))
+- description ||= @instance_presenter.description.presence || strip_tags(t('about.about_mastodon_html'))
%meta{ name: 'description', content: description }/
diff --git a/app/views/user_mailer/backup_ready.html.haml b/app/views/user_mailer/backup_ready.html.haml
index 85140b08be3b4e..465ead2c8bac23 100644
--- a/app/views/user_mailer/backup_ready.html.haml
+++ b/app/views/user_mailer/backup_ready.html.haml
@@ -55,5 +55,5 @@
%tbody
%tr
%td.button-primary
- = link_to full_asset_url(@backup.dump.url) do
+ = link_to download_backup_url(@backup) do
%span= t 'exports.archive_takeout.download'
diff --git a/app/views/user_mailer/backup_ready.text.erb b/app/views/user_mailer/backup_ready.text.erb
index eb89e7d743fba4..8ebbaae85a5b82 100644
--- a/app/views/user_mailer/backup_ready.text.erb
+++ b/app/views/user_mailer/backup_ready.text.erb
@@ -4,4 +4,4 @@
<%= t 'user_mailer.backup_ready.explanation' %>
-=> <%= full_asset_url(@backup.dump.url) %>
+=> <%= download_backup_url(@backup) %>
diff --git a/app/workers/account_deletion_worker.rb b/app/workers/account_deletion_worker.rb
index fdf013e01043b1..7b8a31f8c6c52f 100644
--- a/app/workers/account_deletion_worker.rb
+++ b/app/workers/account_deletion_worker.rb
@@ -3,7 +3,7 @@
class AccountDeletionWorker
include Sidekiq::Worker
- sidekiq_options queue: 'pull', lock: :until_executed
+ sidekiq_options queue: 'pull', lock: :until_executed, lock_ttl: 1.week.to_i
def perform(account_id, options = {})
reserve_username = options.with_indifferent_access.fetch(:reserve_username, true)
diff --git a/app/workers/activitypub/delivery_worker.rb b/app/workers/activitypub/delivery_worker.rb
index d9153132b3102b..376c237a98493a 100644
--- a/app/workers/activitypub/delivery_worker.rb
+++ b/app/workers/activitypub/delivery_worker.rb
@@ -10,12 +10,23 @@ class ActivityPub::DeliveryWorker
sidekiq_options queue: 'push', retry: 16, dead: false
+ # Unfortunately, we cannot control Sidekiq's jitter, so add our own
+ sidekiq_retry_in do |count|
+ # This is Sidekiq's default delay
+ delay = (count**4) + 15
+ # Our custom jitter, that will be added to Sidekiq's built-in one.
+ # Sidekiq's built-in jitter is `rand(10) * (count + 1)`
+ jitter = rand(0.5 * (count**4))
+ delay + jitter
+ end
+
HEADERS = { 'Content-Type' => 'application/activity+json' }.freeze
def perform(json, source_account_id, inbox_url, options = {})
- return unless DeliveryFailureTracker.available?(inbox_url)
-
@options = options.with_indifferent_access
+
+ return unless @options[:bypass_availability] || DeliveryFailureTracker.available?(inbox_url)
+
@json = json
@source_account = Account.find(source_account_id)
@inbox_url = inbox_url
diff --git a/app/workers/activitypub/migrated_follow_delivery_worker.rb b/app/workers/activitypub/migrated_follow_delivery_worker.rb
new file mode 100644
index 00000000000000..17a9e515efad08
--- /dev/null
+++ b/app/workers/activitypub/migrated_follow_delivery_worker.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class ActivityPub::MigratedFollowDeliveryWorker < ActivityPub::DeliveryWorker
+ def perform(json, source_account_id, inbox_url, old_target_account_id, options = {})
+ super(json, source_account_id, inbox_url, options)
+ unfollow_old_account!(old_target_account_id)
+ end
+
+ private
+
+ def unfollow_old_account!(old_target_account_id)
+ old_target_account = Account.find(old_target_account_id)
+ UnfollowService.new.call(@source_account, old_target_account, skip_unmerge: true)
+ rescue StandardError
+ true
+ end
+end
diff --git a/app/workers/activitypub/synchronize_featured_collection_worker.rb b/app/workers/activitypub/synchronize_featured_collection_worker.rb
index f67d693cb3ab3c..7a187d7f53eede 100644
--- a/app/workers/activitypub/synchronize_featured_collection_worker.rb
+++ b/app/workers/activitypub/synchronize_featured_collection_worker.rb
@@ -3,7 +3,7 @@
class ActivityPub::SynchronizeFeaturedCollectionWorker
include Sidekiq::Worker
- sidekiq_options queue: 'pull', lock: :until_executed
+ sidekiq_options queue: 'pull', lock: :until_executed, lock_ttl: 1.day.to_i
def perform(account_id, options = {})
options = { note: true, hashtag: false }.deep_merge(options.deep_symbolize_keys)
diff --git a/app/workers/activitypub/synchronize_featured_tags_collection_worker.rb b/app/workers/activitypub/synchronize_featured_tags_collection_worker.rb
index 14af4f725cdd62..570415c82149c5 100644
--- a/app/workers/activitypub/synchronize_featured_tags_collection_worker.rb
+++ b/app/workers/activitypub/synchronize_featured_tags_collection_worker.rb
@@ -3,7 +3,7 @@
class ActivityPub::SynchronizeFeaturedTagsCollectionWorker
include Sidekiq::Worker
- sidekiq_options queue: 'pull', lock: :until_executed
+ sidekiq_options queue: 'pull', lock: :until_executed, lock_ttl: 1.day.to_i
def perform(account_id, url)
ActivityPub::FetchFeaturedTagsCollectionService.new.call(Account.find(account_id), url)
diff --git a/app/workers/activitypub/update_distribution_worker.rb b/app/workers/activitypub/update_distribution_worker.rb
index d0391bb6f61169..a04ac621f30e02 100644
--- a/app/workers/activitypub/update_distribution_worker.rb
+++ b/app/workers/activitypub/update_distribution_worker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
class ActivityPub::UpdateDistributionWorker < ActivityPub::RawDistributionWorker
- sidekiq_options queue: 'push', lock: :until_executed
+ sidekiq_options queue: 'push', lock: :until_executed, lock_ttl: 1.day.to_i
# Distribute an profile update to servers that might have a copy
# of the account in question
diff --git a/app/workers/admin/account_deletion_worker.rb b/app/workers/admin/account_deletion_worker.rb
index 6e0eb331bef83d..5dfdfb6e73c14d 100644
--- a/app/workers/admin/account_deletion_worker.rb
+++ b/app/workers/admin/account_deletion_worker.rb
@@ -3,7 +3,7 @@
class Admin::AccountDeletionWorker
include Sidekiq::Worker
- sidekiq_options queue: 'pull', lock: :until_executed
+ sidekiq_options queue: 'pull', lock: :until_executed, lock_ttl: 1.week.to_i
def perform(account_id)
DeleteAccountService.new.call(Account.find(account_id), reserve_username: true, reserve_email: true)
diff --git a/app/workers/admin/domain_purge_worker.rb b/app/workers/admin/domain_purge_worker.rb
index 095232a6d74af1..6c5250b660c380 100644
--- a/app/workers/admin/domain_purge_worker.rb
+++ b/app/workers/admin/domain_purge_worker.rb
@@ -3,7 +3,7 @@
class Admin::DomainPurgeWorker
include Sidekiq::Worker
- sidekiq_options queue: 'pull', lock: :until_executed
+ sidekiq_options queue: 'pull', lock: :until_executed, lock_ttl: 1.week.to_i
def perform(domain)
PurgeDomainService.new.call(domain)
diff --git a/app/workers/link_crawl_worker.rb b/app/workers/link_crawl_worker.rb
index b3d8aa26467dfc..c63af1e43aa0a0 100644
--- a/app/workers/link_crawl_worker.rb
+++ b/app/workers/link_crawl_worker.rb
@@ -7,7 +7,7 @@ class LinkCrawlWorker
def perform(status_id)
FetchLinkCardService.new.call(Status.find(status_id))
- rescue ActiveRecord::RecordNotFound
+ rescue ActiveRecord::RecordNotFound, ActiveRecord::RecordNotUnique
true
end
end
diff --git a/app/workers/publish_scheduled_status_worker.rb b/app/workers/publish_scheduled_status_worker.rb
index ce42f7be7c6e21..aa5c4a834a0510 100644
--- a/app/workers/publish_scheduled_status_worker.rb
+++ b/app/workers/publish_scheduled_status_worker.rb
@@ -3,7 +3,7 @@
class PublishScheduledStatusWorker
include Sidekiq::Worker
- sidekiq_options lock: :until_executed
+ sidekiq_options lock: :until_executed, lock_ttl: 1.hour.to_i
def perform(scheduled_status_id)
scheduled_status = ScheduledStatus.find(scheduled_status_id)
diff --git a/app/workers/resolve_account_worker.rb b/app/workers/resolve_account_worker.rb
index 2b5be6d1b217da..4ae2442af52e02 100644
--- a/app/workers/resolve_account_worker.rb
+++ b/app/workers/resolve_account_worker.rb
@@ -3,7 +3,7 @@
class ResolveAccountWorker
include Sidekiq::Worker
- sidekiq_options queue: 'pull', lock: :until_executed
+ sidekiq_options queue: 'pull', lock: :until_executed, lock_ttl: 1.day.to_i
def perform(uri)
ResolveAccountService.new.call(uri)
diff --git a/app/workers/scheduler/accounts_statuses_cleanup_scheduler.rb b/app/workers/scheduler/accounts_statuses_cleanup_scheduler.rb
index bd92fe32c40c44..a2ab31cc5d49a9 100644
--- a/app/workers/scheduler/accounts_statuses_cleanup_scheduler.rb
+++ b/app/workers/scheduler/accounts_statuses_cleanup_scheduler.rb
@@ -7,52 +7,68 @@ class Scheduler::AccountsStatusesCleanupScheduler
# This limit is mostly to be nice to the fediverse at large and not
# generate too much traffic.
# This also helps limiting the running time of the scheduler itself.
- MAX_BUDGET = 50
+ MAX_BUDGET = 300
- # This is an attempt to spread the load across instances, as various
- # accounts are likely to have various followers.
+ # This is an attempt to spread the load across remote servers, as
+ # spreading deletions across diverse accounts is likely to spread
+ # the deletion across diverse followers. It also helps each individual
+ # user see some effect sooner.
PER_ACCOUNT_BUDGET = 5
# This is an attempt to limit the workload generated by status removal
- # jobs to something the particular instance can handle.
+ # jobs to something the particular server can handle.
PER_THREAD_BUDGET = 5
- # Those avoid loading an instance that is already under load
- MAX_DEFAULT_SIZE = 2
- MAX_DEFAULT_LATENCY = 5
- MAX_PUSH_SIZE = 5
- MAX_PUSH_LATENCY = 10
- # 'pull' queue has lower priority jobs, and it's unlikely that pushing
- # deletes would cause much issues with this queue if it didn't cause issues
- # with default and push. Yet, do not enqueue deletes if the instance is
- # lagging behind too much.
- MAX_PULL_SIZE = 500
- MAX_PULL_LATENCY = 300
-
- # This is less of an issue in general, but deleting old statuses is likely
- # to cause delivery errors, and thus increase the number of jobs to be retried.
- # This doesn't directly translate to load, but connection errors and a high
- # number of dead instances may lead to this spiraling out of control if
- # unchecked.
- MAX_RETRY_SIZE = 50_000
-
- sidekiq_options retry: 0, lock: :until_executed
+ # These are latency limits on various queues above which a server is
+ # considered to be under load, causing the auto-deletion to be entirely
+ # skipped for that run.
+ LOAD_LATENCY_THRESHOLDS = {
+ default: 5,
+ push: 10,
+ # The `pull` queue has lower priority jobs, and it's unlikely that
+ # pushing deletes would cause much issues with this queue if it didn't
+ # cause issues with `default` and `push`. Yet, do not enqueue deletes
+ # if the instance is lagging behind too much.
+ pull: 5.minutes.to_i,
+ }.freeze
+
+ sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
def perform
return if under_load?
budget = compute_budget
- first_policy_id = last_processed_id
+
+ # If the budget allows it, we want to consider all accounts with enabled
+ # auto cleanup at least once.
+ #
+ # We start from `first_policy_id` (the last processed id in the previous
+ # run) and process each policy until we loop to `first_policy_id`,
+ # recording into `affected_policies` any policy that caused posts to be
+ # deleted.
+ #
+ # After that, we set `full_iteration` to `false` and continue looping on
+ # policies from `affected_policies`.
+ first_policy_id = last_processed_id || 0
+ first_iteration = true
+ full_iteration = true
+ affected_policies = []
loop do
num_processed_accounts = 0
- scope = AccountStatusesCleanupPolicy.where(enabled: true)
- scope.where(Account.arel_table[:id].gt(first_policy_id)) if first_policy_id.present?
+ scope = cleanup_policies(first_policy_id, affected_policies, first_iteration, full_iteration)
scope.find_each(order: :asc) do |policy|
num_deleted = AccountStatusesCleanupService.new.call(policy, [budget, PER_ACCOUNT_BUDGET].min)
- num_processed_accounts += 1 unless num_deleted.zero?
budget -= num_deleted
+
+ unless num_deleted.zero?
+ num_processed_accounts += 1
+ affected_policies << policy.id if full_iteration
+ end
+
+ full_iteration = false if !first_iteration && policy.id >= first_policy_id
+
if budget.zero?
save_last_processed_id(policy.id)
break
@@ -61,37 +77,55 @@ def perform
# The idea here is to loop through all policies at least once until the budget is exhausted
# and start back after the last processed account otherwise
- break if budget.zero? || (num_processed_accounts.zero? && first_policy_id.nil?)
- first_policy_id = nil
+ break if budget.zero? || (num_processed_accounts.zero? && !full_iteration)
+
+ full_iteration = false unless first_iteration
+ first_iteration = false
end
end
def compute_budget
- threads = Sidekiq::ProcessSet.new.select { |x| x['queues'].include?('push') }.map { |x| x['concurrency'] }.sum
+ # Each post deletion is a `RemovalWorker` job (on `default` queue), each
+ # potentially spawning many `ActivityPub::DeliveryWorker` jobs (on the `push` queue).
+ threads = Sidekiq::ProcessSet.new.select { |x| x['queues'].include?('push') }.pluck('concurrency').sum
[PER_THREAD_BUDGET * threads, MAX_BUDGET].min
end
def under_load?
- return true if Sidekiq::Stats.new.retry_size > MAX_RETRY_SIZE
- queue_under_load?('default', MAX_DEFAULT_SIZE, MAX_DEFAULT_LATENCY) || queue_under_load?('push', MAX_PUSH_SIZE, MAX_PUSH_LATENCY) || queue_under_load?('pull', MAX_PULL_SIZE, MAX_PULL_LATENCY)
+ LOAD_LATENCY_THRESHOLDS.any? { |queue, max_latency| queue_under_load?(queue, max_latency) }
end
private
- def queue_under_load?(name, max_size, max_latency)
- queue = Sidekiq::Queue.new(name)
- queue.size > max_size || queue.latency > max_latency
+ def cleanup_policies(first_policy_id, affected_policies, first_iteration, full_iteration)
+ scope = AccountStatusesCleanupPolicy.where(enabled: true)
+
+ if full_iteration
+ # If we are doing a full iteration, examine all policies we have not examined yet
+ if first_iteration
+ scope.where(id: first_policy_id...)
+ else
+ scope.where(id: ..first_policy_id).or(scope.where(id: affected_policies))
+ end
+ else
+ # Otherwise, examine only policies that previously yielded posts to delete
+ scope.where(id: affected_policies)
+ end
+ end
+
+ def queue_under_load?(name, max_latency)
+ Sidekiq::Queue.new(name).latency > max_latency
end
def last_processed_id
- redis.get('account_statuses_cleanup_scheduler:last_account_id')
+ redis.get('account_statuses_cleanup_scheduler:last_policy_id')&.to_i
end
def save_last_processed_id(id)
if id.nil?
- redis.del('account_statuses_cleanup_scheduler:last_account_id')
+ redis.del('account_statuses_cleanup_scheduler:last_policy_id')
else
- redis.set('account_statuses_cleanup_scheduler:last_account_id', id, ex: 1.hour.seconds)
+ redis.set('account_statuses_cleanup_scheduler:last_policy_id', id, ex: 1.hour.seconds)
end
end
end
diff --git a/app/workers/scheduler/indexing_scheduler.rb b/app/workers/scheduler/indexing_scheduler.rb
index c423966297c310..cde6210fbaf97c 100644
--- a/app/workers/scheduler/indexing_scheduler.rb
+++ b/app/workers/scheduler/indexing_scheduler.rb
@@ -4,19 +4,21 @@ class Scheduler::IndexingScheduler
include Sidekiq::Worker
include Redisable
- sidekiq_options retry: 0
+ sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 30.minutes.to_i
+
+ IMPORT_BATCH_SIZE = 1000
+ SCAN_BATCH_SIZE = 10 * IMPORT_BATCH_SIZE
def perform
return unless Chewy.enabled?
indexes.each do |type|
with_redis do |redis|
- ids = redis.smembers("chewy:queue:#{type.name}")
-
- type.import!(ids)
-
- redis.pipelined do |pipeline|
- ids.each { |id| pipeline.srem("chewy:queue:#{type.name}", id) }
+ redis.sscan_each("chewy:queue:#{type.name}", count: SCAN_BATCH_SIZE).each_slice(IMPORT_BATCH_SIZE) do |ids|
+ type.import!(ids)
+ redis.pipelined do |pipeline|
+ pipeline.srem("chewy:queue:#{type.name}", ids)
+ end
end
end
end
diff --git a/app/workers/scheduler/scheduled_statuses_scheduler.rb b/app/workers/scheduler/scheduled_statuses_scheduler.rb
index 3bf6300b3c4b69..fe60d5524eaf20 100644
--- a/app/workers/scheduler/scheduled_statuses_scheduler.rb
+++ b/app/workers/scheduler/scheduled_statuses_scheduler.rb
@@ -3,7 +3,7 @@
class Scheduler::ScheduledStatusesScheduler
include Sidekiq::Worker
- sidekiq_options retry: 0
+ sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.hour.to_i
def perform
publish_scheduled_statuses!
diff --git a/app/workers/scheduler/trends/refresh_scheduler.rb b/app/workers/scheduler/trends/refresh_scheduler.rb
index b559ba46b4b521..85c000deea786d 100644
--- a/app/workers/scheduler/trends/refresh_scheduler.rb
+++ b/app/workers/scheduler/trends/refresh_scheduler.rb
@@ -3,7 +3,7 @@
class Scheduler::Trends::RefreshScheduler
include Sidekiq::Worker
- sidekiq_options retry: 0
+ sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 30.minutes.to_i
def perform
Trends.refresh!
diff --git a/app/workers/scheduler/user_cleanup_scheduler.rb b/app/workers/scheduler/user_cleanup_scheduler.rb
index 45cfbc62e61384..4aee7935a2f39d 100644
--- a/app/workers/scheduler/user_cleanup_scheduler.rb
+++ b/app/workers/scheduler/user_cleanup_scheduler.rb
@@ -24,7 +24,7 @@ def clean_unconfirmed_accounts!
def clean_discarded_statuses!
Status.unscoped.discarded.where('deleted_at <= ?', 30.days.ago).find_in_batches do |statuses|
RemovalWorker.push_bulk(statuses) do |status|
- [status.id, { 'immediate' => true }]
+ [status.id, { 'immediate' => true, 'skip_streaming' => true }]
end
end
end
diff --git a/app/workers/unfollow_follow_worker.rb b/app/workers/unfollow_follow_worker.rb
index 7203b4888f8c80..a4d57839de96c8 100644
--- a/app/workers/unfollow_follow_worker.rb
+++ b/app/workers/unfollow_follow_worker.rb
@@ -10,13 +10,7 @@ def perform(follower_account_id, old_target_account_id, new_target_account_id, b
old_target_account = Account.find(old_target_account_id)
new_target_account = Account.find(new_target_account_id)
- follow = follower_account.active_relationships.find_by(target_account: old_target_account)
- reblogs = follow&.show_reblogs?
- notify = follow&.notify?
- languages = follow&.languages
-
- FollowService.new.call(follower_account, new_target_account, reblogs: reblogs, notify: notify, languages: languages, bypass_locked: bypass_locked, bypass_limit: true)
- UnfollowService.new.call(follower_account, old_target_account, skip_unmerge: true)
+ FollowMigrationService.new.call(follower_account, new_target_account, old_target_account, bypass_locked: bypass_locked)
rescue ActiveRecord::RecordNotFound, Mastodon::NotPermittedError
true
end
diff --git a/app/workers/verify_account_links_worker.rb b/app/workers/verify_account_links_worker.rb
index f606e6c26fefdf..ad27f450b78996 100644
--- a/app/workers/verify_account_links_worker.rb
+++ b/app/workers/verify_account_links_worker.rb
@@ -3,7 +3,7 @@
class VerifyAccountLinksWorker
include Sidekiq::Worker
- sidekiq_options queue: 'default', retry: false, lock: :until_executed
+ sidekiq_options queue: 'default', retry: false, lock: :until_executed, lock_ttl: 1.hour.to_i
def perform(account_id)
account = Account.find(account_id)
diff --git a/bin/tootctl b/bin/tootctl
index a9ebb22c6dc5ac..9c7ae8b8712104 100755
--- a/bin/tootctl
+++ b/bin/tootctl
@@ -5,7 +5,9 @@ require_relative '../config/boot'
require_relative '../lib/cli'
begin
- Mastodon::CLI.start(ARGV)
+ Chewy.strategy(:mastodon) do
+ Mastodon::CLI.start(ARGV)
+ end
rescue Interrupt
exit(130)
end
diff --git a/config/application.rb b/config/application.rb
index 68d613b342a682..513df0fe4b0908 100644
--- a/config/application.rb
+++ b/config/application.rb
@@ -28,6 +28,7 @@
require_relative '../lib/paperclip/attachment_extensions'
require_relative '../lib/paperclip/lazy_thumbnail'
require_relative '../lib/paperclip/gif_transcoder'
+require_relative '../lib/paperclip/media_type_spoof_detector_extensions'
require_relative '../lib/paperclip/transcoder'
require_relative '../lib/paperclip/type_corrector'
require_relative '../lib/paperclip/response_with_limit_adapter'
@@ -35,9 +36,11 @@
require_relative '../lib/mastodon/snowflake'
require_relative '../lib/mastodon/version'
require_relative '../lib/mastodon/rack_middleware'
+require_relative '../lib/public_file_server_middleware'
require_relative '../lib/devise/two_factor_ldap_authenticatable'
require_relative '../lib/devise/two_factor_pam_authenticatable'
require_relative '../lib/chewy/strategy/mastodon'
+require_relative '../lib/chewy/strategy/bypass_with_warning'
require_relative '../lib/webpacker/manifest_extensions'
require_relative '../lib/webpacker/helper_extensions'
require_relative '../lib/rails/engine_extensions'
@@ -184,6 +187,10 @@ class Application < Rails::Application
config.active_job.queue_adapter = :sidekiq
config.action_mailer.deliver_later_queue_name = 'mailers'
+ # We use our own middleware for this
+ config.public_file_server.enabled = false
+
+ config.middleware.use PublicFileServerMiddleware if Rails.env.development? || ENV['RAILS_SERVE_STATIC_FILES'] == 'true'
config.middleware.use Rack::Attack
config.middleware.use Mastodon::RackMiddleware
diff --git a/config/database.yml b/config/database.yml
index bfb53f21b4a512..34acf2f19aedfd 100644
--- a/config/database.yml
+++ b/config/database.yml
@@ -5,6 +5,7 @@ default: &default
connect_timeout: 15
encoding: unicode
sslmode: <%= ENV['DB_SSLMODE'] || "prefer" %>
+ application_name: ''
development:
<<: *default
diff --git a/config/environments/development.rb b/config/environments/development.rb
index de8762ff744902..c7b4a5d035e4b7 100644
--- a/config/environments/development.rb
+++ b/config/environments/development.rb
@@ -16,12 +16,7 @@
# Run rails dev:cache to toggle caching.
if Rails.root.join('tmp/caching-dev.txt').exist?
config.action_controller.perform_caching = true
-
config.cache_store = :redis_cache_store, REDIS_CACHE_PARAMS
-
- config.public_file_server.headers = {
- 'Cache-Control' => "public, max-age=#{2.days.to_i}",
- }
else
config.action_controller.perform_caching = false
diff --git a/config/environments/production.rb b/config/environments/production.rb
index 99c9bb40c542c2..00d783477596da 100644
--- a/config/environments/production.rb
+++ b/config/environments/production.rb
@@ -19,27 +19,16 @@
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
- # Disable serving static files from the `/public` folder by default since
- # Apache or NGINX already handles this.
- config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
-
ActiveSupport::Logger.new(STDOUT).tap do |logger|
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
- # Compress JavaScripts and CSS.
- # config.assets.js_compressor = Uglifier.new(mangle: false)
- # config.assets.css_compressor = :sass
-
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
- # `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
-
# Specifies the header that your server uses for sending files.
- # config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
- config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
+ config.action_dispatch.x_sendfile_header = ENV['SENDFILE_HEADER'] if ENV['SENDFILE_HEADER'].present?
# Allow to specify public IP of reverse proxy if it's needed
config.action_dispatch.trusted_proxies = ENV['TRUSTED_PROXY_IP'].split(/(?:\s*,\s*|\s+)/).map { |item| IPAddr.new(item) } if ENV['TRUSTED_PROXY_IP'].present?
@@ -67,7 +56,7 @@
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# English when a translation cannot be found).
- config.i18n.fallbacks = [:en]
+ config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
@@ -128,6 +117,7 @@
enable_starttls_auto: enable_starttls_auto,
tls: ENV['SMTP_TLS'].presence && ENV['SMTP_TLS'] == 'true',
ssl: ENV['SMTP_SSL'].presence && ENV['SMTP_SSL'] == 'true',
+ read_timeout: 20,
}
config.action_mailer.delivery_method = ENV.fetch('SMTP_DELIVERY_METHOD', 'smtp').to_sym
diff --git a/config/environments/test.rb b/config/environments/test.rb
index ef3cb2e4877039..44786962ae73ef 100644
--- a/config/environments/test.rb
+++ b/config/environments/test.rb
@@ -12,11 +12,6 @@
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
- # Configure public file server for tests with Cache-Control for performance.
- config.public_file_server.enabled = true
- config.public_file_server.headers = {
- 'Cache-Control' => "public, max-age=#{1.hour.to_i}"
- }
config.assets.digest = false
# Show full error reports and disable caching.
diff --git a/config/imagemagick/policy.xml b/config/imagemagick/policy.xml
new file mode 100644
index 00000000000000..1052476b319eb3
--- /dev/null
+++ b/config/imagemagick/policy.xml
@@ -0,0 +1,27 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/config/initializers/chewy.rb b/config/initializers/chewy.rb
index 752fc3c6dfe551..daf4a5f3260cd6 100644
--- a/config/initializers/chewy.rb
+++ b/config/initializers/chewy.rb
@@ -19,7 +19,7 @@
# cycle, which takes care of checking if Elasticsearch is enabled
# or not. However, mind that for the Rails console, the :urgent
# strategy is set automatically with no way to override it.
-Chewy.root_strategy = :mastodon
+Chewy.root_strategy = :bypass_with_warning if Rails.env.production?
Chewy.request_strategy = :mastodon
Chewy.use_after_commit_callbacks = false
diff --git a/config/initializers/content_security_policy.rb b/config/initializers/content_security_policy.rb
index cb56293376d786..d863fa6d448765 100644
--- a/config/initializers/content_security_policy.rb
+++ b/config/initializers/content_security_policy.rb
@@ -3,7 +3,11 @@
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy
def host_to_url(str)
- "http#{Rails.configuration.x.use_https ? 's' : ''}://#{str}" unless str.blank?
+ return if str.blank?
+
+ uri = Addressable::URI.parse("http#{Rails.configuration.x.use_https ? 's' : ''}://#{str}")
+ uri.path += '/' unless uri.path.blank? || uri.path.end_with?('/')
+ uri.to_s
end
base_host = Rails.configuration.x.web_domain
diff --git a/config/initializers/doorkeeper.rb b/config/initializers/doorkeeper.rb
index 43aac5769f1cb1..043f053a0d7cec 100644
--- a/config/initializers/doorkeeper.rb
+++ b/config/initializers/doorkeeper.rb
@@ -19,9 +19,14 @@
user unless user&.otp_required_for_login?
end
- # If you want to restrict access to the web interface for adding oauth authorized applications, you need to declare the block below.
+ # Doorkeeper provides some administrative interfaces for managing OAuth
+ # Applications, allowing creation, edit, and deletion of applications from the
+ # server. At present, these administrative routes are not integrated into
+ # Mastodon, and as such, we've disabled them by always return a 403 forbidden
+ # response for them. This does not affect the ability for users to manage
+ # their own OAuth Applications.
admin_authenticator do
- current_user&.admin? || redirect_to(new_user_session_url)
+ head 403
end
# Authorization Code expiration time (default 10 minutes).
diff --git a/config/initializers/paperclip.rb b/config/initializers/paperclip.rb
index a2285427c80eea..7ed244e0c0cb7a 100644
--- a/config/initializers/paperclip.rb
+++ b/config/initializers/paperclip.rb
@@ -124,6 +124,7 @@ def copy_to_local_file(style, local_dest_path)
openstack_domain_name: ENV.fetch('SWIFT_DOMAIN_NAME') { 'default' },
openstack_region: ENV['SWIFT_REGION'],
openstack_cache_ttl: ENV.fetch('SWIFT_CACHE_TTL') { 60 },
+ openstack_temp_url_key: ENV['SWIFT_TEMP_URL_KEY'],
},
fog_file: { 'Cache-Control' => 'public, max-age=315576000, immutable' },
@@ -154,3 +155,10 @@ class NetworkingError < StandardError; end
end
end
end
+
+# Set our ImageMagick security policy, but allow admins to override it
+ENV['MAGICK_CONFIGURE_PATH'] = begin
+ imagemagick_config_paths = ENV.fetch('MAGICK_CONFIGURE_PATH', '').split(File::PATH_SEPARATOR)
+ imagemagick_config_paths << Rails.root.join('config', 'imagemagick').expand_path.to_s
+ imagemagick_config_paths.join(File::PATH_SEPARATOR)
+end
diff --git a/config/initializers/sidekiq.rb b/config/initializers/sidekiq.rb
index 9d2abf0745eca9..b847e654692d16 100644
--- a/config/initializers/sidekiq.rb
+++ b/config/initializers/sidekiq.rb
@@ -3,6 +3,11 @@
require_relative '../../lib/mastodon/sidekiq_middleware'
Sidekiq.configure_server do |config|
+ if Rails.configuration.database_configuration.dig('production', 'adapter') == 'postgresql_makara'
+ STDERR.puts 'ERROR: Database replication is not currently supported in Sidekiq workers. Check your configuration.'
+ exit 1
+ end
+
config.redis = REDIS_SIDEKIQ_PARAMS
config.server_middleware do |chain|
diff --git a/config/initializers/twitter_regex.rb b/config/initializers/twitter_regex.rb
index 6a7723fd213c77..e65b05dfdea083 100644
--- a/config/initializers/twitter_regex.rb
+++ b/config/initializers/twitter_regex.rb
@@ -25,7 +25,7 @@ class Regex
\)
/iox
UCHARS = '\u{A0}-\u{D7FF}\u{F900}-\u{FDCF}\u{FDF0}-\u{FFEF}\u{10000}-\u{1FFFD}\u{20000}-\u{2FFFD}\u{30000}-\u{3FFFD}\u{40000}-\u{4FFFD}\u{50000}-\u{5FFFD}\u{60000}-\u{6FFFD}\u{70000}-\u{7FFFD}\u{80000}-\u{8FFFD}\u{90000}-\u{9FFFD}\u{A0000}-\u{AFFFD}\u{B0000}-\u{BFFFD}\u{C0000}-\u{CFFFD}\u{D0000}-\u{DFFFD}\u{E1000}-\u{EFFFD}\u{E000}-\u{F8FF}\u{F0000}-\u{FFFFD}\u{100000}-\u{10FFFD}'
- REGEXEN[:valid_url_query_chars] = /[a-z0-9!?\*'\(\);:&=\+\$\/%#\[\]\-_\.,~|@#{UCHARS}]/iou
+ REGEXEN[:valid_url_query_chars] = /[a-z0-9!?\*'\(\);:&=\+\$\/%#\[\]\-_\.,~|@\^#{UCHARS}]/iou
REGEXEN[:valid_url_query_ending_chars] = /[a-z0-9_&=#\/\-#{UCHARS}]/iou
REGEXEN[:valid_url_path] = /(?:
(?:
diff --git a/config/locales/activerecord.en.yml b/config/locales/activerecord.en.yml
index 8aee15659f27e2..a53c7c6e9e3939 100644
--- a/config/locales/activerecord.en.yml
+++ b/config/locales/activerecord.en.yml
@@ -53,3 +53,7 @@ en:
position:
elevated: cannot be higher than your current role
own_role: cannot be changed with your current role
+ webhook:
+ attributes:
+ events:
+ invalid_permissions: cannot include events you don't have the rights to
diff --git a/config/locales/devise.en.yml b/config/locales/devise.en.yml
index 458fa6d7596e54..d47b38321b6d6a 100644
--- a/config/locales/devise.en.yml
+++ b/config/locales/devise.en.yml
@@ -12,6 +12,7 @@ en:
last_attempt: You have one more attempt before your account is locked.
locked: Your account is locked.
not_found_in_database: Invalid %{authentication_keys} or password.
+ omniauth_user_creation_failure: Error creating an account for this identity.
pending: Your account is still under review.
timeout: Your session expired. Please sign in again to continue.
unauthenticated: You need to sign in or sign up before continuing.
diff --git a/config/locales/en.yml b/config/locales/en.yml
index fc24604b208d87..14bbd2897e7462 100644
--- a/config/locales/en.yml
+++ b/config/locales/en.yml
@@ -809,6 +809,12 @@ en:
message_html: You haven't defined any server rules.
sidekiq_process_check:
message_html: No Sidekiq process running for the %{value} queue(s). Please review your Sidekiq configuration
+ upload_check_privacy_error:
+ action: Check here for more information
+ message_html: "Your web server is misconfigured. The privacy of your users is at risk."
+ upload_check_privacy_error_object_storage:
+ action: Check here for more information
+ message_html: "Your object storage is misconfigured. The privacy of your users is at risk."
tags:
review: Review status
updated_msg: Hashtag settings updated successfully
@@ -1397,6 +1403,7 @@ en:
expired: The poll has already ended
invalid_choice: The chosen vote option does not exist
over_character_limit: cannot be longer than %{max} characters each
+ self_vote: You cannot vote in your own polls
too_few_options: must have more than one item
too_many_options: can't contain more than %{max} items
preferences:
@@ -1415,6 +1422,7 @@ en:
confirm_remove_selected_followers: Are you sure you want to remove selected followers?
confirm_remove_selected_follows: Are you sure you want to remove selected follows?
dormant: Dormant
+ follow_failure: Could not follow some of the selected accounts.
follow_selected_followers: Follow selected followers
followers: Followers
following: Following
@@ -1693,6 +1701,7 @@ en:
follow_limit_reached: You cannot follow more than %{limit} people
invalid_otp_token: Invalid two-factor code
otp_lost_help_html: If you lost access to both, you may get in touch with %{email}
+ rate_limited: Too many authentication attempts, try again later.
seamless_external_login: You are logged in via an external service, so password and e-mail settings are not available.
signed_in_as: 'Signed in as:'
verification:
diff --git a/config/puma.rb b/config/puma.rb
index e5929544583a30..c4e2b0b85c5c42 100644
--- a/config/puma.rb
+++ b/config/puma.rb
@@ -22,3 +22,5 @@
end
plugin :tmp_restart
+
+set_remote_address(proxy_protocol: :v1) if ENV['PROXY_PROTO_V1'] == 'true'
diff --git a/config/routes.rb b/config/routes.rb
index 52ce1d726b19d7..0652b1b80a30fe 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'sidekiq_unique_jobs/web'
+require 'sidekiq_unique_jobs/web' if ENV['ENABLE_SIDEKIQ_UNIQUE_JOBS_UI'] == true
require 'sidekiq-scheduler/web'
Rails.application.routes.draw do
@@ -110,6 +110,8 @@
resource :inbox, only: [:create], module: :activitypub
+ get '/:encoded_at(*path)', to: redirect("/@%{path}"), constraints: { encoded_at: /%40/ }
+
constraints(username: /[^@\/.]+/) do
get '/@:username', to: 'accounts#show', as: :short_account
get '/@:username/with_replies', to: 'accounts#show', as: :short_account_with_replies
@@ -124,7 +126,7 @@
get '/@:account_username/:id/embed', to: 'statuses#embed', as: :embed_short_account_status
end
- get '/@:username_with_domain/(*any)', to: 'home#index', constraints: { username_with_domain: /([^\/])+?/ }, format: false
+ get '/@:username_with_domain/(*any)', to: 'home#index', constraints: { username_with_domain: %r{([^/])+?} }, as: :account_with_domain, format: false
get '/settings', to: redirect('/settings/profile')
namespace :settings do
@@ -219,6 +221,7 @@
resource :statuses_cleanup, controller: :statuses_cleanup, only: [:show, :update]
get '/media_proxy/:id/(*any)', to: 'media_proxy#show', as: :media_proxy, format: false
+ get '/backups/:id/download', to: 'backups#download', as: :download_backup, format: false
resource :authorize_interaction, only: [:show, :create]
resource :share, only: [:show, :create]
@@ -290,7 +293,7 @@
end
end
- resources :instances, only: [:index, :show, :destroy], constraints: { id: /[^\/]+/ } do
+ resources :instances, only: [:index, :show, :destroy], constraints: { id: /[^\/]+/ }, format: 'html' do
member do
post :clear_delivery_errors
post :restart_delivery
@@ -471,7 +474,9 @@
resources :list, only: :show
end
- resources :streaming, only: [:index]
+ get '/streaming', to: 'streaming#index'
+ get '/streaming/(*any)', to: 'streaming#index'
+
resources :custom_emojis, only: [:index]
resources :suggestions, only: [:index, :destroy]
resources :scheduled_statuses, only: [:index, :show, :update, :destroy]
diff --git a/db/seeds.rb b/db/seeds.rb
index 1ca300de734bec..c01e83f1d39020 100644
--- a/db/seeds.rb
+++ b/db/seeds.rb
@@ -1,5 +1,7 @@
# frozen_string_literal: true
-Dir[Rails.root.join('db', 'seeds', '*.rb')].sort.each do |seed|
- load seed
+Chewy.strategy(:mastodon) do
+ Dir[Rails.root.join('db', 'seeds', '*.rb')].sort.each do |seed|
+ load seed
+ end
end
diff --git a/dist/nginx.conf b/dist/nginx.conf
index 5bc960e2568c36..b59446403b0b4f 100644
--- a/dist/nginx.conf
+++ b/dist/nginx.conf
@@ -109,6 +109,8 @@ server {
location ~ ^/system/ {
add_header Cache-Control "public, max-age=2419200, immutable";
add_header Strict-Transport-Security "max-age=63072000; includeSubDomains";
+ add_header X-Content-Type-Options nosniff;
+ add_header Content-Security-Policy "default-src 'none'; form-action 'none'";
try_files $uri =404;
}
diff --git a/docker-compose.yml b/docker-compose.yml
index c534286c76d315..3e4a14413ab1ec 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -56,7 +56,7 @@ services:
web:
build: .
- image: tootsuite/mastodon
+ image: ghcr.io/mastodon/mastodon:v4.1.15
restart: always
env_file: .env.production
command: bash -c "rm -f /mastodon/tmp/pids/server.pid; bundle exec rails s -p 3000"
@@ -77,7 +77,7 @@ services:
streaming:
build: .
- image: tootsuite/mastodon
+ image: ghcr.io/mastodon/mastodon:v4.1.15
restart: always
env_file: .env.production
command: node ./streaming
@@ -95,7 +95,7 @@ services:
sidekiq:
build: .
- image: tootsuite/mastodon
+ image: ghcr.io/mastodon/mastodon:v4.1.15
restart: always
env_file: .env.production
command: bundle exec sidekiq
diff --git a/lib/chewy/strategy/bypass_with_warning.rb b/lib/chewy/strategy/bypass_with_warning.rb
new file mode 100644
index 00000000000000..eb6fbaab167603
--- /dev/null
+++ b/lib/chewy/strategy/bypass_with_warning.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+module Chewy
+ class Strategy
+ class BypassWithWarning < Base
+ def update(...)
+ Rails.logger.warn 'Chewy update without a root strategy' unless @warning_issued
+ @warning_issued = true
+ end
+ end
+ end
+end
diff --git a/lib/mastodon/accounts_cli.rb b/lib/mastodon/accounts_cli.rb
index 34afbc699dd89b..d4a6bbc1276608 100644
--- a/lib/mastodon/accounts_cli.rb
+++ b/lib/mastodon/accounts_cli.rb
@@ -372,16 +372,16 @@ def cull(*domains)
option :concurrency, type: :numeric, default: 5, aliases: [:c]
option :verbose, type: :boolean, aliases: [:v]
option :dry_run, type: :boolean
- desc 'refresh [USERNAME]', 'Fetch remote user data and files'
+ desc 'refresh [USERNAMES]', 'Fetch remote user data and files'
long_desc <<-LONG_DESC
Fetch remote user data and files for one or multiple accounts.
With the --all option, all remote accounts will be processed.
Through the --domain option, this can be narrowed down to a
- specific domain only. Otherwise, a single remote account must
- be specified with USERNAME.
+ specific domain only. Otherwise, remote accounts must be
+ specified with space-separated USERNAMES.
LONG_DESC
- def refresh(username = nil)
+ def refresh(*usernames)
dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
if options[:domain] || options[:all]
@@ -397,19 +397,25 @@ def refresh(username = nil)
end
say("Refreshed #{processed} accounts#{dry_run}", :green, true)
- elsif username.present?
- username, domain = username.split('@')
- account = Account.find_remote(username, domain)
+ elsif !usernames.empty?
+ usernames.each do |user|
+ user, domain = user.split('@')
+ account = Account.find_remote(user, domain)
+
+ if account.nil?
+ say('No such account', :red)
+ exit(1)
+ end
- if account.nil?
- say('No such account', :red)
- exit(1)
- end
+ next if options[:dry_run]
- unless options[:dry_run]
- account.reset_avatar!
- account.reset_header!
- account.save
+ begin
+ account.reset_avatar!
+ account.reset_header!
+ account.save
+ rescue Mastodon::UnexpectedResponseError
+ say("Account failed: #{user}@#{domain}", :red)
+ end
end
say("OK#{dry_run}", :green)
@@ -536,7 +542,7 @@ def approve(username = nil)
User.pending.find_each(&:approve!)
say('OK', :green)
elsif options[:number]
- User.pending.limit(options[:number]).each(&:approve!)
+ User.pending.order(created_at: :asc).limit(options[:number]).each(&:approve!)
say('OK', :green)
elsif username.present?
account = Account.find_local(username)
@@ -631,7 +637,7 @@ def migrate(username)
exit(1)
end
- unless options[:force] || migration.target_acount_id == account.moved_to_account_id
+ unless options[:force] || migration.target_account_id == account.moved_to_account_id
say('The specified account is not redirecting to its last migration target. Use --force if you want to replay the migration anyway', :red)
exit(1)
end
diff --git a/lib/mastodon/cli_helper.rb b/lib/mastodon/cli_helper.rb
index a78a28e2734b2d..4e304c903539b5 100644
--- a/lib/mastodon/cli_helper.rb
+++ b/lib/mastodon/cli_helper.rb
@@ -53,14 +53,16 @@ def parallelize_with_progress(scope)
progress.log("Processing #{item.id}") if options[:verbose]
- result = ActiveRecord::Base.connection_pool.with_connection do
- yield(item)
- ensure
- RedisConfiguration.pool.checkin if Thread.current[:redis]
- Thread.current[:redis] = nil
+ Chewy.strategy(:mastodon) do
+ result = ActiveRecord::Base.connection_pool.with_connection do
+ yield(item)
+ ensure
+ RedisConfiguration.pool.checkin if Thread.current[:redis]
+ Thread.current[:redis] = nil
+ end
+
+ aggregate.increment(result) if result.is_a?(Integer)
end
-
- aggregate.increment(result) if result.is_a?(Integer)
rescue => e
progress.log pastel.red("Error processing #{item.id}: #{e}")
ensure
diff --git a/lib/mastodon/sidekiq_middleware.rb b/lib/mastodon/sidekiq_middleware.rb
index c75e8401f5fbc2..9832e1a27c96ca 100644
--- a/lib/mastodon/sidekiq_middleware.rb
+++ b/lib/mastodon/sidekiq_middleware.rb
@@ -3,8 +3,8 @@
class Mastodon::SidekiqMiddleware
BACKTRACE_LIMIT = 3
- def call(*)
- yield
+ def call(*, &block)
+ Chewy.strategy(:mastodon, &block)
rescue Mastodon::HostValidationError
# Do not retry
rescue => e
diff --git a/lib/mastodon/version.rb b/lib/mastodon/version.rb
index 8101e61dd28c1f..29a22eff3b8ea6 100644
--- a/lib/mastodon/version.rb
+++ b/lib/mastodon/version.rb
@@ -13,7 +13,7 @@ def minor
end
def patch
- 0
+ 15
end
def flags
diff --git a/lib/paperclip/media_type_spoof_detector_extensions.rb b/lib/paperclip/media_type_spoof_detector_extensions.rb
new file mode 100644
index 00000000000000..a406ef312fd3c7
--- /dev/null
+++ b/lib/paperclip/media_type_spoof_detector_extensions.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+module Paperclip
+ module MediaTypeSpoofDetectorExtensions
+ def calculated_content_type
+ return @calculated_content_type if defined?(@calculated_content_type)
+
+ @calculated_content_type = type_from_file_command.chomp
+
+ # The `file` command fails to recognize some MP3 files as such
+ @calculated_content_type = type_from_marcel if @calculated_content_type == 'application/octet-stream' && type_from_marcel == 'audio/mpeg'
+ @calculated_content_type
+ end
+
+ def type_from_marcel
+ @type_from_marcel ||= Marcel::MimeType.for Pathname.new(@file.path),
+ name: @file.path
+ end
+ end
+end
+
+Paperclip::MediaTypeSpoofDetector.prepend(Paperclip::MediaTypeSpoofDetectorExtensions)
diff --git a/lib/paperclip/response_with_limit_adapter.rb b/lib/paperclip/response_with_limit_adapter.rb
index deb89717a40591..ff7a938abb3967 100644
--- a/lib/paperclip/response_with_limit_adapter.rb
+++ b/lib/paperclip/response_with_limit_adapter.rb
@@ -16,7 +16,7 @@ def initialize(target, options = {})
private
def cache_current_values
- @original_filename = filename_from_content_disposition.presence || filename_from_path.presence || 'data'
+ @original_filename = truncated_filename
@tempfile = copy_to_tempfile(@target)
@content_type = ContentTypeDetector.new(@tempfile.path).detect
@size = File.size(@tempfile)
@@ -43,6 +43,13 @@ def copy_to_tempfile(source)
source.response.connection.close
end
+ def truncated_filename
+ filename = filename_from_content_disposition.presence || filename_from_path.presence || 'data'
+ extension = File.extname(filename)
+ basename = File.basename(filename, extension)
+ [basename[...20], extension[..4]].compact_blank.join
+ end
+
def filename_from_content_disposition
disposition = @target.response.headers['content-disposition']
disposition&.match(/filename="([^"]*)"/)&.captures&.first
diff --git a/lib/paperclip/transcoder.rb b/lib/paperclip/transcoder.rb
index afd9f58ff695c0..0f2e30f7d5e45e 100644
--- a/lib/paperclip/transcoder.rb
+++ b/lib/paperclip/transcoder.rb
@@ -19,10 +19,7 @@ def initialize(file, options = {}, attachment = nil)
def make
metadata = VideoMetadataExtractor.new(@file.path)
- unless metadata.valid?
- Paperclip.log("Unsupported file #{@file.path}")
- return File.open(@file.path)
- end
+ raise Paperclip::Error, "Error while transcoding #{@file.path}: unsupported file" unless metadata.valid?
update_attachment_type(metadata)
update_options_from_metadata(metadata)
@@ -40,12 +37,14 @@ def make
@output_options['f'] = 'image2'
@output_options['vframes'] = 1
when 'mp4'
- @output_options['acodec'] = 'aac'
- @output_options['strict'] = 'experimental'
-
- if high_vfr?(metadata) && !eligible_to_passthrough?(metadata)
- @output_options['vsync'] = 'vfr'
- @output_options['r'] = @vfr_threshold
+ unless eligible_to_passthrough?(metadata)
+ @output_options['acodec'] = 'aac'
+ @output_options['strict'] = 'experimental'
+
+ if high_vfr?(metadata)
+ @output_options['vsync'] = 'vfr'
+ @output_options['r'] = @vfr_threshold
+ end
end
end
diff --git a/lib/public_file_server_middleware.rb b/lib/public_file_server_middleware.rb
new file mode 100644
index 00000000000000..7e02e37a08464a
--- /dev/null
+++ b/lib/public_file_server_middleware.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'action_dispatch/middleware/static'
+
+class PublicFileServerMiddleware
+ SERVICE_WORKER_TTL = 7.days.to_i
+ CACHE_TTL = 28.days.to_i
+
+ def initialize(app)
+ @app = app
+ @file_handler = ActionDispatch::FileHandler.new(Rails.application.paths['public'].first)
+ end
+
+ def call(env)
+ file = @file_handler.attempt(env)
+
+ # If the request is not a static file, move on!
+ return @app.call(env) if file.nil?
+
+ status, headers, response = file
+
+ # Set cache headers on static files. Some paths require different cache headers
+ headers['Cache-Control'] = begin
+ request_path = env['REQUEST_PATH']
+
+ if request_path.start_with?('/sw.js')
+ "public, max-age=#{SERVICE_WORKER_TTL}, must-revalidate"
+ elsif request_path.start_with?(paperclip_root_url)
+ "public, max-age=#{CACHE_TTL}, immutable"
+ else
+ "public, max-age=#{CACHE_TTL}, must-revalidate"
+ end
+ end
+
+ # Override the default CSP header set by the CSP middleware
+ headers['Content-Security-Policy'] = "default-src 'none'; form-action 'none'" if request_path.start_with?(paperclip_root_url)
+
+ headers['X-Content-Type-Options'] = 'nosniff'
+
+ [status, headers, response]
+ end
+
+ private
+
+ def paperclip_root_url
+ ENV.fetch('PAPERCLIP_ROOT_URL', '/system')
+ end
+end
diff --git a/lib/sanitize_ext/sanitize_config.rb b/lib/sanitize_ext/sanitize_config.rb
index baf65266263fd8..703ba8b0598e83 100644
--- a/lib/sanitize_ext/sanitize_config.rb
+++ b/lib/sanitize_ext/sanitize_config.rb
@@ -94,26 +94,26 @@ module Config
]
)
- MASTODON_OEMBED ||= freeze_config merge(
- RELAXED,
- elements: RELAXED[:elements] + %w(audio embed iframe source video),
+ MASTODON_OEMBED ||= freeze_config(
+ elements: %w(audio embed iframe source video),
- attributes: merge(
- RELAXED[:attributes],
+ attributes: {
'audio' => %w(controls),
'embed' => %w(height src type width),
'iframe' => %w(allowfullscreen frameborder height scrolling src width),
'source' => %w(src type),
'video' => %w(controls height loop width),
- 'div' => [:data]
- ),
+ },
- protocols: merge(
- RELAXED[:protocols],
+ protocols: {
'embed' => { 'src' => HTTP_PROTOCOLS },
'iframe' => { 'src' => HTTP_PROTOCOLS },
- 'source' => { 'src' => HTTP_PROTOCOLS }
- )
+ 'source' => { 'src' => HTTP_PROTOCOLS },
+ },
+
+ add_attributes: {
+ 'iframe' => { 'sandbox' => 'allow-scripts allow-same-origin allow-popups allow-popups-to-escape-sandbox allow-forms' },
+ }
)
end
end
diff --git a/lib/tasks/branding.rake b/lib/tasks/branding.rake
index d1c1c9dede791d..d97c97c99edc71 100644
--- a/lib/tasks/branding.rake
+++ b/lib/tasks/branding.rake
@@ -40,7 +40,7 @@ namespace :branding do
output_dest = Rails.root.join('app', 'javascript', 'icons')
rsvg_convert = Terrapin::CommandLine.new('rsvg-convert', '-w :size -h :size --keep-aspect-ratio :input -o :output')
- convert = Terrapin::CommandLine.new('convert', ':input :output')
+ convert = Terrapin::CommandLine.new('convert', ':input :output', environment: { 'MAGICK_CONFIGURE_PATH' => nil })
favicon_sizes = [16, 32, 48]
apple_icon_sizes = [57, 60, 72, 76, 114, 120, 144, 152, 167, 180, 1024]
diff --git a/lib/tasks/sidekiq_unique_jobs.rake b/lib/tasks/sidekiq_unique_jobs.rake
new file mode 100644
index 00000000000000..bedc8fe4c650c4
--- /dev/null
+++ b/lib/tasks/sidekiq_unique_jobs.rake
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+namespace :sidekiq_unique_jobs do
+ task delete_all_locks: :environment do
+ digests = SidekiqUniqueJobs::Digests.new
+ digests.delete_by_pattern('*', count: digests.count)
+
+ expiring_digests = SidekiqUniqueJobs::ExpiringDigests.new
+ expiring_digests.delete_by_pattern('*', count: expiring_digests.count)
+ end
+end
diff --git a/spec/controllers/admin/statuses_controller_spec.rb b/spec/controllers/admin/statuses_controller_spec.rb
index 7f912c1c07bb24..877c7e63ebfb30 100644
--- a/spec/controllers/admin/statuses_controller_spec.rb
+++ b/spec/controllers/admin/statuses_controller_spec.rb
@@ -40,24 +40,36 @@
end
describe 'POST #batch' do
- before do
- post :batch, params: { account_id: account.id, action => '', admin_status_batch_action: { status_ids: status_ids } }
- end
+ subject { post :batch, params: { :account_id => account.id, action => '', :admin_status_batch_action => { status_ids: status_ids } } }
let(:status_ids) { [media_attached_status.id] }
- context 'when action is report' do
+ shared_examples 'when action is report' do
let(:action) { 'report' }
it 'creates a report' do
+ subject
+
report = Report.last
expect(report.target_account_id).to eq account.id
expect(report.status_ids).to eq status_ids
end
it 'redirects to report page' do
+ subject
+
expect(response).to redirect_to(admin_report_path(Report.last.id))
end
end
+
+ it_behaves_like 'when action is report'
+
+ context 'when the moderator is blocked by the author' do
+ before do
+ account.block!(user.account)
+ end
+
+ it_behaves_like 'when action is report'
+ end
end
end
diff --git a/spec/controllers/api/v1/conversations_controller_spec.rb b/spec/controllers/api/v1/conversations_controller_spec.rb
index 5add7cf1d4214f..1ec26d52036fa6 100644
--- a/spec/controllers/api/v1/conversations_controller_spec.rb
+++ b/spec/controllers/api/v1/conversations_controller_spec.rb
@@ -16,6 +16,7 @@
before do
PostStatusService.new.call(other.account, text: 'Hey @alice', visibility: 'direct')
+ PostStatusService.new.call(user.account, text: 'Hey, nobody here', visibility: 'direct')
end
it 'returns http success' do
@@ -31,7 +32,26 @@
it 'returns conversations' do
get :index
json = body_as_json
- expect(json.size).to eq 1
+ expect(json.size).to eq 2
+ expect(json[0][:accounts].size).to eq 1
+ end
+
+ context 'with since_id' do
+ context 'when requesting old posts' do
+ it 'returns conversations' do
+ get :index, params: { since_id: Mastodon::Snowflake.id_at(1.hour.ago, with_random: false) }
+ json = body_as_json
+ expect(json.size).to eq 2
+ end
+ end
+
+ context 'when requesting posts in the future' do
+ it 'returns no conversation' do
+ get :index, params: { since_id: Mastodon::Snowflake.id_at(1.hour.from_now, with_random: false) }
+ json = body_as_json
+ expect(json.size).to eq 0
+ end
+ end
end
end
end
diff --git a/spec/controllers/api/v1/statuses/histories_controller_spec.rb b/spec/controllers/api/v1/statuses/histories_controller_spec.rb
index 00677f1d2c37c6..99384c8ed5e914 100644
--- a/spec/controllers/api/v1/statuses/histories_controller_spec.rb
+++ b/spec/controllers/api/v1/statuses/histories_controller_spec.rb
@@ -23,6 +23,7 @@
it 'returns http success' do
expect(response).to have_http_status(200)
+ expect(body_as_json.size).to_not be 0
end
end
end
diff --git a/spec/controllers/api/v1/streaming_controller_spec.rb b/spec/controllers/api/v1/streaming_controller_spec.rb
index 4ab409a54103bc..96062ead7067cf 100644
--- a/spec/controllers/api/v1/streaming_controller_spec.rb
+++ b/spec/controllers/api/v1/streaming_controller_spec.rb
@@ -5,7 +5,7 @@
describe Api::V1::StreamingController do
around(:each) do |example|
before = Rails.configuration.x.streaming_api_base_url
- Rails.configuration.x.streaming_api_base_url = Rails.configuration.x.web_domain
+ Rails.configuration.x.streaming_api_base_url = "wss://#{Rails.configuration.x.web_domain}"
example.run
Rails.configuration.x.streaming_api_base_url = before
end
diff --git a/spec/controllers/api/v1/timelines/tag_controller_spec.rb b/spec/controllers/api/v1/timelines/tag_controller_spec.rb
index 718911083362de..1c60798fcf6fed 100644
--- a/spec/controllers/api/v1/timelines/tag_controller_spec.rb
+++ b/spec/controllers/api/v1/timelines/tag_controller_spec.rb
@@ -5,36 +5,66 @@
describe Api::V1::Timelines::TagController do
render_views
- let(:user) { Fabricate(:user) }
+ let(:user) { Fabricate(:user) }
+ let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id, scopes: 'read:statuses') }
before do
allow(controller).to receive(:doorkeeper_token) { token }
end
- context 'with a user context' do
- let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id) }
+ describe 'GET #show' do
+ subject do
+ get :show, params: { id: 'test' }
+ end
- describe 'GET #show' do
- before do
- PostStatusService.new.call(user.account, text: 'It is a #test')
+ before do
+ PostStatusService.new.call(user.account, text: 'It is a #test')
+ end
+
+ context 'when the instance allows public preview' do
+ context 'when the user is not authenticated' do
+ let(:token) { nil }
+
+ it 'returns http success', :aggregate_failures do
+ subject
+
+ expect(response).to have_http_status(200)
+ expect(response.headers['Link'].links.size).to eq(2)
+ end
end
- it 'returns http success' do
- get :show, params: { id: 'test' }
- expect(response).to have_http_status(200)
- expect(response.headers['Link'].links.size).to eq(2)
+ context 'when the user is authenticated' do
+ it 'returns http success', :aggregate_failures do
+ subject
+
+ expect(response).to have_http_status(200)
+ expect(response.headers['Link'].links.size).to eq(2)
+ end
end
end
- end
- context 'without a user context' do
- let(:token) { Fabricate(:accessible_access_token, resource_owner_id: nil) }
+ context 'when the instance does not allow public preview' do
+ before do
+ Form::AdminSettings.new(timeline_preview: false).save
+ end
+
+ context 'when the user is not authenticated' do
+ let(:token) { nil }
+
+ it 'returns http unauthorized' do
+ subject
+
+ expect(response).to have_http_status(401)
+ end
+ end
+
+ context 'when the user is authenticated' do
+ it 'returns http success', :aggregate_failures do
+ subject
- describe 'GET #show' do
- it 'returns http success' do
- get :show, params: { id: 'test' }
- expect(response).to have_http_status(200)
- expect(response.headers['Link']).to be_nil
+ expect(response).to have_http_status(200)
+ expect(response.headers['Link'].links.size).to eq(2)
+ end
end
end
end
diff --git a/spec/controllers/api/v2/admin/accounts_controller_spec.rb b/spec/controllers/api/v2/admin/accounts_controller_spec.rb
index 2508a9e0557cec..ebec4a13e8227d 100644
--- a/spec/controllers/api/v2/admin/accounts_controller_spec.rb
+++ b/spec/controllers/api/v2/admin/accounts_controller_spec.rb
@@ -69,5 +69,13 @@
end
end
end
+
+ context 'with limit param' do
+ let(:params) { { limit: 1 } }
+
+ it 'sets the correct pagination headers' do
+ expect(response.headers['Link'].find_link(%w(rel next)).href).to eq api_v2_admin_accounts_url(limit: 1, max_id: admin_account.id)
+ end
+ end
end
end
diff --git a/spec/controllers/auth/sessions_controller_spec.rb b/spec/controllers/auth/sessions_controller_spec.rb
index d3db7aa1ab2da9..0941e2cb3d3515 100644
--- a/spec/controllers/auth/sessions_controller_spec.rb
+++ b/spec/controllers/auth/sessions_controller_spec.rb
@@ -262,7 +262,27 @@
end
end
- context 'using a valid OTP' do
+ context 'when repeatedly using an invalid TOTP code before using a valid code' do
+ before do
+ stub_const('Auth::SessionsController::MAX_2FA_ATTEMPTS_PER_HOUR', 2)
+ end
+
+ it 'does not log the user in' do
+ # Travel to the beginning of an hour to avoid crossing rate-limit buckets
+ travel_to '2023-12-20T10:00:00Z'
+
+ Auth::SessionsController::MAX_2FA_ATTEMPTS_PER_HOUR.times do
+ post :create, params: { user: { otp_attempt: '1234' } }, session: { attempt_user_id: user.id, attempt_user_updated_at: user.updated_at.to_s }
+ expect(controller.current_user).to be_nil
+ end
+
+ post :create, params: { user: { otp_attempt: user.current_otp } }, session: { attempt_user_id: user.id, attempt_user_updated_at: user.updated_at.to_s }
+ expect(controller.current_user).to be_nil
+ expect(flash[:alert]).to match I18n.t('users.rate_limited')
+ end
+ end
+
+ context 'when using a valid OTP' do
before do
post :create, params: { user: { otp_attempt: user.current_otp } }, session: { attempt_user_id: user.id, attempt_user_updated_at: user.updated_at.to_s }
end
diff --git a/spec/controllers/concerns/cache_concern_spec.rb b/spec/controllers/concerns/cache_concern_spec.rb
index a34d7d72676964..21daa19921007e 100644
--- a/spec/controllers/concerns/cache_concern_spec.rb
+++ b/spec/controllers/concerns/cache_concern_spec.rb
@@ -13,12 +13,17 @@ def empty_array
def empty_relation
render plain: cache_collection(Status.none, Status).size
end
+
+ def account_statuses_favourites
+ render plain: cache_collection(Status.where(account_id: params[:id]), Status).map(&:favourites_count)
+ end
end
before do
routes.draw do
- get 'empty_array' => 'anonymous#empty_array'
- post 'empty_relation' => 'anonymous#empty_relation'
+ get 'empty_array' => 'anonymous#empty_array'
+ get 'empty_relation' => 'anonymous#empty_relation'
+ get 'account_statuses_favourites' => 'anonymous#account_statuses_favourites'
end
end
@@ -36,5 +41,20 @@ def empty_relation
expect(response.body).to eq '0'
end
end
+
+ context 'when given a collection of statuses' do
+ let!(:account) { Fabricate(:account) }
+ let!(:status) { Fabricate(:status, account: account) }
+
+ it 'correctly updates with new interactions' do
+ get :account_statuses_favourites, params: { id: account.id }
+ expect(response.body).to eq '[0]'
+
+ FavouriteService.new.call(account, status)
+
+ get :account_statuses_favourites, params: { id: account.id }
+ expect(response.body).to eq '[1]'
+ end
+ end
end
end
diff --git a/spec/controllers/concerns/signature_verification_spec.rb b/spec/controllers/concerns/signature_verification_spec.rb
deleted file mode 100644
index 13655f31334829..00000000000000
--- a/spec/controllers/concerns/signature_verification_spec.rb
+++ /dev/null
@@ -1,303 +0,0 @@
-# frozen_string_literal: true
-
-require 'rails_helper'
-
-describe ApplicationController, type: :controller do
- class WrappedActor
- attr_reader :wrapped_account
-
- def initialize(wrapped_account)
- @wrapped_account = wrapped_account
- end
-
- delegate :uri, :keypair, to: :wrapped_account
- end
-
- controller do
- include SignatureVerification
-
- before_action :require_actor_signature!, only: [:signature_required]
-
- def success
- head 200
- end
-
- def alternative_success
- head 200
- end
-
- def signature_required
- head 200
- end
- end
-
- before do
- routes.draw do
- match via: [:get, :post], 'success' => 'anonymous#success'
- match via: [:get, :post], 'signature_required' => 'anonymous#signature_required'
- end
- end
-
- context 'without signature header' do
- before do
- get :success
- end
-
- describe '#signed_request?' do
- it 'returns false' do
- expect(controller.signed_request?).to be false
- end
- end
-
- describe '#signed_request_account' do
- it 'returns nil' do
- expect(controller.signed_request_account).to be_nil
- end
- end
- end
-
- context 'with signature header' do
- let!(:author) { Fabricate(:account, domain: 'example.com', uri: 'https://example.com/actor') }
-
- context 'without body' do
- before do
- get :success
-
- fake_request = Request.new(:get, request.url)
- fake_request.on_behalf_of(author)
-
- request.headers.merge!(fake_request.headers)
- end
-
- describe '#signed_request?' do
- it 'returns true' do
- expect(controller.signed_request?).to be true
- end
- end
-
- describe '#signed_request_account' do
- it 'returns an account' do
- expect(controller.signed_request_account).to eq author
- end
-
- it 'returns nil when path does not match' do
- request.path = '/alternative-path'
- expect(controller.signed_request_account).to be_nil
- end
-
- it 'returns nil when method does not match' do
- post :success
- expect(controller.signed_request_account).to be_nil
- end
- end
- end
-
- context 'with a valid actor that is not an Account' do
- let(:actor) { WrappedActor.new(author) }
-
- before do
- get :success
-
- fake_request = Request.new(:get, request.url)
- fake_request.on_behalf_of(author)
-
- request.headers.merge!(fake_request.headers)
-
- allow(ActivityPub::TagManager.instance).to receive(:uri_to_actor).with(anything) do
- actor
- end
- end
-
- describe '#signed_request?' do
- it 'returns true' do
- expect(controller.signed_request?).to be true
- end
- end
-
- describe '#signed_request_account' do
- it 'returns nil' do
- expect(controller.signed_request_account).to be_nil
- end
- end
-
- describe '#signed_request_actor' do
- it 'returns the expected actor' do
- expect(controller.signed_request_actor).to eq actor
- end
- end
- end
-
- context 'with request with unparseable Date header' do
- before do
- get :success
-
- fake_request = Request.new(:get, request.url)
- fake_request.add_headers({ 'Date' => 'wrong date' })
- fake_request.on_behalf_of(author)
-
- request.headers.merge!(fake_request.headers)
- end
-
- describe '#signed_request?' do
- it 'returns true' do
- expect(controller.signed_request?).to be true
- end
- end
-
- describe '#signed_request_account' do
- it 'returns nil' do
- expect(controller.signed_request_account).to be_nil
- end
- end
-
- describe '#signature_verification_failure_reason' do
- it 'contains an error description' do
- controller.signed_request_account
- expect(controller.signature_verification_failure_reason[:error]).to eq 'Invalid Date header: not RFC 2616 compliant date: "wrong date"'
- end
- end
- end
-
- context 'with request older than a day' do
- before do
- get :success
-
- fake_request = Request.new(:get, request.url)
- fake_request.add_headers({ 'Date' => 2.days.ago.utc.httpdate })
- fake_request.on_behalf_of(author)
-
- request.headers.merge!(fake_request.headers)
- end
-
- describe '#signed_request?' do
- it 'returns true' do
- expect(controller.signed_request?).to be true
- end
- end
-
- describe '#signed_request_account' do
- it 'returns nil' do
- expect(controller.signed_request_account).to be_nil
- end
- end
-
- describe '#signature_verification_failure_reason' do
- it 'contains an error description' do
- controller.signed_request_account
- expect(controller.signature_verification_failure_reason[:error]).to eq 'Signed request date outside acceptable time window'
- end
- end
- end
-
- context 'with inaccessible key' do
- before do
- get :success
-
- author = Fabricate(:account, domain: 'localhost:5000', uri: 'http://localhost:5000/actor')
- fake_request = Request.new(:get, request.url)
- fake_request.on_behalf_of(author)
- author.destroy
-
- request.headers.merge!(fake_request.headers)
-
- stub_request(:get, 'http://localhost:5000/actor#main-key').to_raise(Mastodon::HostValidationError)
- end
-
- describe '#signed_request?' do
- it 'returns true' do
- expect(controller.signed_request?).to be true
- end
- end
-
- describe '#signed_request_account' do
- it 'returns nil' do
- expect(controller.signed_request_account).to be_nil
- end
- end
- end
-
- context 'with body' do
- before do
- allow(controller).to receive(:actor_refresh_key!).and_return(author)
- post :success, body: 'Hello world'
-
- fake_request = Request.new(:post, request.url, body: 'Hello world')
- fake_request.on_behalf_of(author)
-
- request.headers.merge!(fake_request.headers)
- end
-
- describe '#signed_request?' do
- it 'returns true' do
- expect(controller.signed_request?).to be true
- end
- end
-
- describe '#signed_request_account' do
- it 'returns an account' do
- expect(controller.signed_request_account).to eq author
- end
- end
-
- context 'when path does not match' do
- before do
- request.path = '/alternative-path'
- end
-
- describe '#signed_request_account' do
- it 'returns nil' do
- expect(controller.signed_request_account).to be_nil
- end
- end
-
- describe '#signature_verification_failure_reason' do
- it 'contains an error description' do
- controller.signed_request_account
- expect(controller.signature_verification_failure_reason[:error]).to include('using rsa-sha256 (RSASSA-PKCS1-v1_5 with SHA-256)')
- expect(controller.signature_verification_failure_reason[:signed_string]).to include("(request-target): post /alternative-path\n")
- end
- end
- end
-
- context 'when method does not match' do
- before do
- get :success
- end
-
- describe '#signed_request_account' do
- it 'returns nil' do
- expect(controller.signed_request_account).to be_nil
- end
- end
- end
-
- context 'when body has been tampered' do
- before do
- post :success, body: 'doo doo doo'
- end
-
- describe '#signed_request_account' do
- it 'returns nil when body has been tampered' do
- expect(controller.signed_request_account).to be_nil
- end
- end
- end
- end
- end
-
- context 'when a signature is required' do
- before do
- get :signature_required
- end
-
- context 'without signature header' do
- it 'returns HTTP 401' do
- expect(response).to have_http_status(401)
- end
-
- it 'returns an error' do
- expect(Oj.load(response.body)['error']).to eq 'Request not signed'
- end
- end
- end
-end
diff --git a/spec/controllers/relationships_controller_spec.rb b/spec/controllers/relationships_controller_spec.rb
index 2056a2ac294176..bcdcfa9051f3e9 100644
--- a/spec/controllers/relationships_controller_spec.rb
+++ b/spec/controllers/relationships_controller_spec.rb
@@ -55,7 +55,7 @@
end
context 'when select parameter is provided' do
- subject { patch :update, params: { form_account_batch: { account_ids: [poopfeast.id] }, block_domains: '' } }
+ subject { patch :update, params: { form_account_batch: { account_ids: [poopfeast.id] }, remove_domains_from_followers: '' } }
it 'soft-blocks followers from selected domains' do
poopfeast.follow!(user.account)
@@ -66,6 +66,15 @@
expect(poopfeast.following?(user.account)).to be false
end
+ it 'does not unfollow users from selected domains' do
+ user.account.follow!(poopfeast)
+
+ sign_in user, scope: :user
+ subject
+
+ expect(user.account.following?(poopfeast)).to be true
+ end
+
include_examples 'authenticate user'
include_examples 'redirects back to followers page'
end
diff --git a/spec/controllers/settings/two_factor_authentication/webauthn_credentials_controller_spec.rb b/spec/controllers/settings/two_factor_authentication/webauthn_credentials_controller_spec.rb
index fe53b4dfc26c17..269c4d685a3f6a 100644
--- a/spec/controllers/settings/two_factor_authentication/webauthn_credentials_controller_spec.rb
+++ b/spec/controllers/settings/two_factor_authentication/webauthn_credentials_controller_spec.rb
@@ -248,7 +248,7 @@ def add_webauthn_credential(user)
post :create, params: { credential: new_webauthn_credential, nickname: 'USB Key' }
- expect(response).to have_http_status(500)
+ expect(response).to have_http_status(422)
expect(flash[:error]).to be_present
end
end
@@ -268,7 +268,7 @@ def add_webauthn_credential(user)
post :create, params: { credential: new_webauthn_credential, nickname: nickname }
- expect(response).to have_http_status(500)
+ expect(response).to have_http_status(422)
expect(flash[:error]).to be_present
end
end
diff --git a/spec/controllers/well_known/webfinger_controller_spec.rb b/spec/controllers/well_known/webfinger_controller_spec.rb
index 8574d369d19d5e..0e7b34f471b0ef 100644
--- a/spec/controllers/well_known/webfinger_controller_spec.rb
+++ b/spec/controllers/well_known/webfinger_controller_spec.rb
@@ -4,6 +4,10 @@
render_views
describe 'GET #show' do
+ subject(:perform_show!) do
+ get :show, params: { resource: resource }, format: :json
+ end
+
let(:alternate_domains) { [] }
let(:alice) { Fabricate(:account, username: 'alice') }
let(:resource) { nil }
@@ -15,10 +19,6 @@
Rails.configuration.x.alternate_domains = tmp
end
- subject do
- get :show, params: { resource: resource }, format: :json
- end
-
shared_examples 'a successful response' do
it 'returns http success' do
expect(response).to have_http_status(200)
@@ -43,7 +43,7 @@
let(:resource) { alice.to_webfinger_s }
before do
- subject
+ perform_show!
end
it_behaves_like 'a successful response'
@@ -54,7 +54,7 @@
before do
alice.suspend!
- subject
+ perform_show!
end
it_behaves_like 'a successful response'
@@ -66,7 +66,7 @@
before do
alice.suspend!
alice.deletion_request.destroy
- subject
+ perform_show!
end
it 'returns http gone' do
@@ -78,7 +78,7 @@
let(:resource) { 'acct:not@existing.com' }
before do
- subject
+ perform_show!
end
it 'returns http not found' do
@@ -90,7 +90,7 @@
let(:alternate_domains) { ['foo.org'] }
before do
- subject
+ perform_show!
end
context 'when an account exists' do
@@ -114,11 +114,39 @@
end
end
+ context 'when the old name scheme is used to query the instance actor' do
+ let(:resource) do
+ "#{Rails.configuration.x.local_domain}@#{Rails.configuration.x.local_domain}"
+ end
+
+ before do
+ perform_show!
+ end
+
+ it 'returns http success' do
+ expect(response).to have_http_status(200)
+ end
+
+ it 'does not set a Vary header' do
+ expect(response.headers['Vary']).to be_nil
+ end
+
+ it 'returns application/jrd+json' do
+ expect(response.media_type).to eq 'application/jrd+json'
+ end
+
+ it 'returns links for the internal account' do
+ json = body_as_json
+ expect(json[:subject]).to eq 'acct:mastodon.internal@cb6e6126.ngrok.io'
+ expect(json[:aliases]).to eq ['https://cb6e6126.ngrok.io/actor']
+ end
+ end
+
context 'with no resource parameter' do
let(:resource) { nil }
before do
- subject
+ perform_show!
end
it 'returns http bad request' do
@@ -130,7 +158,7 @@
let(:resource) { 'df/:dfkj' }
before do
- subject
+ perform_show!
end
it 'returns http bad request' do
diff --git a/spec/fabricators/account_stat_fabricator.rb b/spec/fabricators/account_stat_fabricator.rb
index 2b06b4790920de..20272fb22f202d 100644
--- a/spec/fabricators/account_stat_fabricator.rb
+++ b/spec/fabricators/account_stat_fabricator.rb
@@ -1,6 +1,8 @@
+# frozen_string_literal: true
+
Fabricator(:account_stat) do
- account nil
- statuses_count ""
- following_count ""
- followers_count ""
+ account { Fabricate.build(:account) }
+ statuses_count '123'
+ following_count '456'
+ followers_count '789'
end
diff --git a/spec/fixtures/files/attachment-jpg.123456_abcd b/spec/fixtures/files/attachment-jpg.123456_abcd
new file mode 100644
index 00000000000000..f1d40539ac0484
Binary files /dev/null and b/spec/fixtures/files/attachment-jpg.123456_abcd differ
diff --git a/spec/fixtures/files/boop.mp3 b/spec/fixtures/files/boop.mp3
new file mode 100644
index 00000000000000..ba106a3a32d414
Binary files /dev/null and b/spec/fixtures/files/boop.mp3 differ
diff --git a/spec/helpers/jsonld_helper_spec.rb b/spec/helpers/jsonld_helper_spec.rb
index 744a14f26096f7..54355b8482647a 100644
--- a/spec/helpers/jsonld_helper_spec.rb
+++ b/spec/helpers/jsonld_helper_spec.rb
@@ -56,15 +56,15 @@
describe '#fetch_resource' do
context 'when the second argument is false' do
it 'returns resource even if the retrieved ID and the given URI does not match' do
- stub_request(:get, 'https://bob.test/').to_return body: '{"id": "https://alice.test/"}'
- stub_request(:get, 'https://alice.test/').to_return body: '{"id": "https://alice.test/"}'
+ stub_request(:get, 'https://bob.test/').to_return(body: '{"id": "https://alice.test/"}', headers: { 'Content-Type': 'application/activity+json' })
+ stub_request(:get, 'https://alice.test/').to_return(body: '{"id": "https://alice.test/"}', headers: { 'Content-Type': 'application/activity+json' })
expect(fetch_resource('https://bob.test/', false)).to eq({ 'id' => 'https://alice.test/' })
end
it 'returns nil if the object identified by the given URI and the object identified by the retrieved ID does not match' do
- stub_request(:get, 'https://mallory.test/').to_return body: '{"id": "https://marvin.test/"}'
- stub_request(:get, 'https://marvin.test/').to_return body: '{"id": "https://alice.test/"}'
+ stub_request(:get, 'https://mallory.test/').to_return(body: '{"id": "https://marvin.test/"}', headers: { 'Content-Type': 'application/activity+json' })
+ stub_request(:get, 'https://marvin.test/').to_return(body: '{"id": "https://alice.test/"}', headers: { 'Content-Type': 'application/activity+json' })
expect(fetch_resource('https://mallory.test/', false)).to eq nil
end
@@ -72,7 +72,7 @@
context 'when the second argument is true' do
it 'returns nil if the retrieved ID and the given URI does not match' do
- stub_request(:get, 'https://mallory.test/').to_return body: '{"id": "https://alice.test/"}'
+ stub_request(:get, 'https://mallory.test/').to_return(body: '{"id": "https://alice.test/"}', headers: { 'Content-Type': 'application/activity+json' })
expect(fetch_resource('https://mallory.test/', true)).to eq nil
end
end
@@ -80,12 +80,12 @@
describe '#fetch_resource_without_id_validation' do
it 'returns nil if the status code is not 200' do
- stub_request(:get, 'https://host.test/').to_return status: 400, body: '{}'
+ stub_request(:get, 'https://host.test/').to_return(status: 400, body: '{}', headers: { 'Content-Type': 'application/activity+json' })
expect(fetch_resource_without_id_validation('https://host.test/')).to eq nil
end
it 'returns hash' do
- stub_request(:get, 'https://host.test/').to_return status: 200, body: '{}'
+ stub_request(:get, 'https://host.test/').to_return(status: 200, body: '{}', headers: { 'Content-Type': 'application/activity+json' })
expect(fetch_resource_without_id_validation('https://host.test/')).to eq({})
end
end
diff --git a/spec/lib/account_reach_finder_spec.rb b/spec/lib/account_reach_finder_spec.rb
new file mode 100644
index 00000000000000..1da95ba6b3a66e
--- /dev/null
+++ b/spec/lib/account_reach_finder_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'rails_helper'
+
+RSpec.describe AccountReachFinder do
+ let(:account) { Fabricate(:account) }
+
+ let(:follower1) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://example.com/inbox-1') }
+ let(:follower2) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://example.com/inbox-2') }
+ let(:follower3) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://foo.bar/users/a/inbox', shared_inbox_url: 'https://foo.bar/inbox') }
+
+ let(:mentioned1) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://foo.bar/users/b/inbox', shared_inbox_url: 'https://foo.bar/inbox') }
+ let(:mentioned2) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://example.com/inbox-3') }
+ let(:mentioned3) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://example.com/inbox-4') }
+
+ let(:unrelated_account) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://example.com/unrelated-inbox') }
+
+ before do
+ follower1.follow!(account)
+ follower2.follow!(account)
+ follower3.follow!(account)
+
+ Fabricate(:status, account: account).tap do |status|
+ status.mentions << Mention.new(account: follower1)
+ status.mentions << Mention.new(account: mentioned1)
+ end
+
+ Fabricate(:status, account: account)
+
+ Fabricate(:status, account: account).tap do |status|
+ status.mentions << Mention.new(account: mentioned2)
+ status.mentions << Mention.new(account: mentioned3)
+ end
+
+ Fabricate(:status).tap do |status|
+ status.mentions << Mention.new(account: unrelated_account)
+ end
+ end
+
+ describe '#inboxes' do
+ it 'includes the preferred inbox URL of followers' do
+ expect(described_class.new(account).inboxes).to include(*[follower1, follower2, follower3].map(&:preferred_inbox_url))
+ end
+
+ it 'includes the preferred inbox URL of recently-mentioned accounts' do
+ expect(described_class.new(account).inboxes).to include(*[mentioned1, mentioned2, mentioned3].map(&:preferred_inbox_url))
+ end
+
+ it 'does not include the inbox of unrelated users' do
+ expect(described_class.new(account).inboxes).to_not include(unrelated_account.preferred_inbox_url)
+ end
+ end
+end
diff --git a/spec/lib/activitypub/activity/announce_spec.rb b/spec/lib/activitypub/activity/announce_spec.rb
index e9cd6c68c1d1f8..2ca70712a8e9cf 100644
--- a/spec/lib/activitypub/activity/announce_spec.rb
+++ b/spec/lib/activitypub/activity/announce_spec.rb
@@ -33,7 +33,7 @@
context 'when sender is followed by a local account' do
before do
Fabricate(:account).follow!(sender)
- stub_request(:get, 'https://example.com/actor/hello-world').to_return(body: Oj.dump(unknown_object_json))
+ stub_request(:get, 'https://example.com/actor/hello-world').to_return(body: Oj.dump(unknown_object_json), headers: { 'Content-Type': 'application/activity+json' })
subject.perform
end
@@ -118,7 +118,7 @@
subject { described_class.new(json, sender, relayed_through_actor: relay_account) }
before do
- stub_request(:get, 'https://example.com/actor/hello-world').to_return(body: Oj.dump(unknown_object_json))
+ stub_request(:get, 'https://example.com/actor/hello-world').to_return(body: Oj.dump(unknown_object_json), headers: { 'Content-Type': 'application/activity+json' })
end
context 'and the relay is enabled' do
diff --git a/spec/lib/activitypub/activity/create_spec.rb b/spec/lib/activitypub/activity/create_spec.rb
index 1a25395fad390f..378ba0cd1b5c87 100644
--- a/spec/lib/activitypub/activity/create_spec.rb
+++ b/spec/lib/activitypub/activity/create_spec.rb
@@ -29,29 +29,67 @@
subject.perform
end
- context 'object has been edited' do
+ context 'when object publication date is below ISO8601 range' do
let(:object_json) do
{
id: [ActivityPub::TagManager.instance.uri_for(sender), '#bar'].join,
type: 'Note',
content: 'Lorem ipsum',
- published: '2022-01-22T15:00:00Z',
- updated: '2022-01-22T16:00:00Z',
+ published: '-0977-11-03T08:31:22Z',
}
end
- it 'creates status' do
+ it 'creates status with a valid creation date', :aggregate_failures do
+ status = sender.statuses.first
+
+ expect(status).to_not be_nil
+ expect(status.text).to eq 'Lorem ipsum'
+
+ expect(status.created_at).to be_within(30).of(Time.now.utc)
+ end
+ end
+
+ context 'when object publication date is above ISO8601 range' do
+ let(:object_json) do
+ {
+ id: [ActivityPub::TagManager.instance.uri_for(sender), '#bar'].join,
+ type: 'Note',
+ content: 'Lorem ipsum',
+ published: '10000-11-03T08:31:22Z',
+ }
+ end
+
+ it 'creates status with a valid creation date', :aggregate_failures do
status = sender.statuses.first
expect(status).to_not be_nil
expect(status.text).to eq 'Lorem ipsum'
+
+ expect(status.created_at).to be_within(30).of(Time.now.utc)
end
+ end
- it 'marks status as edited' do
+ context 'when object has been edited' do
+ let(:object_json) do
+ {
+ id: [ActivityPub::TagManager.instance.uri_for(sender), '#bar'].join,
+ type: 'Note',
+ content: 'Lorem ipsum',
+ published: '2022-01-22T15:00:00Z',
+ updated: '2022-01-22T16:00:00Z',
+ }
+ end
+
+ it 'creates status with appropriate creation and edition dates', :aggregate_failures do
status = sender.statuses.first
expect(status).to_not be_nil
- expect(status.edited?).to eq true
+ expect(status.text).to eq 'Lorem ipsum'
+
+ expect(status.created_at).to eq '2022-01-22T15:00:00Z'.to_datetime
+
+ expect(status.edited?).to be true
+ expect(status.edited_at).to eq '2022-01-22T16:00:00Z'.to_datetime
end
end
diff --git a/spec/lib/activitypub/activity/flag_spec.rb b/spec/lib/activitypub/activity/flag_spec.rb
index 2f2d13876760df..6d7a8a7ec2e8dd 100644
--- a/spec/lib/activitypub/activity/flag_spec.rb
+++ b/spec/lib/activitypub/activity/flag_spec.rb
@@ -37,6 +37,37 @@
end
end
+ context 'when the report comment is excessively long' do
+ subject do
+ described_class.new({
+ '@context': 'https://www.w3.org/ns/activitystreams',
+ id: flag_id,
+ type: 'Flag',
+ content: long_comment,
+ actor: ActivityPub::TagManager.instance.uri_for(sender),
+ object: [
+ ActivityPub::TagManager.instance.uri_for(flagged),
+ ActivityPub::TagManager.instance.uri_for(status),
+ ],
+ }.with_indifferent_access, sender)
+ end
+
+ let(:long_comment) { Faker::Lorem.characters(number: 6000) }
+
+ before do
+ subject.perform
+ end
+
+ it 'creates a report but with a truncated comment' do
+ report = Report.find_by(account: sender, target_account: flagged)
+
+ expect(report).to_not be_nil
+ expect(report.comment.length).to eq 5000
+ expect(report.comment).to eq long_comment[0...5000]
+ expect(report.status_ids).to eq [status.id]
+ end
+ end
+
context 'when the reported status is private and should not be visible to the remote server' do
let(:status) { Fabricate(:status, account: flagged, uri: 'foobar', visibility: :private) }
diff --git a/spec/lib/activitypub/linked_data_signature_spec.rb b/spec/lib/activitypub/linked_data_signature_spec.rb
index d55a7c7fa85e94..8b7e18c8867095 100644
--- a/spec/lib/activitypub/linked_data_signature_spec.rb
+++ b/spec/lib/activitypub/linked_data_signature_spec.rb
@@ -36,6 +36,40 @@
end
end
+ context 'when local account record is missing a public key' do
+ let(:raw_signature) do
+ {
+ 'creator' => 'http://example.com/alice',
+ 'created' => '2017-09-23T20:21:34Z',
+ }
+ end
+
+ let(:signature) { raw_signature.merge('type' => 'RsaSignature2017', 'signatureValue' => sign(sender, raw_signature, raw_json)) }
+
+ let(:service_stub) { instance_double(ActivityPub::FetchRemoteKeyService) }
+
+ before do
+ # Ensure signature is computed with the old key
+ signature
+
+ # Unset key
+ old_key = sender.public_key
+ sender.update!(private_key: '', public_key: '')
+
+ allow(ActivityPub::FetchRemoteKeyService).to receive(:new).and_return(service_stub)
+
+ allow(service_stub).to receive(:call).with('http://example.com/alice') do
+ sender.update!(public_key: old_key)
+ sender
+ end
+ end
+
+ it 'fetches key and returns creator' do
+ expect(subject.verify_actor!).to eq sender
+ expect(service_stub).to have_received(:call).with('http://example.com/alice').once
+ end
+ end
+
context 'when signature is missing' do
let(:signature) { nil }
diff --git a/spec/lib/activitypub/tag_manager_spec.rb b/spec/lib/activitypub/tag_manager_spec.rb
index 606a1de2e562d7..2afdeba7da66f1 100644
--- a/spec/lib/activitypub/tag_manager_spec.rb
+++ b/spec/lib/activitypub/tag_manager_spec.rb
@@ -110,6 +110,14 @@
expect(subject.cc(status)).to include(subject.uri_for(foo))
expect(subject.cc(status)).to_not include(subject.uri_for(alice))
end
+
+ it 'returns poster of reblogged post, if reblog' do
+ bob = Fabricate(:account, username: 'bob', domain: 'example.com', inbox_url: 'http://example.com/bob')
+ alice = Fabricate(:account, username: 'alice')
+ status = Fabricate(:status, visibility: :public, account: bob)
+ reblog = Fabricate(:status, visibility: :public, account: alice, reblog: status)
+ expect(subject.cc(reblog)).to include(subject.uri_for(bob))
+ end
end
describe '#local_uri?' do
diff --git a/spec/lib/plain_text_formatter_spec.rb b/spec/lib/plain_text_formatter_spec.rb
index c3d0ee630121ac..81e4ae286e698b 100644
--- a/spec/lib/plain_text_formatter_spec.rb
+++ b/spec/lib/plain_text_formatter_spec.rb
@@ -4,7 +4,7 @@
describe '#to_s' do
subject { described_class.new(status.text, status.local?).to_s }
- context 'given a post with local status' do
+ context 'when status is local' do
let(:status) { Fabricate(:status, text: '
a text by a nerd who uses an HTML tag in text
', uri: nil) }
it 'returns the raw text' do
@@ -12,12 +12,63 @@
end
end
- context 'given a post with remote status' do
+ context 'when status is remote' do
let(:remote_account) { Fabricate(:account, domain: 'remote.test', username: 'bob', url: 'https://remote.test/') }
- let(:status) { Fabricate(:status, account: remote_account, text: '
Hello
') }
- it 'returns tag-stripped text' do
- is_expected.to eq 'Hello'
+ context 'when text contains inline HTML tags' do
+ let(:status) { Fabricate(:status, account: remote_account, text: 'Loremipsum') }
+
+ it 'strips the tags' do
+ expect(subject).to eq 'Lorem ipsum'
+ end
+ end
+
+ context 'when text contains
tags' do
+ let(:status) { Fabricate(:status, account: remote_account, text: '
Lorem
ipsum
') }
+
+ it 'inserts a newline' do
+ expect(subject).to eq "Lorem\nipsum"
+ end
+ end
+
+ context 'when text contains a single tag' do
+ let(:status) { Fabricate(:status, account: remote_account, text: 'Lorem ipsum') }
+
+ it 'inserts a newline' do
+ expect(subject).to eq "Lorem\nipsum"
+ end
+ end
+
+ context 'when text contains consecutive tag' do
+ let(:status) { Fabricate(:status, account: remote_account, text: 'Lorem
ipsum') }
+
+ it 'inserts a single newline' do
+ expect(subject).to eq "Lorem\nipsum"
+ end
+ end
+
+ context 'when text contains HTML entity' do
+ let(:status) { Fabricate(:status, account: remote_account, text: 'Lorem & ipsum ❤') }
+
+ it 'unescapes the entity' do
+ expect(subject).to eq 'Lorem & ipsum ❤'
+ end
+ end
+
+ context 'when text contains ipsum') }
+
+ it 'strips the tag and its contents' do
+ expect(subject).to eq 'Lorem ipsum'
+ end
+ end
+
+ context 'when text contains an HTML comment tags' do
+ let(:status) { Fabricate(:status, account: remote_account, text: 'Lorem ipsum') }
+
+ it 'strips the comment' do
+ expect(subject).to eq 'Lorem ipsum'
+ end
end
end
end
diff --git a/spec/lib/vacuum/access_tokens_vacuum_spec.rb b/spec/lib/vacuum/access_tokens_vacuum_spec.rb
index 0244c34492688a..39c8cdb3973609 100644
--- a/spec/lib/vacuum/access_tokens_vacuum_spec.rb
+++ b/spec/lib/vacuum/access_tokens_vacuum_spec.rb
@@ -5,9 +5,11 @@
describe '#perform' do
let!(:revoked_access_token) { Fabricate(:access_token, revoked_at: 1.minute.ago) }
+ let!(:expired_access_token) { Fabricate(:access_token, expires_in: 59.minutes.to_i, created_at: 1.hour.ago) }
let!(:active_access_token) { Fabricate(:access_token) }
let!(:revoked_access_grant) { Fabricate(:access_grant, revoked_at: 1.minute.ago) }
+ let!(:expired_access_grant) { Fabricate(:access_grant, expires_in: 59.minutes.to_i, created_at: 1.hour.ago) }
let!(:active_access_grant) { Fabricate(:access_grant) }
before do
@@ -18,10 +20,18 @@
expect { revoked_access_token.reload }.to raise_error ActiveRecord::RecordNotFound
end
+ it 'deletes expired access tokens' do
+ expect { expired_access_token.reload }.to raise_error ActiveRecord::RecordNotFound
+ end
+
it 'deletes revoked access grants' do
expect { revoked_access_grant.reload }.to raise_error ActiveRecord::RecordNotFound
end
+ it 'deletes expired access grants' do
+ expect { expired_access_grant.reload }.to raise_error ActiveRecord::RecordNotFound
+ end
+
it 'does not delete active access tokens' do
expect { active_access_token.reload }.to_not raise_error
end
diff --git a/spec/models/account_spec.rb b/spec/models/account_spec.rb
index 6cd769dc84bf1f..35fc7b721c6b69 100644
--- a/spec/models/account_spec.rb
+++ b/spec/models/account_spec.rb
@@ -695,7 +695,7 @@
expect(subject.match('Check this out https://medium.com/@alice/some-article#.abcdef123')).to be_nil
end
- xit 'does not match URL querystring' do
+ it 'does not match URL query string' do
expect(subject.match('https://example.com/?x=@alice')).to be_nil
end
end
diff --git a/spec/models/form/account_batch_spec.rb b/spec/models/form/account_batch_spec.rb
new file mode 100644
index 00000000000000..fd8e90901065ec
--- /dev/null
+++ b/spec/models/form/account_batch_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'rails_helper'
+
+RSpec.describe Form::AccountBatch do
+ let(:account_batch) { described_class.new }
+
+ describe '#save' do
+ subject { account_batch.save }
+
+ let(:account) { Fabricate(:user, role: UserRole.find_by(name: 'Admin')).account }
+ let(:account_ids) { [] }
+ let(:query) { Account.none }
+
+ before do
+ account_batch.assign_attributes(
+ action: action,
+ current_account: account,
+ account_ids: account_ids,
+ query: query,
+ select_all_matching: select_all_matching
+ )
+ end
+
+ context 'when action is "suspend"' do
+ let(:action) { 'suspend' }
+
+ let(:target_account) { Fabricate(:account) }
+ let(:target_account2) { Fabricate(:account) }
+
+ before do
+ Fabricate(:report, target_account: target_account)
+ Fabricate(:report, target_account: target_account2)
+ end
+
+ context 'when accounts are passed as account_ids' do
+ let(:select_all_matching) { '0' }
+ let(:account_ids) { [target_account.id, target_account2.id] }
+
+ it 'suspends the expected users' do
+ expect { subject }.to change { [target_account.reload.suspended?, target_account2.reload.suspended?] }.from([false, false]).to([true, true])
+ end
+
+ it 'closes open reports targeting the suspended users' do
+ expect { subject }.to change { Report.unresolved.where(target_account: [target_account, target_account2]).count }.from(2).to(0)
+ end
+ end
+
+ context 'when accounts are passed as a query' do
+ let(:select_all_matching) { '1' }
+ let(:query) { Account.where(id: [target_account.id, target_account2.id]) }
+
+ it 'suspends the expected users' do
+ expect { subject }.to change { [target_account.reload.suspended?, target_account2.reload.suspended?] }.from([false, false]).to([true, true])
+ end
+
+ it 'closes open reports targeting the suspended users' do
+ expect { subject }.to change { Report.unresolved.where(target_account: [target_account, target_account2]).count }.from(2).to(0)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/identity_spec.rb b/spec/models/identity_spec.rb
index 689c9b797f4f63..081c254d8200f3 100644
--- a/spec/models/identity_spec.rb
+++ b/spec/models/identity_spec.rb
@@ -1,16 +1,16 @@
require 'rails_helper'
RSpec.describe Identity, type: :model do
- describe '.find_for_oauth' do
+ describe '.find_for_omniauth' do
let(:auth) { Fabricate(:identity, user: Fabricate(:user)) }
it 'calls .find_or_create_by' do
expect(described_class).to receive(:find_or_create_by).with(uid: auth.uid, provider: auth.provider)
- described_class.find_for_oauth(auth)
+ described_class.find_for_omniauth(auth)
end
it 'returns an instance of Identity' do
- expect(described_class.find_for_oauth(auth)).to be_instance_of Identity
+ expect(described_class.find_for_omniauth(auth)).to be_instance_of Identity
end
end
end
diff --git a/spec/models/media_attachment_spec.rb b/spec/models/media_attachment_spec.rb
index 29fd313aec2e49..c3283ccb04f14d 100644
--- a/spec/models/media_attachment_spec.rb
+++ b/spec/models/media_attachment_spec.rb
@@ -150,6 +150,26 @@
end
end
+ describe 'mp3 with large cover art' do
+ let(:media) { described_class.create(account: Fabricate(:account), file: attachment_fixture('boop.mp3')) }
+
+ it 'detects it as an audio file' do
+ expect(media.type).to eq 'audio'
+ end
+
+ it 'sets meta for the duration' do
+ expect(media.file.meta['original']['duration']).to be_within(0.05).of(0.235102)
+ end
+
+ it 'extracts thumbnail' do
+ expect(media.thumbnail.present?).to be true
+ end
+
+ it 'gives the file a random name' do
+ expect(media.file_file_name).to_not eq 'boop.mp3'
+ end
+ end
+
describe 'jpeg' do
let(:media) { MediaAttachment.create(account: Fabricate(:account), file: attachment_fixture('attachment.jpg')) }
diff --git a/spec/models/relationship_filter_spec.rb b/spec/models/relationship_filter_spec.rb
index 7c0f37a06f299e..fccd42aaad0622 100644
--- a/spec/models/relationship_filter_spec.rb
+++ b/spec/models/relationship_filter_spec.rb
@@ -6,32 +6,60 @@
let(:account) { Fabricate(:account) }
describe '#results' do
- context 'when default params are used' do
- let(:subject) do
- RelationshipFilter.new(account, 'order' => 'active').results
- end
+ let(:account_of_7_months) { Fabricate(:account_stat, statuses_count: 1, last_status_at: 7.months.ago).account }
+ let(:account_of_1_day) { Fabricate(:account_stat, statuses_count: 1, last_status_at: 1.day.ago).account }
+ let(:account_of_3_days) { Fabricate(:account_stat, statuses_count: 1, last_status_at: 3.days.ago).account }
+ let(:silent_account) { Fabricate(:account_stat, statuses_count: 0, last_status_at: nil).account }
+
+ before do
+ account.follow!(account_of_7_months)
+ account.follow!(account_of_1_day)
+ account.follow!(account_of_3_days)
+ account.follow!(silent_account)
+ end
- before do
- add_following_account_with(last_status_at: 7.days.ago)
- add_following_account_with(last_status_at: 1.day.ago)
- add_following_account_with(last_status_at: 3.days.ago)
+ context 'when ordering by last activity' do
+ context 'when not filtering' do
+ subject do
+ described_class.new(account, 'order' => 'active').results
+ end
+
+ it 'returns followings ordered by last activity' do
+ expect(subject).to eq [account_of_1_day, account_of_3_days, account_of_7_months, silent_account]
+ end
end
- it 'returns followings ordered by last activity' do
- expected_result = account.following.eager_load(:account_stat).reorder(nil).by_recent_status
+ context 'when filtering for dormant accounts' do
+ subject do
+ described_class.new(account, 'order' => 'active', 'activity' => 'dormant').results
+ end
- expect(subject).to eq expected_result
+ it 'returns dormant followings ordered by last activity' do
+ expect(subject).to eq [account_of_7_months, silent_account]
+ end
end
end
- end
- def add_following_account_with(last_status_at:)
- following_account = Fabricate(:account)
- Fabricate(:account_stat, account: following_account,
- last_status_at: last_status_at,
- statuses_count: 1,
- following_count: 0,
- followers_count: 0)
- Fabricate(:follow, account: account, target_account: following_account).account
+ context 'when ordering by account creation' do
+ context 'when not filtering' do
+ subject do
+ described_class.new(account, 'order' => 'recent').results
+ end
+
+ it 'returns followings ordered by last account creation' do
+ expect(subject).to eq [silent_account, account_of_3_days, account_of_1_day, account_of_7_months]
+ end
+ end
+
+ context 'when filtering for dormant accounts' do
+ subject do
+ described_class.new(account, 'order' => 'recent', 'activity' => 'dormant').results
+ end
+
+ it 'returns dormant followings ordered by last activity' do
+ expect(subject).to eq [silent_account, account_of_7_months]
+ end
+ end
+ end
end
end
diff --git a/spec/models/report_spec.rb b/spec/models/report_spec.rb
index 874be41328cb50..c485a4a3c9ad12 100644
--- a/spec/models/report_spec.rb
+++ b/spec/models/report_spec.rb
@@ -125,10 +125,17 @@
expect(report).to be_valid
end
- it 'is invalid if comment is longer than 1000 characters' do
+ let(:remote_account) { Fabricate(:account, domain: 'example.com', protocol: :activitypub, inbox_url: 'http://example.com/inbox') }
+
+ it 'is invalid if comment is longer than 1000 characters only if reporter is local' do
report = Fabricate.build(:report, comment: Faker::Lorem.characters(number: 1001))
- report.valid?
+ expect(report.valid?).to be false
expect(report).to model_have_error_on_field(:comment)
end
+
+ it 'is valid if comment is longer than 1000 characters and reporter is not local' do
+ report = Fabricate.build(:report, account: remote_account, comment: Faker::Lorem.characters(number: 1001))
+ expect(report.valid?).to be true
+ end
end
end
diff --git a/spec/models/tag_spec.rb b/spec/models/tag_spec.rb
index 102d2f62514c8a..19ff6955996f89 100644
--- a/spec/models/tag_spec.rb
+++ b/spec/models/tag_spec.rb
@@ -31,44 +31,52 @@
expect(subject.match('https://en.wikipedia.org/wiki/Ghostbusters_(song)#Lawsuit')).to be_nil
end
+ it 'does not match URLs with hashtag-like anchors after a numeral' do
+ expect(subject.match('https://gcc.gnu.org/bugzilla/show_bug.cgi?id=111895#c4')).to be_nil
+ end
+
+ it 'does not match URLs with hashtag-like anchors after an empty query parameter' do
+ expect(subject.match('https://en.wikipedia.org/wiki/Ghostbusters_(song)?foo=#Lawsuit')).to be_nil
+ end
+
it 'matches #aesthetic' do
- expect(subject.match('this is #aesthetic').to_s).to eq ' #aesthetic'
+ expect(subject.match('this is #aesthetic').to_s).to eq '#aesthetic'
end
it 'matches digits at the start' do
- expect(subject.match('hello #3d').to_s).to eq ' #3d'
+ expect(subject.match('hello #3d').to_s).to eq '#3d'
end
it 'matches digits in the middle' do
- expect(subject.match('hello #l33ts35k').to_s).to eq ' #l33ts35k'
+ expect(subject.match('hello #l33ts35k').to_s).to eq '#l33ts35k'
end
it 'matches digits at the end' do
- expect(subject.match('hello #world2016').to_s).to eq ' #world2016'
+ expect(subject.match('hello #world2016').to_s).to eq '#world2016'
end
it 'matches underscores at the beginning' do
- expect(subject.match('hello #_test').to_s).to eq ' #_test'
+ expect(subject.match('hello #_test').to_s).to eq '#_test'
end
it 'matches underscores at the end' do
- expect(subject.match('hello #test_').to_s).to eq ' #test_'
+ expect(subject.match('hello #test_').to_s).to eq '#test_'
end
it 'matches underscores in the middle' do
- expect(subject.match('hello #one_two_three').to_s).to eq ' #one_two_three'
+ expect(subject.match('hello #one_two_three').to_s).to eq '#one_two_three'
end
it 'matches middle dots' do
- expect(subject.match('hello #one·two·three').to_s).to eq ' #one·two·three'
+ expect(subject.match('hello #one·two·three').to_s).to eq '#one·two·three'
end
it 'matches ・unicode in ぼっち・ざ・ろっく correctly' do
- expect(subject.match('testing #ぼっち・ざ・ろっく').to_s).to eq ' #ぼっち・ざ・ろっく'
+ expect(subject.match('testing #ぼっち・ざ・ろっく').to_s).to eq '#ぼっち・ざ・ろっく'
end
it 'matches ZWNJ' do
- expect(subject.match('just add #نرمافزار and').to_s).to eq ' #نرمافزار'
+ expect(subject.match('just add #نرمافزار and').to_s).to eq '#نرمافزار'
end
it 'does not match middle dots at the start' do
@@ -76,7 +84,7 @@
end
it 'does not match middle dots at the end' do
- expect(subject.match('hello #one·two·three·').to_s).to eq ' #one·two·three'
+ expect(subject.match('hello #one·two·three·').to_s).to eq '#one·two·three'
end
it 'does not match purely-numeric hashtags' do
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 4b3d6101fd1b70..6414917a096ad3 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -439,7 +439,10 @@ def fabricate
let!(:access_token) { Fabricate(:access_token, resource_owner_id: user.id) }
let!(:web_push_subscription) { Fabricate(:web_push_subscription, access_token: access_token) }
+ let(:redis_pipeline_stub) { instance_double(Redis::Namespace, publish: nil) }
+
before do
+ allow(redis).to receive(:pipelined).and_yield(redis_pipeline_stub)
user.reset_password!
end
@@ -455,6 +458,10 @@ def fabricate
expect(Doorkeeper::AccessToken.active_for(user).count).to eq 0
end
+ it 'revokes streaming access for all access tokens' do
+ expect(redis_pipeline_stub).to have_received(:publish).with("timeline:access_token:#{access_token.id}", Oj.dump(event: :kill)).once
+ end
+
it 'removes push subscriptions' do
expect(Web::PushSubscription.where(user: user).or(Web::PushSubscription.where(access_token: access_token)).count).to eq 0
end
diff --git a/spec/requests/api/v1/accounts/featured_tags_spec.rb b/spec/requests/api/v1/accounts/featured_tags_spec.rb
new file mode 100644
index 00000000000000..bae7d448b6daee
--- /dev/null
+++ b/spec/requests/api/v1/accounts/featured_tags_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'rails_helper'
+
+RSpec.describe 'account featured tags API' do
+ let(:user) { Fabricate(:user) }
+ let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id, scopes: scopes) }
+ let(:scopes) { 'read:accounts' }
+ let(:headers) { { 'Authorization' => "Bearer #{token.token}" } }
+ let(:account) { Fabricate(:account) }
+
+ describe 'GET /api/v1/accounts/:id/featured_tags' do
+ subject do
+ get "/api/v1/accounts/#{account.id}/featured_tags", headers: headers
+ end
+
+ before do
+ account.featured_tags.create!(name: 'foo')
+ account.featured_tags.create!(name: 'bar')
+ end
+
+ it 'returns the expected tags', :aggregate_failures do
+ subject
+
+ expect(response).to have_http_status(200)
+ expect(body_as_json).to contain_exactly(a_hash_including({
+ name: 'bar',
+ url: "https://cb6e6126.ngrok.io/@#{account.username}/tagged/bar",
+ }), a_hash_including({
+ name: 'foo',
+ url: "https://cb6e6126.ngrok.io/@#{account.username}/tagged/foo",
+ }))
+ end
+
+ context 'when the account is remote' do
+ it 'returns the expected tags', :aggregate_failures do
+ subject
+
+ expect(response).to have_http_status(200)
+ expect(body_as_json).to contain_exactly(a_hash_including({
+ name: 'bar',
+ url: "https://cb6e6126.ngrok.io/@#{account.pretty_acct}/tagged/bar",
+ }), a_hash_including({
+ name: 'foo',
+ url: "https://cb6e6126.ngrok.io/@#{account.pretty_acct}/tagged/foo",
+ }))
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/v1/directories_spec.rb b/spec/requests/api/v1/directories_spec.rb
new file mode 100644
index 00000000000000..0a1864d136cc5e
--- /dev/null
+++ b/spec/requests/api/v1/directories_spec.rb
@@ -0,0 +1,139 @@
+# frozen_string_literal: true
+
+require 'rails_helper'
+
+describe 'Directories API' do
+ let(:user) { Fabricate(:user, confirmed_at: nil) }
+ let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id, scopes: scopes) }
+ let(:scopes) { 'read:follows' }
+ let(:headers) { { 'Authorization' => "Bearer #{token.token}" } }
+
+ describe 'GET /api/v1/directories' do
+ context 'with no params' do
+ before do
+ local_unconfirmed_account = Fabricate(
+ :account,
+ domain: nil,
+ user: Fabricate(:user, confirmed_at: nil, approved: true),
+ username: 'local_unconfirmed'
+ )
+ local_unconfirmed_account.create_account_stat!
+
+ local_unapproved_account = Fabricate(
+ :account,
+ domain: nil,
+ user: Fabricate(:user, confirmed_at: 10.days.ago),
+ username: 'local_unapproved'
+ )
+ local_unapproved_account.create_account_stat!
+ local_unapproved_account.user.update(approved: false)
+
+ local_undiscoverable_account = Fabricate(
+ :account,
+ domain: nil,
+ user: Fabricate(:user, confirmed_at: 10.days.ago, approved: true),
+ discoverable: false,
+ username: 'local_undiscoverable'
+ )
+ local_undiscoverable_account.create_account_stat!
+
+ excluded_from_timeline_account = Fabricate(
+ :account,
+ domain: 'host.example',
+ discoverable: true,
+ username: 'remote_excluded_from_timeline'
+ )
+ excluded_from_timeline_account.create_account_stat!
+ Fabricate(:block, account: user.account, target_account: excluded_from_timeline_account)
+
+ domain_blocked_account = Fabricate(
+ :account,
+ domain: 'test.example',
+ discoverable: true,
+ username: 'remote_domain_blocked'
+ )
+ domain_blocked_account.create_account_stat!
+ Fabricate(:account_domain_block, account: user.account, domain: 'test.example')
+
+ local_discoverable_account.create_account_stat!
+ eligible_remote_account.create_account_stat!
+ end
+
+ let(:local_discoverable_account) do
+ Fabricate(
+ :account,
+ domain: nil,
+ user: Fabricate(:user, confirmed_at: 10.days.ago, approved: true),
+ discoverable: true,
+ username: 'local_discoverable'
+ )
+ end
+
+ let(:eligible_remote_account) do
+ Fabricate(
+ :account,
+ domain: 'host.example',
+ discoverable: true,
+ username: 'eligible_remote'
+ )
+ end
+
+ it 'returns the local discoverable account and the remote discoverable account' do
+ get '/api/v1/directory', headers: headers
+
+ expect(response).to have_http_status(200)
+ expect(body_as_json.size).to eq(2)
+ expect(body_as_json.pluck(:id)).to contain_exactly(eligible_remote_account.id.to_s, local_discoverable_account.id.to_s)
+ end
+ end
+
+ context 'when asking for local accounts only' do
+ let(:user) { Fabricate(:user, confirmed_at: 10.days.ago, approved: true) }
+ let(:local_account) { Fabricate(:account, domain: nil, user: user) }
+ let(:remote_account) { Fabricate(:account, domain: 'host.example') }
+
+ before do
+ local_account.create_account_stat!
+ remote_account.create_account_stat!
+ end
+
+ it 'returns only the local accounts' do
+ get '/api/v1/directory', headers: headers, params: { local: '1' }
+
+ expect(response).to have_http_status(200)
+ expect(body_as_json.size).to eq(1)
+ expect(body_as_json.first[:id]).to include(local_account.id.to_s)
+ expect(response.body).to_not include(remote_account.id.to_s)
+ end
+ end
+
+ context 'when ordered by active' do
+ it 'returns accounts in order of most recent status activity' do
+ old_stat = Fabricate(:account_stat, last_status_at: 1.day.ago)
+ new_stat = Fabricate(:account_stat, last_status_at: 1.minute.ago)
+
+ get '/api/v1/directory', headers: headers, params: { order: 'active' }
+
+ expect(response).to have_http_status(200)
+ expect(body_as_json.size).to eq(2)
+ expect(body_as_json.first[:id]).to include(new_stat.account_id.to_s)
+ expect(body_as_json.second[:id]).to include(old_stat.account_id.to_s)
+ end
+ end
+
+ context 'when ordered by new' do
+ it 'returns accounts in order of creation' do
+ account_old = Fabricate(:account_stat).account
+ travel_to 10.seconds.from_now
+ account_new = Fabricate(:account_stat).account
+
+ get '/api/v1/directory', headers: headers, params: { order: 'new' }
+
+ expect(response).to have_http_status(200)
+ expect(body_as_json.size).to eq(2)
+ expect(body_as_json.first[:id]).to include(account_new.id.to_s)
+ expect(body_as_json.second[:id]).to include(account_old.id.to_s)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/v2/media_spec.rb b/spec/requests/api/v2/media_spec.rb
new file mode 100644
index 00000000000000..89384d0ca36206
--- /dev/null
+++ b/spec/requests/api/v2/media_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'rails_helper'
+
+RSpec.describe 'Media API', paperclip_processing: true do
+ let(:user) { Fabricate(:user) }
+ let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id, scopes: scopes) }
+ let(:scopes) { 'write' }
+ let(:headers) { { 'Authorization' => "Bearer #{token.token}" } }
+
+ describe 'POST /api/v2/media' do
+ it 'returns http success' do
+ post '/api/v2/media', headers: headers, params: { file: fixture_file_upload('attachment-jpg.123456_abcd', 'image/jpeg') }
+ expect(File.exist?(user.account.media_attachments.first.file.path(:small))).to be true
+ expect(response).to have_http_status(200)
+ end
+ end
+end
diff --git a/spec/requests/content_security_policy_spec.rb b/spec/requests/content_security_policy_spec.rb
new file mode 100644
index 00000000000000..7eb27d61d615ca
--- /dev/null
+++ b/spec/requests/content_security_policy_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'rails_helper'
+
+describe 'Content-Security-Policy' do
+ it 'sets the expected CSP headers' do
+ allow(SecureRandom).to receive(:base64).with(16).and_return('ZbA+JmE7+bK8F5qvADZHuQ==')
+
+ get '/'
+ expect(response.headers['Content-Security-Policy'].split(';').map(&:strip)).to contain_exactly(
+ "base-uri 'none'",
+ "default-src 'none'",
+ "frame-ancestors 'none'",
+ "font-src 'self' https://cb6e6126.ngrok.io",
+ "img-src 'self' https: data: blob: https://cb6e6126.ngrok.io",
+ "style-src 'self' https://cb6e6126.ngrok.io 'nonce-ZbA+JmE7+bK8F5qvADZHuQ=='",
+ "media-src 'self' https: data: https://cb6e6126.ngrok.io",
+ "frame-src 'self' https:",
+ "manifest-src 'self' https://cb6e6126.ngrok.io",
+ "form-action 'self'",
+ "child-src 'self' blob: https://cb6e6126.ngrok.io",
+ "worker-src 'self' blob: https://cb6e6126.ngrok.io",
+ "connect-src 'self' data: blob: https://cb6e6126.ngrok.io https://cb6e6126.ngrok.io ws://localhost:4000",
+ "script-src 'self' https://cb6e6126.ngrok.io 'wasm-unsafe-eval'"
+ )
+ end
+end
diff --git a/spec/requests/disabled_oauth_endpoints_spec.rb b/spec/requests/disabled_oauth_endpoints_spec.rb
new file mode 100644
index 00000000000000..7c2c09f3804bf3
--- /dev/null
+++ b/spec/requests/disabled_oauth_endpoints_spec.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+require 'rails_helper'
+
+describe 'Disabled OAuth routes' do
+ # These routes are disabled via the doorkeeper configuration for
+ # `admin_authenticator`, as these routes should only be accessible by server
+ # administrators. For now, these routes are not properly designed and
+ # integrated into Mastodon, so we're disabling them completely
+ describe 'GET /oauth/applications' do
+ it 'returns 403 forbidden' do
+ get oauth_applications_path
+
+ expect(response).to have_http_status(403)
+ end
+ end
+
+ describe 'POST /oauth/applications' do
+ it 'returns 403 forbidden' do
+ post oauth_applications_path
+
+ expect(response).to have_http_status(403)
+ end
+ end
+
+ describe 'GET /oauth/applications/new' do
+ it 'returns 403 forbidden' do
+ get new_oauth_application_path
+
+ expect(response).to have_http_status(403)
+ end
+ end
+
+ describe 'GET /oauth/applications/:id' do
+ let(:application) { Fabricate(:application, scopes: 'read') }
+
+ it 'returns 403 forbidden' do
+ get oauth_application_path(application)
+
+ expect(response).to have_http_status(403)
+ end
+ end
+
+ describe 'PATCH /oauth/applications/:id' do
+ let(:application) { Fabricate(:application, scopes: 'read') }
+
+ it 'returns 403 forbidden' do
+ patch oauth_application_path(application)
+
+ expect(response).to have_http_status(403)
+ end
+ end
+
+ describe 'PUT /oauth/applications/:id' do
+ let(:application) { Fabricate(:application, scopes: 'read') }
+
+ it 'returns 403 forbidden' do
+ put oauth_application_path(application)
+
+ expect(response).to have_http_status(403)
+ end
+ end
+
+ describe 'DELETE /oauth/applications/:id' do
+ let(:application) { Fabricate(:application, scopes: 'read') }
+
+ it 'returns 403 forbidden' do
+ delete oauth_application_path(application)
+
+ expect(response).to have_http_status(403)
+ end
+ end
+
+ describe 'GET /oauth/applications/:id/edit' do
+ let(:application) { Fabricate(:application, scopes: 'read') }
+
+ it 'returns 403 forbidden' do
+ get edit_oauth_application_path(application)
+
+ expect(response).to have_http_status(403)
+ end
+ end
+end
diff --git a/spec/requests/omniauth_callbacks_spec.rb b/spec/requests/omniauth_callbacks_spec.rb
new file mode 100644
index 00000000000000..095535e48598e0
--- /dev/null
+++ b/spec/requests/omniauth_callbacks_spec.rb
@@ -0,0 +1,143 @@
+# frozen_string_literal: true
+
+require 'rails_helper'
+
+describe 'OmniAuth callbacks' do
+ shared_examples 'omniauth provider callbacks' do |provider|
+ subject { post send :"user_#{provider}_omniauth_callback_path" }
+
+ context 'with full information in response' do
+ before do
+ mock_omniauth(provider, {
+ provider: provider.to_s,
+ uid: '123',
+ info: {
+ verified: 'true',
+ email: 'user@host.example',
+ },
+ })
+ end
+
+ context 'without a matching user' do
+ it 'creates a user and an identity and redirects to root path' do
+ expect { subject }
+ .to change(User, :count)
+ .by(1)
+ .and change(Identity, :count)
+ .by(1)
+ .and change(LoginActivity, :count)
+ .by(1)
+
+ expect(User.last.email).to eq('user@host.example')
+ expect(Identity.find_by(user: User.last).uid).to eq('123')
+ expect(response).to redirect_to(root_path)
+ end
+ end
+
+ context 'with a matching user and no matching identity' do
+ before do
+ Fabricate(:user, email: 'user@host.example')
+ end
+
+ context 'when ALLOW_UNSAFE_AUTH_PROVIDER_REATTACH is set to true' do
+ around do |example|
+ ClimateControl.modify ALLOW_UNSAFE_AUTH_PROVIDER_REATTACH: 'true' do
+ example.run
+ end
+ end
+
+ it 'matches the existing user, creates an identity, and redirects to root path' do
+ expect { subject }
+ .to not_change(User, :count)
+ .and change(Identity, :count)
+ .by(1)
+ .and change(LoginActivity, :count)
+ .by(1)
+
+ expect(Identity.find_by(user: User.last).uid).to eq('123')
+ expect(response).to redirect_to(root_path)
+ end
+ end
+
+ context 'when ALLOW_UNSAFE_AUTH_PROVIDER_REATTACH is not set to true' do
+ it 'does not match the existing user or create an identity, and redirects to login page' do
+ expect { subject }
+ .to not_change(User, :count)
+ .and not_change(Identity, :count)
+ .and not_change(LoginActivity, :count)
+
+ expect(response).to redirect_to(new_user_session_url)
+ end
+ end
+ end
+
+ context 'with a matching user and a matching identity' do
+ before do
+ user = Fabricate(:user, email: 'user@host.example')
+ Fabricate(:identity, user: user, uid: '123', provider: provider)
+ end
+
+ it 'matches the existing records and redirects to root path' do
+ expect { subject }
+ .to not_change(User, :count)
+ .and not_change(Identity, :count)
+ .and change(LoginActivity, :count)
+ .by(1)
+
+ expect(response).to redirect_to(root_path)
+ end
+ end
+ end
+
+ context 'with a response missing email address' do
+ before do
+ mock_omniauth(provider, {
+ provider: provider.to_s,
+ uid: '123',
+ info: {
+ verified: 'true',
+ },
+ })
+ end
+
+ it 'redirects to the auth setup page' do
+ expect { subject }
+ .to change(User, :count)
+ .by(1)
+ .and change(Identity, :count)
+ .by(1)
+ .and change(LoginActivity, :count)
+ .by(1)
+
+ expect(response).to redirect_to(auth_setup_path(missing_email: '1'))
+ end
+ end
+
+ context 'when a user cannot be built' do
+ before do
+ allow(User).to receive(:find_for_omniauth).and_return(User.new)
+ end
+
+ it 'redirects to the new user signup page' do
+ expect { subject }
+ .to not_change(User, :count)
+ .and not_change(Identity, :count)
+ .and not_change(LoginActivity, :count)
+
+ expect(response).to redirect_to(new_user_registration_url)
+ end
+ end
+ end
+
+ describe '#openid_connect', if: ENV['OIDC_ENABLED'] == 'true' && ENV['OIDC_SCOPE'].present? do
+ include_examples 'omniauth provider callbacks', :openid_connect
+ end
+
+ describe '#cas', if: ENV['CAS_ENABLED'] == 'true' do
+ include_examples 'omniauth provider callbacks', :cas
+ end
+
+ describe '#saml', if: ENV['SAML_ENABLED'] == 'true' do
+ include_examples 'omniauth provider callbacks', :saml
+ end
+end
diff --git a/spec/requests/signature_verification_spec.rb b/spec/requests/signature_verification_spec.rb
new file mode 100644
index 00000000000000..401828c4a3c5d1
--- /dev/null
+++ b/spec/requests/signature_verification_spec.rb
@@ -0,0 +1,398 @@
+# frozen_string_literal: true
+
+require 'rails_helper'
+
+describe 'signature verification concern' do
+ before do
+ stub_tests_controller
+
+ # Signature checking is time-dependent, so travel to a fixed date
+ travel_to '2023-12-20T10:00:00Z'
+ end
+
+ after { Rails.application.reload_routes! }
+
+ # Include the private key so the tests can be easily adjusted and reviewed
+ let(:actor_keypair) do
+ OpenSSL::PKey.read(<<~PEM_TEXT)
+ -----BEGIN RSA PRIVATE KEY-----
+ MIIEowIBAAKCAQEAqIAYvNFGbZ5g4iiK6feSdXD4bDStFM58A7tHycYXaYtzZQpI
+ eHXAmaXuZzXIwtrP4N0gIk8JNwZvXj2UPS+S07t0V9wNK94he01LV5EMz/GN4eNn
+ FmDL64HIEuKLvV8TvgjbUPRD6Y5X0UpKi2ZIFLSb96Q5w0Z/k7ntpVKV52y8kz5F
+ jr/O/0JuHryZe0yItzJh8kzFfeMf0EXzfSnaKvT7P9jhgC6uTre+jXyvVZjiHDrn
+ qvvucdI3I7DRfXo1OqARBrLjy+TdseUAjNYJ+OuPRI1URIWQI01DCHqcohVu9+Ar
+ +BiCjFp3ua+XMuJvrvbD61d1Fvig/9nbBRR+8QIDAQABAoIBAAgySHnFWI6gItR3
+ fkfiqIm80cHCN3Xk1C6iiVu+3oBOZbHpW9R7vl9e/WOA/9O+LPjiSsQOegtWnVvd
+ RRjrl7Hj20VDlZKv5Mssm6zOGAxksrcVbqwdj+fUJaNJCL0AyyseH0x/IE9T8rDC
+ I1GH+3tB3JkhkIN/qjipdX5ab8MswEPu8IC4ViTpdBgWYY/xBcAHPw4xuL0tcwzh
+ FBlf4DqoEVQo8GdK5GAJ2Ny0S4xbXHUURzx/R4y4CCts7niAiLGqd9jmLU1kUTMk
+ QcXfQYK6l+unLc7wDYAz7sFEHh04M48VjWwiIZJnlCqmQbLda7uhhu8zkF1DqZTu
+ ulWDGQECgYEA0TIAc8BQBVab979DHEEmMdgqBwxLY3OIAk0b+r50h7VBGWCDPRsC
+ STD73fQY3lNet/7/jgSGwwAlAJ5PpMXxXiZAE3bUwPmHzgF7pvIOOLhA8O07tHSO
+ L2mvQe6NPzjZ+6iAO2U9PkClxcvGvPx2OBvisfHqZLmxC9PIVxzruQECgYEAzjM6
+ BTUXa6T/qHvLFbN699BXsUOGmHBGaLRapFDBfVvgZrwqYQcZpBBhesLdGTGSqwE7
+ gWsITPIJ+Ldo+38oGYyVys+w/V67q6ud7hgSDTW3hSvm+GboCjk6gzxlt9hQ0t9X
+ 8vfDOYhEXvVUJNv3mYO60ENqQhILO4bQ0zi+VfECgYBb/nUccfG+pzunU0Cb6Dp3
+ qOuydcGhVmj1OhuXxLFSDG84Tazo7juvHA9mp7VX76mzmDuhpHPuxN2AzB2SBEoE
+ cSW0aYld413JRfWukLuYTc6hJHIhBTCRwRQFFnae2s1hUdQySm8INT2xIc+fxBXo
+ zrp+Ljg5Wz90SAnN5TX0AQKBgDaatDOq0o/r+tPYLHiLtfWoE4Dau+rkWJDjqdk3
+ lXWn/e3WyHY3Vh/vQpEqxzgju45TXjmwaVtPATr+/usSykCxzP0PMPR3wMT+Rm1F
+ rIoY/odij+CaB7qlWwxj0x/zRbwB7x1lZSp4HnrzBpxYL+JUUwVRxPLIKndSBTza
+ GvVRAoGBAIVBcNcRQYF4fvZjDKAb4fdBsEuHmycqtRCsnkGOz6ebbEQznSaZ0tZE
+ +JuouZaGjyp8uPjNGD5D7mIGbyoZ3KyG4mTXNxDAGBso1hrNDKGBOrGaPhZx8LgO
+ 4VXJ+ybXrATf4jr8ccZYsZdFpOphPzz+j55Mqg5vac5P1XjmsGTb
+ -----END RSA PRIVATE KEY-----
+ PEM_TEXT
+ end
+
+ context 'without a Signature header' do
+ it 'does not treat the request as signed' do
+ get '/activitypub/success'
+
+ expect(response).to have_http_status(200)
+ expect(body_as_json).to match(
+ signed_request: false,
+ signature_actor_id: nil,
+ error: 'Request not signed'
+ )
+ end
+
+ context 'when a signature is required' do
+ it 'returns http unauthorized with appropriate error' do
+ get '/activitypub/signature_required'
+
+ expect(response).to have_http_status(401)
+ expect(body_as_json).to match(
+ error: 'Request not signed'
+ )
+ end
+ end
+ end
+
+ context 'with an HTTP Signature from a known account' do
+ let!(:actor) { Fabricate(:account, domain: 'remote.domain', uri: 'https://remote.domain/users/bob', private_key: nil, public_key: actor_keypair.public_key.to_pem) }
+
+ context 'with a valid signature on a GET request' do
+ let(:signature_header) do
+ 'keyId="https://remote.domain/users/bob#main-key",algorithm="rsa-sha256",headers="date host (request-target)",signature="Z8ilar3J7bOwqZkMp7sL8sRs4B1FT+UorbmvWoE+A5UeoOJ3KBcUmbsh+k3wQwbP5gMNUrra9rEWabpasZGphLsbDxfbsWL3Cf0PllAc7c1c7AFEwnewtExI83/qqgEkfWc2z7UDutXc2NfgAx89Ox8DXU/fA2GG0jILjB6UpFyNugkY9rg6oI31UnvfVi3R7sr3/x8Ea3I9thPvqI2byF6cojknSpDAwYzeKdngX3TAQEGzFHz3SDWwyp3jeMWfwvVVbM38FxhvAnSumw7YwWW4L7M7h4M68isLimoT3yfCn2ucBVL5Dz8koBpYf/40w7QidClAwCafZQFC29yDOg=="' # rubocop:disable Layout/LineLength
+ end
+
+ it 'successfuly verifies signature', :aggregate_failures do
+ expect(signature_header).to eq build_signature_string(actor_keypair, 'https://remote.domain/users/bob#main-key', 'get /activitypub/success', { 'Date' => 'Wed, 20 Dec 2023 10:00:00 GMT', 'Host' => 'www.example.com' })
+
+ get '/activitypub/success', headers: {
+ 'Host' => 'www.example.com',
+ 'Date' => 'Wed, 20 Dec 2023 10:00:00 GMT',
+ 'Signature' => signature_header,
+ }
+
+ expect(response).to have_http_status(200)
+ expect(body_as_json).to match(
+ signed_request: true,
+ signature_actor_id: actor.id.to_s
+ )
+ end
+ end
+
+ context 'with a valid signature on a GET request that has a query string' do
+ let(:signature_header) do
+ 'keyId="https://remote.domain/users/bob#main-key",algorithm="rsa-sha256",headers="date host (request-target)",signature="SDMa4r/DQYMXYxVgYO2yEqGWWUXugKjVuz0I8dniQAk+aunzBaF2aPu+4grBfawAshlx1Xytl8lhb0H2MllEz16/tKY7rUrb70MK0w8ohXgpb0qs3YvQgdj4X24L1x2MnkFfKHR/J+7TBlnivq0HZqXm8EIkPWLv+eQxu8fbowLwHIVvRd/3t6FzvcfsE0UZKkoMEX02542MhwSif6cu7Ec/clsY9qgKahb9JVGOGS1op9Lvg/9y1mc8KCgD83U5IxVygYeYXaVQ6gixA9NgZiTCwEWzHM5ELm7w5hpdLFYxYOHg/3G3fiqJzpzNQAcCD4S4JxfE7hMI0IzVlNLT6A=="' # rubocop:disable Layout/LineLength
+ end
+
+ it 'successfuly verifies signature', :aggregate_failures do
+ expect(signature_header).to eq build_signature_string(actor_keypair, 'https://remote.domain/users/bob#main-key', 'get /activitypub/success?foo=42', { 'Date' => 'Wed, 20 Dec 2023 10:00:00 GMT', 'Host' => 'www.example.com' })
+
+ get '/activitypub/success?foo=42', headers: {
+ 'Host' => 'www.example.com',
+ 'Date' => 'Wed, 20 Dec 2023 10:00:00 GMT',
+ 'Signature' => signature_header,
+ }
+
+ expect(response).to have_http_status(200)
+ expect(body_as_json).to match(
+ signed_request: true,
+ signature_actor_id: actor.id.to_s
+ )
+ end
+ end
+
+ context 'when the query string is missing from the signature verification (compatibility quirk)' do
+ let(:signature_header) do
+ 'keyId="https://remote.domain/users/bob#main-key",algorithm="rsa-sha256",headers="date host (request-target)",signature="Z8ilar3J7bOwqZkMp7sL8sRs4B1FT+UorbmvWoE+A5UeoOJ3KBcUmbsh+k3wQwbP5gMNUrra9rEWabpasZGphLsbDxfbsWL3Cf0PllAc7c1c7AFEwnewtExI83/qqgEkfWc2z7UDutXc2NfgAx89Ox8DXU/fA2GG0jILjB6UpFyNugkY9rg6oI31UnvfVi3R7sr3/x8Ea3I9thPvqI2byF6cojknSpDAwYzeKdngX3TAQEGzFHz3SDWwyp3jeMWfwvVVbM38FxhvAnSumw7YwWW4L7M7h4M68isLimoT3yfCn2ucBVL5Dz8koBpYf/40w7QidClAwCafZQFC29yDOg=="' # rubocop:disable Layout/LineLength
+ end
+
+ it 'successfuly verifies signature', :aggregate_failures do
+ expect(signature_header).to eq build_signature_string(actor_keypair, 'https://remote.domain/users/bob#main-key', 'get /activitypub/success', { 'Date' => 'Wed, 20 Dec 2023 10:00:00 GMT', 'Host' => 'www.example.com' })
+
+ get '/activitypub/success?foo=42', headers: {
+ 'Host' => 'www.example.com',
+ 'Date' => 'Wed, 20 Dec 2023 10:00:00 GMT',
+ 'Signature' => signature_header,
+ }
+
+ expect(response).to have_http_status(200)
+ expect(body_as_json).to match(
+ signed_request: true,
+ signature_actor_id: actor.id.to_s
+ )
+ end
+ end
+
+ context 'with mismatching query string' do
+ let(:signature_header) do
+ 'keyId="https://remote.domain/users/bob#main-key",algorithm="rsa-sha256",headers="date host (request-target)",signature="SDMa4r/DQYMXYxVgYO2yEqGWWUXugKjVuz0I8dniQAk+aunzBaF2aPu+4grBfawAshlx1Xytl8lhb0H2MllEz16/tKY7rUrb70MK0w8ohXgpb0qs3YvQgdj4X24L1x2MnkFfKHR/J+7TBlnivq0HZqXm8EIkPWLv+eQxu8fbowLwHIVvRd/3t6FzvcfsE0UZKkoMEX02542MhwSif6cu7Ec/clsY9qgKahb9JVGOGS1op9Lvg/9y1mc8KCgD83U5IxVygYeYXaVQ6gixA9NgZiTCwEWzHM5ELm7w5hpdLFYxYOHg/3G3fiqJzpzNQAcCD4S4JxfE7hMI0IzVlNLT6A=="' # rubocop:disable Layout/LineLength
+ end
+
+ it 'fails to verify signature', :aggregate_failures do
+ expect(signature_header).to eq build_signature_string(actor_keypair, 'https://remote.domain/users/bob#main-key', 'get /activitypub/success?foo=42', { 'Date' => 'Wed, 20 Dec 2023 10:00:00 GMT', 'Host' => 'www.example.com' })
+
+ get '/activitypub/success?foo=43', headers: {
+ 'Host' => 'www.example.com',
+ 'Date' => 'Wed, 20 Dec 2023 10:00:00 GMT',
+ 'Signature' => signature_header,
+ }
+
+ expect(body_as_json).to match(
+ signed_request: true,
+ signature_actor_id: nil,
+ error: anything
+ )
+ end
+ end
+
+ context 'with a mismatching path' do
+ it 'fails to verify signature', :aggregate_failures do
+ get '/activitypub/alternative-path', headers: {
+ 'Host' => 'www.example.com',
+ 'Date' => 'Wed, 20 Dec 2023 10:00:00 GMT',
+ 'Signature' => 'keyId="https://remote.domain/users/bob#main-key",algorithm="rsa-sha256",headers="date host (request-target)",signature="Z8ilar3J7bOwqZkMp7sL8sRs4B1FT+UorbmvWoE+A5UeoOJ3KBcUmbsh+k3wQwbP5gMNUrra9rEWabpasZGphLsbDxfbsWL3Cf0PllAc7c1c7AFEwnewtExI83/qqgEkfWc2z7UDutXc2NfgAx89Ox8DXU/fA2GG0jILjB6UpFyNugkY9rg6oI31UnvfVi3R7sr3/x8Ea3I9thPvqI2byF6cojknSpDAwYzeKdngX3TAQEGzFHz3SDWwyp3jeMWfwvVVbM38FxhvAnSumw7YwWW4L7M7h4M68isLimoT3yfCn2ucBVL5Dz8koBpYf/40w7QidClAwCafZQFC29yDOg=="', # rubocop:disable Layout/LineLength
+ }
+
+ expect(body_as_json).to match(
+ signed_request: true,
+ signature_actor_id: nil,
+ error: anything
+ )
+ end
+ end
+
+ context 'with a mismatching method' do
+ it 'fails to verify signature', :aggregate_failures do
+ post '/activitypub/success', headers: {
+ 'Host' => 'www.example.com',
+ 'Date' => 'Wed, 20 Dec 2023 10:00:00 GMT',
+ 'Signature' => 'keyId="https://remote.domain/users/bob#main-key",algorithm="rsa-sha256",headers="date host (request-target)",signature="Z8ilar3J7bOwqZkMp7sL8sRs4B1FT+UorbmvWoE+A5UeoOJ3KBcUmbsh+k3wQwbP5gMNUrra9rEWabpasZGphLsbDxfbsWL3Cf0PllAc7c1c7AFEwnewtExI83/qqgEkfWc2z7UDutXc2NfgAx89Ox8DXU/fA2GG0jILjB6UpFyNugkY9rg6oI31UnvfVi3R7sr3/x8Ea3I9thPvqI2byF6cojknSpDAwYzeKdngX3TAQEGzFHz3SDWwyp3jeMWfwvVVbM38FxhvAnSumw7YwWW4L7M7h4M68isLimoT3yfCn2ucBVL5Dz8koBpYf/40w7QidClAwCafZQFC29yDOg=="', # rubocop:disable Layout/LineLength
+ }
+
+ expect(body_as_json).to match(
+ signed_request: true,
+ signature_actor_id: nil,
+ error: anything
+ )
+ end
+ end
+
+ context 'with an unparsable date' do
+ let(:signature_header) do
+ 'keyId="https://remote.domain/users/bob#main-key",algorithm="rsa-sha256",headers="date host (request-target)",signature="d4B7nfx8RJcfdJDu1J//5WzPzK/hgtPkdzZx49lu5QhnE7qdV3lgyVimmhCFrO16bwvzIp9iRMyRLkNFxLiEeVaa1gqeKbldGSnU0B0OMjx7rFBa65vLuzWQOATDitVGiBEYqoK4v0DMuFCz2DtFaA/DIUZ3sty8bZ/Ea3U1nByLOO6MacARA3zhMSI0GNxGqsSmZmG0hPLavB3jIXoE3IDoQabMnC39jrlcO/a8h1iaxBm2WD8TejrImJullgqlJIFpKhIHI3ipQkvTGPlm9dx0y+beM06qBvWaWQcmT09eRIUefVsOAzIhUtS/7FVb/URhZvircIJDa7vtiFcmZQ=="' # rubocop:disable Layout/LineLength
+ end
+
+ it 'fails to verify signature', :aggregate_failures do
+ expect(signature_header).to eq build_signature_string(actor_keypair, 'https://remote.domain/users/bob#main-key', 'get /activitypub/success', { 'Date' => 'wrong date', 'Host' => 'www.example.com' })
+
+ get '/activitypub/success', headers: {
+ 'Host' => 'www.example.com',
+ 'Date' => 'wrong date',
+ 'Signature' => signature_header,
+ }
+
+ expect(body_as_json).to match(
+ signed_request: true,
+ signature_actor_id: nil,
+ error: 'Invalid Date header: not RFC 2616 compliant date: "wrong date"'
+ )
+ end
+ end
+
+ context 'with a request older than a day' do
+ let(:signature_header) do
+ 'keyId="https://remote.domain/users/bob#main-key",algorithm="rsa-sha256",headers="date host (request-target)",signature="G1NuJv4zgoZ3B/ZIjzDWZHK4RC+5pYee74q8/LJEMCWXhcnAomcb9YHaqk1QYfQvcBUIXw3UZ3Q9xO8F9y0i8G5mzJHfQ+OgHqCoJk8EmGwsUXJMh5s1S5YFCRt8TT12TmJZz0VMqLq85ubueSYBM7QtUE/FzFIVLvz4RysgXxaXQKzdnM6+gbUEEKdCURpXdQt2NXQhp4MAmZH3+0lQoR6VxdsK0hx0Ji2PNp1nuqFTlYqNWZazVdLBN+9rETLRmvGXknvg9jOxTTppBVWnkAIl26HtLS3wwFVvz4pJzi9OQDOvLziehVyLNbU61hky+oJ215e2HuKSe2hxHNl1MA=="' # rubocop:disable Layout/LineLength
+ end
+
+ it 'fails to verify signature', :aggregate_failures do
+ expect(signature_header).to eq build_signature_string(actor_keypair, 'https://remote.domain/users/bob#main-key', 'get /activitypub/success', { 'Date' => 'Wed, 18 Dec 2023 10:00:00 GMT', 'Host' => 'www.example.com' })
+
+ get '/activitypub/success', headers: {
+ 'Host' => 'www.example.com',
+ 'Date' => 'Wed, 18 Dec 2023 10:00:00 GMT',
+ 'Signature' => signature_header,
+ }
+
+ expect(body_as_json).to match(
+ signed_request: true,
+ signature_actor_id: nil,
+ error: 'Signed request date outside acceptable time window'
+ )
+ end
+ end
+
+ context 'with a valid signature on a POST request' do
+ let(:digest_header) { 'SHA-256=ZOyIygCyaOW6GjVnihtTFtIS9PNmskdyMlNKiuyjfzw=' }
+ let(:signature_header) do
+ 'keyId="https://remote.domain/users/bob#main-key",algorithm="rsa-sha256",headers="host date digest (request-target)",signature="gmhMjgMROGElJU3fpehV2acD5kMHeELi8EFP2UPHOdQ54H0r55AxIpji+J3lPe+N2qSb/4H1KXIh6f0lRu8TGSsu12OQmg5hiO8VA9flcA/mh9Lpk+qwlQZIPRqKP9xUEfqD+Z7ti5wPzDKrWAUK/7FIqWgcT/mlqB1R1MGkpMFc/q4CIs2OSNiWgA4K+Kp21oQxzC2kUuYob04gAZ7cyE/FTia5t08uv6lVYFdRsn4XNPn1MsHgFBwBMRG79ng3SyhoG4PrqBEi5q2IdLq3zfre/M6He3wlCpyO2VJNdGVoTIzeZ0Zz8jUscPV3XtWUchpGclLGSaKaq/JyNZeiYQ=="' # rubocop:disable Layout/LineLength
+ end
+
+ it 'successfuly verifies signature', :aggregate_failures do
+ expect(digest_header).to eq digest_value('Hello world')
+ expect(signature_header).to eq build_signature_string(actor_keypair, 'https://remote.domain/users/bob#main-key', 'post /activitypub/success', { 'Host' => 'www.example.com', 'Date' => 'Wed, 20 Dec 2023 10:00:00 GMT', 'Digest' => digest_header })
+
+ post '/activitypub/success', params: 'Hello world', headers: {
+ 'Host' => 'www.example.com',
+ 'Date' => 'Wed, 20 Dec 2023 10:00:00 GMT',
+ 'Digest' => digest_header,
+ 'Signature' => signature_header,
+ }
+
+ expect(response).to have_http_status(200)
+ expect(body_as_json).to match(
+ signed_request: true,
+ signature_actor_id: actor.id.to_s
+ )
+ end
+ end
+
+ context 'when the Digest of a POST request is not signed' do
+ let(:digest_header) { 'SHA-256=ZOyIygCyaOW6GjVnihtTFtIS9PNmskdyMlNKiuyjfzw=' }
+ let(:signature_header) do
+ 'keyId="https://remote.domain/users/bob#main-key",algorithm="rsa-sha256",headers="host date (request-target)",signature="CPD704CG8aCm8X8qIP8kkkiGp1qwFLk/wMVQHOGP0Txxan8c2DZtg/KK7eN8RG8tHx8br/yS2hJs51x4kXImYukGzNJd7ihE3T8lp+9RI1tCcdobTzr/VcVJHDFySdQkg266GCMijRQRZfNvqlJLiisr817PI+gNVBI5qV+vnVd1XhWCEZ+YSmMe8UqYARXAYNqMykTheojqGpTeTFGPUpTQA2Fmt2BipwIjcFDm2Hpihl2kB0MUS0x3zPmHDuadvzoBbN6m3usPDLgYrpALlh+wDs1dYMntcwdwawRKY1oE1XNtgOSum12wntDq3uYL4gya2iPdcw3c929b4koUzw=="' # rubocop:disable Layout/LineLength
+ end
+
+ it 'fails to verify signature', :aggregate_failures do
+ expect(digest_header).to eq digest_value('Hello world')
+ expect(signature_header).to eq build_signature_string(actor_keypair, 'https://remote.domain/users/bob#main-key', 'post /activitypub/success', { 'Host' => 'www.example.com', 'Date' => 'Wed, 20 Dec 2023 10:00:00 GMT' })
+
+ post '/activitypub/success', params: 'Hello world', headers: {
+ 'Host' => 'www.example.com',
+ 'Date' => 'Wed, 20 Dec 2023 10:00:00 GMT',
+ 'Digest' => digest_header,
+ 'Signature' => signature_header,
+ }
+
+ expect(body_as_json).to match(
+ signed_request: true,
+ signature_actor_id: nil,
+ error: 'Mastodon requires the Digest header to be signed when doing a POST request'
+ )
+ end
+ end
+
+ context 'with a tampered body on a POST request' do
+ let(:digest_header) { 'SHA-256=ZOyIygCyaOW6GjVnihtTFtIS9PNmskdyMlNKiuyjfzw=' }
+ let(:signature_header) do
+ 'keyId="https://remote.domain/users/bob#main-key",algorithm="rsa-sha256",headers="host date digest (request-target)",signature="gmhMjgMROGElJU3fpehV2acD5kMHeELi8EFP2UPHOdQ54H0r55AxIpji+J3lPe+N2qSb/4H1KXIh6f0lRu8TGSsu12OQmg5hiO8VA9flcA/mh9Lpk+qwlQZIPRqKP9xUEfqD+Z7ti5wPzDKrWAUK/7FIqWgcT/mlqB1R1MGkpMFc/q4CIs2OSNiWgA4K+Kp21oQxzC2kUuYob04gAZ7cyE/FTia5t08uv6lVYFdRsn4XNPn1MsHgFBwBMRG79ng3SyhoG4PrqBEi5q2IdLq3zfre/M6He3wlCpyO2VJNdGVoTIzeZ0Zz8jUscPV3XtWUchpGclLGSaKaq/JyNZeiYQ=="' # rubocop:disable Layout/LineLength
+ end
+
+ it 'fails to verify signature', :aggregate_failures do
+ expect(digest_header).to_not eq digest_value('Hello world!')
+ expect(signature_header).to eq build_signature_string(actor_keypair, 'https://remote.domain/users/bob#main-key', 'post /activitypub/success', { 'Host' => 'www.example.com', 'Date' => 'Wed, 20 Dec 2023 10:00:00 GMT', 'Digest' => digest_header })
+
+ post '/activitypub/success', params: 'Hello world!', headers: {
+ 'Host' => 'www.example.com',
+ 'Date' => 'Wed, 20 Dec 2023 10:00:00 GMT',
+ 'Digest' => 'SHA-256=ZOyIygCyaOW6GjVnihtTFtIS9PNmskdyMlNKiuyjfzw=',
+ 'Signature' => signature_header,
+ }
+
+ expect(body_as_json).to match(
+ signed_request: true,
+ signature_actor_id: nil,
+ error: 'Invalid Digest value. Computed SHA-256 digest: wFNeS+K3n/2TKRMFQ2v4iTFOSj+uwF7P/Lt98xrZ5Ro=; given: ZOyIygCyaOW6GjVnihtTFtIS9PNmskdyMlNKiuyjfzw='
+ )
+ end
+ end
+
+ context 'with a tampered path in a POST request' do
+ it 'fails to verify signature', :aggregate_failures do
+ post '/activitypub/alternative-path', params: 'Hello world', headers: {
+ 'Host' => 'www.example.com',
+ 'Date' => 'Wed, 20 Dec 2023 10:00:00 GMT',
+ 'Digest' => 'SHA-256=ZOyIygCyaOW6GjVnihtTFtIS9PNmskdyMlNKiuyjfzw=',
+ 'Signature' => 'keyId="https://remote.domain/users/bob#main-key",algorithm="rsa-sha256",headers="host date digest (request-target)",signature="gmhMjgMROGElJU3fpehV2acD5kMHeELi8EFP2UPHOdQ54H0r55AxIpji+J3lPe+N2qSb/4H1KXIh6f0lRu8TGSsu12OQmg5hiO8VA9flcA/mh9Lpk+qwlQZIPRqKP9xUEfqD+Z7ti5wPzDKrWAUK/7FIqWgcT/mlqB1R1MGkpMFc/q4CIs2OSNiWgA4K+Kp21oQxzC2kUuYob04gAZ7cyE/FTia5t08uv6lVYFdRsn4XNPn1MsHgFBwBMRG79ng3SyhoG4PrqBEi5q2IdLq3zfre/M6He3wlCpyO2VJNdGVoTIzeZ0Zz8jUscPV3XtWUchpGclLGSaKaq/JyNZeiYQ=="', # rubocop:disable Layout/LineLength
+ }
+
+ expect(response).to have_http_status(200)
+ expect(body_as_json).to match(
+ signed_request: true,
+ signature_actor_id: nil,
+ error: anything
+ )
+ end
+ end
+ end
+
+ context 'with an inaccessible key' do
+ before do
+ stub_request(:get, 'https://remote.domain/users/alice#main-key').to_return(status: 404)
+ end
+
+ it 'fails to verify signature', :aggregate_failures do
+ get '/activitypub/success', headers: {
+ 'Host' => 'www.example.com',
+ 'Date' => 'Wed, 20 Dec 2023 10:00:00 GMT',
+ 'Signature' => 'keyId="https://remote.domain/users/alice#main-key",algorithm="rsa-sha256",headers="date host (request-target)",signature="Z8ilar3J7bOwqZkMp7sL8sRs4B1FT+UorbmvWoE+A5UeoOJ3KBcUmbsh+k3wQwbP5gMNUrra9rEWabpasZGphLsbDxfbsWL3Cf0PllAc7c1c7AFEwnewtExI83/qqgEkfWc2z7UDutXc2NfgAx89Ox8DXU/fA2GG0jILjB6UpFyNugkY9rg6oI31UnvfVi3R7sr3/x8Ea3I9thPvqI2byF6cojknSpDAwYzeKdngX3TAQEGzFHz3SDWwyp3jeMWfwvVVbM38FxhvAnSumw7YwWW4L7M7h4M68isLimoT3yfCn2ucBVL5Dz8koBpYf/40w7QidClAwCafZQFC29yDOg=="', # rubocop:disable Layout/LineLength
+ }
+
+ expect(body_as_json).to match(
+ signed_request: true,
+ signature_actor_id: nil,
+ error: 'Unable to fetch key JSON at https://remote.domain/users/alice#main-key'
+ )
+ end
+ end
+
+ private
+
+ def stub_tests_controller
+ stub_const('ActivityPub::TestsController', activitypub_tests_controller)
+
+ Rails.application.routes.draw do
+ # NOTE: RouteSet#draw removes all routes, so we need to re-insert one
+ resource :instance_actor, path: 'actor', only: [:show]
+
+ match :via => [:get, :post], '/activitypub/success' => 'activitypub/tests#success'
+ match :via => [:get, :post], '/activitypub/alternative-path' => 'activitypub/tests#alternative_success'
+ match :via => [:get, :post], '/activitypub/signature_required' => 'activitypub/tests#signature_required'
+ end
+ end
+
+ def activitypub_tests_controller
+ Class.new(ApplicationController) do
+ include SignatureVerification
+
+ before_action :require_actor_signature!, only: [:signature_required]
+
+ def success
+ render json: {
+ signed_request: signed_request?,
+ signature_actor_id: signed_request_actor&.id&.to_s,
+ }.merge(signature_verification_failure_reason || {})
+ end
+
+ alias_method :alternative_success, :success
+ alias_method :signature_required, :success
+ end
+ end
+
+ def digest_value(body)
+ "SHA-256=#{Digest::SHA256.base64digest(body)}"
+ end
+
+ def build_signature_string(keypair, key_id, request_target, headers)
+ algorithm = 'rsa-sha256'
+ signed_headers = headers.merge({ '(request-target)' => request_target })
+ signed_string = signed_headers.map { |key, value| "#{key.downcase}: #{value}" }.join("\n")
+ signature = Base64.strict_encode64(keypair.sign(OpenSSL::Digest.new('SHA256'), signed_string))
+
+ "keyId=\"#{key_id}\",algorithm=\"#{algorithm}\",headers=\"#{signed_headers.keys.join(' ').downcase}\",signature=\"#{signature}\""
+ end
+end
diff --git a/spec/services/activitypub/fetch_featured_collection_service_spec.rb b/spec/services/activitypub/fetch_featured_collection_service_spec.rb
index e6336dc1b19785..398fa510a7e6bc 100644
--- a/spec/services/activitypub/fetch_featured_collection_service_spec.rb
+++ b/spec/services/activitypub/fetch_featured_collection_service_spec.rb
@@ -60,10 +60,10 @@
shared_examples 'sets pinned posts' do
before do
- stub_request(:get, 'https://example.com/account/pinned/1').to_return(status: 200, body: Oj.dump(status_json_1))
- stub_request(:get, 'https://example.com/account/pinned/2').to_return(status: 200, body: Oj.dump(status_json_2))
+ stub_request(:get, 'https://example.com/account/pinned/1').to_return(status: 200, body: Oj.dump(status_json_1), headers: { 'Content-Type': 'application/activity+json' })
+ stub_request(:get, 'https://example.com/account/pinned/2').to_return(status: 200, body: Oj.dump(status_json_2), headers: { 'Content-Type': 'application/activity+json' })
stub_request(:get, 'https://example.com/account/pinned/3').to_return(status: 404)
- stub_request(:get, 'https://example.com/account/pinned/4').to_return(status: 200, body: Oj.dump(status_json_4))
+ stub_request(:get, 'https://example.com/account/pinned/4').to_return(status: 200, body: Oj.dump(status_json_4), headers: { 'Content-Type': 'application/activity+json' })
subject.call(actor, note: true, hashtag: false)
end
@@ -76,7 +76,7 @@
describe '#call' do
context 'when the endpoint is a Collection' do
before do
- stub_request(:get, actor.featured_collection_url).to_return(status: 200, body: Oj.dump(payload))
+ stub_request(:get, actor.featured_collection_url).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
end
it_behaves_like 'sets pinned posts'
@@ -93,10 +93,25 @@
end
before do
- stub_request(:get, actor.featured_collection_url).to_return(status: 200, body: Oj.dump(payload))
+ stub_request(:get, actor.featured_collection_url).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
end
it_behaves_like 'sets pinned posts'
+
+ context 'when there is a single item, with the array compacted away' do
+ let(:items) { 'https://example.com/account/pinned/4' }
+
+ before do
+ stub_request(:get, 'https://example.com/account/pinned/4').to_return(status: 200, body: Oj.dump(status_json_4), headers: { 'Content-Type': 'application/activity+json' })
+ subject.call(actor, note: true, hashtag: false)
+ end
+
+ it 'sets expected posts as pinned posts' do
+ expect(actor.pinned_statuses.pluck(:uri)).to contain_exactly(
+ 'https://example.com/account/pinned/4'
+ )
+ end
+ end
end
context 'when the endpoint is a paginated Collection' do
@@ -114,10 +129,25 @@
end
before do
- stub_request(:get, actor.featured_collection_url).to_return(status: 200, body: Oj.dump(payload))
+ stub_request(:get, actor.featured_collection_url).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
end
it_behaves_like 'sets pinned posts'
+
+ context 'when there is a single item, with the array compacted away' do
+ let(:items) { 'https://example.com/account/pinned/4' }
+
+ before do
+ stub_request(:get, 'https://example.com/account/pinned/4').to_return(status: 200, body: Oj.dump(status_json_4), headers: { 'Content-Type': 'application/activity+json' })
+ subject.call(actor, note: true, hashtag: false)
+ end
+
+ it 'sets expected posts as pinned posts' do
+ expect(actor.pinned_statuses.pluck(:uri)).to contain_exactly(
+ 'https://example.com/account/pinned/4'
+ )
+ end
+ end
end
end
end
diff --git a/spec/services/activitypub/fetch_featured_tags_collection_service_spec.rb b/spec/services/activitypub/fetch_featured_tags_collection_service_spec.rb
index 6ca22c9fc66e61..ba02f9259188af 100644
--- a/spec/services/activitypub/fetch_featured_tags_collection_service_spec.rb
+++ b/spec/services/activitypub/fetch_featured_tags_collection_service_spec.rb
@@ -36,7 +36,7 @@
describe '#call' do
context 'when the endpoint is a Collection' do
before do
- stub_request(:get, collection_url).to_return(status: 200, body: Oj.dump(payload))
+ stub_request(:get, collection_url).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
end
it_behaves_like 'sets featured tags'
@@ -44,7 +44,7 @@
context 'when the account already has featured tags' do
before do
- stub_request(:get, collection_url).to_return(status: 200, body: Oj.dump(payload))
+ stub_request(:get, collection_url).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
actor.featured_tags.create!(name: 'FoO')
actor.featured_tags.create!(name: 'baz')
@@ -65,7 +65,7 @@
end
before do
- stub_request(:get, collection_url).to_return(status: 200, body: Oj.dump(payload))
+ stub_request(:get, collection_url).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
end
it_behaves_like 'sets featured tags'
@@ -86,7 +86,7 @@
end
before do
- stub_request(:get, collection_url).to_return(status: 200, body: Oj.dump(payload))
+ stub_request(:get, collection_url).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
end
it_behaves_like 'sets featured tags'
diff --git a/spec/services/activitypub/fetch_remote_account_service_spec.rb b/spec/services/activitypub/fetch_remote_account_service_spec.rb
index ec6f1f41d8f6ce..2b8024cca3531f 100644
--- a/spec/services/activitypub/fetch_remote_account_service_spec.rb
+++ b/spec/services/activitypub/fetch_remote_account_service_spec.rb
@@ -16,7 +16,7 @@
end
describe '#call' do
- let(:account) { subject.call('https://example.com/alice', id: true) }
+ let(:account) { subject.call('https://example.com/alice') }
shared_examples 'sets profile data' do
it 'returns an account' do
@@ -42,7 +42,7 @@
before do
actor[:inbox] = nil
- stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
+ stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor), headers: { 'Content-Type': 'application/activity+json' })
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:alice@example.com').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
@@ -65,7 +65,7 @@
let!(:webfinger) { { subject: 'acct:alice@example.com', links: [{ rel: 'self', href: 'https://example.com/alice' }] } }
before do
- stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
+ stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor), headers: { 'Content-Type': 'application/activity+json' })
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:alice@example.com').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
@@ -91,7 +91,7 @@
let!(:webfinger) { { subject: 'acct:alice@iscool.af', links: [{ rel: 'self', href: 'https://example.com/alice' }] } }
before do
- stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
+ stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor), headers: { 'Content-Type': 'application/activity+json' })
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:alice@example.com').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
stub_request(:get, 'https://iscool.af/.well-known/webfinger?resource=acct:alice@iscool.af').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
@@ -123,7 +123,7 @@
let!(:webfinger) { { subject: 'acct:alice@example.com', links: [{ rel: 'self', href: 'https://example.com/bob' }] } }
before do
- stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
+ stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor), headers: { 'Content-Type': 'application/activity+json' })
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:alice@example.com').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
@@ -146,7 +146,7 @@
let!(:webfinger) { { subject: 'acct:alice@iscool.af', links: [{ rel: 'self', href: 'https://example.com/bob' }] } }
before do
- stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
+ stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor), headers: { 'Content-Type': 'application/activity+json' })
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:alice@example.com').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
stub_request(:get, 'https://iscool.af/.well-known/webfinger?resource=acct:alice@iscool.af').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
diff --git a/spec/services/activitypub/fetch_remote_actor_service_spec.rb b/spec/services/activitypub/fetch_remote_actor_service_spec.rb
index 20117c66d04764..ad7bf0d1b262af 100644
--- a/spec/services/activitypub/fetch_remote_actor_service_spec.rb
+++ b/spec/services/activitypub/fetch_remote_actor_service_spec.rb
@@ -16,7 +16,7 @@
end
describe '#call' do
- let(:account) { subject.call('https://example.com/alice', id: true) }
+ let(:account) { subject.call('https://example.com/alice') }
shared_examples 'sets profile data' do
it 'returns an account' do
@@ -42,7 +42,7 @@
before do
actor[:inbox] = nil
- stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
+ stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor), headers: { 'Content-Type': 'application/activity+json' })
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:alice@example.com').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
@@ -65,7 +65,7 @@
let!(:webfinger) { { subject: 'acct:alice@example.com', links: [{ rel: 'self', href: 'https://example.com/alice' }] } }
before do
- stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
+ stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor), headers: { 'Content-Type': 'application/activity+json' })
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:alice@example.com').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
@@ -91,7 +91,7 @@
let!(:webfinger) { { subject: 'acct:alice@iscool.af', links: [{ rel: 'self', href: 'https://example.com/alice' }] } }
before do
- stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
+ stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor), headers: { 'Content-Type': 'application/activity+json' })
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:alice@example.com').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
stub_request(:get, 'https://iscool.af/.well-known/webfinger?resource=acct:alice@iscool.af').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
@@ -123,7 +123,7 @@
let!(:webfinger) { { subject: 'acct:alice@example.com', links: [{ rel: 'self', href: 'https://example.com/bob' }] } }
before do
- stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
+ stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor), headers: { 'Content-Type': 'application/activity+json' })
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:alice@example.com').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
@@ -146,7 +146,7 @@
let!(:webfinger) { { subject: 'acct:alice@iscool.af', links: [{ rel: 'self', href: 'https://example.com/bob' }] } }
before do
- stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
+ stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor), headers: { 'Content-Type': 'application/activity+json' })
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:alice@example.com').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
stub_request(:get, 'https://iscool.af/.well-known/webfinger?resource=acct:alice@iscool.af').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
diff --git a/spec/services/activitypub/fetch_remote_key_service_spec.rb b/spec/services/activitypub/fetch_remote_key_service_spec.rb
index 3186c4270d7e3d..53582789985682 100644
--- a/spec/services/activitypub/fetch_remote_key_service_spec.rb
+++ b/spec/services/activitypub/fetch_remote_key_service_spec.rb
@@ -38,16 +38,16 @@
end
before do
- stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor))
+ stub_request(:get, 'https://example.com/alice').to_return(body: Oj.dump(actor), headers: { 'Content-Type': 'application/activity+json' })
stub_request(:get, 'https://example.com/.well-known/webfinger?resource=acct:alice@example.com').to_return(body: Oj.dump(webfinger), headers: { 'Content-Type': 'application/jrd+json' })
end
describe '#call' do
- let(:account) { subject.call(public_key_id, id: false) }
+ let(:account) { subject.call(public_key_id) }
context 'when the key is a sub-object from the actor' do
before do
- stub_request(:get, public_key_id).to_return(body: Oj.dump(actor))
+ stub_request(:get, public_key_id).to_return(body: Oj.dump(actor), headers: { 'Content-Type': 'application/activity+json' })
end
it 'returns the expected account' do
@@ -59,7 +59,7 @@
let(:public_key_id) { 'https://example.com/alice-public-key.json' }
before do
- stub_request(:get, public_key_id).to_return(body: Oj.dump(key_json.merge({ '@context': ['https://www.w3.org/ns/activitystreams', 'https://w3id.org/security/v1'] })))
+ stub_request(:get, public_key_id).to_return(body: Oj.dump(key_json.merge({ '@context': ['https://www.w3.org/ns/activitystreams', 'https://w3id.org/security/v1'] })), headers: { 'Content-Type': 'application/activity+json' })
end
it 'returns the expected account' do
@@ -72,7 +72,7 @@
let(:actor_public_key) { 'https://example.com/alice-public-key.json' }
before do
- stub_request(:get, public_key_id).to_return(body: Oj.dump(key_json.merge({ '@context': ['https://www.w3.org/ns/activitystreams', 'https://w3id.org/security/v1'] })))
+ stub_request(:get, public_key_id).to_return(body: Oj.dump(key_json.merge({ '@context': ['https://www.w3.org/ns/activitystreams', 'https://w3id.org/security/v1'] })), headers: { 'Content-Type': 'application/activity+json' })
end
it 'returns the nil' do
diff --git a/spec/services/activitypub/fetch_replies_service_spec.rb b/spec/services/activitypub/fetch_replies_service_spec.rb
index fe49b18c195db8..00ce1ab0f5a64a 100644
--- a/spec/services/activitypub/fetch_replies_service_spec.rb
+++ b/spec/services/activitypub/fetch_replies_service_spec.rb
@@ -32,6 +32,18 @@
describe '#call' do
context 'when the payload is a Collection with inlined replies' do
+ context 'when there is a single reply, with the array compacted away' do
+ let(:items) { 'http://example.com/self-reply-1' }
+
+ it 'queues the expected worker' do
+ allow(FetchReplyWorker).to receive(:push_bulk)
+
+ subject.call(status, payload)
+
+ expect(FetchReplyWorker).to have_received(:push_bulk).with(['http://example.com/self-reply-1'])
+ end
+ end
+
context 'when passing the collection itself' do
it 'spawns workers for up to 5 replies on the same server' do
expect(FetchReplyWorker).to receive(:push_bulk).with(['http://example.com/self-reply-1', 'http://example.com/self-reply-2', 'http://example.com/self-reply-3', 'http://example.com/self-reply-4', 'http://example.com/self-reply-5'])
@@ -41,7 +53,7 @@
context 'when passing the URL to the collection' do
before do
- stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload))
+ stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
end
it 'spawns workers for up to 5 replies on the same server' do
@@ -70,7 +82,7 @@
context 'when passing the URL to the collection' do
before do
- stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload))
+ stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
end
it 'spawns workers for up to 5 replies on the same server' do
@@ -103,7 +115,7 @@
context 'when passing the URL to the collection' do
before do
- stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload))
+ stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
end
it 'spawns workers for up to 5 replies on the same server' do
diff --git a/spec/services/activitypub/process_status_update_service_spec.rb b/spec/services/activitypub/process_status_update_service_spec.rb
index 750369d57fbfcf..09c7fe94a0c427 100644
--- a/spec/services/activitypub/process_status_update_service_spec.rb
+++ b/spec/services/activitypub/process_status_update_service_spec.rb
@@ -41,12 +41,12 @@ def poll_option_json(name, votes)
describe '#call' do
it 'updates text' do
- subject.call(status, json)
+ subject.call(status, json, json)
expect(status.reload.text).to eq 'Hello universe'
end
it 'updates content warning' do
- subject.call(status, json)
+ subject.call(status, json, json)
expect(status.reload.spoiler_text).to eq 'Show more'
end
@@ -64,7 +64,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'does not create any edits' do
@@ -87,7 +87,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'does not create any edits' do
@@ -135,7 +135,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'does not create any edits' do
@@ -188,7 +188,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'does not create any edits' do
@@ -216,11 +216,11 @@ def poll_option_json(name, votes)
end
it 'does not create any edits' do
- expect { subject.call(status, json) }.not_to change { status.reload.edits.pluck(&:id) }
+ expect { subject.call(status, json, json) }.to_not(change { status.reload.edits.pluck(&:id) })
end
it 'does not update the text, spoiler_text or edited_at' do
- expect { subject.call(status, json) }.not_to change { s = status.reload; [s.text, s.spoiler_text, s.edited_at] }
+ expect { subject.call(status, json, json) }.to_not(change { s = status.reload; [s.text, s.spoiler_text, s.edited_at] })
end
end
@@ -235,7 +235,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'does not create any edits' do
@@ -259,7 +259,7 @@ def poll_option_json(name, votes)
before do
status.update(ordered_media_attachment_ids: nil)
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'does not create any edits' do
@@ -273,7 +273,7 @@ def poll_option_json(name, votes)
context 'originally without tags' do
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'updates tags' do
@@ -299,7 +299,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'updates tags' do
@@ -309,7 +309,7 @@ def poll_option_json(name, votes)
context 'originally without mentions' do
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'updates mentions' do
@@ -321,7 +321,7 @@ def poll_option_json(name, votes)
let(:mentions) { [alice, bob] }
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'updates mentions' do
@@ -332,7 +332,7 @@ def poll_option_json(name, votes)
context 'originally without media attachments' do
before do
stub_request(:get, 'https://example.com/foo.png').to_return(body: attachment_fixture('emojo.png'))
- subject.call(status, json)
+ subject.call(status, json, json)
end
let(:payload) do
@@ -382,7 +382,7 @@ def poll_option_json(name, votes)
before do
allow(RedownloadMediaWorker).to receive(:perform_async)
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'updates the existing media attachment in-place' do
@@ -410,7 +410,7 @@ def poll_option_json(name, votes)
before do
poll = Fabricate(:poll, status: status)
status.update(preloadable_poll: poll)
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'removes poll' do
@@ -440,7 +440,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'creates a poll' do
@@ -456,12 +456,12 @@ def poll_option_json(name, votes)
end
it 'creates edit history' do
- subject.call(status, json)
+ subject.call(status, json, json)
expect(status.edits.reload.map(&:text)).to eq ['Hello world', 'Hello universe']
end
it 'sets edited timestamp' do
- subject.call(status, json)
+ subject.call(status, json, json)
expect(status.reload.edited_at.to_s).to eq '2021-09-08 22:39:25 UTC'
end
end
diff --git a/spec/services/activitypub/synchronize_followers_service_spec.rb b/spec/services/activitypub/synchronize_followers_service_spec.rb
index 75dcf204b79517..7b4a5f8ffe2393 100644
--- a/spec/services/activitypub/synchronize_followers_service_spec.rb
+++ b/spec/services/activitypub/synchronize_followers_service_spec.rb
@@ -58,7 +58,7 @@
describe '#call' do
context 'when the endpoint is a Collection of actor URIs' do
before do
- stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload))
+ stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
end
it_behaves_like 'synchronizes followers'
@@ -75,7 +75,7 @@
end
before do
- stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload))
+ stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
end
it_behaves_like 'synchronizes followers'
@@ -96,7 +96,7 @@
end
before do
- stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload))
+ stub_request(:get, collection_uri).to_return(status: 200, body: Oj.dump(payload), headers: { 'Content-Type': 'application/activity+json' })
end
it_behaves_like 'synchronizes followers'
diff --git a/spec/services/fetch_link_card_service_spec.rb b/spec/services/fetch_link_card_service_spec.rb
index 4914c275326eb9..7a758f910fbf93 100644
--- a/spec/services/fetch_link_card_service_spec.rb
+++ b/spec/services/fetch_link_card_service_spec.rb
@@ -10,6 +10,7 @@
stub_request(:get, 'http://example.com/koi8-r').to_return(request_fixture('koi8-r.txt'))
stub_request(:get, 'http://example.com/日本語').to_return(request_fixture('sjis.txt'))
stub_request(:get, 'https://github.com/qbi/WannaCry').to_return(status: 404)
+ stub_request(:get, 'http://example.com/test?data=file.gpx%5E1').to_return(status: 200)
stub_request(:get, 'http://example.com/test-').to_return(request_fixture('idn.txt'))
stub_request(:get, 'http://example.com/windows-1251').to_return(request_fixture('windows-1251.txt'))
@@ -85,6 +86,15 @@
expect(a_request(:get, 'http://example.com/sjis')).to_not have_been_made
end
end
+
+ context do
+ let(:status) { Fabricate(:status, text: 'test http://example.com/test?data=file.gpx^1') }
+
+ it 'does fetch URLs with a caret in search params' do
+ expect(a_request(:get, 'http://example.com/test?data=file.gpx')).to_not have_been_made
+ expect(a_request(:get, 'http://example.com/test?data=file.gpx%5E1')).to have_been_made.once
+ end
+ end
end
context 'in a remote status' do
diff --git a/spec/services/fetch_resource_service_spec.rb b/spec/services/fetch_resource_service_spec.rb
index c0c96ab69c95f6..412c410575fd38 100644
--- a/spec/services/fetch_resource_service_spec.rb
+++ b/spec/services/fetch_resource_service_spec.rb
@@ -54,7 +54,7 @@
let(:json) do
{
- id: 1,
+ id: 'http://example.com/foo',
'@context': ActivityPub::TagManager::CONTEXT,
type: 'Note',
}.to_json
@@ -79,14 +79,14 @@
let(:content_type) { 'application/activity+json; charset=utf-8' }
let(:body) { json }
- it { is_expected.to eq [1, { prefetched_body: body, id: true }] }
+ it { is_expected.to eq ['http://example.com/foo', { prefetched_body: body }] }
end
context 'when content type is ld+json with profile' do
let(:content_type) { 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"' }
let(:body) { json }
- it { is_expected.to eq [1, { prefetched_body: body, id: true }] }
+ it { is_expected.to eq ['http://example.com/foo', { prefetched_body: body }] }
end
before do
@@ -97,14 +97,14 @@
context 'when link header is present' do
let(:headers) { { 'Link' => '; rel="alternate"; type="application/activity+json"', } }
- it { is_expected.to eq [1, { prefetched_body: json, id: true }] }
+ it { is_expected.to eq ['http://example.com/foo', { prefetched_body: json }] }
end
context 'when content type is text/html' do
let(:content_type) { 'text/html' }
let(:body) { '' }
- it { is_expected.to eq [1, { prefetched_body: json, id: true }] }
+ it { is_expected.to eq ['http://example.com/foo', { prefetched_body: json }] }
end
end
end
diff --git a/spec/services/reblog_service_spec.rb b/spec/services/reblog_service_spec.rb
index c0ae5eedcc166b..24770b4ccb1f3e 100644
--- a/spec/services/reblog_service_spec.rb
+++ b/spec/services/reblog_service_spec.rb
@@ -69,9 +69,5 @@
it 'distributes to followers' do
expect(ActivityPub::DistributionWorker).to have_received(:perform_async)
end
-
- it 'sends an announce activity to the author' do
- expect(a_request(:post, bob.inbox_url)).to have_been_made.once
- end
end
end
diff --git a/spec/services/remove_status_service_spec.rb b/spec/services/remove_status_service_spec.rb
index 482068d58f8296..2b789ed84d14fa 100644
--- a/spec/services/remove_status_service_spec.rb
+++ b/spec/services/remove_status_service_spec.rb
@@ -108,4 +108,22 @@
)).to have_been_made.once
end
end
+
+ context 'when removed status is a reblog of a non-follower' do
+ let!(:original_status) { Fabricate(:status, account: bill, text: 'Hello ThisIsASecret', visibility: :public) }
+ let!(:status) { ReblogService.new.call(alice, original_status) }
+
+ it 'sends Undo activity to followers' do
+ subject.call(status)
+ expect(a_request(:post, bill.inbox_url).with(
+ body: hash_including({
+ 'type' => 'Undo',
+ 'object' => hash_including({
+ 'type' => 'Announce',
+ 'object' => ActivityPub::TagManager.instance.uri_for(original_status),
+ }),
+ })
+ )).to have_been_made.once
+ end
+ end
end
diff --git a/spec/services/report_service_spec.rb b/spec/services/report_service_spec.rb
index 02bc42ac170d60..1737a05ae38101 100644
--- a/spec/services/report_service_spec.rb
+++ b/spec/services/report_service_spec.rb
@@ -4,6 +4,14 @@
subject { described_class.new }
let(:source_account) { Fabricate(:account) }
+ let(:target_account) { Fabricate(:account) }
+
+ context 'with a local account' do
+ it 'has a uri' do
+ report = subject.call(source_account, target_account)
+ expect(report.uri).to_not be_nil
+ end
+ end
context 'for a remote account' do
let(:remote_account) { Fabricate(:account, domain: 'example.com', protocol: :activitypub, inbox_url: 'http://example.com/inbox') }
diff --git a/spec/services/resolve_url_service_spec.rb b/spec/services/resolve_url_service_spec.rb
index b3e3defbff2ccd..85a672524878ff 100644
--- a/spec/services/resolve_url_service_spec.rb
+++ b/spec/services/resolve_url_service_spec.rb
@@ -139,11 +139,42 @@
stub_request(:get, url).to_return(status: 302, headers: { 'Location' => status_url })
body = ActiveModelSerializers::SerializableResource.new(status, serializer: ActivityPub::NoteSerializer, adapter: ActivityPub::Adapter).to_json
stub_request(:get, status_url).to_return(body: body, headers: { 'Content-Type' => 'application/activity+json' })
+ stub_request(:get, uri).to_return(body: body, headers: { 'Content-Type' => 'application/activity+json' })
end
it 'returns status by url' do
expect(subject.call(url, on_behalf_of: account)).to eq(status)
end
end
+
+ context 'when searching for a local link of a remote private status' do
+ let(:account) { Fabricate(:account) }
+ let(:poster) { Fabricate(:account, username: 'foo', domain: 'example.com') }
+ let(:url) { 'https://example.com/@foo/42' }
+ let(:uri) { 'https://example.com/users/foo/statuses/42' }
+ let!(:status) { Fabricate(:status, url: url, uri: uri, account: poster, visibility: :private) }
+ let(:search_url) { "https://#{Rails.configuration.x.local_domain}/@foo@example.com/#{status.id}" }
+
+ before do
+ stub_request(:get, url).to_return(status: 404) if url.present?
+ stub_request(:get, uri).to_return(status: 404)
+ end
+
+ context 'when the account follows the poster' do
+ before do
+ account.follow!(poster)
+ end
+
+ it 'returns the status' do
+ expect(subject.call(search_url, on_behalf_of: account)).to eq(status)
+ end
+ end
+
+ context 'when the account does not follow the poster' do
+ it 'does not return the status' do
+ expect(subject.call(search_url, on_behalf_of: account)).to be_nil
+ end
+ end
+ end
end
end
diff --git a/spec/support/omniauth_mocks.rb b/spec/support/omniauth_mocks.rb
new file mode 100644
index 00000000000000..9883adec7a6cad
--- /dev/null
+++ b/spec/support/omniauth_mocks.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+OmniAuth.config.test_mode = true
+
+def mock_omniauth(provider, data)
+ OmniAuth.config.mock_auth[provider] = OmniAuth::AuthHash.new(data)
+end
diff --git a/spec/workers/activitypub/fetch_replies_worker_spec.rb b/spec/workers/activitypub/fetch_replies_worker_spec.rb
index 91ef3c4b928fd7..64cfcd8cbf74d4 100644
--- a/spec/workers/activitypub/fetch_replies_worker_spec.rb
+++ b/spec/workers/activitypub/fetch_replies_worker_spec.rb
@@ -21,7 +21,7 @@
describe 'perform' do
it 'performs a request if the collection URI is from the same host' do
- stub_request(:get, 'https://example.com/statuses_replies/1').to_return(status: 200, body: json)
+ stub_request(:get, 'https://example.com/statuses_replies/1').to_return(status: 200, body: json, headers: { 'Content-Type': 'application/activity+json' })
subject.perform(status.id, 'https://example.com/statuses_replies/1')
expect(a_request(:get, 'https://example.com/statuses_replies/1')).to have_been_made.once
end
diff --git a/spec/workers/scheduler/accounts_statuses_cleanup_scheduler_spec.rb b/spec/workers/scheduler/accounts_statuses_cleanup_scheduler_spec.rb
index 8f20725c87f256..0b0c4dd487e3f6 100644
--- a/spec/workers/scheduler/accounts_statuses_cleanup_scheduler_spec.rb
+++ b/spec/workers/scheduler/accounts_statuses_cleanup_scheduler_spec.rb
@@ -7,11 +7,13 @@
let!(:account2) { Fabricate(:account, domain: nil) }
let!(:account3) { Fabricate(:account, domain: nil) }
let!(:account4) { Fabricate(:account, domain: nil) }
+ let!(:account5) { Fabricate(:account, domain: nil) }
let!(:remote) { Fabricate(:account) }
let!(:policy1) { Fabricate(:account_statuses_cleanup_policy, account: account1) }
let!(:policy2) { Fabricate(:account_statuses_cleanup_policy, account: account3) }
let!(:policy3) { Fabricate(:account_statuses_cleanup_policy, account: account4, enabled: false) }
+ let!(:policy4) { Fabricate(:account_statuses_cleanup_policy, account: account5) }
let(:queue_size) { 0 }
let(:queue_latency) { 0 }
@@ -23,7 +25,6 @@
},
]
end
- let(:retry_size) { 0 }
before do
queue_stub = double
@@ -33,7 +34,6 @@
allow(Sidekiq::ProcessSet).to receive(:new).and_return(process_set_stub)
sidekiq_stats_stub = double
- allow(sidekiq_stats_stub).to receive(:retry_size).and_return(retry_size)
allow(Sidekiq::Stats).to receive(:new).and_return(sidekiq_stats_stub)
# Create a bunch of old statuses
@@ -42,6 +42,7 @@
Fabricate(:status, account: account2, created_at: 3.years.ago)
Fabricate(:status, account: account3, created_at: 3.years.ago)
Fabricate(:status, account: account4, created_at: 3.years.ago)
+ Fabricate(:status, account: account5, created_at: 3.years.ago)
Fabricate(:status, account: remote, created_at: 3.years.ago)
end
@@ -70,17 +71,9 @@
expect(subject.under_load?).to be true
end
end
-
- context 'when there is a huge amount of jobs to retry' do
- let(:retry_size) { 1_000_000 }
-
- it 'returns true' do
- expect(subject.under_load?).to be true
- end
- end
end
- describe '#get_budget' do
+ describe '#compute_budget' do
context 'on a single thread' do
let(:process_set_stub) { [ { 'concurrency' => 1, 'queues' => ['push', 'default'] } ] }
@@ -119,8 +112,48 @@
expect { subject.perform }.to_not change { account4.statuses.count }
end
- it 'eventually deletes every deletable toot' do
- expect { subject.perform; subject.perform; subject.perform; subject.perform }.to change { Status.count }.by(-20)
+ it 'eventually deletes every deletable toot given enough runs' do
+ stub_const 'Scheduler::AccountsStatusesCleanupScheduler::MAX_BUDGET', 4
+
+ expect { 10.times { subject.perform } }.to change { Status.count }.by(-30)
+ end
+
+ it 'correctly round-trips between users across several runs' do
+ stub_const 'Scheduler::AccountsStatusesCleanupScheduler::MAX_BUDGET', 3
+ stub_const 'Scheduler::AccountsStatusesCleanupScheduler::PER_ACCOUNT_BUDGET', 2
+
+ expect { 3.times { subject.perform } }
+ .to change { Status.count }.by(-3 * 3)
+ .and change { account1.statuses.count }
+ .and change { account3.statuses.count }
+ .and change { account5.statuses.count }
+ end
+
+ context 'when given a big budget' do
+ let(:process_set_stub) { [{ 'concurrency' => 400, 'queues' => %w(push default) }] }
+
+ before do
+ stub_const 'Scheduler::AccountsStatusesCleanupScheduler::MAX_BUDGET', 400
+ end
+
+ it 'correctly handles looping in a single run' do
+ expect(subject.compute_budget).to eq(400)
+ expect { subject.perform }.to change { Status.count }.by(-30)
+ end
+ end
+
+ context 'when there is no work to be done' do
+ let(:process_set_stub) { [{ 'concurrency' => 400, 'queues' => %w(push default) }] }
+
+ before do
+ stub_const 'Scheduler::AccountsStatusesCleanupScheduler::MAX_BUDGET', 400
+ subject.perform
+ end
+
+ it 'does not get stuck' do
+ expect(subject.compute_budget).to eq(400)
+ expect { subject.perform }.to_not change { Status.count }
+ end
end
end
end
diff --git a/streaming/index.js b/streaming/index.js
index 3b3ed8f08998f6..00e8bd098e5a77 100644
--- a/streaming/index.js
+++ b/streaming/index.js
@@ -92,18 +92,31 @@ const redisUrlToClient = async (defaultConfig, redisUrl) => {
const numWorkers = +process.env.STREAMING_CLUSTER_NUM || (env === 'development' ? 1 : Math.max(os.cpus().length - 1, 1));
/**
+ * Attempts to safely parse a string as JSON, used when both receiving a message
+ * from redis and when receiving a message from a client over a websocket
+ * connection, this is why it accepts a `req` argument.
* @param {string} json
- * @param {any} req
- * @return {Object.|null}
+ * @param {any?} req
+ * @returns {Object.|null}
*/
const parseJSON = (json, req) => {
try {
return JSON.parse(json);
} catch (err) {
- if (req.accountId) {
- log.warn(req.requestId, `Error parsing message from user ${req.accountId}: ${err}`);
+ /* FIXME: This logging isn't great, and should probably be done at the
+ * call-site of parseJSON, not in the method, but this would require changing
+ * the signature of parseJSON to return something akin to a Result type:
+ * [Error|null, null|Object {
const redisPrefix = redisNamespace ? `${redisNamespace}:` : '';
/**
- * @type {Object.>}
+ * @type {Object.): void>>}
*/
const subs = {};
@@ -207,12 +220,21 @@ const startWorker = async (workerId) => {
return;
}
- callbacks.forEach(callback => callback(message));
+ const json = parseJSON(message, null);
+ if (!json) return;
+
+ callbacks.forEach(callback => callback(json));
};
+ /**
+ * @callback SubscriptionListener
+ * @param {ReturnType} json of the message
+ * @returns void
+ */
+
/**
* @param {string} channel
- * @param {function(string): void} callback
+ * @param {SubscriptionListener} callback
*/
const subscribe = (channel, callback) => {
log.silly(`Adding listener for ${channel}`);
@@ -229,6 +251,7 @@ const startWorker = async (workerId) => {
/**
* @param {string} channel
+ * @param {SubscriptionListener} callback
*/
const unsubscribe = (channel, callback) => {
log.silly(`Removing listener for ${channel}`);
@@ -378,7 +401,7 @@ const startWorker = async (workerId) => {
/**
* @param {any} req
- * @return {string}
+ * @returns {string|undefined}
*/
const channelNameFromPath = req => {
const { path, query } = req;
@@ -487,15 +510,11 @@ const startWorker = async (workerId) => {
/**
* @param {any} req
* @param {SystemMessageHandlers} eventHandlers
- * @return {function(string): void}
+ * @returns {function(object): void}
*/
const createSystemMessageListener = (req, eventHandlers) => {
return message => {
- const json = parseJSON(message, req);
-
- if (!json) return;
-
- const { event } = json;
+ const { event } = message;
log.silly(req.requestId, `System message for ${req.accountId}: ${event}`);
@@ -610,54 +629,66 @@ const startWorker = async (workerId) => {
* @param {string[]} ids
* @param {any} req
* @param {function(string, string): void} output
- * @param {function(string[], function(string): void): void} attachCloseHandler
+ * @param {undefined | function(string[], SubscriptionListener): void} attachCloseHandler
* @param {boolean=} needsFiltering
- * @return {function(string): void}
+ * @returns {SubscriptionListener}
*/
const streamFrom = (ids, req, output, attachCloseHandler, needsFiltering = false) => {
const accountId = req.accountId || req.remoteAddress;
log.verbose(req.requestId, `Starting stream from ${ids.join(', ')} for ${accountId}`);
- const listener = message => {
- const json = parseJSON(message, req);
-
- if (!json) return;
-
- const { event, payload, queued_at } = json;
+ const transmit = (event, payload) => {
+ // TODO: Replace "string"-based delete payloads with object payloads:
+ const encodedPayload = typeof payload === 'object' ? JSON.stringify(payload) : payload;
- const transmit = () => {
- const now = new Date().getTime();
- const delta = now - queued_at;
- const encodedPayload = typeof payload === 'object' ? JSON.stringify(payload) : payload;
-
- log.silly(req.requestId, `Transmitting for ${accountId}: ${event} ${encodedPayload} Delay: ${delta}ms`);
- output(event, encodedPayload);
- };
+ log.silly(req.requestId, `Transmitting for ${accountId}: ${event} ${encodedPayload}`);
+ output(event, encodedPayload);
+ };
- // Only messages that may require filtering are statuses, since notifications
- // are already personalized and deletes do not matter
- if (!needsFiltering || event !== 'update') {
- transmit();
+ // The listener used to process each message off the redis subscription,
+ // message here is an object with an `event` and `payload` property. Some
+ // events also include a queued_at value, but this is being removed shortly.
+ /** @type {SubscriptionListener} */
+ const listener = message => {
+ const { event, payload } = message;
+
+ // Streaming only needs to apply filtering to some channels and only to
+ // some events. This is because majority of the filtering happens on the
+ // Ruby on Rails side when producing the event for streaming.
+ //
+ // The only events that require filtering from the streaming server are
+ // `update` and `status.update`, all other events are transmitted to the
+ // client as soon as they're received (pass-through).
+ //
+ // The channels that need filtering are determined in the function
+ // `channelNameToIds` defined below:
+ if (!needsFiltering || (event !== 'update' && event !== 'status.update')) {
+ transmit(event, payload);
return;
}
- const unpackedPayload = payload;
- const targetAccountIds = [unpackedPayload.account.id].concat(unpackedPayload.mentions.map(item => item.id));
- const accountDomain = unpackedPayload.account.acct.split('@')[1];
+ // The rest of the logic from here on in this function is to handle
+ // filtering of statuses:
- if (Array.isArray(req.chosenLanguages) && unpackedPayload.language !== null && req.chosenLanguages.indexOf(unpackedPayload.language) === -1) {
- log.silly(req.requestId, `Message ${unpackedPayload.id} filtered by language (${unpackedPayload.language})`);
+ // Filter based on language:
+ if (Array.isArray(req.chosenLanguages) && payload.language !== null && req.chosenLanguages.indexOf(payload.language) === -1) {
+ log.silly(req.requestId, `Message ${payload.id} filtered by language (${payload.language})`);
return;
}
// When the account is not logged in, it is not necessary to confirm the block or mute
if (!req.accountId) {
- transmit();
+ transmit(event, payload);
return;
}
- pgPool.connect((err, client, done) => {
+ // Filter based on domain blocks, blocks, mutes, or custom filters:
+ const targetAccountIds = [payload.account.id].concat(payload.mentions.map(item => item.id));
+ const accountDomain = payload.account.acct.split('@')[1];
+
+ // TODO: Move this logic out of the message handling loop
+ pgPool.connect((err, client, releasePgConnection) => {
if (err) {
log.error(err);
return;
@@ -672,40 +703,57 @@ const startWorker = async (workerId) => {
SELECT 1
FROM mutes
WHERE account_id = $1
- AND target_account_id IN (${placeholders(targetAccountIds, 2)})`, [req.accountId, unpackedPayload.account.id].concat(targetAccountIds)),
+ AND target_account_id IN (${placeholders(targetAccountIds, 2)})`, [req.accountId, payload.account.id].concat(targetAccountIds)),
];
if (accountDomain) {
queries.push(client.query('SELECT 1 FROM account_domain_blocks WHERE account_id = $1 AND domain = $2', [req.accountId, accountDomain]));
}
- if (!unpackedPayload.filtered && !req.cachedFilters) {
+ if (!payload.filtered && !req.cachedFilters) {
queries.push(client.query('SELECT filter.id AS id, filter.phrase AS title, filter.context AS context, filter.expires_at AS expires_at, filter.action AS filter_action, keyword.keyword AS keyword, keyword.whole_word AS whole_word FROM custom_filter_keywords keyword JOIN custom_filters filter ON keyword.custom_filter_id = filter.id WHERE filter.account_id = $1 AND (filter.expires_at IS NULL OR filter.expires_at > NOW())', [req.accountId]));
}
Promise.all(queries).then(values => {
- done();
+ releasePgConnection();
+ // Handling blocks & mutes and domain blocks: If one of those applies,
+ // then we don't transmit the payload of the event to the client
if (values[0].rows.length > 0 || (accountDomain && values[1].rows.length > 0)) {
return;
}
- if (!unpackedPayload.filtered && !req.cachedFilters) {
+ // If the payload already contains the `filtered` property, it means
+ // that filtering has been applied on the ruby on rails side, as
+ // such, we don't need to construct or apply the filters in streaming:
+ if (Object.prototype.hasOwnProperty.call(payload, "filtered")) {
+ transmit(event, payload);
+ return;
+ }
+
+ // Handling for constructing the custom filters and caching them on the request
+ // TODO: Move this logic out of the message handling lifecycle
+ if (!req.cachedFilters) {
const filterRows = values[accountDomain ? 2 : 1].rows;
- req.cachedFilters = filterRows.reduce((cache, row) => {
- if (cache[row.id]) {
- cache[row.id].keywords.push([row.keyword, row.whole_word]);
+ req.cachedFilters = filterRows.reduce((cache, filter) => {
+ if (cache[filter.id]) {
+ cache[filter.id].keywords.push([filter.keyword, filter.whole_word]);
} else {
- cache[row.id] = {
- keywords: [[row.keyword, row.whole_word]],
- expires_at: row.expires_at,
- repr: {
- id: row.id,
- title: row.title,
- context: row.context,
- expires_at: row.expires_at,
- filter_action: ['warn', 'hide'][row.filter_action],
+ cache[filter.id] = {
+ keywords: [[filter.keyword, filter.whole_word]],
+ expires_at: filter.expires_at,
+ filter: {
+ id: filter.id,
+ title: filter.title,
+ context: filter.context,
+ expires_at: filter.expires_at,
+ // filter.filter_action is the value from the
+ // custom_filters.action database column, it is an integer
+ // representing a value in an enum defined by Ruby on Rails:
+ //
+ // enum { warn: 0, hide: 1 }
+ filter_action: ['warn', 'hide'][filter.filter_action],
},
};
}
@@ -713,6 +761,10 @@ const startWorker = async (workerId) => {
return cache;
}, {});
+ // Construct the regular expressions for the custom filters: This
+ // needs to be done in a separate loop as the database returns one
+ // filterRow per keyword, so we need all the keywords before
+ // constructing the regular expression
Object.keys(req.cachedFilters).forEach((key) => {
req.cachedFilters[key].regexp = new RegExp(req.cachedFilters[key].keywords.map(([keyword, whole_word]) => {
let expr = keyword.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
@@ -732,31 +784,58 @@ const startWorker = async (workerId) => {
});
}
- // Check filters
- if (req.cachedFilters && !unpackedPayload.filtered) {
- const status = unpackedPayload;
- const searchContent = ([status.spoiler_text || '', status.content].concat((status.poll && status.poll.options) ? status.poll.options.map(option => option.title) : [])).concat(status.media_attachments.map(att => att.description)).join('\n\n').replace(/ /g, '\n').replace(/<\/p>
/g, '\n\n');
- const searchIndex = JSDOM.fragment(searchContent).textContent;
+ // Apply cachedFilters against the payload, constructing a
+ // `filter_results` array of FilterResult entities
+ if (req.cachedFilters) {
+ const status = payload;
+ // TODO: Calculate searchableContent in Ruby on Rails:
+ const searchableContent = ([status.spoiler_text || '', status.content].concat((status.poll && status.poll.options) ? status.poll.options.map(option => option.title) : [])).concat(status.media_attachments.map(att => att.description)).join('\n\n').replace(/ /g, '\n').replace(/<\/p>
/g, '\n\n');
+ const searchableTextContent = JSDOM.fragment(searchableContent).textContent;
const now = new Date();
- payload.filtered = [];
- Object.values(req.cachedFilters).forEach((cachedFilter) => {
- if ((cachedFilter.expires_at === null || cachedFilter.expires_at > now)) {
- const keyword_matches = searchIndex.match(cachedFilter.regexp);
- if (keyword_matches) {
- payload.filtered.push({
- filter: cachedFilter.repr,
- keyword_matches,
- });
- }
+ const filter_results = Object.values(req.cachedFilters).reduce((results, cachedFilter) => {
+ // Check the filter hasn't expired before applying:
+ if (cachedFilter.expires_at !== null && cachedFilter.expires_at < now) {
+ return results;
+ }
+
+ // Just in-case JSDOM fails to find textContent in searchableContent
+ if (!searchableTextContent) {
+ return results;
}
+
+ const keyword_matches = searchableTextContent.match(cachedFilter.regexp);
+ if (keyword_matches) {
+ // results is an Array of FilterResult; status_matches is always
+ // null as we only are only applying the keyword-based custom
+ // filters, not the status-based custom filters.
+ // https://docs.joinmastodon.org/entities/FilterResult/
+ results.push({
+ filter: cachedFilter.filter,
+ keyword_matches,
+ status_matches: null
+ });
+ }
+
+ return results;
+ }, []);
+
+ // Send the payload + the FilterResults as the `filtered` property
+ // to the streaming connection. To reach this code, the `event` must
+ // have been either `update` or `status.update`, meaning the
+ // `payload` is a Status entity, which has a `filtered` property:
+ //
+ // filtered: https://docs.joinmastodon.org/entities/Status/#filtered
+ transmit(event, {
+ ...payload,
+ filtered: filter_results
});
+ } else {
+ transmit(event, payload);
}
-
- transmit();
}).catch(err => {
+ releasePgConnection();
log.error(err);
- done();
});
});
};
@@ -765,7 +844,7 @@ const startWorker = async (workerId) => {
subscribe(`${redisPrefix}${id}`, listener);
});
- if (attachCloseHandler) {
+ if (typeof attachCloseHandler === 'function') {
attachCloseHandler(ids.map(id => `${redisPrefix}${id}`), listener);
}
@@ -802,12 +881,13 @@ const startWorker = async (workerId) => {
/**
* @param {any} req
* @param {function(): void} [closeHandler]
- * @return {function(string[]): void}
+ * @returns {function(string[], SubscriptionListener): void}
*/
- const streamHttpEnd = (req, closeHandler = undefined) => (ids) => {
+
+ const streamHttpEnd = (req, closeHandler = undefined) => (ids, listener) => {
req.on('close', () => {
ids.forEach(id => {
- unsubscribe(id);
+ unsubscribe(id, listener);
});
if (closeHandler) {
@@ -856,15 +936,15 @@ const startWorker = async (workerId) => {
res.write('# TYPE connected_channels gauge\n');
res.write('# HELP connected_channels The number of Redis channels the streaming server is subscribed to\n');
res.write(`connected_channels ${Object.keys(subs).length}.0\n`);
- res.write('# TYPE pg.pool.total_connections gauge \n');
- res.write('# HELP pg.pool.total_connections The total number of clients existing within the pool\n');
- res.write(`pg.pool.total_connections ${pgPool.totalCount}.0\n`);
- res.write('# TYPE pg.pool.idle_connections gauge \n');
- res.write('# HELP pg.pool.idle_connections The number of clients which are not checked out but are currently idle in the pool\n');
- res.write(`pg.pool.idle_connections ${pgPool.idleCount}.0\n`);
- res.write('# TYPE pg.pool.waiting_queries gauge \n');
- res.write('# HELP pg.pool.waiting_queries The number of queued requests waiting on a client when all clients are checked out\n');
- res.write(`pg.pool.waiting_queries ${pgPool.waitingCount}.0\n`);
+ res.write('# TYPE pg_pool_total_connections gauge\n');
+ res.write('# HELP pg_pool_total_connections The total number of clients existing within the pool\n');
+ res.write(`pg_pool_total_connections ${pgPool.totalCount}.0\n`);
+ res.write('# TYPE pg_pool_idle_connections gauge\n');
+ res.write('# HELP pg_pool_idle_connections The number of clients which are not checked out but are currently idle in the pool\n');
+ res.write(`pg_pool_idle_connections ${pgPool.idleCount}.0\n`);
+ res.write('# TYPE pg_pool_waiting_queries gauge\n');
+ res.write('# HELP pg_pool_waiting_queries The number of queued requests waiting on a client when all clients are checked out\n');
+ res.write(`pg_pool_waiting_queries ${pgPool.waitingCount}.0\n`);
res.write('# EOF\n');
res.end();
}));
@@ -1067,7 +1147,7 @@ const startWorker = async (workerId) => {
* @typedef WebSocketSession
* @property {any} socket
* @property {any} request
- * @property {Object.} subscriptions
+ * @property {Object.} subscriptions
*/
/**
@@ -1207,8 +1287,15 @@ const startWorker = async (workerId) => {
ws.on('close', onEnd);
ws.on('error', onEnd);
- ws.on('message', data => {
- const json = parseJSON(data, session.request);
+ ws.on('message', (data, isBinary) => {
+ if (isBinary) {
+ log.warn('socket', 'Received binary data, closing connection');
+ ws.close(1003, 'The mastodon streaming server does not support binary messages');
+ return;
+ }
+ const message = data.toString('utf8');
+
+ const json = parseJSON(message, session.request);
if (!json) return;