diff --git a/fixtures/backup/fresh-install.json b/fixtures/backup/fresh-install.json index 9b832d80680077..94c9234732084a 100644 --- a/fixtures/backup/fresh-install.json +++ b/fixtures/backup/fresh-install.json @@ -52,10 +52,18 @@ "model": "sentry.email", "pk": 1, "fields": { - "email": "testing@example.com", + "email": "admin@example.com", "date_added": "2023-06-22T22:59:55.531Z" } }, +{ + "model": "sentry.email", + "pk": 2, + "fields": { + "email": "member@example.com", + "date_added": "2023-06-22T22:59:56.531Z" + } +}, { "model": "sentry.organization", "pk": 1, @@ -75,9 +83,9 @@ "fields": { "password": "pbkdf2_sha256$150000$iEvdIknqYjTr$+QsGn0tfIJ1FZLxQI37mVU1gL2KbL/wqjMtG/dFhsMA=", "last_login": null, - "username": "testing@example.com", + "username": "admin@example.com", "name": "", - "email": "testing@example.com", + "email": "admin@example.com", "is_staff": true, "is_active": true, "is_superuser": true, @@ -94,6 +102,31 @@ "avatar_url": null } }, +{ + "model": "sentry.user", + "pk": 2, + "fields": { + "password": "pbkdf2_sha256$150000$iEvdIknqYjTr$+Atix29ch10FZLxQI37mVU1gL2KbL/qtru7a/eu9A17=", + "last_login": null, + "username": "member@example.com", + "name": "", + "email": "member@example.com", + "is_staff": true, + "is_active": true, + "is_superuser": true, + "is_managed": false, + "is_sentry_app": null, + "is_password_expired": false, + "is_unclaimed": false, + "last_password_change": "2023-06-22T22:59:58.023Z", + "flags": "0", + "session_nonce": null, + "date_joined": "2023-06-22T22:59:56.488Z", + "last_active": "2023-06-22T22:59:56.489Z", + "avatar_type": 0, + "avatar_url": null + } +}, { "model": "sentry.relayusage", "pk": 1, @@ -127,17 +160,39 @@ "config": "\"\"" } }, +{ + "model":"sentry.authenticator", + "pk": 2, + "fields": { + "user": 2, + "created_at": "2023-06-22T22:59:56.602Z", + "last_used_at": null, + "type": 1, + "config": "\"\"" + } +}, { "model": "sentry.useremail", "pk": 1, "fields": { "user": 1, - "email": "testing@example.com", + "email": "admin@example.com", "validation_hash": "mCnWesSVvYQcq7qXQ36AZHwosAd6cghE", "date_hash_added": "2023-06-22T22:59:55.521Z", "is_verified": false } }, +{ + "model": "sentry.useremail", + "pk": 2, + "fields": { + "user": 2, + "email": "member@example.com", + "validation_hash": "sJui890evYQcq7qXQ36AZHwochus8f0g", + "date_hash_added": "2023-06-22T22:59:56.521Z", + "is_verified": false + } +}, { "model": "sentry.userrole", "pk": 1, @@ -189,7 +244,27 @@ "invite_status": 0, "type": 50, "user_is_active": true, - "user_email": "testing@example.com" + "user_email": "admin@example.com" + } +}, +{ + "model": "sentry.organizationmember", + "pk": 2, + "fields": { + "organization": 1, + "user_id": 2, + "email": null, + "role": "member", + "flags": "0", + "token": null, + "date_added": "2023-06-22T22:59:56.561Z", + "token_expires_at": null, + "has_global_access": true, + "inviter_id": null, + "invite_status": 0, + "type": 50, + "user_is_active": true, + "user_email": "member@example.com" } }, { @@ -261,6 +336,16 @@ "role": null } }, +{ + "model": "sentry.organizationmemberteam", + "pk": 2, + "fields": { + "team": 1, + "organizationmember": 2, + "is_active": true, + "role": null + } +}, { "model": "sentry.projectoption", "pk": 1, diff --git a/src/sentry/api/endpoints/relocations/index.py b/src/sentry/api/endpoints/relocations/index.py index 2083c8a9fbaf65..1da3d64314a2b2 100644 --- a/src/sentry/api/endpoints/relocations/index.py +++ b/src/sentry/api/endpoints/relocations/index.py @@ -83,7 +83,7 @@ def post(self, request: Request) -> Response: ).exists(): return Response({"detail": ERR_DUPLICATE_RELOCATION}, status=409) - # TODO(getsentry/team-ospo#203): check import size, and maybe throttle based on that + # TODO(getsentry/team-ospo#216): check import size, and maybe throttle based on that # information. file = File.objects.create(name="raw-relocation-data.tar", type=RELOCATION_FILE_TYPE) diff --git a/src/sentry/models/lostpasswordhash.py b/src/sentry/models/lostpasswordhash.py index 4702582c9c7970..c09e9afd8050fd 100644 --- a/src/sentry/models/lostpasswordhash.py +++ b/src/sentry/models/lostpasswordhash.py @@ -41,7 +41,18 @@ def is_valid(self) -> bool: return self.date_added > timezone.now() - timedelta(hours=1) @classmethod - def send_email(cls, user, hash, request, mode="recover") -> None: + def send_recover_password_email(cls, user, hash, ip_address) -> None: + extra = { + "ip_address": ip_address, + } + cls._send_email("recover_password", user, hash, extra) + + @classmethod + def send_relocate_account_email(cls, user, hash) -> None: + cls._send_email("relocate_account", user, hash, {}) + + @classmethod + def _send_email(cls, mode, user, hash, extra) -> None: from sentry import options from sentry.http import get_server_hostname from sentry.utils.email import MessageBuilder @@ -51,7 +62,7 @@ def send_email(cls, user, hash, request, mode="recover") -> None: "domain": get_server_hostname(), "url": cls.get_lostpassword_url(user.id, hash, mode), "datetime": timezone.now(), - "ip_address": request.META["REMOTE_ADDR"], + **extra, } subject = "Password Recovery" diff --git a/src/sentry/models/relocation.py b/src/sentry/models/relocation.py index 4f21c9e0175c4a..2b888504858d56 100644 --- a/src/sentry/models/relocation.py +++ b/src/sentry/models/relocation.py @@ -135,7 +135,7 @@ class Kind(Enum): RAW_USER_DATA = 1 # A normalized version of the user data. # - # TODO(getsentry/team-ospo#203): Add a normalization step to the relocation flow + # TODO(getsentry/team-ospo#216): Add a normalization step to the relocation flow NORMALIZED_USER_DATA = 2 # The global configuration we're going to validate against - pulled from the live Sentry # instance, not supplied by the user. diff --git a/src/sentry/tasks/relocation.py b/src/sentry/tasks/relocation.py index 6ba0bc37712132..0e85a57f67b7ea 100644 --- a/src/sentry/tasks/relocation.py +++ b/src/sentry/tasks/relocation.py @@ -28,7 +28,8 @@ from sentry.filestore.gcs import GoogleCloudStorage from sentry.models.files.file import File from sentry.models.files.utils import get_storage -from sentry.models.importchunk import RegionImportChunk +from sentry.models.importchunk import ControlImportChunkReplica, RegionImportChunk +from sentry.models.lostpasswordhash import LostPasswordHash as LostPasswordHash from sentry.models.organization import Organization from sentry.models.relocation import ( Relocation, @@ -38,7 +39,9 @@ ValidationStatus, ) from sentry.models.user import User +from sentry.services.hybrid_cloud.lost_password_hash import lost_password_hash_service from sentry.services.hybrid_cloud.organization import organization_service +from sentry.services.hybrid_cloud.user.service import user_service from sentry.silo import SiloMode from sentry.tasks.base import instrumented_task from sentry.utils import json @@ -117,7 +120,7 @@ ERR_COMPLETED_INTERNAL = "Internal error during relocation wrap-up." -# TODO(getsentry/team-ospo#203): We should split this task in two, one for "small" imports of say +# TODO(getsentry/team-ospo#216): We should split this task in two, one for "small" imports of say # <=10MB, and one for large imports >10MB. Then we should limit the number of daily executions of # the latter. @instrumented_task( @@ -359,7 +362,7 @@ def preprocessing_baseline_config(uuid: str) -> None: attempts_left, ERR_PREPROCESSING_INTERNAL, ): - # TODO(getsentry/team-ospo#203): A very nice optimization here is to only pull this down + # TODO(getsentry/team-ospo#216): A very nice optimization here is to only pull this down # once a day - if we've already done a relocation today, we should just copy that file # instead of doing this (expensive!) global export again. fp = BytesIO() @@ -1015,7 +1018,7 @@ def postprocessing(uuid: str) -> None: ): imported_org_ids = imported_org_ids.union(set(chunk.inserted_map.values())) - # Do a sanity check on pk-mapping before we go an make anyone the owner of an org they did + # Do a sanity check on pk-mapping before we go and make anyone the owner of an org they did # not import - are all of these orgs plausibly ones that the user requested, based on slug # matching? imported_orgs = Organization.objects.filter(id__in=imported_org_ids) @@ -1040,8 +1043,7 @@ def postprocessing(uuid: str) -> None: role="owner", ) - # TODO(getsentry/team-ospo#203): Call notifying_users here. - notifying_owner.delay(uuid) + notifying_users.delay(uuid) @instrumented_task( @@ -1058,8 +1060,51 @@ def notifying_users(uuid: str) -> None: Send an email to all users that have been imported, telling them to claim their accounts. """ - # TODO(getsentry/team-ospo#203): Implement this. - pass + relocation: Optional[Relocation] + attempts_left: int + (relocation, attempts_left) = start_relocation_task( + uuid=uuid, + step=Relocation.Step.NOTIFYING, + task=OrderedTask.NOTIFYING_USERS, + allowed_task_attempts=MAX_FAST_TASK_ATTEMPTS, + ) + if relocation is None: + return + + with retry_task_or_fail_relocation( + relocation, + OrderedTask.NOTIFYING_USERS, + attempts_left, + ERR_NOTIFYING_INTERNAL, + ): + imported_user_ids: set[int] = set() + chunks = ControlImportChunkReplica.objects.filter( + import_uuid=str(uuid), model="sentry.user" + ) + for chunk in chunks: + imported_user_ids = imported_user_ids.union(set(chunk.inserted_map.values())) + + # Do a sanity check on pk-mapping before we go and reset the passwords of random users - are + # all of these usernames plausibly ones that were included in the import, based on username + # prefix matching? + imported_users = user_service.get_many(filter={"user_ids": list(imported_user_ids)}) + for user in imported_users: + matched_prefix = False + for username_prefix in relocation.want_usernames: + if user.username.startswith(username_prefix): + matched_prefix = True + break + + # This should always be treated as an internal logic error, since we just wrote these + # orgs, so probably there is a serious bug with pk mapping. + assert matched_prefix is True + + # Okay, everything seems fine - go ahead and send those emails. + for user in imported_users: + hash = lost_password_hash_service.get_or_create(user_id=user.id).hash + LostPasswordHash.send_relocate_account_email(user, hash) + + notifying_owner.delay(uuid) @instrumented_task( diff --git a/src/sentry/utils/relocation.py b/src/sentry/utils/relocation.py index b78d50e7e2b716..d79dee249bcde0 100644 --- a/src/sentry/utils/relocation.py +++ b/src/sentry/utils/relocation.py @@ -37,8 +37,9 @@ class OrderedTask(Enum): VALIDATING_COMPLETE = 8 IMPORTING = 9 POSTPROCESSING = 10 - NOTIFYING_OWNER = 11 - COMPLETED = 12 + NOTIFYING_USERS = 11 + NOTIFYING_OWNER = 12 + COMPLETED = 13 # The file type for a relocation export tarball of any kind. @@ -61,10 +62,10 @@ class OrderedTask(Enum): # be imported, a `/workspace/out` directory for exports that will be generated, and # `/workspace/findings` for findings. # -# TODO(getsentry/team-ospo#203): Make `get-self-hosted-repo` pull a pinned version, not +# TODO(getsentry/team-ospo#190): Make `get-self-hosted-repo` pull a pinned version, not # mainline. # -# TODO(getsentry/team-ospo#203): Use script in self-hosted to completely flush db instead of +# TODO(getsentry/team-ospo#216): Use script in self-hosted to completely flush db instead of # using truncation tables. CLOUDBUILD_YAML_TEMPLATE = Template( """ @@ -548,7 +549,7 @@ def create_cloudbuild_yaml(relocation: Relocation) -> bytes: kind=RelocationFile.Kind.COLLIDING_USERS_VALIDATION_DATA, args=[], ), - # TODO(getsentry/team-ospo#203): Add compare-raw-relocation-data as well. + # TODO(getsentry/team-ospo#216): Add compare-raw-relocation-data as well. ] deps = dependencies() diff --git a/src/sentry/web/frontend/accounts.py b/src/sentry/web/frontend/accounts.py index e91fd87b29b805..45ed56144bac6e 100644 --- a/src/sentry/web/frontend/accounts.py +++ b/src/sentry/web/frontend/accounts.py @@ -44,7 +44,7 @@ def login_redirect(request): def expired(request, user): hash = lost_password_hash_service.get_or_create(user_id=user.id).hash - LostPasswordHash.send_email(user, hash, request) + LostPasswordHash.send_recover_password_email(user, hash, request.META["REMOTE_ADDR"]) context = {"email": user.email} return render_to_response(get_template("recover", "expired"), context, request) @@ -80,7 +80,9 @@ def recover(request): email = form.cleaned_data["user"] if email: password_hash = lost_password_hash_service.get_or_create(user_id=email.id) - LostPasswordHash.send_email(email, password_hash.hash, request) + LostPasswordHash.send_recover_password_email( + email, password_hash.hash, request.META["REMOTE_ADDR"] + ) extra["passwordhash_id"] = password_hash.id extra["user_id"] = password_hash.user_id diff --git a/tests/sentry/models/test_lostpasswordhash.py b/tests/sentry/models/test_lostpasswordhash.py index 7d5a49358e43b6..d68fa54ccde333 100644 --- a/tests/sentry/models/test_lostpasswordhash.py +++ b/tests/sentry/models/test_lostpasswordhash.py @@ -1,5 +1,4 @@ from django.core import mail -from django.http import HttpRequest from django.urls import reverse from sentry.models.lostpasswordhash import LostPasswordHash @@ -12,12 +11,8 @@ class LostPasswordTest(TestCase): def test_send_recover_mail(self): password_hash = LostPasswordHash.objects.create(user=self.user) - request = HttpRequest() - request.method = "GET" - request.META["REMOTE_ADDR"] = "1.1.1.1" - with self.options({"system.url-prefix": "http://testserver"}), self.tasks(): - LostPasswordHash.send_email(self.user, password_hash.hash, request) + LostPasswordHash.send_recover_password_email(self.user, password_hash.hash, "1.1.1.1") assert len(mail.outbox) == 1 msg = mail.outbox[0] @@ -32,12 +27,8 @@ def test_send_recover_mail(self): def test_send_relocation_mail(self): password_hash = LostPasswordHash.objects.create(user=self.user) - request = HttpRequest() - request.method = "GET" - request.META["REMOTE_ADDR"] = "1.1.1.1" - with self.options({"system.url-prefix": "http://testserver"}), self.tasks(): - LostPasswordHash.send_email(self.user, password_hash.hash, request, "relocate_account") + LostPasswordHash.send_relocate_account_email(self.user, password_hash.hash) assert len(mail.outbox) == 1 msg = mail.outbox[0] diff --git a/tests/sentry/tasks/test_relocation.py b/tests/sentry/tasks/test_relocation.py index 2d99687b79a1d8..c3a9ca9bd8b98c 100644 --- a/tests/sentry/tasks/test_relocation.py +++ b/tests/sentry/tasks/test_relocation.py @@ -24,7 +24,11 @@ from sentry.backup.imports import import_in_organization_scope from sentry.models.files.file import File from sentry.models.files.utils import get_storage -from sentry.models.importchunk import ControlImportChunk, RegionImportChunk +from sentry.models.importchunk import ( + ControlImportChunk, + ControlImportChunkReplica, + RegionImportChunk, +) from sentry.models.organization import Organization from sentry.models.organizationmember import OrganizationMember from sentry.models.relocation import ( @@ -53,9 +57,11 @@ ERR_VALIDATING_MAX_RUNS, MAX_FAST_TASK_RETRIES, MAX_VALIDATION_POLLS, + LostPasswordHash, completed, importing, notifying_owner, + notifying_users, postprocessing, preprocessing_baseline_config, preprocessing_colliding_users, @@ -286,7 +292,10 @@ def test_success_admin_assisted_relocation( ) assert preprocessing_baseline_config_mock.call_count == 1 - assert Relocation.objects.get(uuid=self.uuid).want_usernames == ["testing@example.com"] + assert Relocation.objects.get(uuid=self.uuid).want_usernames == [ + "admin@example.com", + "member@example.com", + ] def test_success_self_service_relocation( self, @@ -309,7 +318,10 @@ def test_success_self_service_relocation( assert preprocessing_baseline_config_mock.call_count == 1 - assert Relocation.objects.get(uuid=self.uuid).want_usernames == ["testing@example.com"] + assert Relocation.objects.get(uuid=self.uuid).want_usernames == [ + "admin@example.com", + "member@example.com", + ] def test_retry_if_attempts_left( self, @@ -488,7 +500,7 @@ def test_fail_too_many_users( relocation = Relocation.objects.get(uuid=self.uuid) assert relocation.status == Relocation.Status.FAILURE.value - assert relocation.failure_reason == ERR_PREPROCESSING_TOO_MANY_USERS.substitute(count=1) + assert relocation.failure_reason == ERR_PREPROCESSING_TOO_MANY_USERS.substitute(count=2) def test_fail_no_orgs( self, @@ -1455,7 +1467,7 @@ def test_success( @patch("sentry.utils.relocation.MessageBuilder") -@patch("sentry.tasks.relocation.notifying_owner.delay") +@patch("sentry.tasks.relocation.notifying_users.delay") @region_silo_test class PostprocessingTest(RelocationTaskTestCase): def setUp(self): @@ -1485,7 +1497,7 @@ def setUp(self): def test_success( self, - notifying_owner_mock: Mock, + notifying_users_mock: Mock, fake_message_builder: Mock, ): self.mock_message_builder(fake_message_builder) @@ -1501,8 +1513,7 @@ def test_success( postprocessing(self.uuid) - # TODO(getsentry/team-ospo#203): Should notify users instead. - assert notifying_owner_mock.call_count == 1 + assert notifying_users_mock.call_count == 1 assert ( OrganizationMember.objects.filter( @@ -1516,7 +1527,7 @@ def test_success( def test_retry_if_attempts_left( self, - notifying_owner_mock: Mock, + notifying_users_mock: Mock, fake_message_builder: Mock, ): self.mock_message_builder(fake_message_builder) @@ -1527,14 +1538,16 @@ def test_retry_if_attempts_left( with pytest.raises(Exception): postprocessing(self.uuid) + assert fake_message_builder.call_count == 0 + assert notifying_users_mock.call_count == 0 + relocation = Relocation.objects.get(uuid=self.uuid) assert relocation.status == Relocation.Status.IN_PROGRESS.value assert not relocation.failure_reason - assert notifying_owner_mock.call_count == 0 def test_fail_if_no_attempts_left( self, - notifying_owner_mock: Mock, + notifying_users_mock: Mock, fake_message_builder: Mock, ): self.mock_message_builder(fake_message_builder) @@ -1546,7 +1559,13 @@ def test_fail_if_no_attempts_left( with pytest.raises(Exception): postprocessing(self.uuid) - assert notifying_owner_mock.call_count == 0 + assert fake_message_builder.call_count == 1 + assert fake_message_builder.call_args.kwargs["type"] == "relocation.failed" + fake_message_builder.return_value.send_async.assert_called_once_with( + to=[self.owner.email, self.superuser.email] + ) + + assert notifying_users_mock.call_count == 0 relocation = Relocation.objects.get(uuid=self.uuid) assert relocation.status == Relocation.Status.FAILURE.value @@ -1554,14 +1573,105 @@ def test_fail_if_no_attempts_left( @patch("sentry.utils.relocation.MessageBuilder") -@patch("sentry.tasks.relocation.completed.delay") +@patch("sentry.tasks.relocation.notifying_owner.delay") @region_silo_test -class NotifyingOwnerTest(RelocationTaskTestCase): +class NotifyingUsersTest(RelocationTaskTestCase): def setUp(self): RelocationTaskTestCase.setUp(self) TransactionTestCase.setUp(self) self.relocation.step = Relocation.Step.POSTPROCESSING.value self.relocation.latest_task = "POSTPROCESSING" + self.relocation.want_usernames = ["admin@example.com", "member@example.com"] + self.relocation.save() + + with open(IMPORT_JSON_FILE_PATH, "rb") as fp: + import_in_organization_scope( + fp, + flags=ImportFlags( + merge_users=False, overwrite_configs=False, import_uuid=str(self.uuid) + ), + org_filter=set(self.relocation.want_org_slugs), + ) + + self.imported_users = ControlImportChunkReplica.objects.get( + import_uuid=self.uuid, model="sentry.user" + ) + + assert len(self.imported_users.inserted_map) == 2 + + def test_success( + self, + notifying_owner_mock: Mock, + fake_message_builder: Mock, + ): + self.mock_message_builder(fake_message_builder) + + with patch.object(LostPasswordHash, "send_relocate_account_email") as mock_relocation_email: + notifying_users(self.uuid) + + # Called once for each user imported, which is 2 for `fresh-install.json` + assert mock_relocation_email.call_count == 2 + assert mock_relocation_email.call_args_list[0][0][0].username == "admin@example.com" + assert mock_relocation_email.call_args_list[1][0][0].username == "member@example.com" + + assert fake_message_builder.call_count == 0 + assert notifying_owner_mock.call_count == 1 + + def test_retry_if_attempts_left( + self, + notifying_owner_mock: Mock, + fake_message_builder: Mock, + ): + self.mock_message_builder(fake_message_builder) + self.relocation.want_usernames = ["doesnotexist"] + self.relocation.save() + + # An exception being raised will trigger a retry in celery. + with pytest.raises(Exception): + notifying_users(self.uuid) + + assert fake_message_builder.call_count == 0 + assert notifying_owner_mock.call_count == 0 + + relocation = Relocation.objects.get(uuid=self.uuid) + assert relocation.status == Relocation.Status.IN_PROGRESS.value + assert not relocation.failure_reason + + def test_fail_if_no_attempts_left( + self, + notifying_owner_mock: Mock, + fake_message_builder: Mock, + ): + self.mock_message_builder(fake_message_builder) + self.relocation.latest_task = "NOTIFYING_USERS" + self.relocation.latest_task_attempts = MAX_FAST_TASK_RETRIES + self.relocation.want_usernames = ["doesnotexist"] + self.relocation.save() + + with pytest.raises(Exception): + notifying_users(self.uuid) + + assert fake_message_builder.call_count == 1 + assert fake_message_builder.call_args.kwargs["type"] == "relocation.failed" + fake_message_builder.return_value.send_async.assert_called_once_with( + to=[self.owner.email, self.superuser.email] + ) + assert notifying_owner_mock.call_count == 0 + + relocation = Relocation.objects.get(uuid=self.uuid) + assert relocation.status == Relocation.Status.FAILURE.value + assert relocation.failure_reason == ERR_NOTIFYING_INTERNAL + + +@patch("sentry.utils.relocation.MessageBuilder") +@patch("sentry.tasks.relocation.completed.delay") +@region_silo_test +class NotifyingOwnerTest(RelocationTaskTestCase): + def setUp(self): + RelocationTaskTestCase.setUp(self) + TransactionTestCase.setUp(self) + self.relocation.step = Relocation.Step.NOTIFYING.value + self.relocation.latest_task = "NOTIFYING_USERS" self.relocation.save() def test_success_admin_assisted_relocation( @@ -1705,9 +1815,13 @@ def test_valid_no_retries( self.mock_message_builder(fake_message_builder) org_count = Organization.objects.filter(slug__startswith="testing").count() - with self.tasks(): + with self.tasks(), patch.object( + LostPasswordHash, "send_relocate_account_email" + ) as mock_relocation_email: uploading_complete(self.relocation.uuid) + assert mock_relocation_email.call_count == 2 + assert fake_cloudbuild_client.create_build.call_count == 1 assert fake_cloudbuild_client.get_build.call_count == 1 @@ -1754,9 +1868,13 @@ def test_invalid_no_retries( mock_invalid_finding(self.storage, self.uuid) org_count = Organization.objects.filter(slug__startswith="testing").count() - with self.tasks(): + with self.tasks(), patch.object( + LostPasswordHash, "send_relocate_account_email" + ) as mock_relocation_email: uploading_complete(self.relocation.uuid) + assert mock_relocation_email.call_count == 0 + assert fake_cloudbuild_client.create_build.call_count == 1 assert fake_cloudbuild_client.get_build.call_count == 1