Skip to content

Commit

Permalink
feat(backup): Generate new OrgAuthToken on input
Browse files Browse the repository at this point in the history
Instead of making a duplicate of the existing token when we copy an org
across regions, which could cause problems for downstream code that
relies on this uniqueness, we simply make a new instance of the token
value. This will break operations that rely on this token as users move
from the old org to the new one, but this an okay price to pay to
preserve uniqueness.

Issue: getsentry/team-ospo#193
  • Loading branch information
azaslavsky committed Sep 13, 2023
1 parent 2a46053 commit dfb047a
Show file tree
Hide file tree
Showing 6 changed files with 174 additions and 33 deletions.
45 changes: 44 additions & 1 deletion src/sentry/models/orgauthtoken.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,16 @@
from __future__ import annotations

from typing import Optional, Tuple

from django.core.exceptions import ValidationError
from django.db import models
from django.forms import model_to_dict
from django.utils import timezone
from django.utils.encoding import force_str

from sentry.backup.scopes import RelocationScope
from sentry.backup.dependencies import ImportKind, PrimaryKeyMap
from sentry.backup.helpers import ImportFlags
from sentry.backup.scopes import ImportScope, RelocationScope
from sentry.conf.server import SENTRY_SCOPES
from sentry.db.models import (
ArrayField,
Expand All @@ -16,6 +21,7 @@
sane_repr,
)
from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey
from sentry.services.hybrid_cloud.organization import organization_service
from sentry.services.hybrid_cloud.orgauthtoken import orgauthtoken_service

MAX_NAME_LENGTH = 255
Expand Down Expand Up @@ -76,6 +82,43 @@ def has_scope(self, scope):
def is_active(self) -> bool:
return self.date_deactivated is None

def write_relocation_import(
self, pk_map: PrimaryKeyMap, scope: ImportScope, flags: ImportFlags
) -> Optional[Tuple[int, int, ImportKind]]:
# TODO(getsentry/team-ospo#190): Prevents a circular import; could probably split up the
# source module in such a way that this is no longer an issue.
from sentry.api.utils import generate_region_url
from sentry.utils.security.orgauthtoken_token import generate_token, hash_token

old_pk = super()._normalize_before_relocation_import(pk_map, scope, flags)
if old_pk is None:
return None

# If there is a token collision, or the token does not exist for some reason, generate a new
# one.
matching_token_hashed = self.__class__.objects.filter(
token_hashed=self.token_hashed
).first()
if (not self.token_hashed) or matching_token_hashed:
org_context = organization_service.get_organization_by_id(id=self.organization_id)
if org_context is None:
return None

token_str = generate_token(org_context.organization.slug, generate_region_url())
self.token_hashed = hash_token(token_str)
self.token_last_characters = token_str[-4:]

(key, created) = OrgAuthToken.objects.get_or_create(
token_hashed=self.token_hashed,
token_last_characters=self.token_last_characters,
defaults=model_to_dict(self),
)
if key:
self.pk = key.pk
self.save()

return (old_pk, self.pk, ImportKind.Inserted if created else ImportKind.Existing)


def is_org_auth_token_auth(auth: object) -> bool:
""":returns True when an API token is hitting the API."""
Expand Down
3 changes: 0 additions & 3 deletions src/sentry/models/projectkey.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,9 +288,6 @@ def write_relocation_import(
return None

# If there is a key collision, generate new keys.
#
# TODO(getsentry/team-ospo#190): Is this the right behavior? Another option is to just
# update the SQL schema to no longer require uniqueness.
matching_public_key = self.__class__.objects.filter(public_key=self.public_key).first()
if not self.public_key or matching_public_key:
self.public_key = self.generate_api_key()
Expand Down
19 changes: 11 additions & 8 deletions src/sentry/testutils/helpers/backups.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,14 +262,6 @@ def create_exhaustive_organization(
)

# Auth*
OrgAuthToken.objects.create(
organization_id=org.id,
name=f"token 1 for {slug}",
token_hashed=f"ABCDEF{slug}",
token_last_characters="xyz1",
scope_list=["org:ci"],
date_last_used=None,
)
ApiKey.objects.create(key=uuid4().hex, organization_id=org.id)
auth_provider = AuthProvider.objects.create(organization_id=org.id, provider="sentry")
AuthIdentity.objects.create(
Expand Down Expand Up @@ -298,6 +290,17 @@ def create_exhaustive_organization(
)
ProjectRedirect.record(project, f"project_slug_in_{slug}")

# OrgAuthToken
OrgAuthToken.objects.create(
organization_id=org.id,
name=f"token 1 for {slug}",
token_hashed=f"ABCDEF{slug}",
token_last_characters="xyz1",
scope_list=["org:ci"],
date_last_used=None,
project_last_used_id=project.id,
)

# Integration*
integration = Integration.objects.create(
provider="slack", name=f"Slack for {slug}", external_id=f"slack:{slug}"
Expand Down
4 changes: 2 additions & 2 deletions tests/sentry/backup/test_coverage.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import annotations

from sentry.backup.helpers import get_exportable_sentry_models
from tests.sentry.backup.test_exhaustive import RELEASE_TESTED_MODELS
from tests.sentry.backup.test_exhaustive import EXHAUSTIVELY_TESTED_MODELS
from tests.sentry.backup.test_models import UNIT_TESTED_MODELS

ALL_EXPORTABLE_MODELS = {c.__name__ for c in get_exportable_sentry_models()}
Expand All @@ -13,5 +13,5 @@ def test_exportable_final_derivations_of_sentry_model_are_unit_tested():


def test_exportable_final_derivations_of_sentry_model_are_exhaustively_tested():
untested = ALL_EXPORTABLE_MODELS - RELEASE_TESTED_MODELS
untested = ALL_EXPORTABLE_MODELS - EXHAUSTIVELY_TESTED_MODELS
assert not untested
76 changes: 68 additions & 8 deletions tests/sentry/backup/test_exhaustive.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,21 @@
from __future__ import annotations

import tempfile
from pathlib import Path
from typing import Literal, Type

from sentry.backup.helpers import get_exportable_sentry_models
from sentry.testutils.helpers.backups import BackupTestCase
from sentry.backup.imports import import_in_global_scope, import_in_organization_scope
from sentry.backup.scopes import ExportScope
from sentry.testutils.helpers.backups import (
NOOP_PRINTER,
BackupTestCase,
clear_database,
export_to_file,
)
from tests.sentry.backup import run_backup_tests_only_on_single_db, targets

RELEASE_TESTED_MODELS = set()
EXHAUSTIVELY_TESTED_MODELS = set()


def mark(*marking: Type | Literal["__all__"]):
Expand All @@ -19,23 +28,74 @@ def mark(*marking: Type | Literal["__all__"]):
for model in marking:
if model == all:
all_models = get_exportable_sentry_models()
RELEASE_TESTED_MODELS.update({c.__name__ for c in all_models})
EXHAUSTIVELY_TESTED_MODELS.update({c.__name__ for c in all_models})
return list(all_models)

RELEASE_TESTED_MODELS.add(model.__name__)
EXHAUSTIVELY_TESTED_MODELS.add(model.__name__)
return marking


@run_backup_tests_only_on_single_db
class ReleaseTests(BackupTestCase):
"""Ensure that the all Sentry models are still exportable."""
class ExhaustiveTests(BackupTestCase):
"""Ensure that a database with all exportable models filled out still works."""

@targets(mark("__all__"))
def test_at_head_clean_pks(self):
def test_exhaustive_clean_pks(self):
self.create_exhaustive_instance(is_superadmin=True)
return self.import_export_then_validate(self._testMethodName, reset_pks=True)

@targets(mark("__all__"))
def test_at_head_dirty_pks(self):
def test_exhaustive_dirty_pks(self):
self.create_exhaustive_instance(is_superadmin=True)
return self.import_export_then_validate(self._testMethodName, reset_pks=False)


@run_backup_tests_only_on_single_db
class UniquenessTests(BackupTestCase):
"""Ensure that required uniqueness (ie, model fields marked `unique=True`) is honored."""

def export_to_tmp_file_and_clear_database(self, tmp_dir, reset_pks) -> Path:
tmp_path = Path(tmp_dir).joinpath(f"{self._testMethodName}.expect.json")
export_to_file(tmp_path, ExportScope.Global)
clear_database(reset_pks=reset_pks)
return tmp_path

@targets(mark("__all__"))
def test_uniqueness_clean_pks(self):
self.create_exhaustive_instance(is_superadmin=True)
with tempfile.TemporaryDirectory() as tmp_dir:
tmp_actual = Path(tmp_dir).joinpath(f"{self._testMethodName}.actual.json")
tmp_expect = self.export_to_tmp_file_and_clear_database(tmp_dir, True)

# Now import twice, so that all random values in the export (UUIDs etc) are identical,
# to test that these are properly replaced and handled.
with open(tmp_expect) as tmp_file:
import_in_global_scope(tmp_file, printer=NOOP_PRINTER)
with open(tmp_expect) as tmp_file:
# Back-to-back global scope imports are disallowed (global scope assume a clean
# database), so use organization scope instead.
import_in_organization_scope(tmp_file, printer=NOOP_PRINTER)

actual = export_to_file(tmp_actual, ExportScope.Global)

return actual

@targets(mark("__all__"))
def test_uniqueness_dirty_pks(self):
self.create_exhaustive_instance(is_superadmin=True)
with tempfile.TemporaryDirectory() as tmp_dir:
tmp_actual = Path(tmp_dir).joinpath(f"{self._testMethodName}.actual.json")
tmp_expect = self.export_to_tmp_file_and_clear_database(tmp_dir, False)

# Now import twice, so that all random values in the export (UUIDs etc) are identical,
# to test that these are properly replaced and handled.
with open(tmp_expect) as tmp_file:
import_in_global_scope(tmp_file, printer=NOOP_PRINTER)
with open(tmp_expect) as tmp_file:
# Back-to-back global scope imports are disallowed (global scope assume a clean
# database), so use organization scope instead.
import_in_organization_scope(tmp_file, printer=NOOP_PRINTER)

actual = export_to_file(tmp_actual, ExportScope.Global)

return actual
60 changes: 49 additions & 11 deletions tests/sentry/backup/test_imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -623,6 +623,47 @@ class CollisionTests(ImportTestCase):
Ensure that collisions are properly handled in different flag modes.
"""

def test_colliding_org_auth_token(self):
owner = self.create_exhaustive_user("owner")
invited = self.create_exhaustive_user("invited")
member = self.create_exhaustive_user("member")
self.create_exhaustive_organization("some-org", owner, invited, [member])

# Take note of the `OrgAuthToken` that was created by the exhaustive organization - this is
# the one we'll be importing.
colliding = OrgAuthToken.objects.filter().first()

with tempfile.TemporaryDirectory() as tmp_dir:
tmp_path = self.export_to_tmp_file_and_clear_database(tmp_dir)

# After exporting and clearing the database, insert a copy of the same `ProjectKey` as
# the one found in the import.
org = self.create_organization()
colliding.organization_id = org.id
colliding.project_last_used_id = self.create_project(organization=org).id
colliding.save()

assert OrgAuthToken.objects.count() == 1
assert OrgAuthToken.objects.filter(token_hashed=colliding.token_hashed).count() == 1
assert (
OrgAuthToken.objects.filter(
token_last_characters=colliding.token_last_characters
).count()
== 1
)

with open(tmp_path) as tmp_file:
import_in_organization_scope(tmp_file, printer=NOOP_PRINTER)

assert OrgAuthToken.objects.count() == 2
assert OrgAuthToken.objects.filter(token_hashed=colliding.token_hashed).count() == 1
assert (
OrgAuthToken.objects.filter(
token_last_characters=colliding.token_last_characters
).count()
== 1
)

def test_colliding_project_key(self):
owner = self.create_exhaustive_user("owner")
invited = self.create_exhaustive_user("invited")
Expand All @@ -632,28 +673,25 @@ def test_colliding_project_key(self):
# Take note of the `ProjectKey` that was created by the exhaustive organization - this is
# the one we'll be importing.
colliding = ProjectKey.objects.filter().first()
colliding_public_key = colliding.public_key
colliding_secret_key = colliding.secret_key

with tempfile.TemporaryDirectory() as tmp_dir:
tmp_path = self.export_to_tmp_file_and_clear_database(tmp_dir)

# After exporting and clearing the database, insert a copy of the same `ProjectKey` as
# the one found in the import.
project = self.create_project()
ProjectKey.objects.create(
project=project,
label="Test",
public_key=colliding_public_key,
secret_key=colliding_secret_key,
)
colliding.project = self.create_project()
colliding.save()

assert ProjectKey.objects.count() < 4
assert ProjectKey.objects.filter(public_key=colliding.public_key).count() == 1
assert ProjectKey.objects.filter(secret_key=colliding.secret_key).count() == 1

with open(tmp_path) as tmp_file:
import_in_organization_scope(tmp_file, printer=NOOP_PRINTER)

assert ProjectKey.objects.count() == 4
assert ProjectKey.objects.filter(public_key=colliding_public_key).count() == 1
assert ProjectKey.objects.filter(secret_key=colliding_secret_key).count() == 1
assert ProjectKey.objects.filter(public_key=colliding.public_key).count() == 1
assert ProjectKey.objects.filter(secret_key=colliding.secret_key).count() == 1

def test_colliding_user_with_merging_enabled_in_user_scope(self):
self.create_exhaustive_user(username="owner", email="[email protected]")
Expand Down

0 comments on commit dfb047a

Please sign in to comment.