Skip to content

Commit

Permalink
ref: fix QuerySet soundness of event_frequency (#75150)
Browse files Browse the repository at this point in the history
<!-- Describe your PR here. -->
  • Loading branch information
asottile-sentry authored Jul 29, 2024
1 parent 56154a1 commit 2e4a0d4
Showing 1 changed file with 23 additions and 11 deletions.
34 changes: 23 additions & 11 deletions src/sentry/rules/conditions/event_frequency.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from collections import defaultdict
from collections.abc import Callable, Mapping
from datetime import datetime, timedelta
from typing import Any, Literal, NotRequired
from typing import Any, Literal, NotRequired, TypedDict

from django import forms
from django.core.cache import cache
Expand Down Expand Up @@ -120,6 +120,13 @@ def clean(self) -> dict[str, Any] | None:
return cleaned_data


class _QSTypedDict(TypedDict):
id: int
type: int
project_id: int
project__organization_id: int


class BaseEventFrequencyCondition(EventCondition, abc.ABC):
intervals = STANDARD_INTERVALS
form_cls = EventFrequencyForm
Expand Down Expand Up @@ -377,7 +384,8 @@ def get_chunked_result(
return batch_totals

def get_error_and_generic_group_ids(
self, groups: list[QuerySet]
self,
groups: QuerySet[Group, _QSTypedDict],
) -> tuple[list[int], list[int]]:
"""
Separate group ids into error group ids and generic group ids
Expand All @@ -386,14 +394,18 @@ def get_error_and_generic_group_ids(
error_issue_ids = []

for group in groups:
issue_type = get_group_type_by_type_id(group.get("type"))
issue_type = get_group_type_by_type_id(group["type"])
if GroupCategory(issue_type.category) == GroupCategory.ERROR:
error_issue_ids.append(group.get("id"))
error_issue_ids.append(group["id"])
else:
generic_issue_ids.append(group.get("id"))
generic_issue_ids.append(group["id"])
return (error_issue_ids, generic_issue_ids)

def get_value_from_groups(self, groups: list[QuerySet] | None, value: str) -> int | None:
def get_value_from_groups(
self,
groups: QuerySet[Group, _QSTypedDict] | None,
value: Literal["id", "project_id", "project__organization_id"],
) -> int | None:
result = None
if groups:
group = groups[0]
Expand Down Expand Up @@ -426,7 +438,7 @@ def batch_query_hook(
) -> dict[int, int]:
batch_sums: dict[int, int] = defaultdict(int)
groups = Group.objects.filter(id__in=group_ids).values(
"id", "type", "project__organization_id"
"id", "type", "project_id", "project__organization_id"
)
error_issue_ids, generic_issue_ids = self.get_error_and_generic_group_ids(groups)
organization_id = self.get_value_from_groups(groups, "project__organization_id")
Expand Down Expand Up @@ -489,7 +501,7 @@ def batch_query_hook(
) -> dict[int, int]:
batch_totals: dict[int, int] = defaultdict(int)
groups = Group.objects.filter(id__in=group_ids).values(
"id", "type", "project__organization_id"
"id", "type", "project_id", "project__organization_id"
)
error_issue_ids, generic_issue_ids = self.get_error_and_generic_group_ids(groups)
organization_id = self.get_value_from_groups(groups, "project__organization_id")
Expand Down Expand Up @@ -664,21 +676,21 @@ def batch_query_hook(
project_id = self.get_value_from_groups(groups, "project_id")

if not project_id:
return {group.get("id"): 0 for group in groups}
return {group["id"]: 0 for group in groups}

session_count_last_hour = self.get_session_count(project_id, environment_id, start, end)
avg_sessions_in_interval = self.get_session_interval(
session_count_last_hour, self.get_option("interval")
)

if not avg_sessions_in_interval:
return {group.get("id"): 0 for group in groups}
return {group["id"]: 0 for group in groups}

error_issue_ids, generic_issue_ids = self.get_error_and_generic_group_ids(groups)
organization_id = self.get_value_from_groups(groups, "project__organization_id")

if not (error_issue_ids and organization_id):
return {group.get("id"): 0 for group in groups}
return {group["id"]: 0 for group in groups}

error_issue_count = self.get_chunked_result(
tsdb_function=self.tsdb.get_sums,
Expand Down

0 comments on commit 2e4a0d4

Please sign in to comment.