Skip to content

Commit 8e37705

Browse files
authored
chore(post-process): Remove Event.build_group_events (#69472)
This isn't necessary anymore - events can have at most one group, and so we can use `Event.for_group` instead. We'll probably continue simplifying this more.
1 parent 11ae138 commit 8e37705

File tree

5 files changed

+17
-83
lines changed

5 files changed

+17
-83
lines changed

src/sentry/eventstore/models.py

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import abc
44
import logging
55
import string
6-
from collections.abc import Generator, Mapping, MutableMapping, Sequence
6+
from collections.abc import Mapping, MutableMapping, Sequence
77
from copy import deepcopy
88
from datetime import datetime, timezone
99
from hashlib import md5
@@ -703,13 +703,6 @@ def groups(self, values: Sequence[Group] | None):
703703
self._groups_cache = values
704704
self._group_ids = [group.id for group in values] if values else None
705705

706-
def build_group_events(self) -> Generator[GroupEvent, None, None]:
707-
"""
708-
Yields a GroupEvent for each Group associated with this Event.
709-
"""
710-
for group in self.groups:
711-
yield GroupEvent.from_event(self, group)
712-
713706
def for_group(self, group: Group) -> GroupEvent:
714707
return GroupEvent.from_event(self, group)
715708

src/sentry/tasks/post_process.py

Lines changed: 8 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
import logging
44
import uuid
5-
from collections.abc import Mapping, Sequence
5+
from collections.abc import Sequence
66
from datetime import datetime, timedelta
77
from time import time
88
from typing import TYPE_CHECKING, Any, TypedDict
@@ -55,7 +55,7 @@
5555

5656

5757
class PostProcessJob(TypedDict, total=False):
58-
event: Event | GroupEvent
58+
event: GroupEvent
5959
group_state: GroupState
6060
is_reprocessed: bool
6161
has_reappeared: bool
@@ -667,34 +667,24 @@ def get_event_raise_exception() -> Event:
667667
"is_new_group_environment": is_new_group_environment,
668668
}
669669

670-
update_event_group(event, group_state)
670+
group_event = update_event_group(event, group_state)
671671
bind_organization_context(event.project.organization)
672672
_capture_event_stats(event)
673673
if should_update_escalating_metrics(event, is_transaction_event):
674674
_update_escalating_metrics(event)
675675

676-
group_events: Mapping[int, GroupEvent] = {
677-
ge.group_id: ge for ge in list(event.build_group_events())
678-
}
679-
if occurrence is not None:
680-
for ge in group_events.values():
681-
ge.occurrence = occurrence
676+
group_event.occurrence = occurrence
682677

683678
group_job: PostProcessJob = {
684-
"event": group_events[group_state["id"]],
679+
"event": group_event,
685680
"group_state": group_state,
686681
"is_reprocessed": is_reprocessed,
687682
"has_reappeared": bool(not group_state["is_new"]),
688683
"has_alert": False,
689684
"has_escalated": False,
690685
}
691686
run_post_process_job(group_job)
692-
693-
if group_events:
694-
# In practice, we only have one group here and will be removing the list of jobs. For now, just grab a
695-
# random one
696-
group_event = list(group_events.values())[0]
697-
metric_tags["occurrence_type"] = group_event.group.issue_type.slug
687+
metric_tags["occurrence_type"] = group_event.group.issue_type.slug
698688

699689
if not is_reprocessed and event.data.get("received"):
700690
duration = time() - event.data["received"]
@@ -795,7 +785,7 @@ def process_event(data: dict, group_id: int | None) -> Event:
795785
return event
796786

797787

798-
def update_event_group(event: Event, group_state: GroupState) -> None:
788+
def update_event_group(event: Event, group_state: GroupState) -> GroupEvent:
799789
# NOTE: we must pass through the full Event object, and not an
800790
# event_id since the Event object may not actually have been stored
801791
# in the database due to sampling.
@@ -821,6 +811,7 @@ def update_event_group(event: Event, group_state: GroupState) -> None:
821811
event.group = rebound_group
822812

823813
event.groups = [rebound_group]
814+
return event.for_group(rebound_group)
824815

825816

826817
def process_inbox_adds(job: PostProcessJob) -> None:

tests/sentry/eventstore/test_models.py

Lines changed: 0 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -534,57 +534,6 @@ def test_from_group_snuba(self):
534534
assert event.groups == [self.group]
535535

536536

537-
class EventBuildGroupEventsTest(TestCase):
538-
def test_none(self):
539-
event = Event(
540-
event_id="a" * 32,
541-
data={
542-
"level": "info",
543-
"message": "Foo bar",
544-
"culprit": "app/components/events/eventEntries in map",
545-
"type": "transaction",
546-
"contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
547-
},
548-
project_id=self.project.id,
549-
)
550-
assert list(event.build_group_events()) == []
551-
552-
def test(self):
553-
event = Event(
554-
event_id="a" * 32,
555-
data={
556-
"level": "info",
557-
"message": "Foo bar",
558-
"culprit": "app/components/events/eventEntries in map",
559-
"type": "transaction",
560-
"contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
561-
},
562-
project_id=self.project.id,
563-
groups=[self.group],
564-
)
565-
assert list(event.build_group_events()) == [GroupEvent.from_event(event, self.group)]
566-
567-
def test_multiple(self):
568-
self.group_2 = self.create_group()
569-
event = Event(
570-
event_id="a" * 32,
571-
data={
572-
"level": "info",
573-
"message": "Foo bar",
574-
"culprit": "app/components/events/eventEntries in map",
575-
"type": "transaction",
576-
"contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
577-
},
578-
project_id=self.project.id,
579-
groups=[self.group, self.group_2],
580-
)
581-
sort_key = lambda group_event: (group_event.event_id, group_event.group_id)
582-
assert sorted(event.build_group_events(), key=sort_key) == sorted(
583-
[GroupEvent.from_event(event, self.group), GroupEvent.from_event(event, self.group_2)],
584-
key=sort_key,
585-
)
586-
587-
588537
class EventForGroupTest(TestCase):
589538
def test(self):
590539
event = Event(

tests/sentry/integrations/slack/test_unfurl.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -212,7 +212,7 @@ def test_unfurl_issues(self):
212212
assert (
213213
unfurls[links[1].url]
214214
== SlackIssuesMessageBuilder(
215-
group2, next(iter(event.build_group_events())), link_to_event=True
215+
group2, event.for_group(group2), link_to_event=True
216216
).build()
217217
)
218218

@@ -242,7 +242,7 @@ def test_unfurl_issues_block_kit(self):
242242
assert (
243243
unfurls[links[1].url]
244244
== SlackIssuesMessageBuilder(
245-
group2, next(iter(event.build_group_events())), link_to_event=True
245+
group2, event.for_group(group2), link_to_event=True
246246
).build()
247247
)
248248

tests/sentry/rules/processing/test_processor.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
from datetime import UTC, datetime, timedelta
2+
from typing import cast
23
from unittest import mock
34
from unittest.mock import patch
45

@@ -8,7 +9,7 @@
89
from django.utils import timezone
910

1011
from sentry.constants import ObjectStatus
11-
from sentry.models.group import GroupStatus
12+
from sentry.models.group import Group, GroupStatus
1213
from sentry.models.grouprulestatus import GroupRuleStatus
1314
from sentry.models.projectownership import ProjectOwnership
1415
from sentry.models.rule import Rule
@@ -46,7 +47,7 @@ def passes(self, event, state):
4647
class RuleProcessorTest(TestCase):
4748
def setUp(self):
4849
event = self.store_event(data={}, project_id=self.project.id)
49-
self.group_event = next(event.build_group_events())
50+
self.group_event = event.for_group(cast(Group, event.group))
5051

5152
Rule.objects.filter(project=self.group_event.project).delete()
5253
ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True)
@@ -402,7 +403,7 @@ class RuleProcessorTestFilters(TestCase):
402403

403404
def setUp(self):
404405
event = self.store_event(data={}, project_id=self.project.id)
405-
self.group_event = next(event.build_group_events())
406+
self.group_event = event.for_group(cast(Group, event.group))
406407

407408
@patch("sentry.constants._SENTRY_RULES", MOCK_SENTRY_RULES_WITH_FILTERS)
408409
def test_filter_passes(self):
@@ -591,7 +592,7 @@ def test_latest_release(self):
591592
self.create_release(project=self.project, version="2021-02.newRelease")
592593

593594
event = self.store_event(data={"release": "2021-02.newRelease"}, project_id=self.project.id)
594-
self.group_event = next(event.build_group_events())
595+
self.group_event = event.for_group(cast(Group, event.group))
595596

596597
Rule.objects.filter(project=self.group_event.project).delete()
597598
ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True)
@@ -639,7 +640,7 @@ def test_latest_release_environment(self):
639640
},
640641
project_id=self.project.id,
641642
)
642-
self.group_event = next(event.build_group_events())
643+
self.group_event = event.for_group(cast(Group, event.group))
643644

644645
Rule.objects.filter(project=self.group_event.project).delete()
645646
ProjectOwnership.objects.create(project_id=self.project.id, fallthrough=True)

0 commit comments

Comments
 (0)