Skip to content

Commit 2a24e6c

Browse files
authored
Merge branch 'master' into kcons/filtered
2 parents 98a808f + 2e16025 commit 2a24e6c

File tree

37 files changed

+622
-307
lines changed

37 files changed

+622
-307
lines changed

migrations_lockfile.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ remote_subscriptions: 0003_drop_remote_subscription
2121

2222
replays: 0005_drop_replay_index
2323

24-
sentry: 0894_split_discover_dataset_saved_queries
24+
sentry: 0895_relocation_provenance_smallint
2525

2626
social_auth: 0002_default_auto_field
2727

src/sentry/grouping/grouptype.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,13 @@
55
from sentry.models.group import DEFAULT_TYPE_ID
66
from sentry.types.group import PriorityLevel
77
from sentry.workflow_engine.endpoints.validators.error_detector import ErrorDetectorValidator
8-
from sentry.workflow_engine.handlers.detector.base import DetectorEvaluationResult, DetectorHandler
8+
from sentry.workflow_engine.handlers.detector.base import DetectorHandler
99
from sentry.workflow_engine.models.data_source import DataPacket
10-
from sentry.workflow_engine.types import DetectorGroupKey, DetectorSettings
10+
from sentry.workflow_engine.types import (
11+
DetectorEvaluationResult,
12+
DetectorGroupKey,
13+
DetectorSettings,
14+
)
1115

1216
T = TypeVar("T")
1317

src/sentry/incidents/grouptype.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
)
1919
from sentry.workflow_engine.handlers.detector.base import EvidenceData
2020
from sentry.workflow_engine.models.data_source import DataPacket
21+
from sentry.workflow_engine.processors.data_condition_group import ProcessedDataConditionGroup
2122
from sentry.workflow_engine.types import DetectorGroupKey, DetectorPriorityLevel, DetectorSettings
2223

2324
COMPARISON_DELTA_CHOICES: list[None | int] = [choice.value for choice in ComparisonDeltaChoices]
@@ -32,7 +33,8 @@ class MetricIssueEvidenceData(EvidenceData):
3233
class MetricAlertDetectorHandler(StatefulGroupingDetectorHandler[QuerySubscriptionUpdate, int]):
3334
def create_occurrence(
3435
self,
35-
value: int,
36+
evaluation_result: ProcessedDataConditionGroup,
37+
data_packet: DataPacket[QuerySubscriptionUpdate],
3638
priority: DetectorPriorityLevel,
3739
) -> tuple[DetectorOccurrence, dict[str, Any]]:
3840
# Returning a placeholder for now, this may require us passing more info
Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
# Generated by Django 5.2.1 on 2025-05-14 18:28
2+
3+
from sentry.new_migrations.migrations import CheckedMigration
4+
from sentry.new_migrations.monkey.special import SafeRunSQL
5+
6+
7+
class Migration(CheckedMigration):
8+
# This flag is used to mark that a migration shouldn't be automatically run in production.
9+
# This should only be used for operations where it's safe to run the migration after your
10+
# code has deployed. So this should not be used for most operations that alter the schema
11+
# of a table.
12+
# Here are some things that make sense to mark as post deployment:
13+
# - Large data migrations. Typically we want these to be run manually so that they can be
14+
# monitored and not block the deploy for a long period of time while they run.
15+
# - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
16+
# run this outside deployments so that we don't block them. Note that while adding an index
17+
# is a schema change, it's completely safe to run the operation after the code has deployed.
18+
# Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
19+
20+
is_post_deployment = True
21+
22+
dependencies = [
23+
("sentry", "0894_split_discover_dataset_saved_queries"),
24+
]
25+
26+
operations = [
27+
SafeRunSQL(
28+
"""
29+
ALTER TABLE sentry_relocation
30+
ALTER COLUMN provenance TYPE smallint;
31+
""",
32+
reverse_sql="",
33+
hints={"tables": ["sentry_relocation"]},
34+
),
35+
]

src/sentry/monitors/consumers/monitor_consumer.py

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
from collections.abc import Mapping
77
from concurrent.futures import ThreadPoolExecutor, wait
88
from copy import deepcopy
9-
from datetime import UTC, datetime, timedelta
9+
from datetime import UTC, datetime
1010
from functools import partial
1111
from typing import Any, Literal, NotRequired, TypedDict
1212

@@ -811,20 +811,14 @@ def _process_checkin(item: CheckinItem, txn: Transaction | Span) -> None:
811811
# 03-B
812812
# Create a brand new check-in object
813813
except MonitorCheckIn.DoesNotExist:
814-
# Infer the original start time of the check-in from the duration.
815-
# Note that the clock of this worker may be off from what Relay is reporting.
816-
date_added = start_time
817-
if duration is not None:
818-
date_added -= timedelta(milliseconds=duration)
819-
820814
# When was this check-in expected to have happened?
821815
expected_time = monitor_environment.next_checkin
822816

823817
# denormalize the monitor configration into the check-in.
824818
# Useful to show details about the configuration of the
825819
# monitor at the time of the check-in
826820
monitor_config = monitor.get_validated_config()
827-
timeout_at = get_timeout_at(monitor_config, status, date_added)
821+
timeout_at = get_timeout_at(monitor_config, status, start_time)
828822

829823
# The "date_clock" is recorded as the "clock time" of when the
830824
# check-in was processed. The clock time is derived from the
@@ -839,9 +833,9 @@ def _process_checkin(item: CheckinItem, txn: Transaction | Span) -> None:
839833
defaults={
840834
"duration": duration,
841835
"status": status,
842-
"date_added": date_added,
843-
"date_clock": clock_time,
836+
"date_added": start_time,
844837
"date_updated": start_time,
838+
"date_clock": clock_time,
845839
"expected_time": expected_time,
846840
"timeout_at": timeout_at,
847841
"monitor_config": monitor_config,

src/sentry/monitors/models.py

Lines changed: 16 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -465,14 +465,25 @@ class MonitorCheckIn(Model):
465465
check-in.
466466
"""
467467

468+
date_created = models.DateTimeField(default=timezone.now, null=True)
469+
"""
470+
Represents when the check-in was actually recorded into the database. This
471+
is a real wall-clock time and is not tied to the "clock" time that
472+
check-ins are processed in the context of.
473+
"""
474+
468475
date_added = models.DateTimeField(default=timezone.now, db_index=True)
469476
"""
470-
Represents the time the checkin was made. This CAN BE back-dated in some
471-
cases, and does not necessarily represent the insertion time of the row in
472-
the database.
477+
Represents the time the checkin was made. This date comes from the time
478+
relay received the envelope containing the check-in.
479+
"""
473480

474-
This date comes from the time relay reiceved the envelope containing the
475-
check-in.
481+
date_updated = models.DateTimeField(default=timezone.now)
482+
"""
483+
Represents the last time a check-in was updated by . This will typically be by the terminal state
484+
Currently only updated when a in_progress check-in is sent with this
485+
check-in's guid. Can be used to extend the lifetime of a check-in so that
486+
it does not time out.
476487
"""
477488

478489
date_clock = models.DateTimeField(null=True)
@@ -484,20 +495,6 @@ class MonitorCheckIn(Model):
484495
as detecting misses)
485496
"""
486497

487-
date_created = models.DateTimeField(default=timezone.now, null=True)
488-
"""
489-
Represents when the check-in was actually recorded into the database. This
490-
is a real wall-clock time and is not tied to the "clock" time that
491-
check-ins are processed in the contenxt of.
492-
"""
493-
494-
date_updated = models.DateTimeField(default=timezone.now)
495-
"""
496-
Currently only updated when a in_progress check-in is sent with this
497-
check-in's guid. Can be used to extend the lifetime of a check-in so that
498-
it does not time out.
499-
"""
500-
501498
expected_time = models.DateTimeField(null=True)
502499
"""
503500
Holds the exact time we expected to receive this check-in

src/sentry/monitors/serializers.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -275,7 +275,10 @@ class MonitorCheckInSerializerResponse(MonitorCheckInSerializerResponseOptional)
275275
environment: str
276276
status: str
277277
duration: int
278+
# TODO(epurkhiser): Should be replaced with the actual date_created, right
279+
# now this is the same as dateAdded
278280
dateCreated: datetime
281+
dateAdded: datetime
279282
expectedTime: datetime
280283
monitorConfig: MonitorConfigSerializerResponse
281284

@@ -342,6 +345,7 @@ def serialize(self, obj, attrs, user, **kwargs) -> MonitorCheckInSerializerRespo
342345
"status": obj.get_status_display(),
343346
"duration": obj.duration,
344347
"dateCreated": obj.date_added,
348+
"dateAdded": obj.date_added,
345349
"expectedTime": obj.expected_time,
346350
"monitorConfig": cast(MonitorConfigSerializerResponse, config),
347351
}

src/sentry/rules/processing/buffer_processing.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -104,6 +104,7 @@ def process_in_batches(project_id: int, processing_type: str) -> None:
104104
metrics.incr(
105105
f"{processing_type}.num_groups", tags={"num_groups": bucket_num_groups(event_count)}
106106
)
107+
metrics.distribution(f"{processing_type}.event_count", event_count)
107108

108109
if event_count < batch_size:
109110
return task.delay(project_id)
Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,11 @@
11
__all__ = [
22
"DataPacketEvaluationType",
33
"DataPacketType",
4-
"DetectorEvaluationResult",
54
"DetectorHandler",
65
"DetectorOccurrence",
76
"DetectorStateData",
87
"StatefulGroupingDetectorHandler",
98
]
109

11-
from .base import (
12-
DataPacketEvaluationType,
13-
DataPacketType,
14-
DetectorEvaluationResult,
15-
DetectorHandler,
16-
DetectorOccurrence,
17-
)
10+
from .base import DataPacketEvaluationType, DataPacketType, DetectorHandler, DetectorOccurrence
1811
from .stateful import DetectorStateData, StatefulGroupingDetectorHandler

src/sentry/workflow_engine/handlers/detector/base.py

Lines changed: 13 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -8,15 +8,21 @@
88

99
from sentry.issues.grouptype import GroupType
1010
from sentry.issues.issue_occurrence import IssueEvidence, IssueOccurrence
11-
from sentry.issues.status_change_message import StatusChangeMessage
1211
from sentry.types.actor import Actor
1312
from sentry.workflow_engine.models import Condition, DataConditionGroup, DataPacket, Detector
14-
from sentry.workflow_engine.types import DetectorGroupKey, DetectorPriorityLevel
13+
from sentry.workflow_engine.processors.data_condition_group import ProcessedDataConditionGroup
14+
from sentry.workflow_engine.types import (
15+
DetectorEvaluationResult,
16+
DetectorGroupKey,
17+
DetectorPriorityLevel,
18+
)
1519

1620
logger = logging.getLogger(__name__)
1721

1822
DataPacketType = TypeVar("DataPacketType")
1923
DataPacketEvaluationType = TypeVar("DataPacketEvaluationType")
24+
25+
# TODO - get more info about how this is used in issue platform
2026
EventData = dict[str, Any]
2127

2228

@@ -35,14 +41,14 @@ class EvidenceData(Generic[DataPacketEvaluationType]):
3541
class DetectorOccurrence:
3642
issue_title: str
3743
subtitle: str
38-
resource_id: str | None = None
3944
evidence_data: Mapping[str, Any] = dataclasses.field(default_factory=dict)
4045
evidence_display: Sequence[IssueEvidence] = dataclasses.field(default_factory=list)
4146
type: type[GroupType]
4247
level: str
4348
culprit: str
44-
priority: int | None = None
49+
resource_id: str | None = None
4550
assignee: Actor | None = None
51+
priority: DetectorPriorityLevel | None = None
4652

4753
def to_issue_occurrence(
4854
self,
@@ -73,20 +79,6 @@ def to_issue_occurrence(
7379
)
7480

7581

76-
@dataclasses.dataclass(frozen=True)
77-
class DetectorEvaluationResult:
78-
# TODO - Should group key live at this level?
79-
group_key: DetectorGroupKey
80-
# TODO: Are these actually necessary? We're going to produce the occurrence in the detector, so we probably don't
81-
# need to know the other results externally
82-
is_triggered: bool
83-
priority: DetectorPriorityLevel
84-
# TODO: This is only temporarily optional. We should always have a value here if returning a result
85-
result: IssueOccurrence | StatusChangeMessage | None = None
86-
# Event data to supplement the `IssueOccurrence`, if passed.
87-
event_data: dict[str, Any] | None = None
88-
89-
9082
# TODO - DetectorHandler -> AbstractDetectorHandler? (then DetectorHandler is the base implementation)
9183
class DetectorHandler(abc.ABC, Generic[DataPacketType, DataPacketEvaluationType]):
9284
def __init__(self, detector: Detector):
@@ -109,7 +101,7 @@ def __init__(self, detector: Detector):
109101
@abc.abstractmethod
110102
def evaluate(
111103
self, data_packet: DataPacket[DataPacketType]
112-
) -> dict[DetectorGroupKey, DetectorEvaluationResult]:
104+
) -> dict[DetectorGroupKey, DetectorEvaluationResult] | None:
113105
"""
114106
This method is used to evaluate the data packet's value against the conditions on the detector.
115107
"""
@@ -118,10 +110,9 @@ def evaluate(
118110
@abc.abstractmethod
119111
def create_occurrence(
120112
self,
121-
value: DataPacketEvaluationType,
113+
evaluation_result: ProcessedDataConditionGroup,
114+
data_packet: DataPacket[DataPacketType],
122115
priority: DetectorPriorityLevel,
123-
# data_packet: DataPacketType, # TODO - having access to all the data being evaluated seems good
124-
# data_conditions: list[DataCondition], # TODO - list of the failing conditions might be nice
125116
) -> tuple[DetectorOccurrence, EventData]:
126117
"""
127118
This method provides the value that was evaluated against, the data packet that was

0 commit comments

Comments
 (0)