Skip to content

Commit fc7dbef

Browse files
cathtengandrewshie-sentry
authored andcommitted
fix(aci): don't cast percentage frequency to int in delayed workflow processing (#89350)
1 parent b12478f commit fc7dbef

File tree

6 files changed

+81
-53
lines changed

6 files changed

+81
-53
lines changed

src/sentry/workflow_engine/handlers/condition/event_frequency_query_handlers.py

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@
3232
from sentry.workflow_engine.models.data_condition import Condition
3333

3434
QueryFilter = dict[str, Any]
35+
QueryResult = dict[int, int | float]
3536

3637

3738
class TSDBFunction(Protocol):
@@ -269,7 +270,7 @@ def batch_query(
269270
end: datetime,
270271
environment_id: int | None,
271272
filters: list[QueryFilter] | None = None,
272-
) -> dict[int, int]:
273+
) -> QueryResult:
273274
"""
274275
Abstract method that specifies how to query Snuba for multiple groups
275276
depending on the condition. Must be implemented by subclasses.
@@ -284,7 +285,7 @@ def get_rate_bulk(
284285
current_time: datetime,
285286
comparison_interval: timedelta | None,
286287
filters: list[QueryFilter] | None,
287-
) -> dict[int, int]:
288+
) -> QueryResult:
288289
"""
289290
Make a batch query for multiple groups. The return value is a dictionary
290291
of group_id to the result for that group.
@@ -324,8 +325,8 @@ def batch_query(
324325
end: datetime,
325326
environment_id: int | None,
326327
filters: list[QueryFilter] | None = None,
327-
) -> dict[int, int]:
328-
batch_sums: dict[int, int] = defaultdict(int)
328+
) -> QueryResult:
329+
batch_sums: QueryResult = defaultdict(int)
329330
groups = Group.objects.filter(id__in=group_ids).values(
330331
"id", "type", "project_id", "project__organization_id"
331332
)
@@ -369,8 +370,8 @@ def batch_query(
369370
end: datetime,
370371
environment_id: int | None,
371372
filters: list[QueryFilter] | None = None,
372-
) -> dict[int, int]:
373-
batch_sums: dict[int, int] = defaultdict(int)
373+
) -> QueryResult:
374+
batch_sums: QueryResult = defaultdict(int)
374375
groups = Group.objects.filter(id__in=group_ids).values(
375376
"id", "type", "project_id", "project__organization_id"
376377
)
@@ -439,8 +440,8 @@ def batch_query(
439440
end: datetime,
440441
environment_id: int | None,
441442
filters: list[QueryFilter] | None = None,
442-
) -> dict[int, int]:
443-
batch_percents: dict[int, int] = defaultdict(int)
443+
) -> QueryResult:
444+
batch_percents: QueryResult = {}
444445
groups = Group.objects.filter(id__in=group_ids).values(
445446
"id", "type", "project_id", "project__organization_id"
446447
)
@@ -484,7 +485,7 @@ def batch_query(
484485
filters=filters,
485486
)
486487
for group_id, count in results.items():
487-
percent: int = int(100 * round(count / avg_sessions_in_interval, 4))
488+
percent: float = 100 * round(count / avg_sessions_in_interval, 4)
488489
batch_percents[group_id] = percent
489490

490491
return batch_percents

src/sentry/workflow_engine/migration_helpers/issue_alert_conditions.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -233,14 +233,18 @@ def create_latest_adopted_release_data_condition(
233233

234234

235235
def create_base_event_frequency_data_condition(
236-
data: dict[str, Any], dcg: DataConditionGroup, count_type: Condition, percent_type: Condition
236+
value: int | float,
237+
data: dict[str, Any],
238+
dcg: DataConditionGroup,
239+
count_type: Condition,
240+
percent_type: Condition,
237241
) -> DataConditionKwargs:
238242
comparison_type = data.get(
239243
"comparisonType", ComparisonType.COUNT
240244
) # this is camelCase, age comparison is snake_case
241245
comparison_type = ComparisonType(comparison_type)
242246

243-
value = max(int(data["value"]), 0) # force to 0 if negative
247+
value = max(value, 0) # force to 0 if negative
244248
comparison = {
245249
"interval": data["interval"],
246250
"value": value,
@@ -263,7 +267,9 @@ def create_base_event_frequency_data_condition(
263267
def create_event_frequency_data_condition(
264268
data: dict[str, Any], dcg: DataConditionGroup
265269
) -> DataConditionKwargs:
270+
value = int(data["value"])
266271
return create_base_event_frequency_data_condition(
272+
value=value,
267273
data=data,
268274
dcg=dcg,
269275
count_type=Condition.EVENT_FREQUENCY_COUNT,
@@ -274,7 +280,9 @@ def create_event_frequency_data_condition(
274280
def create_event_unique_user_frequency_data_condition(
275281
data: dict[str, Any], dcg: DataConditionGroup
276282
) -> DataConditionKwargs:
283+
value = int(data["value"])
277284
return create_base_event_frequency_data_condition(
285+
value=value,
278286
data=data,
279287
dcg=dcg,
280288
count_type=Condition.EVENT_UNIQUE_USER_FREQUENCY_COUNT,
@@ -285,7 +293,9 @@ def create_event_unique_user_frequency_data_condition(
285293
def create_percent_sessions_data_condition(
286294
data: dict[str, Any], dcg: DataConditionGroup
287295
) -> DataConditionKwargs:
296+
value = float(data["value"])
288297
return create_base_event_frequency_data_condition(
298+
value=value,
289299
data=data,
290300
dcg=dcg,
291301
count_type=Condition.PERCENT_SESSIONS_COUNT,

src/sentry/workflow_engine/processors/delayed_workflow.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@
3131
from sentry.utils.safe import safe_execute
3232
from sentry.workflow_engine.handlers.condition.event_frequency_query_handlers import (
3333
BaseEventFrequencyQueryHandler,
34+
QueryResult,
3435
slow_condition_query_handler_registry,
3536
)
3637
from sentry.workflow_engine.models import DataCondition, DataConditionGroup, Workflow
@@ -232,7 +233,7 @@ def get_condition_query_groups(
232233

233234
def get_condition_group_results(
234235
queries_to_groups: dict[UniqueConditionQuery, set[int]],
235-
) -> dict[UniqueConditionQuery, dict[int, int]]:
236+
) -> dict[UniqueConditionQuery, QueryResult]:
236237
condition_group_results = {}
237238
current_time = timezone.now()
238239

@@ -266,7 +267,7 @@ def get_groups_to_fire(
266267
workflows_to_envs: WorkflowEnvMapping,
267268
dcg_to_workflow: dict[int, int],
268269
dcg_to_groups: DataConditionGroupGroups,
269-
condition_group_results: dict[UniqueConditionQuery, dict[int, int]],
270+
condition_group_results: dict[UniqueConditionQuery, QueryResult],
270271
) -> dict[int, set[DataConditionGroup]]:
271272
groups_to_fire: dict[int, set[DataConditionGroup]] = defaultdict(set)
272273
for dcg in data_condition_groups:
@@ -276,7 +277,7 @@ def get_groups_to_fire(
276277
workflow_env = workflows_to_envs[workflow_id] if workflow_id else None
277278

278279
for group_id in dcg_to_groups[dcg.id]:
279-
conditions_to_evaluate: list[tuple[DataCondition, list[int]]] = []
280+
conditions_to_evaluate: list[tuple[DataCondition, list[int | float]]] = []
280281
for condition in slow_conditions:
281282
unique_queries = generate_unique_queries(condition, workflow_env)
282283
query_values = [

tests/sentry/workflow_engine/handlers/condition/test_event_frequency_handlers.py

Lines changed: 34 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ class TestEventFrequencyCountCondition(ConditionTestCase):
2525
def setUp(self):
2626
super().setUp()
2727
self.condition = Condition.EVENT_FREQUENCY_COUNT
28-
self.payload: dict[str, int | str] = {
28+
self.payload: dict[str, str | int | float] = {
2929
"interval": "1h",
3030
"id": EventFrequencyCondition.id,
3131
"value": 50,
@@ -74,14 +74,14 @@ def test_count_with_filters(self):
7474
results = [dc.comparison["value"] - 1]
7575
self.assert_slow_condition_does_not_pass(dc, results)
7676

77-
def _test_dual_write(self, value: int):
77+
def _test_dual_write(self, value):
7878
dcg = self.create_data_condition_group()
7979
dc = self.translate_to_data_condition(self.payload, dcg)
8080

8181
assert dc.type == self.condition
8282
assert dc.comparison == {
8383
"interval": self.payload["interval"],
84-
"value": value,
84+
"value": self.payload["value"],
8585
}
8686
assert dc.condition_result is True
8787
assert dc.condition_group == dcg
@@ -93,22 +93,21 @@ def _test_dual_write(self, value: int):
9393
assert dc.type == self.condition
9494
assert dc.comparison == {
9595
"interval": self.payload["interval"],
96-
"value": value,
96+
"value": self.payload["value"],
9797
}
9898
assert dc.condition_result is True
9999
assert dc.condition_group == dcg
100100

101101
def test_dual_write_count(self):
102-
self._test_dual_write(int(self.payload["value"]))
102+
self._test_dual_write(self.payload["value"])
103103

104104
def test_dual_write_count__string_value(self):
105-
self.payload["value"] = str(self.payload["value"])
106-
self._test_dual_write(int(self.payload["value"]))
105+
self._test_dual_write(str(self.payload["value"]))
107106

108107
def test_dual_write__value_floor(self):
109108
# forces negative to zero for migration
110-
self.payload["value"] = -1
111-
self._test_dual_write(0)
109+
self.payload["value"] = 0 # expected value
110+
self._test_dual_write(-1)
112111

113112
def test_json_schema(self):
114113
with pytest.raises(ValidationError):
@@ -161,7 +160,7 @@ def test_json_schema(self):
161160
class TestEventFrequencyPercentCondition(ConditionTestCase):
162161
def setUp(self):
163162
self.condition = Condition.EVENT_FREQUENCY_PERCENT
164-
self.payload: dict[str, int | str] = {
163+
self.payload: dict[str, str | int | float] = {
165164
"interval": "1h",
166165
"id": EventFrequencyCondition.id,
167166
"value": 50,
@@ -218,30 +217,29 @@ def test_percent_with_filters(self):
218217
results = [10, 10]
219218
self.assert_slow_condition_does_not_pass(dc, results)
220219

221-
def _test_dual_write(self, value: int):
220+
def _test_dual_write(self, value):
222221
dcg = self.create_data_condition_group()
223222
dc = self.translate_to_data_condition(self.payload, dcg)
224223

225224
assert dc.type == self.condition
226225
assert dc.comparison == {
227226
"interval": self.payload["interval"],
228-
"value": value,
227+
"value": self.payload["value"],
229228
"comparison_interval": self.payload["comparisonInterval"],
230229
}
231230
assert dc.condition_result is True
232231
assert dc.condition_group == dcg
233232

234233
def test_dual_write_percent(self):
235-
self._test_dual_write(int(self.payload["value"]))
234+
self._test_dual_write(self.payload["value"])
236235

237236
def test_dual_write_percent__string_value(self):
238-
self.payload["value"] = str(self.payload["value"])
239-
self._test_dual_write(int(self.payload["value"]))
237+
self._test_dual_write(str(self.payload["value"]))
240238

241239
def test_dual_write__value_floor(self):
242240
# forces negative to zero for migration
243-
self.payload["value"] = -1
244-
self._test_dual_write(0)
241+
self.payload["value"] = 0 # expected value
242+
self._test_dual_write(-1)
245243

246244
def test_json_schema(self):
247245
with pytest.raises(ValidationError):
@@ -318,7 +316,7 @@ class TestEventUniqueUserFrequencyCountCondition(TestEventFrequencyCountConditio
318316
def setUp(self):
319317
super().setUp()
320318
self.condition = Condition.EVENT_UNIQUE_USER_FREQUENCY_COUNT
321-
self.payload: dict[str, int | str] = {
319+
self.payload: dict[str, str | int | float] = {
322320
"interval": "1h",
323321
"id": EventUniqueUserFrequencyCondition.id,
324322
"value": 50,
@@ -330,7 +328,7 @@ class TestEventUniqueUserFrequencyPercentCondition(TestEventFrequencyPercentCond
330328
def setUp(self):
331329
super().setUp()
332330
self.condition = Condition.EVENT_UNIQUE_USER_FREQUENCY_PERCENT
333-
self.payload: dict[str, int | str] = {
331+
self.payload: dict[str, str | int | float] = {
334332
"interval": "1h",
335333
"id": EventUniqueUserFrequencyCondition.id,
336334
"value": 50,
@@ -343,10 +341,10 @@ class TestPercentSessionsCountCondition(TestEventFrequencyCountCondition):
343341
def setUp(self):
344342
super().setUp()
345343
self.condition = Condition.PERCENT_SESSIONS_COUNT
346-
self.payload: dict[str, int | str] = {
344+
self.payload: dict[str, str | int | float] = {
347345
"interval": "30m", # only percent sessions allows 30m
348346
"id": EventFrequencyPercentCondition.id,
349-
"value": 17,
347+
"value": 17.2,
350348
"comparisonType": ComparisonType.COUNT,
351349
}
352350
self.intervals = PERCENT_INTERVALS
@@ -363,10 +361,10 @@ class TestPercentSessionsPercentCondition(TestEventFrequencyPercentCondition):
363361
def setUp(self):
364362
super().setUp()
365363
self.condition = Condition.PERCENT_SESSIONS_PERCENT
366-
self.payload: dict[str, int | str] = {
364+
self.payload: dict[str, str | int | float] = {
367365
"interval": "30m", # only percent sessions allows 30m
368366
"id": EventFrequencyPercentCondition.id,
369-
"value": 17,
367+
"value": 17.2,
370368
"comparisonType": ComparisonType.PERCENT,
371369
"comparisonInterval": "1d",
372370
}
@@ -385,7 +383,7 @@ class TestEventUniqueUserFrequencyConditionWithConditions(ConditionTestCase):
385383
def setUp(self):
386384
super().setUp()
387385
self.condition = Condition.EVENT_UNIQUE_USER_FREQUENCY_COUNT
388-
self.payload: dict[str, int | str] = {
386+
self.payload: dict[str, str | int | float] = {
389387
"interval": "1h",
390388
"id": EventUniqueUserFrequencyConditionWithConditions.id,
391389
"value": 50,
@@ -426,33 +424,32 @@ def setUp(self):
426424
]
427425
self.dcg = self.create_data_condition_group()
428426

429-
def _test_dual_write_count(self, value: int):
427+
def _test_dual_write_count(self, value):
430428
dc = create_event_unique_user_frequency_condition_with_conditions(
431429
self.payload, self.dcg, self.conditions
432430
)
433431

434432
assert dc.type == self.condition
435433
assert dc.comparison == {
436434
"interval": self.payload["interval"],
437-
"value": value,
435+
"value": self.payload["value"],
438436
"filters": self.expected_filters,
439437
}
440438
assert dc.condition_result is True
441439
assert dc.condition_group == self.dcg
442440

443441
def test_dual_write_count(self):
444-
self._test_dual_write_count(int(self.payload["value"]))
442+
self._test_dual_write_count(self.payload["value"])
445443

446444
def test_dual_write_count__string_value(self):
447-
self.payload["value"] = str(self.payload["value"])
448-
self._test_dual_write_count(int(self.payload["value"]))
445+
self._test_dual_write_count(str(self.payload["value"]))
449446

450447
def test_dual_write_count__value_floor(self):
451448
# forces negative to zero for migration
452-
self.payload["value"] = -1
453-
self._test_dual_write_count(0)
449+
self.payload["value"] = 0 # expected
450+
self._test_dual_write_count(-1)
454451

455-
def _test_dual_write_percent(self, value: int):
452+
def _test_dual_write_percent(self, value):
456453
self.payload.update({"comparisonType": ComparisonType.PERCENT, "comparisonInterval": "1d"})
457454
dc = create_event_unique_user_frequency_condition_with_conditions(
458455
self.payload, self.dcg, self.conditions
@@ -461,24 +458,23 @@ def _test_dual_write_percent(self, value: int):
461458
assert dc.type == Condition.EVENT_UNIQUE_USER_FREQUENCY_PERCENT
462459
assert dc.comparison == {
463460
"interval": self.payload["interval"],
464-
"value": value,
461+
"value": self.payload["value"],
465462
"comparison_interval": self.payload["comparisonInterval"],
466463
"filters": self.expected_filters,
467464
}
468465
assert dc.condition_result is True
469466
assert dc.condition_group == self.dcg
470467

471468
def test_dual_write_percent(self):
472-
self._test_dual_write_percent(int(self.payload["value"]))
469+
self._test_dual_write_percent(self.payload["value"])
473470

474471
def test_dual_write_percent__string_value(self):
475-
self.payload["value"] = str(self.payload["value"])
476-
self._test_dual_write_percent(int(self.payload["value"]))
472+
self._test_dual_write_percent(str(self.payload["value"]))
477473

478474
def test_dual_write_count__percent_floor(self):
479475
# forces negative to zero for migration
480-
self.payload["value"] = -1
481-
self._test_dual_write_percent(0)
476+
self.payload["value"] = 0 # expected
477+
self._test_dual_write_percent(-1)
482478

483479
def test_dual_write__invalid(self):
484480
with pytest.raises(KeyError):

0 commit comments

Comments
 (0)