Skip to content

Commit 98c6175

Browse files
authored
Merge branch 'master' into jb/admin/deprecateddropdown
2 parents e068aea + 6959cf6 commit 98c6175

File tree

23 files changed

+596
-153
lines changed

23 files changed

+596
-153
lines changed

src/sentry/incidents/endpoints/serializers/workflow_engine_action.py

Lines changed: 53 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,65 @@
1+
from collections.abc import Mapping
2+
from typing import Any
3+
4+
from django.contrib.auth.models import AnonymousUser
5+
from django.db.models import Subquery
6+
17
from sentry.api.serializers import Serializer
28
from sentry.incidents.endpoints.serializers.alert_rule_trigger_action import (
39
get_identifier_from_action,
410
get_input_channel_id,
511
human_desc,
612
)
7-
from sentry.incidents.models.alert_rule import AlertRuleTriggerAction
8-
from sentry.notifications.models.notificationaction import ActionService
13+
from sentry.notifications.models.notificationaction import ActionService, ActionTarget
914
from sentry.notifications.notification_action.group_type_notification_registry.handlers.metric_alert_registry_handler import (
1015
MetricAlertRegistryHandler,
1116
)
12-
from sentry.workflow_engine.models import Action, ActionAlertRuleTriggerAction
17+
from sentry.users.models.user import User
18+
from sentry.users.services.user.model import RpcUser
19+
from sentry.workflow_engine.models import (
20+
Action,
21+
ActionAlertRuleTriggerAction,
22+
DataCondition,
23+
DataConditionAlertRuleTrigger,
24+
DataConditionGroupAction,
25+
DetectorWorkflow,
26+
WorkflowDataConditionGroup,
27+
)
28+
from sentry.workflow_engine.models.data_condition import Condition
1329

1430

1531
class WorkflowEngineActionSerializer(Serializer):
16-
def serialize(self, obj: Action, attrs, user, **kwargs):
32+
def get_alert_rule_trigger_id(self, action: Action) -> int | None:
33+
"""
34+
Fetches the alert rule trigger id for the detector trigger related to the given action
35+
"""
36+
action_filter_data_condition = DataCondition.objects.filter(
37+
condition_group__in=Subquery(
38+
DataConditionGroupAction.objects.filter(action=action).values("condition_group")
39+
),
40+
type=Condition.ISSUE_PRIORITY_EQUALS,
41+
condition_result=True,
42+
)
43+
detector_dcg = DetectorWorkflow.objects.filter(
44+
workflow__in=Subquery(
45+
WorkflowDataConditionGroup.objects.filter(
46+
condition_group__in=Subquery(
47+
action_filter_data_condition.values("condition_group")
48+
)
49+
).values("workflow")
50+
)
51+
).values("detector__workflow_condition_group")
52+
detector_trigger = DataCondition.objects.filter(
53+
condition_result__in=Subquery(action_filter_data_condition.values("comparison")),
54+
condition_group__in=detector_dcg,
55+
)
56+
return DataConditionAlertRuleTrigger.objects.values_list(
57+
"alert_rule_trigger_id", flat=True
58+
).get(data_condition__in=detector_trigger)
59+
60+
def serialize(
61+
self, obj: Action, attrs: Mapping[str, Any], user: User | RpcUser | AnonymousUser, **kwargs
62+
) -> dict[str, Any]:
1763
"""
1864
Temporary serializer to take an Action and serialize it for the old metric alert rule endpoints
1965
"""
@@ -34,19 +80,13 @@ def serialize(self, obj: Action, attrs, user, **kwargs):
3480
sentry_app_id = int(obj.config.get("target_identifier"))
3581
sentry_app_config = obj.data.get("settings")
3682

37-
trigger_action = AlertRuleTriggerAction.objects.get(id=aarta.alert_rule_trigger_action_id)
38-
3983
result = {
4084
"id": str(aarta.alert_rule_trigger_action_id),
41-
"alertRuleTriggerId": str(trigger_action.alert_rule_trigger.id),
85+
"alertRuleTriggerId": str(self.get_alert_rule_trigger_id(aarta.action)),
4286
"type": obj.type,
43-
"targetType": ACTION_TARGET_TYPE_TO_STRING[
44-
AlertRuleTriggerAction.TargetType(target_type)
45-
],
87+
"targetType": ACTION_TARGET_TYPE_TO_STRING[ActionTarget(target_type)],
4688
"targetIdentifier": get_identifier_from_action(
47-
type_value,
48-
str(target_identifier),
49-
target_display,
89+
type_value, str(target_identifier), target_display
5090
),
5191
"inputChannelId": get_input_channel_id(type_value, target_identifier),
5292
"integrationId": obj.integration_id,

src/sentry/incidents/subscription_processor.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -436,7 +436,11 @@ def process_update(self, subscription_update: QuerySubscriptionUpdate) -> None:
436436
logger.info(
437437
"dual processing results for alert rule %s",
438438
self.alert_rule.id,
439-
extra={"results": results},
439+
extra={
440+
"results": results,
441+
"num_results": len(results),
442+
"value": aggregation_value,
443+
},
440444
)
441445

442446
has_anomaly_detection = features.has(

src/sentry/integrations/slack/message_builder/issues.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -75,6 +75,7 @@
7575

7676
MAX_BLOCK_TEXT_LENGTH = 256
7777
USER_FEEDBACK_MAX_BLOCK_TEXT_LENGTH = 1500
78+
MAX_SUMMARY_HEADLINE_LENGTH = 50
7879

7980

8081
def get_group_users_count(group: Group, rules: list[Rule] | None = None) -> int:
@@ -480,9 +481,9 @@ def get_title_block(
480481
text = text.lstrip(" ")
481482
if "\n" in text:
482483
text = text.strip().split("\n")[0] + "..."
483-
if len(text) > 100:
484-
text = text[:100] + "..."
485-
headline = f"{error_type}: `{text}`"
484+
if len(text) > MAX_SUMMARY_HEADLINE_LENGTH:
485+
text = text[:MAX_SUMMARY_HEADLINE_LENGTH] + "..."
486+
headline = f"{error_type}: {text}" if text else error_type
486487

487488
title = headline if headline else build_attachment_title(event_or_group)
488489

src/sentry/rules/conditions/event_frequency.py

Lines changed: 26 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -190,7 +190,7 @@ def passes(self, event: GroupEvent, state: EventState) -> bool:
190190
return current_value > value
191191

192192
def passes_activity_frequency(
193-
self, activity: ConditionActivity, buckets: dict[datetime, int]
193+
self, activity: ConditionActivity, buckets: dict[datetime, int | float]
194194
) -> bool:
195195
interval, value = self._get_options()
196196
if not (interval and value is not None):
@@ -221,15 +221,17 @@ def passes_activity_frequency(
221221
def get_preview_aggregate(self) -> tuple[str, str]:
222222
raise NotImplementedError
223223

224-
def query(self, event: GroupEvent, start: datetime, end: datetime, environment_id: int) -> int:
224+
def query(
225+
self, event: GroupEvent, start: datetime, end: datetime, environment_id: int
226+
) -> int | float:
225227
"""
226228
Queries Snuba for a unique condition for a single group.
227229
"""
228230
return self.query_hook(event, start, end, environment_id)
229231

230232
def query_hook(
231233
self, event: GroupEvent, start: datetime, end: datetime, environment_id: int
232-
) -> int:
234+
) -> int | float:
233235
"""
234236
Abstract method that specifies how to query Snuba for a single group
235237
depending on the condition. Must be implemented by subclasses.
@@ -238,15 +240,15 @@ def query_hook(
238240

239241
def batch_query(
240242
self, group_ids: set[int], start: datetime, end: datetime, environment_id: int
241-
) -> dict[int, int]:
243+
) -> dict[int, int | float]:
242244
"""
243245
Queries Snuba for a unique condition for multiple groups.
244246
"""
245247
return self.batch_query_hook(group_ids, start, end, environment_id)
246248

247249
def batch_query_hook(
248250
self, group_ids: set[int], start: datetime, end: datetime, environment_id: int
249-
) -> dict[int, int]:
251+
) -> dict[int, int | float]:
250252
"""
251253
Abstract method that specifies how to query Snuba for multiple groups
252254
depending on the condition. Must be implemented by subclasses.
@@ -279,7 +281,7 @@ def get_rate(
279281
event: GroupEvent,
280282
environment_id: int,
281283
comparison_type: str,
282-
) -> int:
284+
) -> int | float:
283285
current_time = timezone.now()
284286
start, end = self.get_query_window(end=current_time, duration=duration)
285287
with self.disable_consistent_snuba_mode(duration):
@@ -302,7 +304,7 @@ def get_rate_bulk(
302304
environment_id: int,
303305
current_time: datetime,
304306
comparison_interval: timedelta | None,
305-
) -> dict[int, int]:
307+
) -> dict[int, int | float]:
306308
"""
307309
Make a batch query for multiple groups. The return value is a dictionary
308310
of group_id to the result for that group.
@@ -434,8 +436,8 @@ def query_hook(
434436

435437
def batch_query_hook(
436438
self, group_ids: set[int], start: datetime, end: datetime, environment_id: int
437-
) -> dict[int, int]:
438-
batch_sums: dict[int, int] = defaultdict(int)
439+
) -> dict[int, int | float]:
440+
batch_sums: dict[int, int | float] = defaultdict(int)
439441
groups = Group.objects.filter(id__in=group_ids).values(
440442
"id", "type", "project_id", "project__organization_id"
441443
)
@@ -497,8 +499,8 @@ def query_hook(
497499

498500
def batch_query_hook(
499501
self, group_ids: set[int], start: datetime, end: datetime, environment_id: int
500-
) -> dict[int, int]:
501-
batch_totals: dict[int, int] = defaultdict(int)
502+
) -> dict[int, int | float]:
503+
batch_totals: dict[int, int | float] = defaultdict(int)
502504
groups = Group.objects.filter(id__in=group_ids).values(
503505
"id", "type", "project_id", "project__organization_id"
504506
)
@@ -583,7 +585,7 @@ def query_hook(
583585

584586
def batch_query_hook(
585587
self, group_ids: set[int], start: datetime, end: datetime, environment_id: int
586-
) -> dict[int, int]:
588+
) -> dict[int, int | float]:
587589
logger = logging.getLogger(
588590
"sentry.rules.event_frequency.EventUniqueUserFrequencyConditionWithConditions"
589591
)
@@ -609,7 +611,7 @@ def batch_query_hook(
609611
raise NotImplementedError(
610612
"EventUniqueUserFrequencyConditionWithConditions does not support filter_match == any"
611613
)
612-
batch_totals: dict[int, int] = defaultdict(int)
614+
batch_totals: dict[int, int | float] = defaultdict(int)
613615
groups = Group.objects.filter(id__in=group_ids).values(
614616
"id", "type", "project_id", "project__organization_id"
615617
)
@@ -723,7 +725,7 @@ def get_chunked_result(
723725

724726
@staticmethod
725727
def convert_rule_condition_to_snuba_condition(
726-
condition: dict[str, Any]
728+
condition: dict[str, Any],
727729
) -> tuple[str, str, str | list[str]] | None:
728730
if condition["id"] != "sentry.rules.filters.tagged_event.TaggedEventFilter":
729731
return None
@@ -863,7 +865,7 @@ def get_session_interval(self, session_count: int, interval: str) -> int | None:
863865

864866
def query_hook(
865867
self, event: GroupEvent, start: datetime, end: datetime, environment_id: int
866-
) -> int:
868+
) -> float:
867869
project_id = event.project_id
868870
session_count_last_hour = self.get_session_count(project_id, environment_id, start, end)
869871
avg_sessions_in_interval = self.get_session_interval(
@@ -892,14 +894,14 @@ def query_hook(
892894
"avg_sessions_in_interval": avg_sessions_in_interval,
893895
},
894896
)
895-
percent: int = int(100 * round(issue_count / avg_sessions_in_interval, 4))
897+
percent: float = 100 * round(issue_count / avg_sessions_in_interval, 4)
896898
return percent
897899

898900
return 0
899901

900902
def batch_query_hook(
901903
self, group_ids: set[int], start: datetime, end: datetime, environment_id: int
902-
) -> dict[int, int]:
904+
) -> dict[int, int | float]:
903905
groups = Group.objects.filter(id__in=group_ids).values(
904906
"id", "type", "project_id", "project__organization_id"
905907
)
@@ -933,34 +935,35 @@ def batch_query_hook(
933935
referrer_suffix="batch_alert_event_frequency_percent",
934936
)
935937

936-
batch_percents: dict[int, int] = {}
938+
batch_percents: dict[int, int | float] = {}
937939
for group_id, count in error_issue_count.items():
938-
percent: int = int(100 * round(count / avg_sessions_in_interval, 4))
940+
percent: float = 100 * round(count / avg_sessions_in_interval, 4)
939941
batch_percents[group_id] = percent
940942

941943
# We do not have sessions for non-error issue types
942944
for group in generic_issue_ids:
943945
batch_percents[group] = 0
944-
945946
return batch_percents
946947

947948
def passes_activity_frequency(
948-
self, activity: ConditionActivity, buckets: dict[datetime, int]
949+
self, activity: ConditionActivity, buckets: dict[datetime, int | float]
949950
) -> bool:
950951
raise NotImplementedError
951952

952953
def get_form_instance(self) -> EventFrequencyPercentForm:
953954
return EventFrequencyPercentForm(self.data)
954955

955956

956-
def bucket_count(start: datetime, end: datetime, buckets: dict[datetime, int]) -> int:
957+
def bucket_count(
958+
start: datetime, end: datetime, buckets: dict[datetime, int | float]
959+
) -> int | float:
957960
rounded_end = round_to_five_minute(end)
958961
rounded_start = round_to_five_minute(start)
959962
count = buckets.get(rounded_end, 0) - buckets.get(rounded_start, 0)
960963
return count
961964

962965

963-
def percent_increase(result: int, comparison_result: int) -> int:
966+
def percent_increase(result: int | float, comparison_result: int | float) -> int:
964967
return (
965968
int(max(0, ((result - comparison_result) / comparison_result * 100)))
966969
if comparison_result > 0

src/sentry/rules/processing/delayed_processing.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -213,7 +213,7 @@ def bulk_fetch_events(event_ids: list[str], project_id: int) -> dict[str, Event]
213213

214214

215215
def parse_rulegroup_to_event_data(
216-
rulegroup_to_event_data: dict[str, str]
216+
rulegroup_to_event_data: dict[str, str],
217217
) -> dict[tuple[int, int], dict[str, str]]:
218218
parsed_rulegroup_to_event_data = {}
219219
for rule_group, instance_data in rulegroup_to_event_data.items():
@@ -326,7 +326,7 @@ def get_group_to_groupevent(
326326

327327
def get_condition_group_results(
328328
condition_groups: dict[UniqueConditionQuery, DataAndGroups], project: Project
329-
) -> dict[UniqueConditionQuery, dict[int, int]] | None:
329+
) -> dict[UniqueConditionQuery, dict[int, int | float]] | None:
330330
condition_group_results = {}
331331
current_time = datetime.now(tz=timezone.utc)
332332
project_id = project.id
@@ -377,7 +377,7 @@ def get_condition_group_results(
377377

378378

379379
def passes_comparison(
380-
condition_group_results: dict[UniqueConditionQuery, dict[int, int]],
380+
condition_group_results: dict[UniqueConditionQuery, dict[int, int | float]],
381381
condition_data: EventFrequencyConditionData,
382382
group_id: int,
383383
environment_id: int,
@@ -414,7 +414,7 @@ def passes_comparison(
414414

415415

416416
def get_rules_to_fire(
417-
condition_group_results: dict[UniqueConditionQuery, dict[int, int]],
417+
condition_group_results: dict[UniqueConditionQuery, dict[int, int | float]],
418418
rules_to_slow_conditions: DefaultDict[Rule, list[EventFrequencyConditionData]],
419419
rules_to_groups: DefaultDict[int, set[int]],
420420
project_id: int,

src/sentry/search/eap/columns.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -121,6 +121,8 @@ class VirtualColumnDefinition:
121121
) = None
122122
filter_column: str | None = None
123123
default_value: str | None = None
124+
# Processor is the function run in the post process step to transform a row into the final result
125+
processor: Callable[[Any], Any] | None = None
124126

125127

126128
@dataclass(frozen=True, kw_only=True)

src/sentry/search/eap/resolver.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -718,7 +718,10 @@ def resolve_attribute(
718718
if column in self.definitions.contexts:
719719
column_context = self.definitions.contexts[column]
720720
column_definition = ResolvedAttribute(
721-
public_alias=column, internal_name=column, search_type="string"
721+
public_alias=column,
722+
internal_name=column,
723+
search_type="string",
724+
processor=column_context.processor,
722725
)
723726
elif column in self.definitions.columns:
724727
column_context = None

0 commit comments

Comments
 (0)