19
19
from sentry .models .group import Group
20
20
from sentry .models .project import Project
21
21
from sentry .profiles .utils import get_from_profiling_service
22
+ from sentry .search .events .types import EventsResponse , SnubaParams
22
23
from sentry .seer .seer_setup import get_seer_org_acknowledgement
23
24
from sentry .seer .signed_seer_api import sign_with_seer_secret
25
+ from sentry .snuba import ourlogs
24
26
from sentry .snuba .dataset import Dataset
25
27
from sentry .snuba .referrer import Referrer
26
28
from sentry .tasks .autofix import check_autofix_status
33
35
TIMEOUT_SECONDS = 60 * 30 # 30 minutes
34
36
35
37
38
+ def _get_logs_for_event (event : Event | GroupEvent , project : Project ) -> list [dict ] | None :
39
+ trace_id = event .trace_id
40
+ if not trace_id :
41
+ return None
42
+
43
+ projects_qs = Project .objects .filter (
44
+ organization = project .organization , status = ObjectStatus .ACTIVE
45
+ )
46
+ projects = list (projects_qs )
47
+ project_id_to_slug = dict (projects_qs .values_list ("id" , "slug" ))
48
+ start = event .datetime - timedelta (days = 1 )
49
+ end = event .datetime + timedelta (days = 1 )
50
+
51
+ snuba_params = SnubaParams (
52
+ start = start ,
53
+ end = end ,
54
+ projects = projects ,
55
+ organization = project .organization ,
56
+ )
57
+
58
+ results : EventsResponse = ourlogs .query (
59
+ selected_columns = [
60
+ "project.id" ,
61
+ "timestamp" ,
62
+ "message" ,
63
+ "severity" ,
64
+ "code.file.path" ,
65
+ "code.function.name" ,
66
+ ],
67
+ query = f"trace_id:{ trace_id } " ,
68
+ snuba_params = snuba_params ,
69
+ orderby = ["-timestamp" ],
70
+ offset = 0 ,
71
+ limit = 100 ,
72
+ referrer = Referrer .API_GROUP_AI_AUTOFIX ,
73
+ )
74
+ data = results ["data" ]
75
+
76
+ # Convert log timestamps to datetime and sort by timestamp ascending (oldest first)
77
+ for log in data :
78
+ ts = log .get ("timestamp" )
79
+ if ts :
80
+ try :
81
+ log ["_parsed_ts" ] = datetime .fromisoformat (ts )
82
+ except Exception :
83
+ log ["_parsed_ts" ] = None
84
+ else :
85
+ log ["_parsed_ts" ] = None
86
+
87
+ # Sort logs by timestamp ascending (oldest first)
88
+ data .sort (key = lambda x : x .get ("_parsed_ts" ) or datetime .min )
89
+
90
+ # Find the index of the log closest to the event timestamp (faster with min and enumerate)
91
+ closest_idx = 0
92
+ if data :
93
+ valid_logs = [(i , log ) for i , log in enumerate (data ) if log .get ("_parsed_ts" ) is not None ]
94
+ if valid_logs :
95
+ closest_idx , _ = min (
96
+ (
97
+ (i , abs ((log ["_parsed_ts" ] - event .datetime ).total_seconds ()))
98
+ for i , log in valid_logs
99
+ ),
100
+ key = lambda x : x [1 ],
101
+ default = (0 , None ),
102
+ )
103
+
104
+ # Select up to 80 logs before and up to 20 logs after (including the closest)
105
+ start_idx = max (0 , closest_idx - 80 )
106
+ end_idx = min (len (data ), closest_idx + 21 )
107
+ window = data [start_idx :end_idx ]
108
+
109
+ # Merge and count consecutive logs with identical message and severity
110
+ merged_logs = []
111
+ prev_log = None
112
+ count = 0
113
+ for log in window :
114
+ project_id = log .get ("project.id" )
115
+ log ["project_slug" ] = project_id_to_slug .get (project_id ) if project_id else None
116
+ log ["code_file_path" ] = log .get ("code.file.path" )
117
+ log ["code_function_name" ] = log .get ("code.function.name" )
118
+ log .pop ("code.file.path" , None )
119
+ log .pop ("code.function.name" , None )
120
+ log .pop ("_parsed_ts" , None )
121
+ log .pop ("project.id" , None )
122
+
123
+ msg = log .get ("message" )
124
+ sev = log .get ("severity" )
125
+ if prev_log and msg == prev_log ["message" ] and sev == prev_log ["severity" ]:
126
+ count += 1
127
+ else :
128
+ if prev_log :
129
+ if count > 1 :
130
+ prev_log ["consecutive_count" ] = count
131
+ merged_logs .append (prev_log )
132
+ prev_log = log .copy ()
133
+ count = 1
134
+ if prev_log :
135
+ if count > 1 :
136
+ prev_log ["consecutive_count" ] = count
137
+ merged_logs .append (prev_log )
138
+
139
+ return merged_logs
140
+
141
+
36
142
def build_spans_tree (spans_data : list [dict ]) -> list [dict ]:
37
143
"""
38
144
Builds a hierarchical tree structure from a flat list of spans.
@@ -676,6 +782,7 @@ def _call_autofix(
676
782
serialized_event : dict [str , Any ],
677
783
profile : dict [str , Any ] | None ,
678
784
trace_tree : dict [str , Any ] | None ,
785
+ logs : list [dict ] | None ,
679
786
instruction : str | None = None ,
680
787
timeout_secs : int = TIMEOUT_SECONDS ,
681
788
pr_to_comment_on_url : str | None = None ,
@@ -696,6 +803,7 @@ def _call_autofix(
696
803
},
697
804
"profile" : profile ,
698
805
"trace_tree" : trace_tree ,
806
+ "logs" : logs ,
699
807
"instruction" : instruction ,
700
808
"timeout_secs" : timeout_secs ,
701
809
"last_updated" : datetime .now ().isoformat (),
@@ -791,6 +899,13 @@ def trigger_autofix(
791
899
logger .exception ("Failed to get profile from trace tree" )
792
900
profile = None
793
901
902
+ # get logs for this event
903
+ try :
904
+ logs = _get_logs_for_event (event , group .project ) if event else None
905
+ except Exception :
906
+ logger .exception ("Failed to get logs for event" )
907
+ logs = None
908
+
794
909
try :
795
910
run_id = _call_autofix (
796
911
user = user ,
@@ -799,6 +914,7 @@ def trigger_autofix(
799
914
serialized_event = serialized_event ,
800
915
profile = profile ,
801
916
trace_tree = trace_tree ,
917
+ logs = logs ,
802
918
instruction = instruction ,
803
919
timeout_secs = TIMEOUT_SECONDS ,
804
920
pr_to_comment_on_url = pr_to_comment_on_url ,
0 commit comments