@@ -312,6 +312,14 @@ def get_comparison_aggregation_value(
312
312
313
313
if not comparison_aggregate :
314
314
metrics .incr ("incidents.alert_rules.skipping_update_comparison_value_invalid" )
315
+ logger .info (
316
+ "No comparison aggregate" ,
317
+ extra = {
318
+ "alert_rule_id" : self .alert_rule .id ,
319
+ "subscription_id" : subscription_update .get ("subscription_id" ),
320
+ "organization_id" : self .alert_rule .organization_id ,
321
+ },
322
+ )
315
323
return None
316
324
317
325
return (aggregation_value / comparison_aggregate ) * 100
@@ -408,33 +416,34 @@ def process_update(self, subscription_update: QuerySubscriptionUpdate) -> None:
408
416
},
409
417
)
410
418
411
- if features .has (
412
- "organizations:workflow-engine-metric-alert-processing" ,
413
- self .subscription .project .organization ,
414
- ):
415
- packet = MetricDetectorUpdate (
416
- entity = subscription_update .get ("entity" , "" ),
417
- subscription_id = subscription_update ["subscription_id" ],
418
- values = {"value" : aggregation_value },
419
- timestamp = self .last_update ,
420
- )
421
- data_packet = DataPacket [MetricDetectorUpdate ](
422
- source_id = str (self .subscription .id ), packet = packet
423
- )
424
- results = process_data_packets ([data_packet ], DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION )
419
+ if aggregation_value is not None :
425
420
if features .has (
426
- "organizations:workflow-engine-metric-alert-dual- processing-logs " ,
427
- self .alert_rule .organization ,
421
+ "organizations:workflow-engine-metric-alert-processing" ,
422
+ self .subscription . project .organization ,
428
423
):
429
- logger . info (
430
- "dual processing results for alert rule" ,
431
- extra = {
432
- "results " : results ,
433
- "num_results" : len ( results ) ,
434
- "value" : aggregation_value ,
435
- "rule_id" : self . alert_rule . id ,
436
- },
424
+ packet = MetricDetectorUpdate (
425
+ entity = subscription_update . get ( "entity" , "" ) ,
426
+ subscription_id = subscription_update [ "subscription_id" ],
427
+ values = { "value " : aggregation_value } ,
428
+ timestamp = self . last_update ,
429
+ )
430
+ data_packet = DataPacket [ MetricDetectorUpdate ](
431
+ source_id = str ( self . subscription . id ), packet = packet
437
432
)
433
+ results = process_data_packets ([data_packet ], DATA_SOURCE_SNUBA_QUERY_SUBSCRIPTION )
434
+ if features .has (
435
+ "organizations:workflow-engine-metric-alert-dual-processing-logs" ,
436
+ self .alert_rule .organization ,
437
+ ):
438
+ logger .info (
439
+ "dual processing results for alert rule" ,
440
+ extra = {
441
+ "results" : results ,
442
+ "num_results" : len (results ),
443
+ "value" : aggregation_value ,
444
+ "rule_id" : self .alert_rule .id ,
445
+ },
446
+ )
438
447
439
448
has_anomaly_detection = features .has (
440
449
"organizations:anomaly-detection-alerts" , self .subscription .project .organization
0 commit comments