@@ -258,14 +258,14 @@ def test_process_pending_partitions_none(self, process_pending, process_incr):
258
258
def group_rule_data_by_project_id (self , buffer , project_ids ):
259
259
project_ids_to_rule_data = defaultdict (list )
260
260
for proj_id in project_ids [0 ]:
261
- rule_group_pairs = buffer .get_hash (Project , {"project_id" : proj_id })
261
+ rule_group_pairs = buffer .get_hash (Project , {"project_id" : proj_id [ 0 ] })
262
262
for pair in rule_group_pairs :
263
263
for k , v in pair .items ():
264
264
if isinstance (k , bytes ):
265
265
k = k .decode ("utf-8" )
266
266
if isinstance (v , bytes ):
267
267
v = v .decode ("utf-8" )
268
- project_ids_to_rule_data [int (proj_id )].append ({k : v })
268
+ project_ids_to_rule_data [int (proj_id [ 0 ] )].append ({k : v })
269
269
return project_ids_to_rule_data
270
270
271
271
def test_enqueue (self ):
@@ -283,8 +283,8 @@ def test_enqueue(self):
283
283
event4_id = 11
284
284
285
285
# store the project ids
286
- self .buf .push_to_set (key = PROJECT_ID_BUFFER_LIST_KEY , value = project_id )
287
- self .buf .push_to_set (key = PROJECT_ID_BUFFER_LIST_KEY , value = project_id2 )
286
+ self .buf .push_to_sorted_set (key = PROJECT_ID_BUFFER_LIST_KEY , value = project_id )
287
+ self .buf .push_to_sorted_set (key = PROJECT_ID_BUFFER_LIST_KEY , value = project_id2 )
288
288
289
289
# store the rules and group per project
290
290
self .buf .push_to_hash (
@@ -350,6 +350,67 @@ def test_process_batch(self):
350
350
assert mock .call_count == 1
351
351
assert mock .call_args [0 ][0 ] == self .buf
352
352
353
+ def test_delete_batch (self ):
354
+ """Test that after we add things to redis we can clean it up"""
355
+ project_id = 1
356
+ rule_id = 2
357
+ group_id = 3
358
+ event_id = 4
359
+
360
+ project2_id = 5
361
+ rule2_id = 6
362
+ group2_id = 7
363
+ event2_id = 8
364
+
365
+ now = datetime .datetime (2024 , 4 , 15 , 3 , 30 , 00 , tzinfo = datetime .UTC )
366
+ one_minute_from_now = (now ).replace (minute = 31 )
367
+
368
+ # add a set and a hash to the buffer
369
+ with freeze_time (now ):
370
+ self .buf .push_to_sorted_set (key = PROJECT_ID_BUFFER_LIST_KEY , value = project_id )
371
+ self .buf .push_to_hash (
372
+ model = Project ,
373
+ filters = {"project_id" : project_id },
374
+ field = f"{ rule_id } :{ group_id } " ,
375
+ value = event_id ,
376
+ )
377
+ with freeze_time (one_minute_from_now ):
378
+ self .buf .push_to_sorted_set (key = PROJECT_ID_BUFFER_LIST_KEY , value = project2_id )
379
+ self .buf .push_to_hash (
380
+ model = Project ,
381
+ filters = {"project_id" : project2_id },
382
+ field = f"{ rule2_id } :{ group2_id } " ,
383
+ value = event2_id ,
384
+ )
385
+
386
+ # retrieve them
387
+ project_ids = self .buf .get_set (PROJECT_ID_BUFFER_LIST_KEY )
388
+ assert len (project_ids [0 ]) == 2
389
+ rule_group_pairs = self .buf .get_hash (Project , {"project_id" : project_id })
390
+ assert len (rule_group_pairs )
391
+
392
+ # delete only the first project ID by time
393
+ self .buf .delete_key (PROJECT_ID_BUFFER_LIST_KEY , min = 0 , max = now .timestamp ())
394
+
395
+ # retrieve again to make sure only project_id was removed
396
+ project_ids = self .buf .get_set (PROJECT_ID_BUFFER_LIST_KEY )
397
+ if isinstance (project_ids [0 ][0 ][0 ], bytes ):
398
+ assert project_ids == [
399
+ [(bytes (str (project2_id ), "utf-8" ), one_minute_from_now .timestamp ())]
400
+ ]
401
+ else :
402
+ assert project_ids == [[(str (project2_id ), one_minute_from_now .timestamp ())]]
403
+
404
+ # delete the project_id hash
405
+ self .buf .delete_hash (
406
+ model = Project ,
407
+ filters = {"project_id" : project_id },
408
+ field = f"{ rule_id } :{ group_id } " ,
409
+ )
410
+
411
+ rule_group_pairs = self .buf .get_hash (Project , {"project_id" : project_id })
412
+ assert rule_group_pairs == [{}]
413
+
353
414
@mock .patch ("sentry.buffer.redis.RedisBuffer._make_key" , mock .Mock (return_value = "foo" ))
354
415
@mock .patch ("sentry.buffer.base.Buffer.process" )
355
416
def test_process_uses_signal_only (self , process ):
0 commit comments