@@ -383,6 +383,74 @@ def select_rows(
383383 def tenant_ids (self ) -> dict [str , str ]:
384384 return {"referrer" : self .referrer , "organization_id" : self .organization .id }
385385
386+ def test_simple_issue_platform (self ) -> None :
387+ # Adding this query here to make sure that the cache is not being used
388+ assert self .select_error_events (self .project .id ) is None
389+ initial_issue_platform_data = self .select_issue_platform_events (self .project .id )
390+ assert initial_issue_platform_data is None , (
391+ f"Expected no issue platform events for new project { self .project .id } , "
392+ f"but found: event_id={ initial_issue_platform_data .get ('event_id' ) if initial_issue_platform_data else None } , "
393+ f"group_id={ initial_issue_platform_data .get ('group_id' ) if initial_issue_platform_data else None } , "
394+ f"occurrence_id={ initial_issue_platform_data .get ('occurrence_id' ) if initial_issue_platform_data else None } . "
395+ f"This suggests reset_snuba fixture is not properly cleaning up the search_issues dataset."
396+ )
397+
398+ # Create initial error event and occurrence related to it; two different groups will exist
399+ event = self .store_event (data = {}, project_id = self .project .id )
400+ # XXX: We need a different way of creating occurrences which will insert into the nodestore
401+ occurrence_event , issue_platform_group = self .create_occurrence (
402+ event , type_id = FeedbackGroup .type_id
403+ )
404+
405+ # Assertions after creation
406+ assert occurrence_event .id != event .event_id
407+ assert event .group_id != issue_platform_group .id
408+ assert event .group .issue_category == GroupCategory .ERROR
409+ assert issue_platform_group .issue_category == GroupCategory .FEEDBACK
410+ assert issue_platform_group .type == FeedbackGroup .type_id
411+
412+ # Assert that the error event has been inserted in the nodestore & Snuba
413+ event_node_id = Event .generate_node_id (event .project_id , event .event_id )
414+ assert nodestore .backend .get (event_node_id )
415+ expected_error = {"event_id" : event .event_id , "group_id" : event .group_id }
416+ assert self .select_error_events (self .project .id ) == expected_error
417+
418+ # Assert that the occurrence event has been inserted in the nodestore & Snuba
419+ # occurrence_node_id = Event.generate_node_id(
420+ # occurrence_event.project_id, occurrence_event.id
421+ # )
422+ # assert nodestore.backend.get(occurrence_node_id)
423+ expected_occurrence_event = {
424+ "event_id" : occurrence_event .event_id ,
425+ "group_id" : issue_platform_group .id ,
426+ "occurrence_id" : occurrence_event .id ,
427+ }
428+ assert self .select_issue_platform_events (self .project .id ) == expected_occurrence_event
429+
430+ # This will delete the group and the events from the node store and Snuba
431+ with self .tasks ():
432+ delete_groups_for_project (
433+ object_ids = [issue_platform_group .id ],
434+ transaction_id = uuid4 ().hex ,
435+ project_id = self .project .id ,
436+ )
437+
438+ # The original error event and group still exist
439+ assert Group .objects .filter (id = event .group_id ).exists ()
440+ assert nodestore .backend .get (event_node_id )
441+ assert self .select_error_events (self .project .id ) == expected_error
442+
443+ # The Issue Platform group and occurrence have been deleted
444+ assert not Group .objects .filter (id = issue_platform_group .id ).exists ()
445+ # assert not nodestore.backend.get(occurrence_node_id)
446+ final_issue_platform_data = self .select_issue_platform_events (self .project .id )
447+ assert final_issue_platform_data is None , (
448+ f"Expected issue platform events to be deleted, but found data in Snuba.\n "
449+ f"Expected (deleted): event_id={ occurrence_event .event_id } , "
450+ f"group_id={ issue_platform_group .id } , occurrence_id={ occurrence_event .id } \n "
451+ f"Found: { final_issue_platform_data } "
452+ )
453+
386454 @mock .patch ("sentry.deletions.tasks.nodestore.bulk_snuba_queries" )
387455 def test_issue_platform_batching (self , mock_bulk_snuba_queries : mock .Mock ) -> None :
388456 # Patch max_rows_to_delete to a small value for testing
0 commit comments