Skip to content

Commit 4335498

Browse files
committed
try again to make test_simple_issue_platfrom test end to end and pass in CI
1 parent ddab562 commit 4335498

File tree

1 file changed

+18
-119
lines changed

1 file changed

+18
-119
lines changed

tests/sentry/deletions/test_group.py

Lines changed: 18 additions & 119 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@
66
from unittest import mock
77
from uuid import uuid4
88

9-
import pytest
109
from snuba_sdk import Column, Condition, Entity, Function, Op, Query, Request
1110

1211
from sentry import deletions, nodestore
@@ -384,15 +383,13 @@ def select_rows(
384383
def tenant_ids(self) -> dict[str, str]:
385384
return {"referrer": self.referrer, "organization_id": self.organization.id}
386385

387-
@mock.patch("sentry.deletions.tasks.nodestore.bulk_snuba_queries")
388-
def test_simple_issue_platform(self, mock_bulk_snuba_queries: mock.Mock) -> None:
386+
def test_simple_issue_platform(self) -> None:
389387
# Adding this query here to make sure that the cache is not being used
390388
assert self.select_error_events(self.project.id) is None
391389
assert self.select_issue_platform_events(self.project.id) is None
392390

393391
# Create initial error event and occurrence related to it; two different groups will exist
394392
event = self.store_event(data={}, project_id=self.project.id)
395-
# XXX: We need a different way of creating occurrences which will insert into the nodestore
396393
occurrence_event, issue_platform_group = self.create_occurrence(
397394
event, type_id=FeedbackGroup.type_id
398395
)
@@ -410,11 +407,7 @@ def test_simple_issue_platform(self, mock_bulk_snuba_queries: mock.Mock) -> None
410407
expected_error = {"event_id": event.event_id, "group_id": event.group_id}
411408
assert self.select_error_events(self.project.id) == expected_error
412409

413-
# Assert that the occurrence event has been inserted in the nodestore & Snuba
414-
# occurrence_node_id = Event.generate_node_id(
415-
# occurrence_event.project_id, occurrence_event.id
416-
# )
417-
# assert nodestore.backend.get(occurrence_node_id)
410+
# Assert that the occurrence event has been inserted in Snuba
418411
expected_occurrence_event = {
419412
"event_id": occurrence_event.event_id,
420413
"group_id": issue_platform_group.id,
@@ -437,34 +430,22 @@ def test_simple_issue_platform(self, mock_bulk_snuba_queries: mock.Mock) -> None
437430

438431
# The Issue Platform group and occurrence have been deleted from Postgres
439432
assert not Group.objects.filter(id=issue_platform_group.id).exists()
440-
# assert not nodestore.backend.get(occurrence_node_id)
441-
442-
# Verify that deletion was sent to Snuba
443-
# We mock bulk_snuba_queries to avoid eventual consistency issues with ClickHouse light deletes
444-
assert mock_bulk_snuba_queries.called
445-
delete_calls = [
446-
call for call in mock_bulk_snuba_queries.call_args_list if call[0]
447-
] # Get calls with args
448-
assert (
449-
len(delete_calls) > 0
450-
), "Expected at least one call to bulk_snuba_queries for deletion"
451-
452-
# Verify a DeleteQuery was sent for the issue platform group
453-
from snuba_sdk import DeleteQuery
454-
455-
found_delete = False
456-
for call in delete_calls:
457-
requests = call[0][0] # First positional arg is the list of requests
458-
for req in requests:
459-
if isinstance(req.query, DeleteQuery):
460-
# Verify it's deleting the correct group
461-
if issue_platform_group.id in req.query.column_conditions.get("group_id", []):
462-
found_delete = True
463-
break
464-
if found_delete:
465-
break
466-
467-
assert found_delete, f"No DeleteQuery found for group {issue_platform_group.id}"
433+
434+
# Verify events are deleted from Snuba
435+
# ClickHouse light deletes are eventually consistent, so poll with timeout
436+
max_attempts = 50 # Up to 5 seconds
437+
for attempt in range(max_attempts):
438+
result = self.select_issue_platform_events(self.project.id)
439+
if result is None:
440+
break # Success - events deleted
441+
if attempt < max_attempts - 1:
442+
sleep(0.1)
443+
else:
444+
# If we exhausted all attempts, fail with helpful message
445+
result = self.select_issue_platform_events(self.project.id)
446+
raise AssertionError(
447+
f"Issue platform events not deleted from Snuba after 5 seconds. Found: {result}"
448+
)
468449

469450
@mock.patch("sentry.deletions.tasks.nodestore.bulk_snuba_queries")
470451
def test_issue_platform_batching(self, mock_bulk_snuba_queries: mock.Mock) -> None:
@@ -508,85 +489,3 @@ def test_issue_platform_batching(self, mock_bulk_snuba_queries: mock.Mock) -> No
508489
assert first_batch == [group2.id, group1.id] # group2 has less times_seen than group1
509490
# group3 and group4 have the same times_seen, thus sorted by id
510491
assert second_batch == [group3.id, group4.id]
511-
512-
@pytest.mark.xfail(
513-
strict=False,
514-
reason="ClickHouse light deletes are eventually consistent and may be slow in CI",
515-
)
516-
def test_issue_platform_deletion_integration(self) -> None:
517-
"""
518-
Integration test verifying issue platform events are actually deleted from Snuba.
519-
520-
Unlike test_simple_issue_platform which mocks Snuba calls for speed and reliability,
521-
this test performs actual end-to-end deletion to verify the integration works.
522-
523-
Note: This test may be flaky in CI due to ClickHouse eventual consistency.
524-
It's marked with xfail to avoid blocking CI, but will report if it passes.
525-
"""
526-
# Adding this query here to make sure that the cache is not being used
527-
assert self.select_error_events(self.project.id) is None
528-
assert self.select_issue_platform_events(self.project.id) is None
529-
530-
# Create initial error event and occurrence related to it
531-
event = self.store_event(data={}, project_id=self.project.id)
532-
occurrence_event, issue_platform_group = self.create_occurrence(
533-
event, type_id=FeedbackGroup.type_id
534-
)
535-
536-
# Assertions after creation
537-
assert occurrence_event.id != event.event_id
538-
assert event.group_id != issue_platform_group.id
539-
assert event.group.issue_category == GroupCategory.ERROR
540-
assert issue_platform_group.issue_category == GroupCategory.FEEDBACK
541-
assert issue_platform_group.type == FeedbackGroup.type_id
542-
543-
# Assert that the error event has been inserted in the nodestore & Snuba
544-
event_node_id = Event.generate_node_id(event.project_id, event.event_id)
545-
assert nodestore.backend.get(event_node_id)
546-
expected_error = {"event_id": event.event_id, "group_id": event.group_id}
547-
assert self.select_error_events(self.project.id) == expected_error
548-
549-
# Assert that the occurrence event has been inserted in Snuba
550-
expected_occurrence_event = {
551-
"event_id": occurrence_event.event_id,
552-
"group_id": issue_platform_group.id,
553-
"occurrence_id": occurrence_event.id,
554-
}
555-
assert self.select_issue_platform_events(self.project.id) == expected_occurrence_event
556-
557-
# This will delete the group and the events from the node store and Snuba
558-
with self.tasks():
559-
delete_groups_for_project(
560-
object_ids=[issue_platform_group.id],
561-
transaction_id=uuid4().hex,
562-
project_id=self.project.id,
563-
)
564-
565-
# The original error event and group still exist
566-
assert Group.objects.filter(id=event.group_id).exists()
567-
assert nodestore.backend.get(event_node_id)
568-
assert self.select_error_events(self.project.id) == expected_error
569-
570-
# The Issue Platform group has been deleted from Postgres
571-
assert not Group.objects.filter(id=issue_platform_group.id).exists()
572-
573-
# Poll for Snuba deletion with CI-friendly timeouts
574-
# ClickHouse light deletes are eventually consistent, so we need to wait
575-
max_attempts = 50 # Up to 5 seconds
576-
delay = 0.1
577-
deleted = False
578-
for attempt in range(max_attempts):
579-
result = self.select_issue_platform_events(self.project.id)
580-
if result is None:
581-
deleted = True
582-
break
583-
if attempt < max_attempts - 1:
584-
sleep(delay)
585-
586-
if not deleted:
587-
# Provide helpful debug information if test fails
588-
result = self.select_issue_platform_events(self.project.id)
589-
pytest.fail(
590-
f"Issue platform events not deleted from Snuba after {max_attempts * delay}s. "
591-
f"Found: {result}. This indicates ClickHouse replication lag or deletion failure."
592-
)

0 commit comments

Comments
 (0)