|
1 | 1 | import os |
2 | 2 | import random |
3 | 3 | from datetime import datetime, timedelta |
4 | | -from time import time |
| 4 | +from time import sleep, time |
5 | 5 | from typing import Any |
6 | 6 | from unittest import mock |
7 | 7 | from uuid import uuid4 |
8 | 8 |
|
| 9 | +import pytest |
9 | 10 | from snuba_sdk import Column, Condition, Entity, Function, Op, Query, Request |
10 | 11 |
|
11 | 12 | from sentry import deletions, nodestore |
@@ -507,3 +508,85 @@ def test_issue_platform_batching(self, mock_bulk_snuba_queries: mock.Mock) -> No |
507 | 508 | assert first_batch == [group2.id, group1.id] # group2 has less times_seen than group1 |
508 | 509 | # group3 and group4 have the same times_seen, thus sorted by id |
509 | 510 | assert second_batch == [group3.id, group4.id] |
| 511 | + |
| 512 | + @pytest.mark.xfail( |
| 513 | + strict=False, |
| 514 | + reason="ClickHouse light deletes are eventually consistent and may be slow in CI", |
| 515 | + ) |
| 516 | + def test_issue_platform_deletion_integration(self) -> None: |
| 517 | + """ |
| 518 | + Integration test verifying issue platform events are actually deleted from Snuba. |
| 519 | +
|
| 520 | + Unlike test_simple_issue_platform which mocks Snuba calls for speed and reliability, |
| 521 | + this test performs actual end-to-end deletion to verify the integration works. |
| 522 | +
|
| 523 | + Note: This test may be flaky in CI due to ClickHouse eventual consistency. |
| 524 | + It's marked with xfail to avoid blocking CI, but will report if it passes. |
| 525 | + """ |
| 526 | + # Adding this query here to make sure that the cache is not being used |
| 527 | + assert self.select_error_events(self.project.id) is None |
| 528 | + assert self.select_issue_platform_events(self.project.id) is None |
| 529 | + |
| 530 | + # Create initial error event and occurrence related to it |
| 531 | + event = self.store_event(data={}, project_id=self.project.id) |
| 532 | + occurrence_event, issue_platform_group = self.create_occurrence( |
| 533 | + event, type_id=FeedbackGroup.type_id |
| 534 | + ) |
| 535 | + |
| 536 | + # Assertions after creation |
| 537 | + assert occurrence_event.id != event.event_id |
| 538 | + assert event.group_id != issue_platform_group.id |
| 539 | + assert event.group.issue_category == GroupCategory.ERROR |
| 540 | + assert issue_platform_group.issue_category == GroupCategory.FEEDBACK |
| 541 | + assert issue_platform_group.type == FeedbackGroup.type_id |
| 542 | + |
| 543 | + # Assert that the error event has been inserted in the nodestore & Snuba |
| 544 | + event_node_id = Event.generate_node_id(event.project_id, event.event_id) |
| 545 | + assert nodestore.backend.get(event_node_id) |
| 546 | + expected_error = {"event_id": event.event_id, "group_id": event.group_id} |
| 547 | + assert self.select_error_events(self.project.id) == expected_error |
| 548 | + |
| 549 | + # Assert that the occurrence event has been inserted in Snuba |
| 550 | + expected_occurrence_event = { |
| 551 | + "event_id": occurrence_event.event_id, |
| 552 | + "group_id": issue_platform_group.id, |
| 553 | + "occurrence_id": occurrence_event.id, |
| 554 | + } |
| 555 | + assert self.select_issue_platform_events(self.project.id) == expected_occurrence_event |
| 556 | + |
| 557 | + # This will delete the group and the events from the node store and Snuba |
| 558 | + with self.tasks(): |
| 559 | + delete_groups_for_project( |
| 560 | + object_ids=[issue_platform_group.id], |
| 561 | + transaction_id=uuid4().hex, |
| 562 | + project_id=self.project.id, |
| 563 | + ) |
| 564 | + |
| 565 | + # The original error event and group still exist |
| 566 | + assert Group.objects.filter(id=event.group_id).exists() |
| 567 | + assert nodestore.backend.get(event_node_id) |
| 568 | + assert self.select_error_events(self.project.id) == expected_error |
| 569 | + |
| 570 | + # The Issue Platform group has been deleted from Postgres |
| 571 | + assert not Group.objects.filter(id=issue_platform_group.id).exists() |
| 572 | + |
| 573 | + # Poll for Snuba deletion with CI-friendly timeouts |
| 574 | + # ClickHouse light deletes are eventually consistent, so we need to wait |
| 575 | + max_attempts = 50 # Up to 5 seconds |
| 576 | + delay = 0.1 |
| 577 | + deleted = False |
| 578 | + for attempt in range(max_attempts): |
| 579 | + result = self.select_issue_platform_events(self.project.id) |
| 580 | + if result is None: |
| 581 | + deleted = True |
| 582 | + break |
| 583 | + if attempt < max_attempts - 1: |
| 584 | + sleep(delay) |
| 585 | + |
| 586 | + if not deleted: |
| 587 | + # Provide helpful debug information if test fails |
| 588 | + result = self.select_issue_platform_events(self.project.id) |
| 589 | + pytest.fail( |
| 590 | + f"Issue platform events not deleted from Snuba after {max_attempts * delay}s. " |
| 591 | + f"Found: {result}. This indicates ClickHouse replication lag or deletion failure." |
| 592 | + ) |
0 commit comments