Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
91 changes: 36 additions & 55 deletions tests/sentry/deletions/test_group.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,12 @@
import os
import random
from datetime import datetime, timedelta
from time import time
from typing import Any
from unittest import mock
from uuid import uuid4

from snuba_sdk import Column, Condition, Entity, Function, Op, Query, Request

from sentry import deletions, nodestore
from sentry.deletions.tasks.groups import delete_groups_for_project
from sentry.issues.grouptype import GroupCategory
from sentry.issues.grouptype import FeedbackGroup, GroupCategory
from sentry.issues.issue_occurrence import IssueOccurrence
from sentry.models.eventattachment import EventAttachment
from sentry.models.group import Group
Expand All @@ -22,11 +18,8 @@
from sentry.models.groupredirect import GroupRedirect
from sentry.models.userreport import UserReport
from sentry.services.eventstore.models import Event
from sentry.snuba.dataset import Dataset, EntityKey
from sentry.snuba.referrer import Referrer
from sentry.testutils.cases import SnubaTestCase, TestCase
from sentry.testutils.helpers.datetime import before_now
from sentry.utils.snuba import bulk_snuba_queries
from tests.sentry.issues.test_utils import OccurrenceTestMixin


Expand Down Expand Up @@ -333,8 +326,6 @@ def test_delete_grouphashes_and_metadata(self) -> None:


class DeleteIssuePlatformTest(TestCase, SnubaTestCase, OccurrenceTestMixin):
referrer = Referrer.TESTING_TEST.value

def create_occurrence(self, event: Event, type_id: int) -> tuple[IssueOccurrence, Group]:
occurrence, group_info = self.process_occurrence(
project_id=self.project.id,
Expand All @@ -346,42 +337,33 @@ def create_occurrence(self, event: Event, type_id: int) -> tuple[IssueOccurrence
assert group_info is not None
return occurrence, group_info.group

def select_error_events(self, project_id: int) -> object:
columns = ["event_id", "group_id"]
return self.select_rows(Entity(EntityKey.Events.value), columns, project_id)

def select_issue_platform_events(self, project_id: int) -> object:
columns = ["event_id", "group_id", "occurrence_id"]
return self.select_rows(Entity(EntityKey.IssuePlatform.value), columns, project_id)

def select_rows(
self, entity: Entity, columns: list[str], project_id: int
) -> None | dict[str, object]:
# Adding the random microseconds is to circumvent Snuba's caching mechanism
now = datetime.now()
start_time = now - timedelta(days=1, microseconds=random.randint(0, 100000000))
end_time = now + timedelta(days=1, microseconds=random.randint(0, 100000000))

select = [Column(column) for column in columns]
where = [
Condition(Column("project_id"), Op.IN, Function("tuple", [project_id])),
Condition(Column("timestamp"), Op.GTE, start_time),
Condition(Column("timestamp"), Op.LT, end_time),
]
query = Query(match=entity, select=select, where=where)
request = Request(
# Using IssuePlatform dataset for occurrence queries
dataset=Dataset.IssuePlatform.value,
app_id=self.referrer,
query=query,
tenant_ids=self.tenant_ids,
def test_simple_issue_platform(self) -> None:
# Create initial error event and occurrence related to it; two different groups will exist
event = self.store_event(data={}, project_id=self.project.id)
occurrence_event, issue_platform_group = self.create_occurrence(
event, type_id=FeedbackGroup.type_id
)
results = bulk_snuba_queries([request])[0]["data"]
return results[0] if results else None

@property
def tenant_ids(self) -> dict[str, str]:
return {"referrer": self.referrer, "organization_id": self.organization.id}
# Assertions after creation
assert occurrence_event.id != event.event_id
assert event.group_id != issue_platform_group.id
assert event.group.issue_category == GroupCategory.ERROR
assert issue_platform_group.issue_category == GroupCategory.FEEDBACK
assert issue_platform_group.type == FeedbackGroup.type_id

# Delete the issue platform group
with self.tasks():
delete_groups_for_project(
object_ids=[issue_platform_group.id],
transaction_id=uuid4().hex,
project_id=self.project.id,
)

# The Issue Platform group has been deleted from Postgres
assert not Group.objects.filter(id=issue_platform_group.id).exists()

# The original error event and group still exist
assert Group.objects.filter(id=event.group_id).exists()

@mock.patch("sentry.deletions.tasks.nodestore.bulk_snuba_queries")
def test_issue_platform_batching(self, mock_bulk_snuba_queries: mock.Mock) -> None:
Expand All @@ -396,19 +378,18 @@ def test_issue_platform_batching(self, mock_bulk_snuba_queries: mock.Mock) -> No
group3 = self.create_group(project=self.project)
group4 = self.create_group(project=self.project)

# Set times_seen for each group
Group.objects.filter(id=group1.id).update(times_seen=3, type=GroupCategory.FEEDBACK)
Group.objects.filter(id=group2.id).update(times_seen=1, type=GroupCategory.FEEDBACK)
Group.objects.filter(id=group3.id).update(times_seen=3, type=GroupCategory.FEEDBACK)
Group.objects.filter(id=group4.id).update(times_seen=3, type=GroupCategory.FEEDBACK)
# Set times_seen and type for each group
Group.objects.filter(id=group1.id).update(times_seen=3, type=FeedbackGroup.type_id)
Group.objects.filter(id=group2.id).update(times_seen=1, type=FeedbackGroup.type_id)
Group.objects.filter(id=group3.id).update(times_seen=3, type=FeedbackGroup.type_id)
Group.objects.filter(id=group4.id).update(times_seen=3, type=FeedbackGroup.type_id)

# This will delete the group and the events from the node store and Snuba
with self.tasks():
delete_groups_for_project(
object_ids=[group1.id, group2.id, group3.id, group4.id],
transaction_id=uuid4().hex,
project_id=self.project.id,
)
delete_groups_for_project(
object_ids=[group1.id, group2.id, group3.id, group4.id],
transaction_id=uuid4().hex,
project_id=self.project.id,
)

assert mock_bulk_snuba_queries.call_count == 1
# There should be two batches with max_rows_to_delete=6
Expand Down
Loading