Skip to content

fix(deletions): Prevent DB call from deleting issue #95915

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jul 21, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions src/sentry/api/helpers/group_index/delete.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,9 @@ def delete_group_list(
},
)

# Tell seer to delete grouping records for these groups
may_schedule_task_to_delete_hashes_from_seer(error_ids)
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If this call raises an exception (as it used to do), the exception will be caught by the endpoint and return a 500.

I moved this call here because I don't want the status of the group to be changed.

I have upcoming changes to make this function clearer and more robust.


Group.objects.filter(id__in=group_ids).exclude(
status__in=[GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS]
).update(status=GroupStatus.PENDING_DELETION, substatus=None)
Expand All @@ -84,9 +87,6 @@ def delete_group_list(
# fails, we will still have a record of who requested the deletion.
create_audit_entries(request, project, group_list, delete_type, transaction_id)

# Tell seer to delete grouping records for these groups
may_schedule_task_to_delete_hashes_from_seer(error_ids)

# Removing GroupHash rows prevents new events from associating to the groups
# we just deleted.
GroupHash.objects.filter(project_id=project.id, group__id__in=group_ids).delete()
Expand Down
9 changes: 8 additions & 1 deletion src/sentry/issues/endpoints/organization_group_index.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import functools
import logging
from collections.abc import Mapping, Sequence
from datetime import datetime, timedelta
from typing import Any
Expand Down Expand Up @@ -67,6 +68,8 @@
ERR_INVALID_STATS_PERIOD = "Invalid stats_period. Valid choices are '', '24h', '14d' and 'auto'"
allowed_inbox_search_terms = frozenset(["date", "status", "for_review", "assigned_or_suggested"])

logger = logging.getLogger(__name__)


def inbox_search(
projects: Sequence[Project],
Expand Down Expand Up @@ -501,4 +504,8 @@ def delete(self, request: Request, organization: Organization) -> Response:
self.get_environments(request, organization),
)

return delete_groups(request, projects, organization.id, search_fn)
try:
return delete_groups(request, projects, organization.id, search_fn)
except Exception:
logger.exception("Error deleting groups")
return Response({"detail": "Error deleting groups"}, status=500)
8 changes: 7 additions & 1 deletion src/sentry/issues/endpoints/project_group_index.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import functools
import logging

from rest_framework.request import Request
from rest_framework.response import Response
Expand Down Expand Up @@ -30,6 +31,7 @@

ERR_INVALID_STATS_PERIOD = "Invalid stats_period. Valid choices are '', '24h', and '14d'"
ERR_HASHES_AND_OTHER_QUERY = "Cannot use 'hashes' with 'query'"
logger = logging.getLogger(__name__)


@region_silo_endpoint
Expand Down Expand Up @@ -299,4 +301,8 @@ def delete(self, request: Request, project: Project) -> Response:
:auth: required
"""
search_fn = functools.partial(prep_search, request, project)
return delete_groups(request, [project], project.organization_id, search_fn)
try:
return delete_groups(request, [project], project.organization_id, search_fn)
except Exception:
logger.exception("Error deleting groups")
return Response({"detail": "Error deleting groups"}, status=500)
25 changes: 24 additions & 1 deletion tests/sentry/issues/endpoints/test_organization_group_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from unittest.mock import MagicMock, Mock, call, patch
from uuid import uuid4

from django.db import OperationalError
from django.db.utils import OperationalError
from django.urls import reverse
from django.utils import timezone

Expand Down Expand Up @@ -4455,3 +4455,26 @@ def test_bulk_delete_performance_issues(self) -> None:
assert response.status_code == 204

self.assert_deleted_groups(groups)

@patch("sentry.api.helpers.group_index.delete.may_schedule_task_to_delete_hashes_from_seer")
def test_do_not_mark_as_pending_deletion_if_seer_fails(self, mock_seer_delete: Mock):
"""
Test that the issue is not marked as pending deletion if the seer call fails.
"""
# When trying to gather the hashes, the query could be cancelled by the user
mock_seer_delete.side_effect = OperationalError(
"QueryCanceled('canceling statement due to user request\n')"
)
event = self.store_event(data={}, project_id=self.project.id)
group1 = Group.objects.get(id=event.group_id)
assert GroupHash.objects.filter(group=group1).exists()

self.login_as(user=self.user)
with self.tasks():
response = self.get_response(qs_params={"id": [group1.id]})
assert response.status_code == 500
assert response.data["detail"] == "Error deleting groups"

# The group has not been marked as pending deletion
assert Group.objects.get(id=group1.id).status == group1.status
assert GroupHash.objects.filter(group=group1).exists()
27 changes: 15 additions & 12 deletions tests/snuba/api/endpoints/test_project_group_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from uuid import uuid4

from django.conf import settings
from django.db.utils import OperationalError
from django.utils import timezone

from sentry.integrations.models.external_issue import ExternalIssue
Expand Down Expand Up @@ -1637,26 +1638,28 @@ def test_bulk_delete_performance_issues(self):
self.assert_groups_are_gone(groups)

@patch("sentry.api.helpers.group_index.delete.may_schedule_task_to_delete_hashes_from_seer")
@patch("sentry.utils.audit.log_service.record_audit_log")
def test_audit_log_even_if_exception_raised(
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The previous test does not apply anymore.

self, mock_record_audit_log: Mock, mock_seer_delete: Mock
):
def test_do_not_mark_as_pending_deletion_if_seer_fails(self, mock_seer_delete: Mock):
"""
Test that audit log is created even if an exception is raised after the audit log is created.
Test that the issue is not marked as pending deletion if the seer call fails.
"""
# Calling seer happens after creating the audit log entry
mock_seer_delete.side_effect = Exception("Seer error!")
group1 = self.create_group()
# When trying to gather the hashes, the query could be cancelled by the user
mock_seer_delete.side_effect = OperationalError(
"QueryCanceled('canceling statement due to user request\n')"
)
event = self.store_event(data={}, project_id=self.project.id)
group1 = Group.objects.get(id=event.group_id)
assert GroupHash.objects.filter(group=group1).exists()

self.login_as(user=self.user)
url = f"{self.path}?id={group1.id}"
with self.tasks():
response = self.client.delete(url, format="json")
assert response.status_code == 500
assert response.data["detail"] == "Error deleting groups"

self.assert_audit_log_entry([group1], mock_record_audit_log)

# They have been marked as pending deletion but the exception prevented their complete deletion
assert Group.objects.get(id=group1.id).status == GroupStatus.PENDING_DELETION
# The group has not been marked as pending deletion
assert Group.objects.get(id=group1.id).status == group1.status
assert GroupHash.objects.filter(group=group1).exists()

def test_new_event_for_pending_deletion_group_creates_new_group(self):
"""Test that after deleting a group, new events with the same fingerprint create a new group."""
Expand Down
Loading