Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
90 changes: 85 additions & 5 deletions src/sentry/seer/explorer/tools.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import logging
from datetime import datetime, timedelta, timezone
from datetime import UTC, datetime, timedelta, timezone
from typing import Any, Literal

from sentry import eventstore
Expand All @@ -23,6 +23,7 @@
from sentry.snuba.referrer import Referrer
from sentry.snuba.spans_rpc import Spans
from sentry.snuba.trace import query_trace_data
from sentry.utils.dates import parse_stats_period

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -322,6 +323,69 @@ def get_repository_definition(*, organization_id: int, repo_full_name: str) -> d
}


# Tuples of (total period, interval) (both in sentry stats period format).
EVENT_TIMESERIES_RESOLUTIONS = (
("6h", "15m"), # 24 buckets
("24h", "1h"), # 24 buckets
("3d", "3h"), # 24 buckets
("7d", "6h"), # 28 buckets
("14d", "12h"), # 28 buckets
("30d", "24h"), # 30 buckets
("90d", "3d"), # 30 buckets
)


def _get_issue_event_timeseries(
*,
organization: Organization,
project_id: int,
issue_short_id: str,
first_seen_delta: timedelta,
) -> tuple[dict[str, Any], str, str] | None:
"""
Get event counts over time for an issue by calling the events-stats endpoint.
"""

stats_period, interval = None, None
for p, i in EVENT_TIMESERIES_RESOLUTIONS:
delta = parse_stats_period(p)
if delta and first_seen_delta <= delta:
stats_period, interval = p, i
break
stats_period = stats_period or "90d"
interval = interval or "3d"

params: dict[str, Any] = {
"dataset": "issuePlatform",
"query": f"issue:{issue_short_id}",
"yAxis": "count()",
"partial": "1",
"statsPeriod": stats_period,
"interval": interval,
"project": project_id,
"referrer": Referrer.SEER_RPC,
}

resp = client.get(
auth=ApiKey(organization_id=organization.id, scope_list=["org:read", "project:read"]),
user=None,
path=f"/organizations/{organization.slug}/events-stats/",
params=params,
)
if resp.status_code != 200 or not (resp.data or {}).get("data"):
logger.warning(
"Failed to get event counts for issue",
extra={
"organization_slug": organization.slug,
"project_id": project_id,
"issue_id": issue_short_id,
},
)
return None

return {"count()": {"data": resp.data["data"]}}, stats_period, interval


def get_issue_details(
*,
issue_id: str,
Expand Down Expand Up @@ -354,12 +418,12 @@ def get_issue_details(
)
return None

org_project_ids = Project.objects.filter(
organization=organization, status=ObjectStatus.ACTIVE
).values_list("id", flat=True)

try:
if issue_id.isdigit():
org_project_ids = Project.objects.filter(
organization=organization, status=ObjectStatus.ACTIVE
).values_list("id", flat=True)

group = Group.objects.get(project_id__in=org_project_ids, id=int(issue_id))
else:
group = Group.objects.by_qualified_short_id(organization_id, issue_id)
Expand Down Expand Up @@ -415,8 +479,24 @@ def get_issue_details(
)
tags_overview = None

ts_result = _get_issue_event_timeseries(
organization=organization,
project_id=group.project_id,
issue_short_id=group.qualified_short_id,
first_seen_delta=datetime.now(UTC) - group.first_seen,
)
if ts_result:
timeseries, stats_period, interval = ts_result
else:
timeseries = None
stats_period = None
interval = None

return {
"issue": serialized_group,
"event_timeseries": timeseries,
"timeseries_stats_period": stats_period,
"timeseries_interval": interval,
"tags_overview": tags_overview,
"event": serialized_event,
"event_id": event.event_id,
Expand Down
80 changes: 79 additions & 1 deletion tests/sentry/seer/explorer/test_tools.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,18 @@
import uuid
from datetime import datetime, timedelta
from datetime import UTC, datetime, timedelta
from typing import Literal
from unittest.mock import patch

import pytest
from pydantic import BaseModel

from sentry.api import client
from sentry.constants import ObjectStatus
from sentry.models.group import Group
from sentry.models.groupassignee import GroupAssignee
from sentry.models.repository import Repository
from sentry.seer.explorer.tools import (
EVENT_TIMESERIES_RESOLUTIONS,
execute_trace_query_chart,
execute_trace_query_table,
get_issue_details,
Expand All @@ -20,6 +22,7 @@
from sentry.seer.sentry_data_models import EAPTrace
from sentry.testutils.cases import APITransactionTestCase, SnubaTestCase, SpanTestCase
from sentry.testutils.helpers.datetime import before_now
from sentry.utils.dates import parse_stats_period
from sentry.utils.samples import load_data
from tests.sentry.issues.test_utils import OccurrenceTestMixin

Expand Down Expand Up @@ -625,6 +628,20 @@ class _SentryEventData(BaseModel):

class TestGetIssueDetails(APITransactionTestCase, SnubaTestCase, OccurrenceTestMixin):

def _validate_event_timeseries(self, timeseries: dict):
assert isinstance(timeseries, dict)
assert "count()" in timeseries
assert "data" in timeseries["count()"]
assert isinstance(timeseries["count()"]["data"], list)
for item in timeseries["count()"]["data"]:
assert len(item) == 2
assert isinstance(item[0], int)
assert isinstance(item[1], list)
assert len(item[1]) == 1
assert isinstance(item[1][0], dict)
assert "count" in item[1][0]
assert isinstance(item[1][0]["count"], int)

@patch("sentry.models.group.get_recommended_event")
@patch("sentry.seer.explorer.tools.get_all_tags_overview")
def _test_get_issue_details_success(
Expand Down Expand Up @@ -711,6 +728,9 @@ def _test_get_issue_details_success(
else:
assert result["event_trace_id"] is None

# Validate timeseries dict structure.
self._validate_event_timeseries(result["event_timeseries"])

def test_get_issue_details_success_int_id(self):
self._test_get_issue_details_success(use_short_id=False)

Expand Down Expand Up @@ -855,6 +875,64 @@ def test_get_issue_details_with_assigned_team(self, mock_get_tags, mock_get_reco
assert md.assignedTo.name == self.team.slug
assert md.assignedTo.email is None

@patch("sentry.seer.explorer.tools.client")
@patch("sentry.models.group.get_recommended_event")
@patch("sentry.seer.explorer.tools.get_all_tags_overview")
def test_get_issue_details_timeseries_resolution(
self,
mock_get_tags,
mock_get_recommended_event,
mock_api_client,
):
"""Test groups with different first_seen dates"""
mock_get_tags.return_value = {"tags_overview": [{"key": "test_tag", "top_values": []}]}
# Passthrough to real client - allows testing call args
mock_api_client.get.side_effect = client.get

for stats_period, interval in EVENT_TIMESERIES_RESOLUTIONS:
delta = parse_stats_period(stats_period)
assert delta is not None
if delta > timedelta(days=30):
# Skip the 90d test as the retention for testutils is 30d.
continue

# Set a first_seen date slightly newer than the stats period we're testing.
first_seen = datetime.now(UTC) - delta + timedelta(minutes=6, seconds=7)
data = load_data("python", timestamp=first_seen)
data["exception"] = {"values": [{"type": "Exception", "value": "Test exception"}]}
event = self.store_event(data=data, project_id=self.project.id)
mock_get_recommended_event.return_value = event

# Second newer event
data = load_data("python", timestamp=first_seen + timedelta(minutes=6, seconds=7))
data["exception"] = {"values": [{"type": "Exception", "value": "Test exception"}]}
self.store_event(data=data, project_id=self.project.id)

group = event.group
assert isinstance(group, Group)
assert group.first_seen == first_seen

result = get_issue_details(
issue_id=str(group.id),
organization_id=self.organization.id,
selected_event="recommended",
)

# Assert expected stats params were passed to the API.
_, kwargs = mock_api_client.get.call_args
assert kwargs["path"] == f"/organizations/{self.organization.slug}/events-stats/"
assert kwargs["params"]["statsPeriod"] == stats_period
assert kwargs["params"]["interval"] == interval

# Validate final results.
assert result is not None
self._validate_event_timeseries(result["event_timeseries"])
assert result["timeseries_stats_period"] == stats_period
assert result["timeseries_interval"] == interval

# Ensure next iteration makes a fresh group.
group.delete()


@pytest.mark.django_db(databases=["default", "control"])
class TestGetRepositoryDefinition(APITransactionTestCase):
Expand Down
Loading