Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(releases): Add organization issue-metrics endpoint #86626

Open
wants to merge 25 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 3 commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
137 changes: 137 additions & 0 deletions src/sentry/api/endpoints/organization_issue_breakdown.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
from datetime import datetime, timedelta
from typing import TypedDict

from django.db.models import Count, F, Q
from django.db.models.functions import TruncDay
from rest_framework.request import Request
from rest_framework.response import Response

from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import EnvironmentMixin, region_silo_endpoint
from sentry.api.bases.organization import OrganizationEndpoint
from sentry.api.helpers.environments import get_environments
from sentry.api.utils import get_date_range_from_params
from sentry.models.group import Group, GroupCategory, GroupStatus
from sentry.models.organization import Organization
from sentry.models.project import Project

CATEGORY_MAP = {
"error": GroupCategory.ERROR,
"feedback": GroupCategory.FEEDBACK,
}


@region_silo_endpoint
class OrganizationIssueBreakdownEndpoint(OrganizationEndpoint, EnvironmentMixin):
owner = ApiOwner.REPLAY
publish_status = {"GET": ApiPublishStatus.PRIVATE}

def get(self, request: Request, organization: Organization) -> Response:
"""Stats bucketed by time."""
start, end = get_date_range_from_params(request.GET)
end = end.replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(days=1)
start = start.replace(hour=0, minute=0, second=0, microsecond=0)
environments = [e.id for e in get_environments(request, organization)]
projects = self.get_projects(request, organization)
issue_category = CATEGORY_MAP.get(request.GET.get("category", "error"), GroupCategory.ERROR)
group_by = request.GET.get("group_by", "new")

if group_by == "new":
response = query_new_issues(projects, environments, issue_category, start, end)
return Response({"data": response}, status=200)
if group_by == "resolved":
response = query_resolved_issues(projects, environments, issue_category, start, end)
return Response({"data": response}, status=200)
if group_by == "release":
response = query_issues_by_release(projects, environments, issue_category, start, end)
return Response({"data": response}, status=200)
else:
return Response("", status=404)


class BreakdownQueryResult(TypedDict):
bucket: str
count: int


def query_new_issues(
projects: list[Project],
environments: list[int],
issue_category: int,
start: datetime,
end: datetime,
) -> list[BreakdownQueryResult]:
# SELECT count(*), day(first_seen) FROM issues GROUP BY day(first_seen)
group_environment_filter = (
Q(groupenvironment__environment_id=environments[0]) if environments else Q()
)
issues_query = (
Group.objects.filter(
group_environment_filter,
first_seen__gte=start,
first_seen__lte=end,
project__in=projects,
type=issue_category,
)
.annotate(bucket=TruncDay("first_seen"))
.order_by("bucket")
.values("bucket")
.annotate(count=Count("id"))
)
return list(issues_query)


def query_resolved_issues(
projects: list[Project],
environments: list[int],
issue_category: int,
start: datetime,
end: datetime,
) -> list[BreakdownQueryResult]:
# SELECT count(*), day(resolved_at) FROM issues WHERE status = resolved GROUP BY day(resolved_at)
group_environment_filter = (
Q(groupenvironment__environment_id=environments[0]) if environments else Q()
)
resolved_issues_query = (
Group.objects.filter(
group_environment_filter,
first_seen__gte=start,
first_seen__lte=end,
project__in=projects,
type=issue_category,
status=GroupStatus.RESOLVED,
)
.annotate(bucket=TruncDay("resolved_at"))
.order_by("bucket")
.values("bucket")
.annotate(count=Count("id"))
)
return list(resolved_issues_query)


def query_issues_by_release(
projects: list[Project],
environments: list[int],
issue_category: int,
start: datetime,
end: datetime,
) -> list[BreakdownQueryResult]:
# SELECT count(*), first_release.version FROM issues JOIN release GROUP BY first_release.version
group_environment_filter = (
Q(groupenvironment__environment_id=environments[0]) if environments else Q()
)
issues_by_release_query = (
Group.objects.filter(
group_environment_filter,
first_seen__gte=start,
first_seen__lte=end,
project__in=projects,
type=issue_category,
)
.annotate(bucket=F("first_release__version"))
.order_by("bucket")
.values("bucket")
.annotate(count=Count("id"))
)
return list(issues_by_release_query)
6 changes: 6 additions & 0 deletions src/sentry/api/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -503,6 +503,7 @@
from .endpoints.organization_events_trends_v2 import OrganizationEventsNewTrendsStatsEndpoint
from .endpoints.organization_events_vitals import OrganizationEventsVitalsEndpoint
from .endpoints.organization_index import OrganizationIndexEndpoint
from .endpoints.organization_issue_breakdown import OrganizationIssueBreakdownEndpoint
from .endpoints.organization_issues_resolved_in_release import (
OrganizationIssuesResolvedInReleaseEndpoint,
)
Expand Down Expand Up @@ -1609,6 +1610,11 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]:
OrganizationGroupIndexStatsEndpoint.as_view(),
name="sentry-api-0-organization-group-index-stats",
),
re_path(
r"^(?P<organization_id_or_slug>[^\/]+)/issues-breakdown/$",
OrganizationIssueBreakdownEndpoint.as_view(),
name="sentry-api-0-organization-issue-breakdown",
),
re_path(
r"^(?P<organization_id_or_slug>[^\/]+)/integrations/$",
OrganizationIntegrationsEndpoint.as_view(),
Expand Down
128 changes: 128 additions & 0 deletions tests/sentry/api/endpoints/test_organization_issue_breakdown.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
from datetime import datetime, timedelta, timezone

from django.urls import reverse

from sentry.testutils.cases import APITestCase
from sentry.testutils.helpers.datetime import freeze_time
from sentry.utils import json


@freeze_time()
class OrganizationIssueBreakdownTest(APITestCase):
endpoint = "sentry-api-0-organization-issue-breakdown"

def setUp(self):
super().setUp()
self.login_as(user=self.user)
self.url = reverse(self.endpoint, args=(self.organization.slug,))

def test_new_issues(self):
project1 = self.create_project(teams=[self.team], slug="foo")
project2 = self.create_project(teams=[self.team], slug="bar")

today = datetime.now(tz=timezone.utc).replace(hour=0, minute=0, second=0, microsecond=0)
tomorrow = today + timedelta(days=1)
self.create_group(project=project1, status=0, first_seen=today, type=1)
self.create_group(project=project1, status=1, first_seen=today, type=1)
self.create_group(project=project2, status=1, first_seen=tomorrow, type=1)
self.create_group(project=project2, status=2, first_seen=tomorrow, type=1)
self.create_group(project=project2, status=2, first_seen=tomorrow, type=6)

response = self.client.get(self.url + "?statsPeriod=7d&category=error&group_by=new")
assert json.loads(response.content) == {
"data": [
{"bucket": today.isoformat().replace("+00:00", "Z"), "count": 2},
{"bucket": tomorrow.isoformat().replace("+00:00", "Z"), "count": 2},
]
}

def test_resolved_issues(self):
project1 = self.create_project(teams=[self.team], slug="foo")
project2 = self.create_project(teams=[self.team], slug="bar")

today = datetime.now(tz=timezone.utc).replace(hour=0, minute=0, second=0, microsecond=0)
tomorrow = today + timedelta(days=1)
self.create_group(project=project1, status=0, resolved_at=today, type=1)
self.create_group(project=project1, status=1, resolved_at=today, type=1)
self.create_group(project=project2, status=1, resolved_at=tomorrow, type=1)
self.create_group(project=project2, status=1, resolved_at=tomorrow, type=6)
self.create_group(project=project2, status=2, resolved_at=tomorrow, type=1)

response = self.client.get(self.url + "?statsPeriod=7d&category=error&group_by=resolved")
assert json.loads(response.content) == {
"data": [
{"bucket": today.isoformat().replace("+00:00", "Z"), "count": 1},
{"bucket": tomorrow.isoformat().replace("+00:00", "Z"), "count": 1},
]
}

def test_issues_by_release(self):
project1 = self.create_project(teams=[self.team], slug="foo")
project2 = self.create_project(teams=[self.team], slug="bar")
release_one = self.create_release(project1, version="1.0.0")
release_two = self.create_release(project2, version="1.2.0")
self.create_group(project=project1, status=0, first_release=release_one, type=1)
self.create_group(project=project1, status=1, first_release=release_one, type=1)
self.create_group(project=project2, status=1, first_release=release_two, type=1)
self.create_group(project=project2, status=2, first_release=release_two, type=1)
self.create_group(project=project2, status=2, first_release=release_two, type=6)

response = self.client.get(self.url + "?statsPeriod=7d&category=error&group_by=release")
assert json.loads(response.content) == {
"data": [
{"bucket": "1.0.0", "count": 2},
{"bucket": "1.2.0", "count": 2},
]
}

def test_issues_invalid_group_by(self):
response = self.client.get(self.url + "?statsPeriod=7d&category=error&group_by=test")
assert response.status_code == 404

def test_new_feedback(self):
project1 = self.create_project(teams=[self.team], slug="foo")
project2 = self.create_project(teams=[self.team], slug="bar")

today = datetime.now(tz=timezone.utc).replace(hour=0, minute=0, second=0, microsecond=0)
tomorrow = today + timedelta(days=1)
self.create_group(project=project1, status=0, first_seen=today, type=1)
self.create_group(project=project1, status=1, first_seen=today, type=1)
self.create_group(project=project2, status=1, first_seen=tomorrow, type=1)
self.create_group(project=project2, status=2, first_seen=tomorrow, type=1)
self.create_group(project=project2, status=2, first_seen=tomorrow, type=6)

response = self.client.get(self.url + "?statsPeriod=7d&category=feedback&group_by=new")
assert json.loads(response.content) == {
"data": [{"bucket": tomorrow.isoformat().replace("+00:00", "Z"), "count": 1}]
}

def test_resolved_feedback(self):
project1 = self.create_project(teams=[self.team], slug="foo")
project2 = self.create_project(teams=[self.team], slug="bar")

today = datetime.now(tz=timezone.utc).replace(hour=0, minute=0, second=0, microsecond=0)
tomorrow = today + timedelta(days=1)
self.create_group(project=project1, status=0, resolved_at=today, type=1)
self.create_group(project=project1, status=1, resolved_at=today, type=1)
self.create_group(project=project2, status=1, resolved_at=tomorrow, type=1)
self.create_group(project=project2, status=1, resolved_at=tomorrow, type=6)
self.create_group(project=project2, status=2, resolved_at=tomorrow, type=1)

response = self.client.get(self.url + "?statsPeriod=7d&category=feedback&group_by=resolved")
assert json.loads(response.content) == {
"data": [{"bucket": tomorrow.isoformat().replace("+00:00", "Z"), "count": 1}]
}

def test_feedback_by_release(self):
project1 = self.create_project(teams=[self.team], slug="foo")
project2 = self.create_project(teams=[self.team], slug="bar")
release_one = self.create_release(project1, version="1.0.0")
release_two = self.create_release(project2, version="1.2.0")
self.create_group(project=project1, status=0, first_release=release_one, type=1)
self.create_group(project=project1, status=1, first_release=release_one, type=1)
self.create_group(project=project2, status=1, first_release=release_two, type=1)
self.create_group(project=project2, status=2, first_release=release_two, type=1)
self.create_group(project=project2, status=2, first_release=release_two, type=6)

response = self.client.get(self.url + "?statsPeriod=7d&category=feedback&group_by=release")
assert json.loads(response.content) == {"data": [{"bucket": "1.2.0", "count": 1}]}
Loading