Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(releases): Add organization issue-metrics endpoint #86626

Merged
merged 28 commits into from
Mar 19, 2025
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
a471c89
Temp
cmanallen Mar 6, 2025
7c80341
Merge branch 'master' into cmanallen/releases-issue-counts
cmanallen Mar 6, 2025
26828ea
Add organization issue breakdown endpoint
cmanallen Mar 7, 2025
4a11495
Merge branch 'master' into cmanallen/releases-issue-counts
cmanallen Mar 10, 2025
b90a21e
Port issues by time to event-stats format
cmanallen Mar 11, 2025
a5251d7
Port releases to new format
cmanallen Mar 11, 2025
ee14cf8
Fix typing
cmanallen Mar 11, 2025
285d695
Update feedback coverage
cmanallen Mar 11, 2025
66d3369
Return everything but feedbacks
cmanallen Mar 11, 2025
84ad555
Rename to issue-metrics
cmanallen Mar 11, 2025
2a6d1b4
Update codeowners
cmanallen Mar 11, 2025
be20dda
Fix code location
cmanallen Mar 11, 2025
c8f27ac
Update codeowners
cmanallen Mar 11, 2025
76d487f
Update response output
cmanallen Mar 12, 2025
fafe313
Add test coverage
cmanallen Mar 12, 2025
d40e9a9
Update truncation logic to be 1 hour
cmanallen Mar 12, 2025
e95af27
Naming
cmanallen Mar 12, 2025
c9b87e3
First pass on new design
cmanallen Mar 13, 2025
72bd80d
Allow arbitrary intervals and cleanup old code
cmanallen Mar 14, 2025
659f015
Update schema
cmanallen Mar 14, 2025
53278bb
Fix typing
cmanallen Mar 14, 2025
2a0f5d2
Add coverage for interval parsing
cmanallen Mar 14, 2025
0f00ffc
Fix names
cmanallen Mar 14, 2025
55feabd
Merge branch 'master' into cmanallen/releases-issue-counts
cmanallen Mar 17, 2025
519b0d1
Group by other
cmanallen Mar 18, 2025
1d9e108
Return largest 5 groups
cmanallen Mar 19, 2025
3d38a87
Fix typing
cmanallen Mar 19, 2025
ec7382c
Merge branch 'master' into cmanallen/releases-issue-counts
cmanallen Mar 19, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 27 additions & 1 deletion src/sentry/issues/endpoints/organization_issue_metrics.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import collections
from collections.abc import Iterator
from datetime import datetime, timedelta
from heapq import nlargest
from itertools import chain
from typing import TypedDict

Expand Down Expand Up @@ -84,12 +85,37 @@ def gen_ts(
end,
)

grouped_counter: collections.defaultdict[str, int] = collections.defaultdict(int)
grouped_series: dict[str, list[TimeSeries]] = collections.defaultdict(list)
for row in qs:
grouping = [row[g] for g in group_by]
key = "||||".join(grouping)
grouped_counter[key] += row["value"]
grouped_series[key].append({"timestamp": row["timestamp"], "value": row["value"]})

# Group the smallest series into the "other" bucket.
if len(grouped_series) > 4:
keys = [v[0] for v in nlargest(4, grouped_counter.items(), key=lambda i: i[0])]

new_grouped_series: dict[str, list[TimeSeries]] = {}
other_series = collections.defaultdict(int)
for key, series in grouped_series.items():
if key in keys:
new_grouped_series[key] = series
else:
for s in series:
other_series[s["timestamp"]] += s["value"]

if other_series:
new_grouped_series["other"] = list(
map(
lambda i: {"timestamp": i[0], "value": i[1]},
sorted(list(other_series.items()), key=lambda i: i[0]),
)
)
else:
new_grouped_series = grouped_series

return [
make_timeseries_result(
axis=axis,
Expand All @@ -100,7 +126,7 @@ def gen_ts(
order=i,
values=series,
)
for i, (key, series) in enumerate(grouped_series.items())
for i, (key, series) in enumerate(new_grouped_series.items())
]

return Response(
Expand Down
118 changes: 118 additions & 0 deletions tests/sentry/issues/endpoints/test_organization_issue_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,3 +209,121 @@ def test_get_zero_interval(self):
response = self.client.get(self.url + "?interval=0")
assert response.status_code == 400
assert response.json() == {"detail": "Interval must be greater than 1000 milliseconds."}

def test_other_grouping(self):
project1 = self.create_project(teams=[self.team], slug="foo")
project2 = self.create_project(teams=[self.team], slug="bar")
one = self.create_release(project1, version="1.0.0")
two = self.create_release(project2, version="1.1.0")
three = self.create_release(project2, version="1.2.0")
four = self.create_release(project2, version="1.3.0")
fifth = self.create_release(project2, version="1.4.0")
sixth = self.create_release(project2, version="1.5.0")

curr = datetime.now(tz=timezone.utc)
prev = curr - timedelta(hours=1)

# Release issues.
self.create_group(project=project1, status=0, first_seen=curr, first_release=one, type=1)
self.create_group(project=project1, status=0, first_seen=curr, first_release=two, type=1)
self.create_group(project=project1, status=0, first_seen=curr, first_release=three, type=1)
self.create_group(project=project1, status=0, first_seen=curr, first_release=four, type=1)
self.create_group(project=project1, status=0, first_seen=curr, first_release=fifth, type=1)
self.create_group(project=project1, status=0, first_seen=curr, first_release=sixth, type=1)

response = self.client.get(
self.url + f"?start={prev.isoformat()[:-6]}&end={curr.isoformat()[:-6]}&category=error"
)
response_json = response.json()
assert response_json["timeseries"] == [
{
"axis": "new_issues_count",
"groupBy": [],
"meta": {
"interval": 3600000,
"isOther": False,
"order": 0,
"valueType": "integer",
"valueUnit": None,
},
"values": [
{"timestamp": int(prev.timestamp()), "value": 0},
{"timestamp": int(curr.timestamp()), "value": 6},
],
},
{
"axis": "new_issues_count_by_release",
"groupBy": ["1.2.0"],
"meta": {
"interval": 3600000,
"isOther": False,
"order": 0,
"valueType": "integer",
"valueUnit": None,
},
"values": [
{"timestamp": int(prev.timestamp()), "value": 0},
{"timestamp": int(curr.timestamp()), "value": 1},
],
},
{
"axis": "new_issues_count_by_release",
"groupBy": ["1.3.0"],
"meta": {
"interval": 3600000,
"isOther": False,
"order": 1,
"valueType": "integer",
"valueUnit": None,
},
"values": [
{"timestamp": int(prev.timestamp()), "value": 0},
{"timestamp": int(curr.timestamp()), "value": 1},
],
},
{
"axis": "new_issues_count_by_release",
"groupBy": ["1.4.0"],
"meta": {
"interval": 3600000,
"isOther": False,
"order": 2,
"valueType": "integer",
"valueUnit": None,
},
"values": [
{"timestamp": int(prev.timestamp()), "value": 0},
{"timestamp": int(curr.timestamp()), "value": 1},
],
},
{
"axis": "new_issues_count_by_release",
"groupBy": ["1.5.0"],
"meta": {
"interval": 3600000,
"isOther": False,
"order": 3,
"valueType": "integer",
"valueUnit": None,
},
"values": [
{"timestamp": int(prev.timestamp()), "value": 0},
{"timestamp": int(curr.timestamp()), "value": 1},
],
},
{
"axis": "new_issues_count_by_release",
"groupBy": ["other"],
"meta": {
"interval": 3600000,
"isOther": False,
"order": 4,
"valueType": "integer",
"valueUnit": None,
},
"values": [
{"timestamp": int(prev.timestamp()), "value": 0},
{"timestamp": int(curr.timestamp()), "value": 2},
],
},
]
Loading