Skip to content

Commit 853776b

Browse files
committed
Merge branch 'master' into mia/aci/data-conditions-endpoint
2 parents 28c394c + b296710 commit 853776b

File tree

185 files changed

+4136
-1423
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

185 files changed

+4136
-1423
lines changed

.github/CODEOWNERS

+1-1
Original file line numberDiff line numberDiff line change
@@ -612,7 +612,7 @@ tests/sentry/api/endpoints/test_organization_dashboard_widget_details.py @ge
612612
/static/app/components/events/autofix/ @getsentry/machine-learning-ai
613613
/static/app/components/modals/autofixSetupModal.spec.tsx @getsentry/machine-learning-ai
614614
/static/app/components/modals/autofixSetupModal.tsx @getsentry/machine-learning-ai
615-
/src/sentry/seer/fetch_issues_given_patches.py @getsentry/machine-learning-ai
615+
/src/sentry/seer/ @getsentry/machine-learning-ai
616616
## End of ML & AI
617617

618618
## Processing

.github/workflows/scripts/getsentry-dispatch.js

-4
Original file line numberDiff line numberDiff line change
@@ -7,10 +7,6 @@
77
* deleted/renamed in `getsentry`, this will fail
88
*/
99
const DISPATCHES = [
10-
{
11-
workflow: 'js-build-and-lint.yml',
12-
pathFilterName: 'frontend_all',
13-
},
1410
{
1511
workflow: 'backend.yml',
1612
pathFilterName: 'backend_all',

migrations_lockfile.txt

+2-2
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,12 @@ remote_subscriptions: 0003_drop_remote_subscription
1717

1818
replays: 0004_index_together
1919

20-
sentry: 0842_create_organization_member_invite_table
20+
sentry: 0843_make_groupsearchview_postition_nullable_for_deletion
2121

2222
social_auth: 0002_default_auto_field
2323

2424
tempest: 0002_make_message_type_nullable
2525

2626
uptime: 0031_translate_uptime_object_headers_to_lists_take_three
2727

28-
workflow_engine: 0036_action_remove_legacy_fields
28+
workflow_engine: 0037_rm_workflow_name_unique_constraint

requirements-base.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ requests>=2.32.3
6565
rfc3339-validator>=0.1.2
6666
rfc3986-validator>=0.1.1
6767
# [end] jsonschema format validators
68-
sentry-arroyo>=2.19.9
68+
sentry-arroyo>=2.20.0
6969
sentry-kafka-schemas>=1.1.2
7070
sentry-ophio==1.0.0
7171
sentry-protos>=0.1.62

requirements-dev-frozen.txt

+2-2
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ click==8.1.7
2929
click-didyoumean==0.3.0
3030
click-plugins==1.1.1
3131
click-repl==0.3.0
32-
confluent-kafka==2.3.0
32+
confluent-kafka==2.8.0
3333
covdefaults==2.3.0
3434
coverage==7.6.4
3535
cronsim==2.6
@@ -180,7 +180,7 @@ rpds-py==0.20.0
180180
rsa==4.8
181181
s3transfer==0.10.0
182182
selenium==4.16.0
183-
sentry-arroyo==2.19.9
183+
sentry-arroyo==2.20.0
184184
sentry-cli==2.16.0
185185
sentry-covdefaults-disable-branch-coverage==1.0.2
186186
sentry-devenv==1.16.0

requirements-frozen.txt

+2-2
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ click==8.1.7
2626
click-didyoumean==0.3.0
2727
click-plugins==1.1.1
2828
click-repl==0.3.0
29-
confluent-kafka==2.3.0
29+
confluent-kafka==2.8.0
3030
cronsim==2.6
3131
cryptography==43.0.1
3232
cssselect==1.0.3
@@ -123,7 +123,7 @@ rfc3986-validator==0.1.1
123123
rpds-py==0.20.0
124124
rsa==4.8
125125
s3transfer==0.10.0
126-
sentry-arroyo==2.19.9
126+
sentry-arroyo==2.20.0
127127
sentry-forked-email-reply-parser==0.5.12.post1
128128
sentry-kafka-schemas==1.1.2
129129
sentry-ophio==1.0.0

src/sentry/api/endpoints/group_ai_summary.py

+4-3
Original file line numberDiff line numberDiff line change
@@ -245,9 +245,10 @@ def post(self, request: Request, group: Group) -> Response:
245245
group=group, event_id=event.event_id, user=request.user
246246
)
247247

248-
if response.status_code != 202:
249-
# If autofix trigger fails, we don't cache to let it error and we can run again, this is only temporary for when we're testing this internally.
250-
return response
248+
if response.status_code != 202:
249+
# If autofix trigger fails, we don't cache to let it error and we can run again
250+
# This is only temporary for when we're testing this internally.
251+
return response
251252

252253
summary_dict = issue_summary.dict()
253254
summary_dict["event_id"] = event.event_id

src/sentry/api/event_search.py

+31-9
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
from collections.abc import Callable, Generator, Mapping, Sequence
77
from dataclasses import asdict, dataclass, field
88
from datetime import datetime
9-
from typing import TYPE_CHECKING, Any, Literal, NamedTuple, TypeIs, Union
9+
from typing import TYPE_CHECKING, Any, Literal, NamedTuple, TypeIs, Union, overload
1010

1111
from django.utils.functional import cached_property
1212
from parsimonious.exceptions import IncompleteParseError
@@ -574,8 +574,8 @@ def __str__(self) -> str:
574574
return f"{self.key.name}{self.operator}{self.value.raw_value}"
575575

576576

577-
@dataclass
578-
class SearchConfig:
577+
@dataclass # pycqa/pycodestyle#1277
578+
class SearchConfig[TAllowBoolean: (Literal[True], Literal[False]) = Literal[True]]: # noqa: E251
579579
"""
580580
Configures how the search parser interprets a search query
581581
"""
@@ -604,7 +604,7 @@ class SearchConfig:
604604
is_filter_translation: Mapping[str, tuple[str, Any]] = field(default_factory=dict)
605605

606606
# Enables boolean filtering (AND / OR)
607-
allow_boolean = True
607+
allow_boolean: TAllowBoolean = True # type: ignore[assignment] # python/mypy#18812
608608

609609
# Allows us to specify an allowlist of keys we will accept for this search.
610610
# If empty, allow all keys.
@@ -619,8 +619,32 @@ class SearchConfig:
619619
# Whether to wrap free_text_keys in asterisks
620620
wildcard_free_text: bool = False
621621

622+
@overload
622623
@classmethod
623-
def create_from(cls, search_config: SearchConfig, **overrides):
624+
def create_from[
625+
TBool: (Literal[True], Literal[False])
626+
](
627+
cls: type[SearchConfig[Any]],
628+
search_config: SearchConfig[Any],
629+
*,
630+
allow_boolean: TBool,
631+
**overrides: Any,
632+
) -> SearchConfig[TBool]: ...
633+
634+
@overload
635+
@classmethod
636+
def create_from[
637+
TBool: (Literal[True], Literal[False])
638+
](
639+
cls: type[SearchConfig[Any]],
640+
search_config: SearchConfig[TBool],
641+
**overrides: Any,
642+
) -> SearchConfig[TBool]: ...
643+
644+
@classmethod
645+
def create_from(
646+
cls: type[SearchConfig[Any]], search_config: SearchConfig[Any], **overrides: Any
647+
) -> SearchConfig[Any]:
624648
config = cls(**asdict(search_config))
625649
for key, val in overrides.items():
626650
setattr(config, key, val)
@@ -632,15 +656,13 @@ class SearchVisitor(NodeVisitor):
632656

633657
def __init__(
634658
self,
635-
config: SearchConfig | None = None,
659+
config: SearchConfig[Any],
636660
params: ParamsType | None = None,
637661
get_field_type: Callable[[str], str | None] | None = None,
638662
get_function_result_type: Callable[[str], str | None] | None = None,
639663
) -> None:
640664
super().__init__()
641665

642-
if config is None:
643-
config = SearchConfig()
644666
self.config = config
645667
self.params = params if params is not None else {}
646668

@@ -1335,7 +1357,7 @@ def generic_visit(self, node, children):
13351357
def parse_search_query(
13361358
query: str,
13371359
*,
1338-
config: SearchConfig | None = None,
1360+
config: SearchConfig[Any] | None = None,
13391361
params=None,
13401362
get_field_type: Callable[[str], str | None] | None = None,
13411363
get_function_result_type: Callable[[str], str | None] | None = None,

src/sentry/api/helpers/group_index/index.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
from sentry.models.environment import Environment
2121
from sentry.models.group import Group, looks_like_short_id
2222
from sentry.models.groupsearchview import GroupSearchView
23+
from sentry.models.groupsearchviewstarred import GroupSearchViewStarred
2324
from sentry.models.organization import Organization
2425
from sentry.models.project import Project
2526
from sentry.models.release import Release
@@ -104,11 +105,12 @@ def build_query_params_from_request(
104105
if selected_view_id:
105106
default_view = GroupSearchView.objects.filter(id=int(selected_view_id)).first()
106107
else:
107-
default_view = GroupSearchView.objects.filter(
108+
first_starred_view = GroupSearchViewStarred.objects.filter(
108109
organization=organization,
109110
user_id=request.user.id,
110111
position=0,
111112
).first()
113+
default_view = first_starred_view.group_search_view if first_starred_view else None
112114

113115
if default_view:
114116
query_kwargs["sort_by"] = default_view.query_sort

src/sentry/api/serializers/models/commit.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,10 @@ def get_attrs(self, item_list, user, **kwargs):
5050
)
5151
)
5252

53-
pull_request_by_commit = {pr.merge_commit_sha: serialize(pr) for pr in pull_requests}
53+
pull_request_by_commit = {
54+
pr.merge_commit_sha: serialized_pr
55+
for (pr, serialized_pr) in zip(pull_requests, serialize(pull_requests))
56+
}
5457

5558
result = {}
5659
for item in item_list:

src/sentry/incidents/metric_alert_detector.py

+2
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@ def validate(self, attrs):
5151
raise serializers.ValidationError("Too many conditions")
5252
return attrs
5353

54+
# TODO - @saponifi3d - we can make this more generic and move it into the base Detector
5455
def update_data_conditions(self, instance: Detector, data_conditions: list[DataConditionType]):
5556
"""
5657
Update the data condition if it already exists, create one if it does not
@@ -116,6 +117,7 @@ def update_data_source(self, instance: Detector, data_source: SnubaQueryDataSour
116117
event_types=data_source.get("event_types", [event_type for event_type in event_types]),
117118
)
118119

120+
# TODO - @saponifi3d - we can make this more generic and move it into the base Detector
119121
def update(self, instance: Detector, validated_data: dict[str, Any]):
120122
instance.name = validated_data.get("name", instance.name)
121123
instance.type = validated_data.get("detector_type", instance.group_type).slug

src/sentry/incidents/subscription_processor.py

-24
Original file line numberDiff line numberDiff line change
@@ -411,18 +411,6 @@ def process_update(self, subscription_update: QuerySubscriptionUpdate) -> None:
411411
)
412412

413413
aggregation_value = self.get_aggregation_value(subscription_update)
414-
if features.has(
415-
"organizations:failure-rate-metric-alert-logging",
416-
self.subscription.project.organization,
417-
):
418-
logger.info(
419-
"Update value in subscription processor",
420-
extra={
421-
"result": subscription_update,
422-
"aggregation_value": aggregation_value,
423-
"rule_id": self.alert_rule.id,
424-
},
425-
)
426414

427415
has_anomaly_detection = features.has(
428416
"organizations:anomaly-detection-alerts", self.subscription.project.organization
@@ -444,18 +432,6 @@ def process_update(self, subscription_update: QuerySubscriptionUpdate) -> None:
444432
last_update=self.last_update.timestamp(),
445433
aggregation_value=aggregation_value,
446434
)
447-
# XXX (mifu67): log problematic rule, to be deleted later
448-
if features.has(
449-
"feature.organizations:failure-rate-metric-alert-logging",
450-
self.subscription.project.organization,
451-
):
452-
logger.info(
453-
"Received this response from Seer",
454-
extra={
455-
"potential_anomalies": potential_anomalies,
456-
"alert_rule_id": self.alert_rule.id,
457-
},
458-
)
459435
if potential_anomalies is None:
460436
logger.info(
461437
"No potential anomalies found",

src/sentry/incidents/utils/metric_issue_poc.py

+35-13
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,9 @@
66
from django.utils.translation import gettext as _
77

88
from sentry import features
9-
from sentry.incidents.models.alert_rule import AlertRule, AlertRuleThresholdType
10-
from sentry.incidents.models.incident import Incident, IncidentStatus
9+
from sentry.constants import CRASH_RATE_ALERT_AGGREGATE_ALIAS
10+
from sentry.incidents.models.alert_rule import AlertRule, AlertRuleThresholdType, AlertRuleTrigger
11+
from sentry.incidents.models.incident import INCIDENT_STATUS, Incident, IncidentStatus
1112
from sentry.incidents.utils.format_duration import format_duration_idiomatic
1213
from sentry.integrations.metric_alerts import TEXT_COMPARISON_DELTA
1314
from sentry.issues.grouptype import MetricIssuePOC
@@ -43,16 +44,21 @@ def to_dict(self) -> dict[str, Any]:
4344
"count_unique(tags[sentry:user])": "Number of users affected",
4445
"percentage(sessions_crashed, sessions)": "Crash free session rate",
4546
"percentage(users_crashed, users)": "Crash free user rate",
47+
"failure_rate()": "Failure rate",
48+
"apdex()": "Apdex score",
4649
}
4750

4851

49-
def construct_title(alert_rule: AlertRule) -> str:
52+
def construct_title(alert_rule: AlertRule, status: int) -> str:
5053
# Parse the aggregate key from the alert rule
5154
agg_display_key = alert_rule.snuba_query.aggregate
5255
if is_mri_field(agg_display_key):
53-
agg_text = format_mri_field(agg_display_key)
56+
aggregate = format_mri_field(agg_display_key)
57+
elif CRASH_RATE_ALERT_AGGREGATE_ALIAS in agg_display_key:
58+
agg_display_key = agg_display_key.split(f"AS {CRASH_RATE_ALERT_AGGREGATE_ALIAS}")[0].strip()
59+
aggregate = QUERY_AGGREGATION_DISPLAY.get(agg_display_key, agg_display_key)
5460
else:
55-
agg_text = QUERY_AGGREGATION_DISPLAY.get(agg_display_key, alert_rule.snuba_query.aggregate)
61+
aggregate = QUERY_AGGREGATION_DISPLAY.get(agg_display_key, alert_rule.snuba_query.aggregate)
5662

5763
# Determine the higher or lower comparison
5864
higher_or_lower = ""
@@ -61,19 +67,35 @@ def construct_title(alert_rule: AlertRule) -> str:
6167
else:
6268
higher_or_lower = "less than" if alert_rule.comparison_delta else "below"
6369

70+
label = INCIDENT_STATUS[IncidentStatus(status)]
71+
6472
# Format the time window for the threshold
65-
time_window = alert_rule.snuba_query.time_window // 60
66-
title = f"{agg_text} in the last {format_duration_idiomatic(time_window)} {higher_or_lower}"
73+
time_window = format_duration_idiomatic(alert_rule.snuba_query.time_window // 60)
6774

6875
# If the alert rule has a comparison delta, format the comparison string
76+
comparison: str | int | float = "threshold"
6977
if alert_rule.comparison_delta:
7078
comparison_delta_minutes = alert_rule.comparison_delta // 60
71-
comparison_string = TEXT_COMPARISON_DELTA.get(
72-
comparison_delta_minutes, f"same time {comparison_delta_minutes} minutes ago"
79+
comparison = TEXT_COMPARISON_DELTA.get(
80+
comparison_delta_minutes, f"same time {comparison_delta_minutes} minutes ago "
7381
)
74-
return _(f"{title} {comparison_string}")
75-
76-
return _(f"{title} threshold")
82+
else:
83+
# Otherwise, check if there is a trigger with a threshold
84+
trigger = AlertRuleTrigger.objects.filter(id=alert_rule.id, label=label.lower()).first()
85+
if trigger:
86+
threshold = trigger.alert_threshold
87+
comparison = int(threshold) if threshold % 1 == 0 else threshold
88+
89+
template = "{label}: {metric} in the last {time_window} {higher_or_lower} {comparison}"
90+
return _(
91+
template.format(
92+
label=label.capitalize(),
93+
metric=aggregate,
94+
higher_or_lower=higher_or_lower,
95+
comparison=comparison,
96+
time_window=time_window,
97+
)
98+
)
7799

78100

79101
def _build_occurrence_from_incident(
@@ -88,7 +110,7 @@ def _build_occurrence_from_incident(
88110
else PriorityLevel.MEDIUM
89111
)
90112
fingerprint = [str(incident.alert_rule.id)]
91-
title = construct_title(incident.alert_rule)
113+
title = construct_title(incident.alert_rule, incident.status)
92114
return IssueOccurrence(
93115
id=uuid4().hex,
94116
project_id=project.id,

src/sentry/integrations/bitbucket/client.py

+10-2
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77

88
from requests import PreparedRequest
99

10-
from sentry.integrations.base import IntegrationFeatureNotImplementedError
1110
from sentry.integrations.client import ApiClient
1211
from sentry.integrations.models.integration import Integration
1312
from sentry.integrations.services.integration.model import RpcIntegration
@@ -182,4 +181,13 @@ def check_file(self, repo: Repository, path: str, version: str | None) -> object
182181
def get_file(
183182
self, repo: Repository, path: str, ref: str | None, codeowners: bool = False
184183
) -> str:
185-
raise IntegrationFeatureNotImplementedError
184+
response = self.get_cached(
185+
path=BitbucketAPIPath.source.format(
186+
repo=repo.name,
187+
sha=ref,
188+
path=path,
189+
),
190+
allow_redirects=True,
191+
raw_response=True,
192+
)
193+
return response.text

0 commit comments

Comments
 (0)