Skip to content

Commit cfda88d

Browse files
ref: fix some of the types once api.event_search is typed (#87303)
<!-- Describe your PR here. -->
1 parent 641ea9f commit cfda88d

File tree

11 files changed

+68
-29
lines changed

11 files changed

+68
-29
lines changed

src/sentry/api/event_search.py

+23-3
Original file line numberDiff line numberDiff line change
@@ -1354,16 +1354,36 @@ def generic_visit(self, node, children):
13541354
QueryToken = Union[SearchFilter, AggregateFilter, QueryOp, ParenExpression]
13551355

13561356

1357+
@overload
1358+
def parse_search_query(
1359+
query: str,
1360+
*,
1361+
config: SearchConfig[Literal[False]],
1362+
params=None,
1363+
get_field_type: Callable[[str], str | None] | None = None,
1364+
get_function_result_type: Callable[[str], str | None] | None = None,
1365+
) -> Sequence[SearchFilter | AggregateFilter]: ...
1366+
1367+
1368+
@overload
1369+
def parse_search_query(
1370+
query: str,
1371+
*,
1372+
config: SearchConfig[Literal[True]] | None = None,
1373+
params=None,
1374+
get_field_type: Callable[[str], str | None] | None = None,
1375+
get_function_result_type: Callable[[str], str | None] | None = None,
1376+
) -> Sequence[QueryToken]: ...
1377+
1378+
13571379
def parse_search_query(
13581380
query: str,
13591381
*,
13601382
config: SearchConfig[Any] | None = None,
13611383
params=None,
13621384
get_field_type: Callable[[str], str | None] | None = None,
13631385
get_function_result_type: Callable[[str], str | None] | None = None,
1364-
) -> list[
1365-
SearchFilter
1366-
]: # TODO: use the `Sequence[QueryToken]` type and update the code that fails type checking.
1386+
) -> Sequence[QueryToken]:
13671387
if config is None:
13681388
config = default_config
13691389

src/sentry/api/helpers/group_index/index.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from rest_framework.response import Response
1313

1414
from sentry import features, search
15-
from sentry.api.event_search import SearchFilter
15+
from sentry.api.event_search import AggregateFilter, SearchFilter
1616
from sentry.api.issue_search import convert_query_values, parse_search_query
1717
from sentry.api.serializers import serialize
1818
from sentry.constants import DEFAULT_SORT_OPTION
@@ -53,7 +53,7 @@ def parse_and_convert_issue_search_query(
5353
projects: Sequence[Project],
5454
environments: Sequence[Environment] | None,
5555
user: User | AnonymousUser,
56-
) -> Sequence[SearchFilter]:
56+
) -> Sequence[SearchFilter | AggregateFilter]:
5757
try:
5858
search_filters = convert_query_values(
5959
parse_search_query(query), projects, user, environments
@@ -160,7 +160,7 @@ def build_query_params_from_request(
160160

161161
def validate_search_filter_permissions(
162162
organization: Organization,
163-
search_filters: Sequence[SearchFilter],
163+
search_filters: Sequence[AggregateFilter | SearchFilter],
164164
user: User | AnonymousUser,
165165
) -> None:
166166
"""
@@ -178,7 +178,7 @@ def validate_search_filter_permissions(
178178

179179
for search_filter in search_filters:
180180
for feature_condition, feature_name in advanced_search_features:
181-
if feature_condition(search_filter):
181+
if isinstance(search_filter, SearchFilter) and feature_condition(search_filter):
182182
advanced_search_feature_gated.send_robust(
183183
user=user, organization=organization, sender=validate_search_filter_permissions
184184
)

src/sentry/api/issue_search.py

+12
Original file line numberDiff line numberDiff line change
@@ -273,6 +273,18 @@ def convert_query_values(
273273
) -> list[SearchFilter]: ...
274274

275275

276+
# maintain a specific subtype of QueryToken union
277+
@overload
278+
def convert_query_values(
279+
search_filters: Sequence[AggregateFilter | SearchFilter],
280+
projects: Sequence[Project],
281+
user: User | RpcUser | AnonymousUser | None,
282+
environments: Sequence[Environment] | None,
283+
value_converters=value_converters,
284+
allow_aggregate_filters=False,
285+
) -> Sequence[AggregateFilter | SearchFilter]: ...
286+
287+
276288
@overload
277289
def convert_query_values(
278290
search_filters: Sequence[QueryToken],

src/sentry/replays/endpoints/organization_replay_selector_index.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from __future__ import annotations
22

3+
from collections.abc import Sequence
34
from datetime import datetime
45
from typing import Any, TypedDict
56

@@ -159,7 +160,7 @@ def query_selector_collection(
159160
limit: str | None,
160161
offset: str | None,
161162
environment: list[str],
162-
search_filters: list[Condition],
163+
search_filters: Sequence[QueryToken],
163164
organization: Organization,
164165
) -> dict:
165166
"""Query aggregated replay collection."""
@@ -187,7 +188,7 @@ def query_selector_dataset(
187188
project_ids: list[int],
188189
start: datetime,
189190
end: datetime,
190-
search_filters: list[QueryToken],
191+
search_filters: Sequence[QueryToken],
191192
environment: list[str],
192193
pagination: Paginators | None,
193194
sort: str | None,

src/sentry/replays/query.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
from snuba_sdk.orderby import Direction, OrderBy
2323

2424
from sentry import options
25-
from sentry.api.event_search import ParenExpression, SearchConfig, SearchFilter
25+
from sentry.api.event_search import ParenExpression, QueryToken, SearchConfig, SearchFilter
2626
from sentry.api.exceptions import BadRequest
2727
from sentry.models.organization import Organization
2828
from sentry.replays.lib.query import all_values_for_tag_key
@@ -57,7 +57,7 @@ def query_replays_collection_paginated(
5757
sort: str | None,
5858
limit: int,
5959
offset: int,
60-
search_filters: Sequence[SearchFilter],
60+
search_filters: Sequence[QueryToken],
6161
preferred_source: PREFERRED_SOURCE,
6262
organization: Organization | None = None,
6363
actor: Any | None = None,

src/sentry/replays/scripts/delete_replays.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from collections.abc import Sequence
66
from datetime import datetime, timezone
77

8-
from sentry.api.event_search import SearchFilter, parse_search_query
8+
from sentry.api.event_search import QueryToken, parse_search_query
99
from sentry.models.organization import Organization
1010
from sentry.replays.lib.kafka import initialize_replays_publisher
1111
from sentry.replays.post_process import generate_normalized_output
@@ -61,7 +61,7 @@ def delete_replays(
6161
delete_replay_ids(project_id, replay_ids=[r["id"] for r in replays])
6262

6363

64-
def translate_cli_tags_param_to_snuba_tag_param(tags: list[str]) -> Sequence[SearchFilter]:
64+
def translate_cli_tags_param_to_snuba_tag_param(tags: list[str]) -> Sequence[QueryToken]:
6565
return parse_search_query(" AND ".join(tags), config=replay_url_parser_config)
6666

6767

src/sentry/search/events/datasets/metrics.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1804,7 +1804,7 @@ def _resolve_total_score_weights_function(self, column: str, alias: str | None)
18041804
if (isinstance(term, SearchFilter) and term.key.name == "browser.name")
18051805
or (
18061806
isinstance(term, ParenExpression)
1807-
and all( # type: ignore[unreachable]
1807+
and all(
18081808
(isinstance(child_term, SearchFilter) and child_term.key.name == "browser.name")
18091809
or child_term == "OR"
18101810
for child_term in term.children

src/sentry/search/events/filter.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -527,7 +527,7 @@ def parse_semver(version, operator) -> SemverFilter:
527527

528528

529529
def convert_search_filter_to_snuba_query(
530-
search_filter: SearchFilter,
530+
search_filter: AggregateFilter | SearchFilter,
531531
key: str | None = None,
532532
params: FilterConvertParams | None = None,
533533
) -> Sequence[Any] | None:

src/sentry/snuba/metrics/extraction.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -422,7 +422,7 @@ def _transform_search_filter(search_filter: SearchFilter) -> SearchFilter:
422422
return search_filter
423423

424424

425-
def _transform_search_query(query: Sequence[QueryToken]) -> Sequence[QueryToken]:
425+
def _transform_search_query(query: Sequence[QueryToken]) -> list[QueryToken]:
426426
transformed_query: list[QueryToken] = []
427427

428428
for token in query:
@@ -446,7 +446,7 @@ def parse_search_query(
446446
Parses a search query with the discover grammar and performs some transformations on the AST in order to account for
447447
edge cases.
448448
"""
449-
tokens = cast(Sequence[QueryToken], event_search.parse_search_query(query))
449+
tokens = event_search.parse_search_query(query)
450450

451451
# We might want to force the `event.type:transaction` to be in the query, as a validation step.
452452
if force_transaction_event_type:
@@ -462,7 +462,7 @@ def parse_search_query(
462462
return tokens
463463

464464

465-
def cleanup_search_query(tokens: Sequence[QueryToken]) -> Sequence[QueryToken]:
465+
def cleanup_search_query(tokens: Sequence[QueryToken]) -> list[QueryToken]:
466466
"""
467467
Recreates a valid query from an original query that has had on demand search filters removed.
468468
@@ -907,7 +907,7 @@ def to_standard_metrics_query(query: str) -> str:
907907
return query_tokens_to_string(cleaned_query)
908908

909909

910-
def to_standard_metrics_tokens(tokens: Sequence[QueryToken]) -> Sequence[QueryToken]:
910+
def to_standard_metrics_tokens(tokens: Sequence[QueryToken]) -> list[QueryToken]:
911911
"""
912912
Converts a query in token form containing on-demand search fields to a query
913913
that has all on-demand filters removed and can be run using only standard metrics.
@@ -930,7 +930,7 @@ def query_tokens_to_string(tokens: Sequence[QueryToken]) -> str:
930930
return ret_val.strip()
931931

932932

933-
def _remove_on_demand_search_filters(tokens: Sequence[QueryToken]) -> Sequence[QueryToken]:
933+
def _remove_on_demand_search_filters(tokens: Sequence[QueryToken]) -> list[QueryToken]:
934934
"""
935935
Removes tokens that contain filters that can only be handled by on demand metrics.
936936
"""
@@ -947,7 +947,7 @@ def _remove_on_demand_search_filters(tokens: Sequence[QueryToken]) -> Sequence[Q
947947
return ret_val
948948

949949

950-
def _remove_blacklisted_search_filters(tokens: Sequence[QueryToken]) -> Sequence[QueryToken]:
950+
def _remove_blacklisted_search_filters(tokens: Sequence[QueryToken]) -> list[QueryToken]:
951951
"""
952952
Removes tokens that contain filters that are blacklisted.
953953
"""

tests/sentry/api/test_event_search.py

+7-1
Original file line numberDiff line numberDiff line change
@@ -301,7 +301,7 @@ def test_paren_expression_with_bool_disabled(self):
301301
def test_paren_expression_to_query_string(self):
302302
(val,) = parse_search_query("(has:1 random():<5)")
303303
assert isinstance(val, ParenExpression)
304-
assert val.to_query_string() == "(has:=1 random():<5.0)" # type: ignore[unreachable] # will be fixed once parse_search_query is fixed!
304+
assert val.to_query_string() == "(has:=1 random():<5.0)"
305305

306306
def test_bool_operator_with_bool_disabled(self):
307307
config = SearchConfig.create_from(default_config, allow_boolean=False)
@@ -852,6 +852,7 @@ def test_escaping_asterisk(self):
852852
]
853853
search_filter = search_filters[0]
854854
# the slash should be removed in the final value
855+
assert isinstance(search_filter, SearchFilter)
855856
assert search_filter.value.value == "a*b"
856857

857858
# the first and last asterisks arent escaped with a preceding backslash, so they're
@@ -861,6 +862,7 @@ def test_escaping_asterisk(self):
861862
SearchFilter(key=SearchKey(name="title"), operator="=", value=SearchValue(r"*\**"))
862863
]
863864
search_filter = search_filters[0]
865+
assert isinstance(search_filter, SearchFilter)
864866
assert search_filter.value.value == r"^.*\*.*$"
865867

866868
@pytest.mark.xfail(reason="escaping backslashes is not supported yet")
@@ -871,6 +873,7 @@ def test_escaping_backslashes(self):
871873
]
872874
search_filter = search_filters[0]
873875
# the extra slash should be removed in the final value
876+
assert isinstance(search_filter, SearchFilter)
874877
assert search_filter.value.value == r"a\b"
875878

876879
@pytest.mark.xfail(reason="escaping backslashes is not supported yet")
@@ -881,6 +884,7 @@ def test_trailing_escaping_backslashes(self):
881884
]
882885
search_filter = search_filters[0]
883886
# the extra slash should be removed in the final value
887+
assert isinstance(search_filter, SearchFilter)
884888
assert search_filter.value.value == "a\\"
885889

886890
def test_escaping_quotes(self):
@@ -890,6 +894,7 @@ def test_escaping_quotes(self):
890894
]
891895
search_filter = search_filters[0]
892896
# the slash should be removed in the final value
897+
assert isinstance(search_filter, SearchFilter)
893898
assert search_filter.value.value == 'a"b'
894899

895900

@@ -941,6 +946,7 @@ def test_search_filter_to_query_string(query):
941946

942947
filters = parse_search_query(query)
943948
assert len(filters) == 1
949+
assert isinstance(filters[0], SearchFilter)
944950
actual = filters[0].to_query_string()
945951
assert actual == query
946952

tests/sentry/snuba/metrics/test_extraction.py

+7-7
Original file line numberDiff line numberDiff line change
@@ -833,13 +833,13 @@ def test_cleanup_query_with_empty_parens() -> None:
833833
Separate test with empty parens because we can't parse a string with empty parens correctly
834834
"""
835835
paren = ParenExpression
836-
dirty_tokens = (
837-
[paren([paren(["AND", "OR", paren([])])])]
838-
+ parse_search_query("release:initial AND (AND OR) (OR)") # ((AND OR (OR ())))
839-
+ [paren([])]
840-
+ parse_search_query("os.name:android") # ()
841-
+ [paren([paren([paren(["AND", "OR", paren([])])])])] # ((()))
842-
)
836+
dirty_tokens = [
837+
paren([paren(["AND", "OR", paren([])])]),
838+
*parse_search_query("release:initial AND (AND OR) (OR)"), # ((AND OR (OR ())))
839+
paren([]),
840+
*parse_search_query("os.name:android"), # ()
841+
paren([paren([paren(["AND", "OR", paren([])])])]), # ((()))
842+
]
843843
clean_tokens = parse_search_query("release:initial AND os.name:android")
844844
actual_clean = cleanup_search_query(dirty_tokens)
845845
assert actual_clean == clean_tokens

0 commit comments

Comments
 (0)