Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ref: fix some of the types once api.event_search is typed #87303

Merged
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 23 additions & 3 deletions src/sentry/api/event_search.py
Original file line number Diff line number Diff line change
@@ -1354,16 +1354,36 @@ def generic_visit(self, node, children):
QueryToken = Union[SearchFilter, AggregateFilter, QueryOp, ParenExpression]


@overload
def parse_search_query(
query: str,
*,
config: SearchConfig[Literal[False]],
params=None,
get_field_type: Callable[[str], str | None] | None = None,
get_function_result_type: Callable[[str], str | None] | None = None,
) -> Sequence[SearchFilter | AggregateFilter]: ...


@overload
def parse_search_query(
query: str,
*,
config: SearchConfig[Literal[True]] | None = None,
params=None,
get_field_type: Callable[[str], str | None] | None = None,
get_function_result_type: Callable[[str], str | None] | None = None,
) -> Sequence[QueryToken]: ...


def parse_search_query(
query: str,
*,
config: SearchConfig[Any] | None = None,
params=None,
get_field_type: Callable[[str], str | None] | None = None,
get_function_result_type: Callable[[str], str | None] | None = None,
) -> list[
SearchFilter
]: # TODO: use the `Sequence[QueryToken]` type and update the code that fails type checking.
) -> Sequence[QueryToken]:
if config is None:
config = default_config

8 changes: 4 additions & 4 deletions src/sentry/api/helpers/group_index/index.py
Original file line number Diff line number Diff line change
@@ -12,7 +12,7 @@
from rest_framework.response import Response

from sentry import features, search
from sentry.api.event_search import SearchFilter
from sentry.api.event_search import AggregateFilter, SearchFilter
from sentry.api.issue_search import convert_query_values, parse_search_query
from sentry.api.serializers import serialize
from sentry.constants import DEFAULT_SORT_OPTION
@@ -53,7 +53,7 @@ def parse_and_convert_issue_search_query(
projects: Sequence[Project],
environments: Sequence[Environment] | None,
user: User | AnonymousUser,
) -> Sequence[SearchFilter]:
) -> Sequence[SearchFilter | AggregateFilter]:
try:
search_filters = convert_query_values(
parse_search_query(query), projects, user, environments
@@ -160,7 +160,7 @@ def build_query_params_from_request(

def validate_search_filter_permissions(
organization: Organization,
search_filters: Sequence[SearchFilter],
search_filters: Sequence[AggregateFilter | SearchFilter],
user: User | AnonymousUser,
) -> None:
"""
@@ -178,7 +178,7 @@ def validate_search_filter_permissions(

for search_filter in search_filters:
for feature_condition, feature_name in advanced_search_features:
if feature_condition(search_filter):
if isinstance(search_filter, SearchFilter) and feature_condition(search_filter):
advanced_search_feature_gated.send_robust(
user=user, organization=organization, sender=validate_search_filter_permissions
)
12 changes: 12 additions & 0 deletions src/sentry/api/issue_search.py
Original file line number Diff line number Diff line change
@@ -273,6 +273,18 @@ def convert_query_values(
) -> list[SearchFilter]: ...


# maintain a specific subtype of QueryToken union
@overload
def convert_query_values(
search_filters: Sequence[AggregateFilter | SearchFilter],
projects: Sequence[Project],
user: User | RpcUser | AnonymousUser | None,
environments: Sequence[Environment] | None,
value_converters=value_converters,
allow_aggregate_filters=False,
) -> Sequence[AggregateFilter | SearchFilter]: ...


@overload
def convert_query_values(
search_filters: Sequence[QueryToken],
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import annotations

from collections.abc import Sequence
from datetime import datetime
from typing import Any, TypedDict

@@ -159,7 +160,7 @@ def query_selector_collection(
limit: str | None,
offset: str | None,
environment: list[str],
search_filters: list[Condition],
search_filters: Sequence[QueryToken],
organization: Organization,
) -> dict:
"""Query aggregated replay collection."""
@@ -187,7 +188,7 @@ def query_selector_dataset(
project_ids: list[int],
start: datetime,
end: datetime,
search_filters: list[QueryToken],
search_filters: Sequence[QueryToken],
environment: list[str],
pagination: Paginators | None,
sort: str | None,
4 changes: 2 additions & 2 deletions src/sentry/replays/query.py
Original file line number Diff line number Diff line change
@@ -22,7 +22,7 @@
from snuba_sdk.orderby import Direction, OrderBy

from sentry import options
from sentry.api.event_search import ParenExpression, SearchConfig, SearchFilter
from sentry.api.event_search import ParenExpression, QueryToken, SearchConfig, SearchFilter
from sentry.api.exceptions import BadRequest
from sentry.models.organization import Organization
from sentry.replays.lib.query import all_values_for_tag_key
@@ -57,7 +57,7 @@ def query_replays_collection_paginated(
sort: str | None,
limit: int,
offset: int,
search_filters: Sequence[SearchFilter],
search_filters: Sequence[QueryToken],
preferred_source: PREFERRED_SOURCE,
organization: Organization | None = None,
actor: Any | None = None,
4 changes: 2 additions & 2 deletions src/sentry/replays/scripts/delete_replays.py
Original file line number Diff line number Diff line change
@@ -5,7 +5,7 @@
from collections.abc import Sequence
from datetime import datetime, timezone

from sentry.api.event_search import SearchFilter, parse_search_query
from sentry.api.event_search import QueryToken, parse_search_query
from sentry.models.organization import Organization
from sentry.replays.lib.kafka import initialize_replays_publisher
from sentry.replays.post_process import generate_normalized_output
@@ -61,7 +61,7 @@ def delete_replays(
delete_replay_ids(project_id, replay_ids=[r["id"] for r in replays])


def translate_cli_tags_param_to_snuba_tag_param(tags: list[str]) -> Sequence[SearchFilter]:
def translate_cli_tags_param_to_snuba_tag_param(tags: list[str]) -> Sequence[QueryToken]:
return parse_search_query(" AND ".join(tags), config=replay_url_parser_config)


2 changes: 1 addition & 1 deletion src/sentry/search/events/datasets/metrics.py
Original file line number Diff line number Diff line change
@@ -1804,7 +1804,7 @@ def _resolve_total_score_weights_function(self, column: str, alias: str | None)
if (isinstance(term, SearchFilter) and term.key.name == "browser.name")
or (
isinstance(term, ParenExpression)
and all( # type: ignore[unreachable]
and all(
(isinstance(child_term, SearchFilter) and child_term.key.name == "browser.name")
or child_term == "OR"
for child_term in term.children
2 changes: 1 addition & 1 deletion src/sentry/search/events/filter.py
Original file line number Diff line number Diff line change
@@ -527,7 +527,7 @@ def parse_semver(version, operator) -> SemverFilter:


def convert_search_filter_to_snuba_query(
search_filter: SearchFilter,
search_filter: AggregateFilter | SearchFilter,
key: str | None = None,
params: FilterConvertParams | None = None,
) -> Sequence[Any] | None:
12 changes: 6 additions & 6 deletions src/sentry/snuba/metrics/extraction.py
Original file line number Diff line number Diff line change
@@ -422,7 +422,7 @@ def _transform_search_filter(search_filter: SearchFilter) -> SearchFilter:
return search_filter


def _transform_search_query(query: Sequence[QueryToken]) -> Sequence[QueryToken]:
def _transform_search_query(query: Sequence[QueryToken]) -> list[QueryToken]:
transformed_query: list[QueryToken] = []

for token in query:
@@ -446,7 +446,7 @@ def parse_search_query(
Parses a search query with the discover grammar and performs some transformations on the AST in order to account for
edge cases.
"""
tokens = cast(Sequence[QueryToken], event_search.parse_search_query(query))
tokens = event_search.parse_search_query(query)

# We might want to force the `event.type:transaction` to be in the query, as a validation step.
if force_transaction_event_type:
@@ -462,7 +462,7 @@ def parse_search_query(
return tokens


def cleanup_search_query(tokens: Sequence[QueryToken]) -> Sequence[QueryToken]:
def cleanup_search_query(tokens: Sequence[QueryToken]) -> list[QueryToken]:
"""
Recreates a valid query from an original query that has had on demand search filters removed.

@@ -907,7 +907,7 @@ def to_standard_metrics_query(query: str) -> str:
return query_tokens_to_string(cleaned_query)


def to_standard_metrics_tokens(tokens: Sequence[QueryToken]) -> Sequence[QueryToken]:
def to_standard_metrics_tokens(tokens: Sequence[QueryToken]) -> list[QueryToken]:
"""
Converts a query in token form containing on-demand search fields to a query
that has all on-demand filters removed and can be run using only standard metrics.
@@ -930,7 +930,7 @@ def query_tokens_to_string(tokens: Sequence[QueryToken]) -> str:
return ret_val.strip()


def _remove_on_demand_search_filters(tokens: Sequence[QueryToken]) -> Sequence[QueryToken]:
def _remove_on_demand_search_filters(tokens: Sequence[QueryToken]) -> list[QueryToken]:
"""
Removes tokens that contain filters that can only be handled by on demand metrics.
"""
@@ -947,7 +947,7 @@ def _remove_on_demand_search_filters(tokens: Sequence[QueryToken]) -> Sequence[Q
return ret_val


def _remove_blacklisted_search_filters(tokens: Sequence[QueryToken]) -> Sequence[QueryToken]:
def _remove_blacklisted_search_filters(tokens: Sequence[QueryToken]) -> list[QueryToken]:
"""
Removes tokens that contain filters that are blacklisted.
"""
8 changes: 7 additions & 1 deletion tests/sentry/api/test_event_search.py
Original file line number Diff line number Diff line change
@@ -301,7 +301,7 @@ def test_paren_expression_with_bool_disabled(self):
def test_paren_expression_to_query_string(self):
(val,) = parse_search_query("(has:1 random():<5)")
assert isinstance(val, ParenExpression)
assert val.to_query_string() == "(has:=1 random():<5.0)" # type: ignore[unreachable] # will be fixed once parse_search_query is fixed!
assert val.to_query_string() == "(has:=1 random():<5.0)"

def test_bool_operator_with_bool_disabled(self):
config = SearchConfig.create_from(default_config, allow_boolean=False)
@@ -852,6 +852,7 @@ def test_escaping_asterisk(self):
]
search_filter = search_filters[0]
# the slash should be removed in the final value
assert isinstance(search_filter, SearchFilter)
assert search_filter.value.value == "a*b"

# the first and last asterisks arent escaped with a preceding backslash, so they're
@@ -861,6 +862,7 @@ def test_escaping_asterisk(self):
SearchFilter(key=SearchKey(name="title"), operator="=", value=SearchValue(r"*\**"))
]
search_filter = search_filters[0]
assert isinstance(search_filter, SearchFilter)
assert search_filter.value.value == r"^.*\*.*$"

@pytest.mark.xfail(reason="escaping backslashes is not supported yet")
@@ -871,6 +873,7 @@ def test_escaping_backslashes(self):
]
search_filter = search_filters[0]
# the extra slash should be removed in the final value
assert isinstance(search_filter, SearchFilter)
assert search_filter.value.value == r"a\b"

@pytest.mark.xfail(reason="escaping backslashes is not supported yet")
@@ -881,6 +884,7 @@ def test_trailing_escaping_backslashes(self):
]
search_filter = search_filters[0]
# the extra slash should be removed in the final value
assert isinstance(search_filter, SearchFilter)
assert search_filter.value.value == "a\\"

def test_escaping_quotes(self):
@@ -890,6 +894,7 @@ def test_escaping_quotes(self):
]
search_filter = search_filters[0]
# the slash should be removed in the final value
assert isinstance(search_filter, SearchFilter)
assert search_filter.value.value == 'a"b'


@@ -941,6 +946,7 @@ def test_search_filter_to_query_string(query):

filters = parse_search_query(query)
assert len(filters) == 1
assert isinstance(filters[0], SearchFilter)
actual = filters[0].to_query_string()
assert actual == query

14 changes: 7 additions & 7 deletions tests/sentry/snuba/metrics/test_extraction.py
Original file line number Diff line number Diff line change
@@ -833,13 +833,13 @@ def test_cleanup_query_with_empty_parens() -> None:
Separate test with empty parens because we can't parse a string with empty parens correctly
"""
paren = ParenExpression
dirty_tokens = (
[paren([paren(["AND", "OR", paren([])])])]
+ parse_search_query("release:initial AND (AND OR) (OR)") # ((AND OR (OR ())))
+ [paren([])]
+ parse_search_query("os.name:android") # ()
+ [paren([paren([paren(["AND", "OR", paren([])])])])] # ((()))
)
dirty_tokens = [
paren([paren(["AND", "OR", paren([])])]),
*parse_search_query("release:initial AND (AND OR) (OR)"), # ((AND OR (OR ())))
paren([]),
*parse_search_query("os.name:android"), # ()
paren([paren([paren(["AND", "OR", paren([])])])]), # ((()))
]
clean_tokens = parse_search_query("release:initial AND os.name:android")
actual_clean = cleanup_search_query(dirty_tokens)
assert actual_clean == clean_tokens