Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(aci): rename DataConditionHandler type and filter_group #87232

Merged
merged 1 commit into from
Mar 18, 2025
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -11,8 +11,8 @@

@condition_handler_registry.register(Condition.AGE_COMPARISON)
class AgeComparisonConditionHandler(DataConditionHandler[WorkflowJob]):
type = DataConditionHandler.Type.ACTION_FILTER
filter_group = DataConditionHandler.FilterGroup.ISSUE_ATTRIBUTES
group = DataConditionHandler.Group.ACTION_FILTER
subgroup = DataConditionHandler.Subgroup.ISSUE_ATTRIBUTES

comparison_json_schema = {
"type": "object",
Original file line number Diff line number Diff line change
@@ -7,7 +7,7 @@

@condition_handler_registry.register(Condition.ANOMALY_DETECTION)
class AnomalyDetectionHandler(DataConditionHandler[WorkflowJob]):
type = DataConditionHandler.Type.DETECTOR_TRIGGER
group = DataConditionHandler.Group.DETECTOR_TRIGGER
comparison_json_schema = {"type": "boolean"}

@staticmethod
Original file line number Diff line number Diff line change
@@ -12,8 +12,8 @@

@condition_handler_registry.register(Condition.ASSIGNED_TO)
class AssignedToConditionHandler(DataConditionHandler[WorkflowJob]):
type = DataConditionHandler.Type.ACTION_FILTER
filter_group = DataConditionHandler.FilterGroup.ISSUE_ATTRIBUTES
group = DataConditionHandler.Group.ACTION_FILTER
subgroup = DataConditionHandler.Subgroup.ISSUE_ATTRIBUTES

comparison_json_schema = {
"type": "object",
Original file line number Diff line number Diff line change
@@ -13,8 +13,8 @@

@condition_handler_registry.register(Condition.EVENT_ATTRIBUTE)
class EventAttributeConditionHandler(DataConditionHandler[WorkflowJob]):
type = DataConditionHandler.Type.ACTION_FILTER
filter_group = DataConditionHandler.FilterGroup.EVENT_ATTRIBUTES
group = DataConditionHandler.Group.ACTION_FILTER
subgroup = DataConditionHandler.Subgroup.EVENT_ATTRIBUTES

comparison_json_schema = {
"type": "object",
Original file line number Diff line number Diff line change
@@ -7,7 +7,7 @@

@condition_handler_registry.register(Condition.EVENT_CREATED_BY_DETECTOR)
class EventCreatedByDetectorConditionHandler(DataConditionHandler[WorkflowJob]):
type = DataConditionHandler.Type.ACTION_FILTER
group = DataConditionHandler.Group.ACTION_FILTER

@staticmethod
def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
Original file line number Diff line number Diff line change
@@ -17,8 +17,8 @@
@condition_handler_registry.register(Condition.EVENT_FREQUENCY_COUNT)
@condition_handler_registry.register(Condition.EVENT_UNIQUE_USER_FREQUENCY_COUNT)
class EventFrequencyCountHandler(DataConditionHandler[list[int]]):
type = DataConditionHandler.Type.ACTION_FILTER
filter_group = DataConditionHandler.FilterGroup.FREQUENCY
group = DataConditionHandler.Group.ACTION_FILTER
subgroup = DataConditionHandler.Subgroup.FREQUENCY

comparison_json_schema = {
"type": "object",
@@ -44,8 +44,8 @@ def evaluate_value(value: list[int], comparison: Any) -> DataConditionResult:
@condition_handler_registry.register(Condition.EVENT_FREQUENCY_PERCENT)
@condition_handler_registry.register(Condition.EVENT_UNIQUE_USER_FREQUENCY_PERCENT)
class EventFrequencyPercentHandler(DataConditionHandler[list[int]]):
type = DataConditionHandler.Type.ACTION_FILTER
filter_group = DataConditionHandler.FilterGroup.FREQUENCY
group = DataConditionHandler.Group.ACTION_FILTER
subgroup = DataConditionHandler.Subgroup.FREQUENCY

comparison_json_schema = {
"type": "object",
@@ -72,6 +72,8 @@ def evaluate_value(value: list[int], comparison: Any) -> DataConditionResult:
# Percent sessions values must be between 0-100 (%)
@condition_handler_registry.register(Condition.PERCENT_SESSIONS_COUNT)
class PercentSessionsCountHandler(EventFrequencyCountHandler):
group = DataConditionHandler.Group.ACTION_FILTER
subgroup = DataConditionHandler.Subgroup.FREQUENCY
comparison_json_schema = {
"type": "object",
"properties": {
@@ -90,6 +92,8 @@ class PercentSessionsCountHandler(EventFrequencyCountHandler):
# This percent value can be > 100 (%)
@condition_handler_registry.register(Condition.PERCENT_SESSIONS_PERCENT)
class PercentSessionsPercentHandler(EventFrequencyPercentHandler):
group = DataConditionHandler.Group.ACTION_FILTER
subgroup = DataConditionHandler.Subgroup.FREQUENCY
comparison_json_schema = {
"type": "object",
"properties": {
Original file line number Diff line number Diff line change
@@ -7,7 +7,7 @@

@condition_handler_registry.register(Condition.EVENT_SEEN_COUNT)
class EventSeenCountConditionHandler(DataConditionHandler[WorkflowJob]):
type = DataConditionHandler.Type.ACTION_FILTER
group = DataConditionHandler.Group.ACTION_FILTER

@staticmethod
def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
Original file line number Diff line number Diff line change
@@ -8,7 +8,7 @@

@condition_handler_registry.register(Condition.EXISTING_HIGH_PRIORITY_ISSUE)
class ExistingHighPriorityIssueConditionHandler(DataConditionHandler[WorkflowJob]):
type = DataConditionHandler.Type.WORKFLOW_TRIGGER
group = DataConditionHandler.Group.WORKFLOW_TRIGGER
comparison_json_schema = {"type": "boolean"}

@staticmethod
Original file line number Diff line number Diff line change
@@ -19,7 +19,7 @@ def is_new_event(job: WorkflowJob) -> bool:

@condition_handler_registry.register(Condition.FIRST_SEEN_EVENT)
class FirstSeenEventConditionHandler(DataConditionHandler[WorkflowJob]):
type = DataConditionHandler.Type.WORKFLOW_TRIGGER
group = DataConditionHandler.Group.WORKFLOW_TRIGGER
comparison_json_schema = {"type": "boolean"}

@staticmethod
Original file line number Diff line number Diff line change
@@ -8,7 +8,7 @@

@condition_handler_registry.register(Condition.ISSUE_CATEGORY)
class IssueCategoryConditionHandler(DataConditionHandler[WorkflowJob]):
type = DataConditionHandler.Type.ACTION_FILTER
group = DataConditionHandler.Group.ACTION_FILTER

comparison_json_schema = {
"type": "object",
Original file line number Diff line number Diff line change
@@ -8,8 +8,8 @@

@condition_handler_registry.register(Condition.ISSUE_OCCURRENCES)
class IssueOccurrencesConditionHandler(DataConditionHandler[WorkflowJob]):
type = DataConditionHandler.Type.ACTION_FILTER
filter_group = DataConditionHandler.FilterGroup.ISSUE_ATTRIBUTES
group = DataConditionHandler.Group.ACTION_FILTER
subgroup = DataConditionHandler.Subgroup.ISSUE_ATTRIBUTES

comparison_json_schema = {
"type": "object",
Original file line number Diff line number Diff line change
@@ -7,8 +7,8 @@

@condition_handler_registry.register(Condition.ISSUE_PRIORITY_EQUALS)
class IssuePriorityCondition(DataConditionHandler[WorkflowJob]):
type = DataConditionHandler.Type.ACTION_FILTER
filter_group = DataConditionHandler.FilterGroup.ISSUE_ATTRIBUTES
group = DataConditionHandler.Group.ACTION_FILTER
subgroup = DataConditionHandler.Subgroup.ISSUE_ATTRIBUTES

@staticmethod
def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
Original file line number Diff line number Diff line change
@@ -7,7 +7,7 @@

@condition_handler_registry.register(Condition.ISSUE_RESOLUTION_CHANGE)
class IssueResolutionConditionHandler(DataConditionHandler[WorkflowJob]):
type = DataConditionHandler.Type.WORKFLOW_TRIGGER
group = DataConditionHandler.Group.WORKFLOW_TRIGGER

@staticmethod
def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
Original file line number Diff line number Diff line change
@@ -18,8 +18,8 @@

@condition_handler_registry.register(Condition.LATEST_ADOPTED_RELEASE)
class LatestAdoptedReleaseConditionHandler(DataConditionHandler[WorkflowJob]):
type = DataConditionHandler.Type.ACTION_FILTER
filter_group = DataConditionHandler.FilterGroup.EVENT_ATTRIBUTES
group = DataConditionHandler.Group.ACTION_FILTER
subgroup = DataConditionHandler.Subgroup.EVENT_ATTRIBUTES

comparison_json_schema = {
"type": "object",
Original file line number Diff line number Diff line change
@@ -41,8 +41,8 @@ def get_latest_release_for_env(

@condition_handler_registry.register(Condition.LATEST_RELEASE)
class LatestReleaseConditionHandler(DataConditionHandler[WorkflowJob]):
type = DataConditionHandler.Type.ACTION_FILTER
filter_group = DataConditionHandler.FilterGroup.EVENT_ATTRIBUTES
group = DataConditionHandler.Group.ACTION_FILTER
subgroup = DataConditionHandler.Subgroup.EVENT_ATTRIBUTES
comparison_json_schema = {"type": "boolean"}

@staticmethod
Original file line number Diff line number Diff line change
@@ -9,8 +9,8 @@

@condition_handler_registry.register(Condition.LEVEL)
class LevelConditionHandler(DataConditionHandler[WorkflowJob]):
type = DataConditionHandler.Type.ACTION_FILTER
filter_group = DataConditionHandler.FilterGroup.EVENT_ATTRIBUTES
group = DataConditionHandler.Group.ACTION_FILTER
subgroup = DataConditionHandler.Subgroup.EVENT_ATTRIBUTES

comparison_json_schema = {
"type": "object",
Original file line number Diff line number Diff line change
@@ -9,7 +9,7 @@

@condition_handler_registry.register(Condition.NEW_HIGH_PRIORITY_ISSUE)
class NewHighPriorityIssueConditionHandler(DataConditionHandler[WorkflowJob]):
type = DataConditionHandler.Type.WORKFLOW_TRIGGER
group = DataConditionHandler.Group.WORKFLOW_TRIGGER
comparison_json_schema = {"type": "boolean"}

@staticmethod
Original file line number Diff line number Diff line change
@@ -7,7 +7,7 @@

@condition_handler_registry.register(Condition.REAPPEARED_EVENT)
class ReappearedEventConditionHandler(DataConditionHandler[WorkflowJob]):
type = DataConditionHandler.Type.WORKFLOW_TRIGGER
group = DataConditionHandler.Group.WORKFLOW_TRIGGER
comparison_json_schema = {"type": "boolean"}

@staticmethod
Original file line number Diff line number Diff line change
@@ -7,7 +7,7 @@

@condition_handler_registry.register(Condition.REGRESSION_EVENT)
class RegressionEventConditionHandler(DataConditionHandler[WorkflowJob]):
type = DataConditionHandler.Type.WORKFLOW_TRIGGER
group = DataConditionHandler.Group.WORKFLOW_TRIGGER
comparison_json_schema = {"type": "boolean"}

@staticmethod
Original file line number Diff line number Diff line change
@@ -9,8 +9,8 @@

@condition_handler_registry.register(Condition.TAGGED_EVENT)
class TaggedEventConditionHandler(DataConditionHandler[WorkflowJob]):
type = DataConditionHandler.Type.ACTION_FILTER
filter_group = DataConditionHandler.FilterGroup.EVENT_ATTRIBUTES
group = DataConditionHandler.Group.ACTION_FILTER
subgroup = DataConditionHandler.Subgroup.EVENT_ATTRIBUTES

comparison_json_schema = {
"type": "object",
28 changes: 14 additions & 14 deletions src/sentry/workflow_engine/processors/delayed_workflow.py
Original file line number Diff line number Diff line change
@@ -101,19 +101,19 @@ def fetch_group_to_event_data(


def get_dcg_group_workflow_detector_data(
workflow_event_dcg_data: dict[str, str]
) -> tuple[DataConditionGroupGroups, dict[DataConditionHandler.Type, dict[int, int]]]:
workflow_event_dcg_data: dict[str, str],
) -> tuple[DataConditionGroupGroups, dict[DataConditionHandler.Group, dict[int, int]]]:
"""
Parse the data in the buffer hash, which is in the form of {workflow/detector_id}:{group_id}:{dcg_id, ..., dcg_id}:{dcg_type}
"""

dcg_to_groups: DataConditionGroupGroups = defaultdict(set)
trigger_type_to_dcg_model: dict[DataConditionHandler.Type, dict[int, int]] = defaultdict(dict)
trigger_group_to_dcg_model: dict[DataConditionHandler.Group, dict[int, int]] = defaultdict(dict)

for workflow_group_dcg, _ in workflow_event_dcg_data.items():
data = workflow_group_dcg.split(":")
try:
dcg_type = DataConditionHandler.Type(data[3])
dcg_group = DataConditionHandler.Group(data[3])
except ValueError:
continue

@@ -123,9 +123,9 @@ def get_dcg_group_workflow_detector_data(
for dcg_id in dcg_ids:
dcg_to_groups[dcg_id].add(group_id)

trigger_type_to_dcg_model[dcg_type][dcg_id] = int(data[0])
trigger_group_to_dcg_model[dcg_group][dcg_id] = int(data[0])

return dcg_to_groups, trigger_type_to_dcg_model
return dcg_to_groups, trigger_group_to_dcg_model


def fetch_workflows_envs(
@@ -226,7 +226,7 @@ def get_condition_query_groups(


def get_condition_group_results(
queries_to_groups: dict[UniqueConditionQuery, set[int]]
queries_to_groups: dict[UniqueConditionQuery, set[int]],
) -> dict[UniqueConditionQuery, dict[int, int]]:
condition_group_results = {}
current_time = timezone.now()
@@ -386,7 +386,7 @@ def get_group_to_groupevent(

def fire_actions_for_groups(
groups_to_fire: dict[int, set[DataConditionGroup]],
trigger_type_to_dcg_model: dict[DataConditionHandler.Type, dict[int, int]],
trigger_group_to_dcg_model: dict[DataConditionHandler.Group, dict[int, int]],
group_to_groupevent: dict[Group, GroupEvent],
) -> None:
for group, group_event in group_to_groupevent.items():
@@ -396,9 +396,9 @@ def fire_actions_for_groups(
workflow_triggers: set[DataConditionGroup] = set()
action_filters: set[DataConditionGroup] = set()
for dcg in groups_to_fire[group.id]:
if dcg.id in trigger_type_to_dcg_model[DataConditionHandler.Type.WORKFLOW_TRIGGER]:
if dcg.id in trigger_group_to_dcg_model[DataConditionHandler.Group.WORKFLOW_TRIGGER]:
workflow_triggers.add(dcg)
elif dcg.id in trigger_type_to_dcg_model[DataConditionHandler.Type.ACTION_FILTER]:
elif dcg.id in trigger_group_to_dcg_model[DataConditionHandler.Group.ACTION_FILTER]:
action_filters.add(dcg)

# process action filters
@@ -464,11 +464,11 @@ def process_delayed_workflows(
workflow_event_dcg_data = fetch_group_to_event_data(project_id, Workflow, batch_key)

# Get mappings from DataConditionGroups to other info
dcg_to_groups, trigger_type_to_dcg_model = get_dcg_group_workflow_detector_data(
dcg_to_groups, trigger_group_to_dcg_model = get_dcg_group_workflow_detector_data(
workflow_event_dcg_data
)
dcg_to_workflow = trigger_type_to_dcg_model[DataConditionHandler.Type.WORKFLOW_TRIGGER].copy()
dcg_to_workflow.update(trigger_type_to_dcg_model[DataConditionHandler.Type.ACTION_FILTER])
dcg_to_workflow = trigger_group_to_dcg_model[DataConditionHandler.Group.WORKFLOW_TRIGGER].copy()
dcg_to_workflow.update(trigger_group_to_dcg_model[DataConditionHandler.Group.ACTION_FILTER])

_, workflows_to_envs = fetch_workflows_envs(list(dcg_to_workflow.values()))
data_condition_groups = fetch_data_condition_groups(list(dcg_to_groups.keys()))
@@ -495,7 +495,7 @@ def process_delayed_workflows(
dcg_group_to_event_data, list(groups_to_dcgs.keys()), event_ids, occurrence_ids, project_id
)

fire_actions_for_groups(groups_to_dcgs, trigger_type_to_dcg_model, group_to_groupevent)
fire_actions_for_groups(groups_to_dcgs, trigger_group_to_dcg_model, group_to_groupevent)

cleanup_redis_buffer(project_id, workflow_event_dcg_data, batch_key)

8 changes: 4 additions & 4 deletions src/sentry/workflow_engine/types.py
Original file line number Diff line number Diff line change
@@ -65,18 +65,18 @@ def related_model(instance) -> list[ModelRelation]:


class DataConditionHandler(Generic[T]):
class Type(StrEnum):
class Group(StrEnum):
DETECTOR_TRIGGER = "detector_trigger"
WORKFLOW_TRIGGER = "workflow_trigger"
ACTION_FILTER = "action_filter"

class FilterGroup(StrEnum):
class Subgroup(StrEnum):
ISSUE_ATTRIBUTES = "issue_attributes"
FREQUENCY = "frequency"
EVENT_ATTRIBUTES = "event_attributes"

type: ClassVar[Type]
filter_group: ClassVar[FilterGroup]
group: ClassVar[Group]
subgroup: ClassVar[Subgroup]
comparison_json_schema: ClassVar[dict[str, Any]] = {}

@staticmethod
56 changes: 29 additions & 27 deletions tests/sentry/workflow_engine/processors/test_delayed_workflow.py
Original file line number Diff line number Diff line change
@@ -83,10 +83,10 @@ def setUp(self):
self.create_event(self.project.id, FROZEN_TIME, "group-2", self.environment.name)

self.workflow_group_dcg_mapping = {
f"{self.workflow1.id}:{self.group1.id}:{self.workflow1_dcgs[0].id}:{DataConditionHandler.Type.WORKFLOW_TRIGGER}",
f"{self.workflow1.id}:{self.group1.id}:{self.workflow1_dcgs[1].id}:{DataConditionHandler.Type.ACTION_FILTER}",
f"{self.workflow2.id}:{self.group2.id}:{self.workflow2_dcgs[0].id}:{DataConditionHandler.Type.WORKFLOW_TRIGGER}",
f"{self.workflow2.id}:{self.group2.id}:{self.workflow2_dcgs[1].id}:{DataConditionHandler.Type.ACTION_FILTER}",
f"{self.workflow1.id}:{self.group1.id}:{self.workflow1_dcgs[0].id}:{DataConditionHandler.Group.WORKFLOW_TRIGGER}",
f"{self.workflow1.id}:{self.group1.id}:{self.workflow1_dcgs[1].id}:{DataConditionHandler.Group.ACTION_FILTER}",
f"{self.workflow2.id}:{self.group2.id}:{self.workflow2_dcgs[0].id}:{DataConditionHandler.Group.WORKFLOW_TRIGGER}",
f"{self.workflow2.id}:{self.group2.id}:{self.workflow2_dcgs[1].id}:{DataConditionHandler.Group.ACTION_FILTER}",
}

self.event3, self.group3 = self.setup_event(self.project2, self.environment2, "group-3")
@@ -99,41 +99,41 @@ def setUp(self):
self._make_sessions(60, project=self.project2)

self.workflow_group_dcg_mapping2 = {
f"{self.workflow3.id}:{self.group3.id}:{self.workflow3_dcgs[0].id}:{DataConditionHandler.Type.WORKFLOW_TRIGGER}",
f"{self.workflow3.id}:{self.group3.id}:{self.workflow3_dcgs[1].id}:{DataConditionHandler.Type.ACTION_FILTER}",
f"{self.workflow4.id}:{self.group4.id}:{self.workflow4_dcgs[0].id}:{DataConditionHandler.Type.WORKFLOW_TRIGGER}",
f"{self.workflow4.id}:{self.group4.id}:{self.workflow4_dcgs[1].id}:{DataConditionHandler.Type.ACTION_FILTER}",
f"{self.workflow3.id}:{self.group3.id}:{self.workflow3_dcgs[0].id}:{DataConditionHandler.Group.WORKFLOW_TRIGGER}",
f"{self.workflow3.id}:{self.group3.id}:{self.workflow3_dcgs[1].id}:{DataConditionHandler.Group.ACTION_FILTER}",
f"{self.workflow4.id}:{self.group4.id}:{self.workflow4_dcgs[0].id}:{DataConditionHandler.Group.WORKFLOW_TRIGGER}",
f"{self.workflow4.id}:{self.group4.id}:{self.workflow4_dcgs[1].id}:{DataConditionHandler.Group.ACTION_FILTER}",
}

self.dcg_to_groups: DataConditionGroupGroups = {
dcg.id: {self.group1.id} for dcg in self.workflow1_dcgs
} | {dcg.id: {self.group2.id} for dcg in self.workflow2_dcgs}
self.trigger_type_to_dcg_model: dict[DataConditionHandler.Type, dict[int, int]] = (
self.trigger_group_to_dcg_model: dict[DataConditionHandler.Group, dict[int, int]] = (
defaultdict(dict)
)

self.workflow_dcgs = self.workflow1_dcgs + self.workflow2_dcgs
for i, dcg in enumerate(self.workflow_dcgs):
handler_type = (
DataConditionHandler.Type.WORKFLOW_TRIGGER
DataConditionHandler.Group.WORKFLOW_TRIGGER
if i % 2 == 0
else DataConditionHandler.Type.ACTION_FILTER
else DataConditionHandler.Group.ACTION_FILTER
)
workflow_id = self.workflow1.id if i < len(self.workflow1_dcgs) else self.workflow2.id
self.trigger_type_to_dcg_model[handler_type][dcg.id] = workflow_id
self.trigger_group_to_dcg_model[handler_type][dcg.id] = workflow_id

self.detector = Detector.objects.get(project_id=self.project.id, type=ErrorGroupType.slug)
self.detector_dcg = self.create_data_condition_group()
self.detector.update(workflow_condition_group=self.detector_dcg)
self.trigger_type_to_dcg_model[DataConditionHandler.Type.DETECTOR_TRIGGER][
self.trigger_group_to_dcg_model[DataConditionHandler.Group.DETECTOR_TRIGGER][
self.detector_dcg.id
] = self.detector.id

self.dcg_to_workflow = self.trigger_type_to_dcg_model[
DataConditionHandler.Type.WORKFLOW_TRIGGER
self.dcg_to_workflow = self.trigger_group_to_dcg_model[
DataConditionHandler.Group.WORKFLOW_TRIGGER
].copy()
self.dcg_to_workflow.update(
self.trigger_type_to_dcg_model[DataConditionHandler.Type.ACTION_FILTER]
self.trigger_group_to_dcg_model[DataConditionHandler.Group.ACTION_FILTER]
)

self.mock_redis_buffer = mock_redis_buffer()
@@ -207,10 +207,10 @@ def push_to_hash(
dcg_ids: list[int],
event_id: str | None = None,
occurrence_id: str | None = None,
dcg_type: DataConditionHandler.Type = DataConditionHandler.Type.WORKFLOW_TRIGGER,
dcg_group: DataConditionHandler.Group = DataConditionHandler.Group.WORKFLOW_TRIGGER,
) -> None:
value = json.dumps({"event_id": event_id, "occurrence_id": occurrence_id})
field = f"{workflow_id}:{group_id}:{','.join(map(str, dcg_ids))}:{dcg_type}"
field = f"{workflow_id}:{group_id}:{','.join(map(str, dcg_ids))}:{dcg_group}"
buffer.backend.push_to_hash(
model=Workflow,
filters={"project_id": project_id},
@@ -225,9 +225,9 @@ def _push_base_events(self) -> None:
self.workflow3: (self.project2, self.workflow3_dcgs, self.event3, self.group3),
self.workflow4: (self.project2, self.workflow4_dcgs, self.event4, self.group4),
}
dcg_type = [
DataConditionHandler.Type.WORKFLOW_TRIGGER,
DataConditionHandler.Type.ACTION_FILTER,
dcg_group = [
DataConditionHandler.Group.WORKFLOW_TRIGGER,
DataConditionHandler.Group.ACTION_FILTER,
]

for workflow, (project, dcgs, event, group) in workflow_to_data.items():
@@ -238,7 +238,7 @@ def _push_base_events(self) -> None:
group_id=group.id,
dcg_ids=[dcg.id],
event_id=event.event_id,
dcg_type=dcg_type[i],
dcg_group=dcg_group[i],
)


@@ -269,15 +269,17 @@ def test_get_dcg_group_workflow_detector_data(self):
self.group1.id,
[self.detector_dcg.id],
self.event1.event_id,
dcg_type=DataConditionHandler.Type.DETECTOR_TRIGGER,
dcg_group=DataConditionHandler.Group.DETECTOR_TRIGGER,
)
self.dcg_to_groups[self.detector_dcg.id] = {self.group1.id}

buffer_data = fetch_group_to_event_data(self.project.id, Workflow)
dcg_to_groups, trigger_type_to_dcg_model = get_dcg_group_workflow_detector_data(buffer_data)
dcg_to_groups, trigger_group_to_dcg_model = get_dcg_group_workflow_detector_data(
buffer_data
)

assert dcg_to_groups == self.dcg_to_groups
assert trigger_type_to_dcg_model == self.trigger_type_to_dcg_model
assert trigger_group_to_dcg_model == self.trigger_group_to_dcg_model

def test_fetch_workflows_envs(self):
workflow_ids_to_workflows, workflows_to_envs = fetch_workflows_envs(
@@ -741,7 +743,7 @@ def test_get_group_to_groupevent(self):
@with_feature("organizations:workflow-engine-trigger-actions")
def test_fire_actions_for_groups__fire_actions(self, mock_trigger):
fire_actions_for_groups(
self.groups_to_dcgs, self.trigger_type_to_dcg_model, self.group_to_groupevent
self.groups_to_dcgs, self.trigger_group_to_dcg_model, self.group_to_groupevent
)

assert mock_trigger.call_count == 2
@@ -759,7 +761,7 @@ def test_fire_actions_for_groups__enqueue(self, mock_enqueue):
# enqueue the IF DCGs with slow conditions!

fire_actions_for_groups(
self.groups_to_dcgs, self.trigger_type_to_dcg_model, self.group_to_groupevent
self.groups_to_dcgs, self.trigger_group_to_dcg_model, self.group_to_groupevent
)

assert mock_enqueue.call_count == 2