From f5fff15f76e6011d69440d546b561d24a0b2c7db Mon Sep 17 00:00:00 2001
From: Mia Hsu <ameliahsu@gmail.com>
Date: Mon, 17 Mar 2025 13:52:33 -0700
Subject: [PATCH] fix(aci): rename DataConditionHandler type and filter_group

---
 .../condition/age_comparison_handler.py       |  4 +-
 .../condition/anomaly_detection_handler.py    |  2 +-
 .../handlers/condition/assigned_to_handler.py |  4 +-
 .../condition/event_attribute_handler.py      |  4 +-
 .../event_created_by_detector_handler.py      |  2 +-
 .../condition/event_frequency_handlers.py     | 12 ++--
 .../condition/event_seen_count_handler.py     |  2 +-
 .../existing_high_priority_issue_handler.py   |  2 +-
 .../condition/first_seen_event_handler.py     |  2 +-
 .../condition/issue_category_handler.py       |  2 +-
 .../condition/issue_occurrences_handler.py    |  4 +-
 .../condition/issue_priority_equals.py        |  4 +-
 .../issue_resolution_condition_handler.py     |  2 +-
 .../latest_adopted_release_handler.py         |  4 +-
 .../condition/latest_release_handler.py       |  4 +-
 .../handlers/condition/level_handler.py       |  4 +-
 .../new_high_priority_issue_handler.py        |  2 +-
 .../condition/reappeared_event_handler.py     |  2 +-
 .../condition/regression_event_handler.py     |  2 +-
 .../condition/tagged_event_handler.py         |  4 +-
 .../processors/delayed_workflow.py            | 28 +++++-----
 src/sentry/workflow_engine/types.py           |  8 +--
 .../processors/test_delayed_workflow.py       | 56 ++++++++++---------
 23 files changed, 83 insertions(+), 77 deletions(-)

diff --git a/src/sentry/workflow_engine/handlers/condition/age_comparison_handler.py b/src/sentry/workflow_engine/handlers/condition/age_comparison_handler.py
index 153540559e6f9c..74bf7f8d91e49f 100644
--- a/src/sentry/workflow_engine/handlers/condition/age_comparison_handler.py
+++ b/src/sentry/workflow_engine/handlers/condition/age_comparison_handler.py
@@ -11,8 +11,8 @@
 
 @condition_handler_registry.register(Condition.AGE_COMPARISON)
 class AgeComparisonConditionHandler(DataConditionHandler[WorkflowJob]):
-    type = DataConditionHandler.Type.ACTION_FILTER
-    filter_group = DataConditionHandler.FilterGroup.ISSUE_ATTRIBUTES
+    group = DataConditionHandler.Group.ACTION_FILTER
+    subgroup = DataConditionHandler.Subgroup.ISSUE_ATTRIBUTES
 
     comparison_json_schema = {
         "type": "object",
diff --git a/src/sentry/workflow_engine/handlers/condition/anomaly_detection_handler.py b/src/sentry/workflow_engine/handlers/condition/anomaly_detection_handler.py
index 316cd36a703776..e64a542f589404 100644
--- a/src/sentry/workflow_engine/handlers/condition/anomaly_detection_handler.py
+++ b/src/sentry/workflow_engine/handlers/condition/anomaly_detection_handler.py
@@ -7,7 +7,7 @@
 
 @condition_handler_registry.register(Condition.ANOMALY_DETECTION)
 class AnomalyDetectionHandler(DataConditionHandler[WorkflowJob]):
-    type = DataConditionHandler.Type.DETECTOR_TRIGGER
+    group = DataConditionHandler.Group.DETECTOR_TRIGGER
     comparison_json_schema = {"type": "boolean"}
 
     @staticmethod
diff --git a/src/sentry/workflow_engine/handlers/condition/assigned_to_handler.py b/src/sentry/workflow_engine/handlers/condition/assigned_to_handler.py
index 8d640512060f8c..f9b27bde453e16 100644
--- a/src/sentry/workflow_engine/handlers/condition/assigned_to_handler.py
+++ b/src/sentry/workflow_engine/handlers/condition/assigned_to_handler.py
@@ -12,8 +12,8 @@
 
 @condition_handler_registry.register(Condition.ASSIGNED_TO)
 class AssignedToConditionHandler(DataConditionHandler[WorkflowJob]):
-    type = DataConditionHandler.Type.ACTION_FILTER
-    filter_group = DataConditionHandler.FilterGroup.ISSUE_ATTRIBUTES
+    group = DataConditionHandler.Group.ACTION_FILTER
+    subgroup = DataConditionHandler.Subgroup.ISSUE_ATTRIBUTES
 
     comparison_json_schema = {
         "type": "object",
diff --git a/src/sentry/workflow_engine/handlers/condition/event_attribute_handler.py b/src/sentry/workflow_engine/handlers/condition/event_attribute_handler.py
index 668240906f59a3..379f4fa6157b27 100644
--- a/src/sentry/workflow_engine/handlers/condition/event_attribute_handler.py
+++ b/src/sentry/workflow_engine/handlers/condition/event_attribute_handler.py
@@ -13,8 +13,8 @@
 
 @condition_handler_registry.register(Condition.EVENT_ATTRIBUTE)
 class EventAttributeConditionHandler(DataConditionHandler[WorkflowJob]):
-    type = DataConditionHandler.Type.ACTION_FILTER
-    filter_group = DataConditionHandler.FilterGroup.EVENT_ATTRIBUTES
+    group = DataConditionHandler.Group.ACTION_FILTER
+    subgroup = DataConditionHandler.Subgroup.EVENT_ATTRIBUTES
 
     comparison_json_schema = {
         "type": "object",
diff --git a/src/sentry/workflow_engine/handlers/condition/event_created_by_detector_handler.py b/src/sentry/workflow_engine/handlers/condition/event_created_by_detector_handler.py
index 08c97c99ed1fc1..0c42a51e91c939 100644
--- a/src/sentry/workflow_engine/handlers/condition/event_created_by_detector_handler.py
+++ b/src/sentry/workflow_engine/handlers/condition/event_created_by_detector_handler.py
@@ -7,7 +7,7 @@
 
 @condition_handler_registry.register(Condition.EVENT_CREATED_BY_DETECTOR)
 class EventCreatedByDetectorConditionHandler(DataConditionHandler[WorkflowJob]):
-    type = DataConditionHandler.Type.ACTION_FILTER
+    group = DataConditionHandler.Group.ACTION_FILTER
 
     @staticmethod
     def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
diff --git a/src/sentry/workflow_engine/handlers/condition/event_frequency_handlers.py b/src/sentry/workflow_engine/handlers/condition/event_frequency_handlers.py
index 9c854f258c1da7..0a3781259e8457 100644
--- a/src/sentry/workflow_engine/handlers/condition/event_frequency_handlers.py
+++ b/src/sentry/workflow_engine/handlers/condition/event_frequency_handlers.py
@@ -17,8 +17,8 @@
 @condition_handler_registry.register(Condition.EVENT_FREQUENCY_COUNT)
 @condition_handler_registry.register(Condition.EVENT_UNIQUE_USER_FREQUENCY_COUNT)
 class EventFrequencyCountHandler(DataConditionHandler[list[int]]):
-    type = DataConditionHandler.Type.ACTION_FILTER
-    filter_group = DataConditionHandler.FilterGroup.FREQUENCY
+    group = DataConditionHandler.Group.ACTION_FILTER
+    subgroup = DataConditionHandler.Subgroup.FREQUENCY
 
     comparison_json_schema = {
         "type": "object",
@@ -44,8 +44,8 @@ def evaluate_value(value: list[int], comparison: Any) -> DataConditionResult:
 @condition_handler_registry.register(Condition.EVENT_FREQUENCY_PERCENT)
 @condition_handler_registry.register(Condition.EVENT_UNIQUE_USER_FREQUENCY_PERCENT)
 class EventFrequencyPercentHandler(DataConditionHandler[list[int]]):
-    type = DataConditionHandler.Type.ACTION_FILTER
-    filter_group = DataConditionHandler.FilterGroup.FREQUENCY
+    group = DataConditionHandler.Group.ACTION_FILTER
+    subgroup = DataConditionHandler.Subgroup.FREQUENCY
 
     comparison_json_schema = {
         "type": "object",
@@ -72,6 +72,8 @@ def evaluate_value(value: list[int], comparison: Any) -> DataConditionResult:
 # Percent sessions values must be between 0-100 (%)
 @condition_handler_registry.register(Condition.PERCENT_SESSIONS_COUNT)
 class PercentSessionsCountHandler(EventFrequencyCountHandler):
+    group = DataConditionHandler.Group.ACTION_FILTER
+    subgroup = DataConditionHandler.Subgroup.FREQUENCY
     comparison_json_schema = {
         "type": "object",
         "properties": {
@@ -90,6 +92,8 @@ class PercentSessionsCountHandler(EventFrequencyCountHandler):
 # This percent value can be > 100 (%)
 @condition_handler_registry.register(Condition.PERCENT_SESSIONS_PERCENT)
 class PercentSessionsPercentHandler(EventFrequencyPercentHandler):
+    group = DataConditionHandler.Group.ACTION_FILTER
+    subgroup = DataConditionHandler.Subgroup.FREQUENCY
     comparison_json_schema = {
         "type": "object",
         "properties": {
diff --git a/src/sentry/workflow_engine/handlers/condition/event_seen_count_handler.py b/src/sentry/workflow_engine/handlers/condition/event_seen_count_handler.py
index dd81b3c0062b73..3410a9283f642a 100644
--- a/src/sentry/workflow_engine/handlers/condition/event_seen_count_handler.py
+++ b/src/sentry/workflow_engine/handlers/condition/event_seen_count_handler.py
@@ -7,7 +7,7 @@
 
 @condition_handler_registry.register(Condition.EVENT_SEEN_COUNT)
 class EventSeenCountConditionHandler(DataConditionHandler[WorkflowJob]):
-    type = DataConditionHandler.Type.ACTION_FILTER
+    group = DataConditionHandler.Group.ACTION_FILTER
 
     @staticmethod
     def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
diff --git a/src/sentry/workflow_engine/handlers/condition/existing_high_priority_issue_handler.py b/src/sentry/workflow_engine/handlers/condition/existing_high_priority_issue_handler.py
index 8b2a8f3a306495..ee831cdd8c4c42 100644
--- a/src/sentry/workflow_engine/handlers/condition/existing_high_priority_issue_handler.py
+++ b/src/sentry/workflow_engine/handlers/condition/existing_high_priority_issue_handler.py
@@ -8,7 +8,7 @@
 
 @condition_handler_registry.register(Condition.EXISTING_HIGH_PRIORITY_ISSUE)
 class ExistingHighPriorityIssueConditionHandler(DataConditionHandler[WorkflowJob]):
-    type = DataConditionHandler.Type.WORKFLOW_TRIGGER
+    group = DataConditionHandler.Group.WORKFLOW_TRIGGER
     comparison_json_schema = {"type": "boolean"}
 
     @staticmethod
diff --git a/src/sentry/workflow_engine/handlers/condition/first_seen_event_handler.py b/src/sentry/workflow_engine/handlers/condition/first_seen_event_handler.py
index 5f253fc15d3557..9801aca2f06a00 100644
--- a/src/sentry/workflow_engine/handlers/condition/first_seen_event_handler.py
+++ b/src/sentry/workflow_engine/handlers/condition/first_seen_event_handler.py
@@ -19,7 +19,7 @@ def is_new_event(job: WorkflowJob) -> bool:
 
 @condition_handler_registry.register(Condition.FIRST_SEEN_EVENT)
 class FirstSeenEventConditionHandler(DataConditionHandler[WorkflowJob]):
-    type = DataConditionHandler.Type.WORKFLOW_TRIGGER
+    group = DataConditionHandler.Group.WORKFLOW_TRIGGER
     comparison_json_schema = {"type": "boolean"}
 
     @staticmethod
diff --git a/src/sentry/workflow_engine/handlers/condition/issue_category_handler.py b/src/sentry/workflow_engine/handlers/condition/issue_category_handler.py
index d657997a9099f4..ad18101703275f 100644
--- a/src/sentry/workflow_engine/handlers/condition/issue_category_handler.py
+++ b/src/sentry/workflow_engine/handlers/condition/issue_category_handler.py
@@ -8,7 +8,7 @@
 
 @condition_handler_registry.register(Condition.ISSUE_CATEGORY)
 class IssueCategoryConditionHandler(DataConditionHandler[WorkflowJob]):
-    type = DataConditionHandler.Type.ACTION_FILTER
+    group = DataConditionHandler.Group.ACTION_FILTER
 
     comparison_json_schema = {
         "type": "object",
diff --git a/src/sentry/workflow_engine/handlers/condition/issue_occurrences_handler.py b/src/sentry/workflow_engine/handlers/condition/issue_occurrences_handler.py
index a6f0317585fc9e..7ec413e04720e7 100644
--- a/src/sentry/workflow_engine/handlers/condition/issue_occurrences_handler.py
+++ b/src/sentry/workflow_engine/handlers/condition/issue_occurrences_handler.py
@@ -8,8 +8,8 @@
 
 @condition_handler_registry.register(Condition.ISSUE_OCCURRENCES)
 class IssueOccurrencesConditionHandler(DataConditionHandler[WorkflowJob]):
-    type = DataConditionHandler.Type.ACTION_FILTER
-    filter_group = DataConditionHandler.FilterGroup.ISSUE_ATTRIBUTES
+    group = DataConditionHandler.Group.ACTION_FILTER
+    subgroup = DataConditionHandler.Subgroup.ISSUE_ATTRIBUTES
 
     comparison_json_schema = {
         "type": "object",
diff --git a/src/sentry/workflow_engine/handlers/condition/issue_priority_equals.py b/src/sentry/workflow_engine/handlers/condition/issue_priority_equals.py
index 43b22f5ff23cb8..8b297666179819 100644
--- a/src/sentry/workflow_engine/handlers/condition/issue_priority_equals.py
+++ b/src/sentry/workflow_engine/handlers/condition/issue_priority_equals.py
@@ -7,8 +7,8 @@
 
 @condition_handler_registry.register(Condition.ISSUE_PRIORITY_EQUALS)
 class IssuePriorityCondition(DataConditionHandler[WorkflowJob]):
-    type = DataConditionHandler.Type.ACTION_FILTER
-    filter_group = DataConditionHandler.FilterGroup.ISSUE_ATTRIBUTES
+    group = DataConditionHandler.Group.ACTION_FILTER
+    subgroup = DataConditionHandler.Subgroup.ISSUE_ATTRIBUTES
 
     @staticmethod
     def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
diff --git a/src/sentry/workflow_engine/handlers/condition/issue_resolution_condition_handler.py b/src/sentry/workflow_engine/handlers/condition/issue_resolution_condition_handler.py
index 18d6a98430086c..060cb899be4980 100644
--- a/src/sentry/workflow_engine/handlers/condition/issue_resolution_condition_handler.py
+++ b/src/sentry/workflow_engine/handlers/condition/issue_resolution_condition_handler.py
@@ -7,7 +7,7 @@
 
 @condition_handler_registry.register(Condition.ISSUE_RESOLUTION_CHANGE)
 class IssueResolutionConditionHandler(DataConditionHandler[WorkflowJob]):
-    type = DataConditionHandler.Type.WORKFLOW_TRIGGER
+    group = DataConditionHandler.Group.WORKFLOW_TRIGGER
 
     @staticmethod
     def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
diff --git a/src/sentry/workflow_engine/handlers/condition/latest_adopted_release_handler.py b/src/sentry/workflow_engine/handlers/condition/latest_adopted_release_handler.py
index 97d1094f5db5da..be65b40eed9b67 100644
--- a/src/sentry/workflow_engine/handlers/condition/latest_adopted_release_handler.py
+++ b/src/sentry/workflow_engine/handlers/condition/latest_adopted_release_handler.py
@@ -18,8 +18,8 @@
 
 @condition_handler_registry.register(Condition.LATEST_ADOPTED_RELEASE)
 class LatestAdoptedReleaseConditionHandler(DataConditionHandler[WorkflowJob]):
-    type = DataConditionHandler.Type.ACTION_FILTER
-    filter_group = DataConditionHandler.FilterGroup.EVENT_ATTRIBUTES
+    group = DataConditionHandler.Group.ACTION_FILTER
+    subgroup = DataConditionHandler.Subgroup.EVENT_ATTRIBUTES
 
     comparison_json_schema = {
         "type": "object",
diff --git a/src/sentry/workflow_engine/handlers/condition/latest_release_handler.py b/src/sentry/workflow_engine/handlers/condition/latest_release_handler.py
index 60aa43f615143e..b2ebe42d81b783 100644
--- a/src/sentry/workflow_engine/handlers/condition/latest_release_handler.py
+++ b/src/sentry/workflow_engine/handlers/condition/latest_release_handler.py
@@ -41,8 +41,8 @@ def get_latest_release_for_env(
 
 @condition_handler_registry.register(Condition.LATEST_RELEASE)
 class LatestReleaseConditionHandler(DataConditionHandler[WorkflowJob]):
-    type = DataConditionHandler.Type.ACTION_FILTER
-    filter_group = DataConditionHandler.FilterGroup.EVENT_ATTRIBUTES
+    group = DataConditionHandler.Group.ACTION_FILTER
+    subgroup = DataConditionHandler.Subgroup.EVENT_ATTRIBUTES
     comparison_json_schema = {"type": "boolean"}
 
     @staticmethod
diff --git a/src/sentry/workflow_engine/handlers/condition/level_handler.py b/src/sentry/workflow_engine/handlers/condition/level_handler.py
index 36fc5fd1072acf..bcacf2c3cc5968 100644
--- a/src/sentry/workflow_engine/handlers/condition/level_handler.py
+++ b/src/sentry/workflow_engine/handlers/condition/level_handler.py
@@ -9,8 +9,8 @@
 
 @condition_handler_registry.register(Condition.LEVEL)
 class LevelConditionHandler(DataConditionHandler[WorkflowJob]):
-    type = DataConditionHandler.Type.ACTION_FILTER
-    filter_group = DataConditionHandler.FilterGroup.EVENT_ATTRIBUTES
+    group = DataConditionHandler.Group.ACTION_FILTER
+    subgroup = DataConditionHandler.Subgroup.EVENT_ATTRIBUTES
 
     comparison_json_schema = {
         "type": "object",
diff --git a/src/sentry/workflow_engine/handlers/condition/new_high_priority_issue_handler.py b/src/sentry/workflow_engine/handlers/condition/new_high_priority_issue_handler.py
index 5483841f2660b1..077073221fa2ae 100644
--- a/src/sentry/workflow_engine/handlers/condition/new_high_priority_issue_handler.py
+++ b/src/sentry/workflow_engine/handlers/condition/new_high_priority_issue_handler.py
@@ -9,7 +9,7 @@
 
 @condition_handler_registry.register(Condition.NEW_HIGH_PRIORITY_ISSUE)
 class NewHighPriorityIssueConditionHandler(DataConditionHandler[WorkflowJob]):
-    type = DataConditionHandler.Type.WORKFLOW_TRIGGER
+    group = DataConditionHandler.Group.WORKFLOW_TRIGGER
     comparison_json_schema = {"type": "boolean"}
 
     @staticmethod
diff --git a/src/sentry/workflow_engine/handlers/condition/reappeared_event_handler.py b/src/sentry/workflow_engine/handlers/condition/reappeared_event_handler.py
index b2b5f0df426eed..25b8990f513105 100644
--- a/src/sentry/workflow_engine/handlers/condition/reappeared_event_handler.py
+++ b/src/sentry/workflow_engine/handlers/condition/reappeared_event_handler.py
@@ -7,7 +7,7 @@
 
 @condition_handler_registry.register(Condition.REAPPEARED_EVENT)
 class ReappearedEventConditionHandler(DataConditionHandler[WorkflowJob]):
-    type = DataConditionHandler.Type.WORKFLOW_TRIGGER
+    group = DataConditionHandler.Group.WORKFLOW_TRIGGER
     comparison_json_schema = {"type": "boolean"}
 
     @staticmethod
diff --git a/src/sentry/workflow_engine/handlers/condition/regression_event_handler.py b/src/sentry/workflow_engine/handlers/condition/regression_event_handler.py
index 1a9833a5e3414a..cb8c6aa557515e 100644
--- a/src/sentry/workflow_engine/handlers/condition/regression_event_handler.py
+++ b/src/sentry/workflow_engine/handlers/condition/regression_event_handler.py
@@ -7,7 +7,7 @@
 
 @condition_handler_registry.register(Condition.REGRESSION_EVENT)
 class RegressionEventConditionHandler(DataConditionHandler[WorkflowJob]):
-    type = DataConditionHandler.Type.WORKFLOW_TRIGGER
+    group = DataConditionHandler.Group.WORKFLOW_TRIGGER
     comparison_json_schema = {"type": "boolean"}
 
     @staticmethod
diff --git a/src/sentry/workflow_engine/handlers/condition/tagged_event_handler.py b/src/sentry/workflow_engine/handlers/condition/tagged_event_handler.py
index 6208f16f7b2bb0..36545f34adf454 100644
--- a/src/sentry/workflow_engine/handlers/condition/tagged_event_handler.py
+++ b/src/sentry/workflow_engine/handlers/condition/tagged_event_handler.py
@@ -9,8 +9,8 @@
 
 @condition_handler_registry.register(Condition.TAGGED_EVENT)
 class TaggedEventConditionHandler(DataConditionHandler[WorkflowJob]):
-    type = DataConditionHandler.Type.ACTION_FILTER
-    filter_group = DataConditionHandler.FilterGroup.EVENT_ATTRIBUTES
+    group = DataConditionHandler.Group.ACTION_FILTER
+    subgroup = DataConditionHandler.Subgroup.EVENT_ATTRIBUTES
 
     comparison_json_schema = {
         "type": "object",
diff --git a/src/sentry/workflow_engine/processors/delayed_workflow.py b/src/sentry/workflow_engine/processors/delayed_workflow.py
index 5d5b1c06a758e4..ea76f74b66b300 100644
--- a/src/sentry/workflow_engine/processors/delayed_workflow.py
+++ b/src/sentry/workflow_engine/processors/delayed_workflow.py
@@ -101,19 +101,19 @@ def fetch_group_to_event_data(
 
 
 def get_dcg_group_workflow_detector_data(
-    workflow_event_dcg_data: dict[str, str]
-) -> tuple[DataConditionGroupGroups, dict[DataConditionHandler.Type, dict[int, int]]]:
+    workflow_event_dcg_data: dict[str, str],
+) -> tuple[DataConditionGroupGroups, dict[DataConditionHandler.Group, dict[int, int]]]:
     """
     Parse the data in the buffer hash, which is in the form of {workflow/detector_id}:{group_id}:{dcg_id, ..., dcg_id}:{dcg_type}
     """
 
     dcg_to_groups: DataConditionGroupGroups = defaultdict(set)
-    trigger_type_to_dcg_model: dict[DataConditionHandler.Type, dict[int, int]] = defaultdict(dict)
+    trigger_group_to_dcg_model: dict[DataConditionHandler.Group, dict[int, int]] = defaultdict(dict)
 
     for workflow_group_dcg, _ in workflow_event_dcg_data.items():
         data = workflow_group_dcg.split(":")
         try:
-            dcg_type = DataConditionHandler.Type(data[3])
+            dcg_group = DataConditionHandler.Group(data[3])
         except ValueError:
             continue
 
@@ -123,9 +123,9 @@ def get_dcg_group_workflow_detector_data(
         for dcg_id in dcg_ids:
             dcg_to_groups[dcg_id].add(group_id)
 
-            trigger_type_to_dcg_model[dcg_type][dcg_id] = int(data[0])
+            trigger_group_to_dcg_model[dcg_group][dcg_id] = int(data[0])
 
-    return dcg_to_groups, trigger_type_to_dcg_model
+    return dcg_to_groups, trigger_group_to_dcg_model
 
 
 def fetch_workflows_envs(
@@ -226,7 +226,7 @@ def get_condition_query_groups(
 
 
 def get_condition_group_results(
-    queries_to_groups: dict[UniqueConditionQuery, set[int]]
+    queries_to_groups: dict[UniqueConditionQuery, set[int]],
 ) -> dict[UniqueConditionQuery, dict[int, int]]:
     condition_group_results = {}
     current_time = timezone.now()
@@ -386,7 +386,7 @@ def get_group_to_groupevent(
 
 def fire_actions_for_groups(
     groups_to_fire: dict[int, set[DataConditionGroup]],
-    trigger_type_to_dcg_model: dict[DataConditionHandler.Type, dict[int, int]],
+    trigger_group_to_dcg_model: dict[DataConditionHandler.Group, dict[int, int]],
     group_to_groupevent: dict[Group, GroupEvent],
 ) -> None:
     for group, group_event in group_to_groupevent.items():
@@ -396,9 +396,9 @@ def fire_actions_for_groups(
         workflow_triggers: set[DataConditionGroup] = set()
         action_filters: set[DataConditionGroup] = set()
         for dcg in groups_to_fire[group.id]:
-            if dcg.id in trigger_type_to_dcg_model[DataConditionHandler.Type.WORKFLOW_TRIGGER]:
+            if dcg.id in trigger_group_to_dcg_model[DataConditionHandler.Group.WORKFLOW_TRIGGER]:
                 workflow_triggers.add(dcg)
-            elif dcg.id in trigger_type_to_dcg_model[DataConditionHandler.Type.ACTION_FILTER]:
+            elif dcg.id in trigger_group_to_dcg_model[DataConditionHandler.Group.ACTION_FILTER]:
                 action_filters.add(dcg)
 
         # process action filters
@@ -464,11 +464,11 @@ def process_delayed_workflows(
     workflow_event_dcg_data = fetch_group_to_event_data(project_id, Workflow, batch_key)
 
     # Get mappings from DataConditionGroups to other info
-    dcg_to_groups, trigger_type_to_dcg_model = get_dcg_group_workflow_detector_data(
+    dcg_to_groups, trigger_group_to_dcg_model = get_dcg_group_workflow_detector_data(
         workflow_event_dcg_data
     )
-    dcg_to_workflow = trigger_type_to_dcg_model[DataConditionHandler.Type.WORKFLOW_TRIGGER].copy()
-    dcg_to_workflow.update(trigger_type_to_dcg_model[DataConditionHandler.Type.ACTION_FILTER])
+    dcg_to_workflow = trigger_group_to_dcg_model[DataConditionHandler.Group.WORKFLOW_TRIGGER].copy()
+    dcg_to_workflow.update(trigger_group_to_dcg_model[DataConditionHandler.Group.ACTION_FILTER])
 
     _, workflows_to_envs = fetch_workflows_envs(list(dcg_to_workflow.values()))
     data_condition_groups = fetch_data_condition_groups(list(dcg_to_groups.keys()))
@@ -495,7 +495,7 @@ def process_delayed_workflows(
         dcg_group_to_event_data, list(groups_to_dcgs.keys()), event_ids, occurrence_ids, project_id
     )
 
-    fire_actions_for_groups(groups_to_dcgs, trigger_type_to_dcg_model, group_to_groupevent)
+    fire_actions_for_groups(groups_to_dcgs, trigger_group_to_dcg_model, group_to_groupevent)
 
     cleanup_redis_buffer(project_id, workflow_event_dcg_data, batch_key)
 
diff --git a/src/sentry/workflow_engine/types.py b/src/sentry/workflow_engine/types.py
index 3e7ef9ad067818..fc8a46967e19c0 100644
--- a/src/sentry/workflow_engine/types.py
+++ b/src/sentry/workflow_engine/types.py
@@ -65,18 +65,18 @@ def related_model(instance) -> list[ModelRelation]:
 
 
 class DataConditionHandler(Generic[T]):
-    class Type(StrEnum):
+    class Group(StrEnum):
         DETECTOR_TRIGGER = "detector_trigger"
         WORKFLOW_TRIGGER = "workflow_trigger"
         ACTION_FILTER = "action_filter"
 
-    class FilterGroup(StrEnum):
+    class Subgroup(StrEnum):
         ISSUE_ATTRIBUTES = "issue_attributes"
         FREQUENCY = "frequency"
         EVENT_ATTRIBUTES = "event_attributes"
 
-    type: ClassVar[Type]
-    filter_group: ClassVar[FilterGroup]
+    group: ClassVar[Group]
+    subgroup: ClassVar[Subgroup]
     comparison_json_schema: ClassVar[dict[str, Any]] = {}
 
     @staticmethod
diff --git a/tests/sentry/workflow_engine/processors/test_delayed_workflow.py b/tests/sentry/workflow_engine/processors/test_delayed_workflow.py
index 48b7df014d0bef..c9b0862a7449f7 100644
--- a/tests/sentry/workflow_engine/processors/test_delayed_workflow.py
+++ b/tests/sentry/workflow_engine/processors/test_delayed_workflow.py
@@ -83,10 +83,10 @@ def setUp(self):
         self.create_event(self.project.id, FROZEN_TIME, "group-2", self.environment.name)
 
         self.workflow_group_dcg_mapping = {
-            f"{self.workflow1.id}:{self.group1.id}:{self.workflow1_dcgs[0].id}:{DataConditionHandler.Type.WORKFLOW_TRIGGER}",
-            f"{self.workflow1.id}:{self.group1.id}:{self.workflow1_dcgs[1].id}:{DataConditionHandler.Type.ACTION_FILTER}",
-            f"{self.workflow2.id}:{self.group2.id}:{self.workflow2_dcgs[0].id}:{DataConditionHandler.Type.WORKFLOW_TRIGGER}",
-            f"{self.workflow2.id}:{self.group2.id}:{self.workflow2_dcgs[1].id}:{DataConditionHandler.Type.ACTION_FILTER}",
+            f"{self.workflow1.id}:{self.group1.id}:{self.workflow1_dcgs[0].id}:{DataConditionHandler.Group.WORKFLOW_TRIGGER}",
+            f"{self.workflow1.id}:{self.group1.id}:{self.workflow1_dcgs[1].id}:{DataConditionHandler.Group.ACTION_FILTER}",
+            f"{self.workflow2.id}:{self.group2.id}:{self.workflow2_dcgs[0].id}:{DataConditionHandler.Group.WORKFLOW_TRIGGER}",
+            f"{self.workflow2.id}:{self.group2.id}:{self.workflow2_dcgs[1].id}:{DataConditionHandler.Group.ACTION_FILTER}",
         }
 
         self.event3, self.group3 = self.setup_event(self.project2, self.environment2, "group-3")
@@ -99,41 +99,41 @@ def setUp(self):
         self._make_sessions(60, project=self.project2)
 
         self.workflow_group_dcg_mapping2 = {
-            f"{self.workflow3.id}:{self.group3.id}:{self.workflow3_dcgs[0].id}:{DataConditionHandler.Type.WORKFLOW_TRIGGER}",
-            f"{self.workflow3.id}:{self.group3.id}:{self.workflow3_dcgs[1].id}:{DataConditionHandler.Type.ACTION_FILTER}",
-            f"{self.workflow4.id}:{self.group4.id}:{self.workflow4_dcgs[0].id}:{DataConditionHandler.Type.WORKFLOW_TRIGGER}",
-            f"{self.workflow4.id}:{self.group4.id}:{self.workflow4_dcgs[1].id}:{DataConditionHandler.Type.ACTION_FILTER}",
+            f"{self.workflow3.id}:{self.group3.id}:{self.workflow3_dcgs[0].id}:{DataConditionHandler.Group.WORKFLOW_TRIGGER}",
+            f"{self.workflow3.id}:{self.group3.id}:{self.workflow3_dcgs[1].id}:{DataConditionHandler.Group.ACTION_FILTER}",
+            f"{self.workflow4.id}:{self.group4.id}:{self.workflow4_dcgs[0].id}:{DataConditionHandler.Group.WORKFLOW_TRIGGER}",
+            f"{self.workflow4.id}:{self.group4.id}:{self.workflow4_dcgs[1].id}:{DataConditionHandler.Group.ACTION_FILTER}",
         }
 
         self.dcg_to_groups: DataConditionGroupGroups = {
             dcg.id: {self.group1.id} for dcg in self.workflow1_dcgs
         } | {dcg.id: {self.group2.id} for dcg in self.workflow2_dcgs}
-        self.trigger_type_to_dcg_model: dict[DataConditionHandler.Type, dict[int, int]] = (
+        self.trigger_group_to_dcg_model: dict[DataConditionHandler.Group, dict[int, int]] = (
             defaultdict(dict)
         )
 
         self.workflow_dcgs = self.workflow1_dcgs + self.workflow2_dcgs
         for i, dcg in enumerate(self.workflow_dcgs):
             handler_type = (
-                DataConditionHandler.Type.WORKFLOW_TRIGGER
+                DataConditionHandler.Group.WORKFLOW_TRIGGER
                 if i % 2 == 0
-                else DataConditionHandler.Type.ACTION_FILTER
+                else DataConditionHandler.Group.ACTION_FILTER
             )
             workflow_id = self.workflow1.id if i < len(self.workflow1_dcgs) else self.workflow2.id
-            self.trigger_type_to_dcg_model[handler_type][dcg.id] = workflow_id
+            self.trigger_group_to_dcg_model[handler_type][dcg.id] = workflow_id
 
         self.detector = Detector.objects.get(project_id=self.project.id, type=ErrorGroupType.slug)
         self.detector_dcg = self.create_data_condition_group()
         self.detector.update(workflow_condition_group=self.detector_dcg)
-        self.trigger_type_to_dcg_model[DataConditionHandler.Type.DETECTOR_TRIGGER][
+        self.trigger_group_to_dcg_model[DataConditionHandler.Group.DETECTOR_TRIGGER][
             self.detector_dcg.id
         ] = self.detector.id
 
-        self.dcg_to_workflow = self.trigger_type_to_dcg_model[
-            DataConditionHandler.Type.WORKFLOW_TRIGGER
+        self.dcg_to_workflow = self.trigger_group_to_dcg_model[
+            DataConditionHandler.Group.WORKFLOW_TRIGGER
         ].copy()
         self.dcg_to_workflow.update(
-            self.trigger_type_to_dcg_model[DataConditionHandler.Type.ACTION_FILTER]
+            self.trigger_group_to_dcg_model[DataConditionHandler.Group.ACTION_FILTER]
         )
 
         self.mock_redis_buffer = mock_redis_buffer()
@@ -207,10 +207,10 @@ def push_to_hash(
         dcg_ids: list[int],
         event_id: str | None = None,
         occurrence_id: str | None = None,
-        dcg_type: DataConditionHandler.Type = DataConditionHandler.Type.WORKFLOW_TRIGGER,
+        dcg_group: DataConditionHandler.Group = DataConditionHandler.Group.WORKFLOW_TRIGGER,
     ) -> None:
         value = json.dumps({"event_id": event_id, "occurrence_id": occurrence_id})
-        field = f"{workflow_id}:{group_id}:{','.join(map(str, dcg_ids))}:{dcg_type}"
+        field = f"{workflow_id}:{group_id}:{','.join(map(str, dcg_ids))}:{dcg_group}"
         buffer.backend.push_to_hash(
             model=Workflow,
             filters={"project_id": project_id},
@@ -225,9 +225,9 @@ def _push_base_events(self) -> None:
             self.workflow3: (self.project2, self.workflow3_dcgs, self.event3, self.group3),
             self.workflow4: (self.project2, self.workflow4_dcgs, self.event4, self.group4),
         }
-        dcg_type = [
-            DataConditionHandler.Type.WORKFLOW_TRIGGER,
-            DataConditionHandler.Type.ACTION_FILTER,
+        dcg_group = [
+            DataConditionHandler.Group.WORKFLOW_TRIGGER,
+            DataConditionHandler.Group.ACTION_FILTER,
         ]
 
         for workflow, (project, dcgs, event, group) in workflow_to_data.items():
@@ -238,7 +238,7 @@ def _push_base_events(self) -> None:
                     group_id=group.id,
                     dcg_ids=[dcg.id],
                     event_id=event.event_id,
-                    dcg_type=dcg_type[i],
+                    dcg_group=dcg_group[i],
                 )
 
 
@@ -269,15 +269,17 @@ def test_get_dcg_group_workflow_detector_data(self):
             self.group1.id,
             [self.detector_dcg.id],
             self.event1.event_id,
-            dcg_type=DataConditionHandler.Type.DETECTOR_TRIGGER,
+            dcg_group=DataConditionHandler.Group.DETECTOR_TRIGGER,
         )
         self.dcg_to_groups[self.detector_dcg.id] = {self.group1.id}
 
         buffer_data = fetch_group_to_event_data(self.project.id, Workflow)
-        dcg_to_groups, trigger_type_to_dcg_model = get_dcg_group_workflow_detector_data(buffer_data)
+        dcg_to_groups, trigger_group_to_dcg_model = get_dcg_group_workflow_detector_data(
+            buffer_data
+        )
 
         assert dcg_to_groups == self.dcg_to_groups
-        assert trigger_type_to_dcg_model == self.trigger_type_to_dcg_model
+        assert trigger_group_to_dcg_model == self.trigger_group_to_dcg_model
 
     def test_fetch_workflows_envs(self):
         workflow_ids_to_workflows, workflows_to_envs = fetch_workflows_envs(
@@ -741,7 +743,7 @@ def test_get_group_to_groupevent(self):
     @with_feature("organizations:workflow-engine-trigger-actions")
     def test_fire_actions_for_groups__fire_actions(self, mock_trigger):
         fire_actions_for_groups(
-            self.groups_to_dcgs, self.trigger_type_to_dcg_model, self.group_to_groupevent
+            self.groups_to_dcgs, self.trigger_group_to_dcg_model, self.group_to_groupevent
         )
 
         assert mock_trigger.call_count == 2
@@ -759,7 +761,7 @@ def test_fire_actions_for_groups__enqueue(self, mock_enqueue):
         # enqueue the IF DCGs with slow conditions!
 
         fire_actions_for_groups(
-            self.groups_to_dcgs, self.trigger_type_to_dcg_model, self.group_to_groupevent
+            self.groups_to_dcgs, self.trigger_group_to_dcg_model, self.group_to_groupevent
         )
 
         assert mock_enqueue.call_count == 2