Skip to content

Commit

Permalink
ref: fix reassignment of key_errors (#75186)
Browse files Browse the repository at this point in the history
depending on the part of the code `key_errors` was either
`list[tuple[int, int]]` or `list[tuple[Group, None, int]]` -- this is
now explicitly separated as `key_errors_by_id: list[tuple[int, int]]`
and `key_errors_by_group: list[tuple[Group, int]]`

<!-- Describe your PR here. -->
  • Loading branch information
asottile-sentry committed Jul 29, 2024
1 parent f68aa36 commit 1ae38c5
Show file tree
Hide file tree
Showing 6 changed files with 51 additions and 63 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -149,10 +149,10 @@ def build(self) -> SlackBlock:

# Add Top 3 Error/Performance Issues
top_issue_fields = []
if context.key_errors:
if context.key_errors_by_group:
top_errors_text = "*Today's Top 3 Error Issues*\n"
for error in context.key_errors:
linked_title = self.linkify_error_title(error[0])
for group, _ in context.key_errors_by_group:
linked_title = self.linkify_error_title(group)
top_errors_text += f"• {linked_title}\n"
top_issue_fields.append(self.make_field(top_errors_text))

Expand Down
4 changes: 3 additions & 1 deletion src/sentry/tasks/summaries/daily_summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,9 @@ def build_summary_data(
ctx=ctx, project=project, referrer=Referrer.DAILY_SUMMARY_KEY_ERRORS.value
)
if key_errors:
project_ctx.key_errors = [(e["events.group_id"], e["count()"]) for e in key_errors]
project_ctx.key_errors_by_id = [
(e["events.group_id"], e["count()"]) for e in key_errors
]

# Today's Top 3 Performance Issues
key_performance_issues = project_key_performance_issues(
Expand Down
56 changes: 23 additions & 33 deletions src/sentry/tasks/summaries/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,8 @@ class ProjectContext:
def __init__(self, project):
self.project = project

# Array of (group_id, group_history, count)
self.key_errors = []
self.key_errors_by_id: list[tuple[int, int]] = []
self.key_errors_by_group: list[tuple[Group, int]] = []
# Array of (transaction_name, count_this_week, p95_this_week, count_last_week, p95_last_week)
self.key_transactions = []
# Array of (Group, count)
Expand All @@ -94,7 +94,7 @@ def __init__(self, project):
def __repr__(self):
return "\n".join(
[
f"{self.key_errors}, ",
f"{self.key_errors_by_group}, ",
f"Errors: [Accepted {self.accepted_error_count}, Dropped {self.dropped_error_count}]",
f"Transactions: [Accepted {self.accepted_transaction_count} Dropped {self.dropped_transaction_count}]",
f"Replays: [Accepted {self.accepted_replay_count} Dropped {self.dropped_replay_count}]",
Expand All @@ -103,7 +103,7 @@ def __repr__(self):

def check_if_project_is_empty(self):
return (
not self.key_errors
not self.key_errors_by_group
and not self.key_transactions
and not self.key_performance_issues
and not self.accepted_error_count
Expand All @@ -116,26 +116,21 @@ def check_if_project_is_empty(self):


class DailySummaryProjectContext:
total_today = 0
comparison_period_total = 0
comparison_period_avg = 0
key_errors: list[tuple[Group, int]] = []
key_performance_issues: list[tuple[Group, int]] = []
escalated_today: list[Group] = []
regressed_today: list[Group] = []
new_in_release: dict[int, list[Group]] = {}

def __init__(self, project: Project):
self.total_today = 0
self.comparison_period_total = 0
self.comparison_period_avg = 0
self.project = project
self.key_errors = []
self.key_performance_issues = []
self.escalated_today = []
self.regressed_today = []
self.new_in_release = {}
self.key_errors_by_id: list[tuple[int, int]] = []
self.key_errors_by_group: list[tuple[Group, int]] = []
self.key_performance_issues: list[tuple[Group, int]] = []
self.escalated_today: list[Group] = []
self.regressed_today: list[Group] = []
self.new_in_release: dict[int, list[Group]] = {}

def check_if_project_is_empty(self):
return (
not self.key_errors
not self.key_errors_by_group
and not self.key_performance_issues
and not self.total_today
and not self.comparison_period_total
Expand Down Expand Up @@ -217,7 +212,7 @@ def project_key_errors(
)
query_result = raw_snql_query(request, referrer=referrer)
key_errors = query_result["data"]
# Set project_ctx.key_errors to be an array of (group_id, count) for now.
# Set project_ctx.key_errors_by_id to be an array of (group_id, count) for now.
# We will query the group history later on in `fetch_key_error_groups`, batched in a per-organization basis
return key_errors

Expand Down Expand Up @@ -346,7 +341,7 @@ def fetch_key_error_groups(ctx: OrganizationReportContext) -> None:
# Organization pass. Depends on project_key_errors.
all_key_error_group_ids = []
for project_ctx in ctx.projects_context_map.values():
all_key_error_group_ids.extend([group_id for group_id, count in project_ctx.key_errors])
all_key_error_group_ids.extend([group_id for group_id, _ in project_ctx.key_errors_by_id])

if len(all_key_error_group_ids) == 0:
return
Expand All @@ -358,19 +353,14 @@ def fetch_key_error_groups(ctx: OrganizationReportContext) -> None:
for project_ctx in ctx.projects_context_map.values():
# note Snuba might have groups that have since been deleted
# we should just ignore those
project_ctx.key_errors = list(
filter(
lambda x: x[0] is not None,
[
(
group_id_to_group.get(group_id),
None,
count,
)
for group_id, count in project_ctx.key_errors
],
project_ctx.key_errors_by_group = [
(group, count)
for group, count in (
(group_id_to_group.get(group_id), count)
for group_id, count in project_ctx.key_errors_by_id
)
)
if group is not None
]


def fetch_key_performance_issue_groups(ctx: OrganizationReportContext):
Expand Down
16 changes: 6 additions & 10 deletions src/sentry/tasks/summaries/weekly_reports.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,9 @@ def prepare_organization_report(

project_ctx = cast(ProjectContext, ctx.projects_context_map[project.id])
if key_errors:
project_ctx.key_errors = [(e["events.group_id"], e["count()"]) for e in key_errors]
project_ctx.key_errors_by_id = [
(e["events.group_id"], e["count()"]) for e in key_errors
]

if ctx.organization.slug == "sentry":
logger.info(
Expand Down Expand Up @@ -636,7 +638,7 @@ def all_key_errors():
"project_id": project_ctx.project.id,
},
)
for group, group_history, count in project_ctx.key_errors:
for group, count in project_ctx.key_errors_by_group:
if ctx.organization.slug == "sentry":
logger.info(
"render_template_context.all_key_errors.found_error",
Expand All @@ -656,14 +658,8 @@ def all_key_errors():
yield {
"count": count,
"group": group,
"status": (
group_history.get_status_display() if group_history else "Unresolved"
),
"status_color": (
group_status_to_color[group_history.status]
if group_history
else group_status_to_color[GroupHistoryStatus.NEW]
),
"status": "Unresolved",
"status_color": (group_status_to_color[GroupHistoryStatus.NEW]),
"group_substatus": substatus,
"group_substatus_color": substatus_color,
"group_substatus_border_color": substatus_border_color,
Expand Down
4 changes: 2 additions & 2 deletions src/sentry/web/frontend/debug/debug_weekly_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,8 @@ def get_context(self, request):
project_context.dropped_replay_count = int(
random.weibullvariate(5, 1) * random.paretovariate(0.2)
)
project_context.key_errors = [
(g, None, random.randint(0, 1000)) for g in Group.objects.all()[:3]
project_context.key_errors_by_group = [
(g, random.randint(0, 1000)) for g in Group.objects.all()[:3]
]

project_context.new_substatus_count = random.randint(5, 200)
Expand Down
28 changes: 14 additions & 14 deletions tests/sentry/tasks/test_daily_summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,13 +261,13 @@ def test_build_summary_data(self):
)
assert project_context_map.total_today == 17 # total outcomes from today
assert project_context_map.comparison_period_avg == 1
assert len(project_context_map.key_errors) == 3
assert (self.group1, None, 3) in project_context_map.key_errors
assert (self.group2, None, 2) in project_context_map.key_errors
assert (self.group3, None, 10) in project_context_map.key_errors
assert len(project_context_map.key_errors_by_group) == 3
assert (self.group1, 3) in project_context_map.key_errors_by_group
assert (self.group2, 2) in project_context_map.key_errors_by_group
assert (self.group3, 10) in project_context_map.key_errors_by_group
assert len(project_context_map.key_performance_issues) == 2
assert (self.perf_event.group, None, 1) in project_context_map.key_performance_issues
assert (self.perf_event2.group, None, 1) in project_context_map.key_performance_issues
assert (self.perf_event.group, 1) in project_context_map.key_performance_issues
assert (self.perf_event2.group, 1) in project_context_map.key_performance_issues
assert project_context_map.escalated_today == [self.group3]
assert project_context_map.regressed_today == [self.group2]
assert len(project_context_map.new_in_release) == 2
Expand All @@ -281,7 +281,7 @@ def test_build_summary_data(self):
)
assert project_context_map2.total_today == 2
assert project_context_map2.comparison_period_avg == 0
assert project_context_map2.key_errors == [(self.group4, None, 2)]
assert project_context_map2.key_errors_by_group == [(self.group4, 2)]
assert project_context_map2.key_performance_issues == []
assert project_context_map2.escalated_today == []
assert project_context_map2.regressed_today == []
Expand Down Expand Up @@ -328,9 +328,9 @@ def test_build_summary_data_filter_to_unresolved(self):
)
assert project_context_map.total_today == 9 # total outcomes from today
assert project_context_map.comparison_period_avg == 0
assert len(project_context_map.key_errors) == 2
assert (group1, None, 3) in project_context_map.key_errors
assert (group2, None, 3) in project_context_map.key_errors
assert len(project_context_map.key_errors_by_group) == 2
assert (group1, 3) in project_context_map.key_errors_by_group
assert (group2, 3) in project_context_map.key_errors_by_group

def test_build_summary_data_filter_to_error_level(self):
"""Test that non-error level issues are filtered out of the results"""
Expand Down Expand Up @@ -371,10 +371,10 @@ def test_build_summary_data_filter_to_error_level(self):
)
assert project_context_map.total_today == 9 # total outcomes from today
assert project_context_map.comparison_period_avg == 0
assert len(project_context_map.key_errors) == 2
assert (group1, None, 3) not in project_context_map.key_errors
assert (group2, None, 3) in project_context_map.key_errors
assert (group3, None, 3) in project_context_map.key_errors
assert len(project_context_map.key_errors_by_group) == 2
assert (group1, 3) not in project_context_map.key_errors_by_group
assert (group2, 3) in project_context_map.key_errors_by_group
assert (group3, 3) in project_context_map.key_errors_by_group

def test_build_summary_data_dedupes_groups(self):
"""
Expand Down

0 comments on commit 1ae38c5

Please sign in to comment.