From 918cbcb85054c2da006b7cd46144950f85a84498 Mon Sep 17 00:00:00 2001 From: Matt Duncan <14761+mrduncan@users.noreply.github.com> Date: Tue, 1 Aug 2023 14:39:51 -0700 Subject: [PATCH 01/44] fix(hc): Stabilize OrganizationRepositoryDeleteTest (#53668) This reverts 9a38207, reimplementing the changes in #53563 with the addition of fixes for the `test_put_*` test cases. --- .../organization_repository_details.py | 17 ++++----- .../test_organization_repository_details.py | 38 +++++++++++++------ 2 files changed, 34 insertions(+), 21 deletions(-) diff --git a/src/sentry/api/endpoints/organization_repository_details.py b/src/sentry/api/endpoints/organization_repository_details.py index 74cd2847f513ff..2b55c64e9ecaf4 100644 --- a/src/sentry/api/endpoints/organization_repository_details.py +++ b/src/sentry/api/endpoints/organization_repository_details.py @@ -9,8 +9,9 @@ from sentry.api.fields.empty_integer import EmptyIntegerField from sentry.api.serializers import serialize from sentry.constants import ObjectStatus -from sentry.models import Commit, Integration, Repository, ScheduledDeletion +from sentry.models import Commit, RegionScheduledDeletion, Repository from sentry.services.hybrid_cloud import coerce_id_from +from sentry.services.hybrid_cloud.integration import integration_service from sentry.tasks.repository import repository_cascade_delete_on_hide @@ -59,12 +60,10 @@ def put(self, request: Request, organization, repo_id) -> Response: else: raise NotImplementedError if result.get("integrationId"): - try: - integration = Integration.objects.get( - id=result["integrationId"], - organizationintegration__organization_id=coerce_id_from(organization), - ) - except Integration.DoesNotExist: + integration = integration_service.get_integration( + integration_id=result["integrationId"], organization_id=coerce_id_from(organization) + ) + if integration is None: return Response({"detail": "Invalid integration id"}, status=400) update_kwargs["integration_id"] = integration.id @@ -108,8 +107,8 @@ def delete(self, request: Request, organization, repo_id) -> Response: repo.rename_on_pending_deletion() if has_commits: - ScheduledDeletion.schedule(repo, days=0, hours=1, actor=request.user) + RegionScheduledDeletion.schedule(repo, days=0, hours=1, actor=request.user) else: - ScheduledDeletion.schedule(repo, days=0, actor=request.user) + RegionScheduledDeletion.schedule(repo, days=0, actor=request.user) return Response(serialize(repo, request.user), status=202) diff --git a/tests/sentry/api/endpoints/test_organization_repository_details.py b/tests/sentry/api/endpoints/test_organization_repository_details.py index 9d29d6f96d62f5..10333a1f22899a 100644 --- a/tests/sentry/api/endpoints/test_organization_repository_details.py +++ b/tests/sentry/api/endpoints/test_organization_repository_details.py @@ -4,12 +4,19 @@ from django.utils import timezone from sentry.constants import ObjectStatus -from sentry.models import Commit, Integration, OrganizationOption, Repository, ScheduledDeletion +from sentry.models import ( + Commit, + Integration, + OrganizationOption, + RegionScheduledDeletion, + Repository, +) +from sentry.silo import SiloMode from sentry.testutils.cases import APITestCase -from sentry.testutils.silo import region_silo_test +from sentry.testutils.silo import assume_test_silo_mode, region_silo_test -@region_silo_test +@region_silo_test(stable=True) class OrganizationRepositoryDeleteTest(APITestCase): def assert_rename_pending_delete(self, response, repo, external_id=None): assert response.data["status"] == "pending_deletion" @@ -44,7 +51,7 @@ def test_delete_no_commits(self): repo = Repository.objects.get(id=repo.id) assert repo.status == ObjectStatus.PENDING_DELETION - assert ScheduledDeletion.objects.filter( + assert RegionScheduledDeletion.objects.filter( object_id=repo.id, model_name="Repository", date_scheduled__lte=timezone.now() ).exists() self.assert_rename_pending_delete(response, repo) @@ -67,7 +74,7 @@ def test_delete_with_commits(self): repo = Repository.objects.get(id=repo.id) assert repo.status == ObjectStatus.PENDING_DELETION - assert ScheduledDeletion.objects.filter( + assert RegionScheduledDeletion.objects.filter( object_id=repo.id, model_name="Repository", date_scheduled__gt=timezone.now() ).exists() self.assert_rename_pending_delete(response, repo, "abc123") @@ -92,7 +99,9 @@ def test_delete_disabled_no_commits(self): repo = Repository.objects.get(id=repo.id) assert repo.status == ObjectStatus.PENDING_DELETION - assert ScheduledDeletion.objects.filter(object_id=repo.id, model_name="Repository").exists() + assert RegionScheduledDeletion.objects.filter( + object_id=repo.id, model_name="Repository" + ).exists() self.assert_rename_pending_delete(response, repo, "abc12345") def test_delete_disabled_with_commits(self): @@ -114,15 +123,18 @@ def test_delete_disabled_with_commits(self): repo = Repository.objects.get(id=repo.id) assert repo.status == ObjectStatus.PENDING_DELETION - assert ScheduledDeletion.objects.filter(object_id=repo.id, model_name="Repository").exists() + assert RegionScheduledDeletion.objects.filter( + object_id=repo.id, model_name="Repository" + ).exists() self.assert_rename_pending_delete(response, repo) def test_put(self): self.login_as(user=self.user) org = self.create_organization(owner=self.user, name="baz") - integration = Integration.objects.create(provider="example", name="example") - integration.add_organization(org) + integration = self.create_integration( + organization=org, provider="example", name="Example", external_id="example:1" + ) repo = Repository.objects.create( name="example", organization_id=org.id, status=ObjectStatus.DISABLED @@ -141,8 +153,9 @@ def test_put_cancel_deletion(self): self.login_as(user=self.user) org = self.create_organization(owner=self.user, name="baz") - integration = Integration.objects.create(provider="example", name="example") - integration.add_organization(org) + integration = self.create_integration( + organization=org, provider="example", name="Example", external_id="example:1" + ) repo = Repository.objects.create( name="uuid-name", @@ -223,7 +236,8 @@ def test_put_bad_integration_org(self): self.login_as(user=self.user) org = self.create_organization(owner=self.user, name="baz") - integration = Integration.objects.create(provider="example", name="example") + with assume_test_silo_mode(SiloMode.CONTROL): + integration = Integration.objects.create(provider="example", name="example") repo = Repository.objects.create(name="example", organization_id=org.id) From da8e77c3f598f25d189045c8e1191b3b4b77efc9 Mon Sep 17 00:00:00 2001 From: Colleen O'Rourke Date: Tue, 1 Aug 2023 15:07:47 -0700 Subject: [PATCH 02/44] ref(alerts): Soft deprecate projectalertruledetails delete method (#53970) Another follow up to https://github.com/getsentry/sentry/pull/53126 but for the `ProjectAlertRuleDetailsEndpoint` DELETE method. It's purpose is duplicated by the `OrganizationAlertRuleDetailsEndpoint`, so this PR removes the duplication by putting code into a shared function. Future PRs will address the GET and PUT methods, update any front end usages to use the `OrganizationAlertRuleDetailsEndpoint`, and later on we'll implement the real `@deprecated` decorator. --- .../organization_alert_rule_details.py | 50 ++++--------------- .../endpoints/project_alert_rule_details.py | 20 +++----- .../test_organization_alert_rule_details.py | 8 +-- .../test_project_alert_rule_details.py | 21 -------- 4 files changed, 23 insertions(+), 76 deletions(-) diff --git a/src/sentry/incidents/endpoints/organization_alert_rule_details.py b/src/sentry/incidents/endpoints/organization_alert_rule_details.py index 8992df028b36db..2f52a98a277972 100644 --- a/src/sentry/incidents/endpoints/organization_alert_rule_details.py +++ b/src/sentry/incidents/endpoints/organization_alert_rule_details.py @@ -6,17 +6,23 @@ from sentry.api.base import region_silo_endpoint from sentry.api.serializers import serialize from sentry.api.serializers.models.alert_rule import DetailedAlertRuleSerializer -from sentry.auth.superuser import is_active_superuser from sentry.incidents.endpoints.bases import OrganizationAlertRuleEndpoint from sentry.incidents.logic import AlreadyDeletedError, delete_alert_rule from sentry.incidents.serializers import AlertRuleSerializer as DrfAlertRuleSerializer -from sentry.models import OrganizationMemberTeam, SentryAppComponent, SentryAppInstallation -from sentry.models.actor import ACTOR_TYPES +from sentry.models import SentryAppComponent, SentryAppInstallation from sentry.models.rulesnooze import RuleSnooze from sentry.services.hybrid_cloud.app import app_service from sentry.services.hybrid_cloud.user.service import user_service +def remove_alert_rule(request: Request, organization, alert_rule): + try: + delete_alert_rule(alert_rule, user=request.user, ip_address=request.META.get("REMOTE_ADDR")) + return Response(status=status.HTTP_204_NO_CONTENT) + except AlreadyDeletedError: + return Response("This rule has already been deleted", status=status.HTTP_400_BAD_REQUEST) + + @region_silo_endpoint class OrganizationAlertRuleDetailsEndpoint(OrganizationAlertRuleEndpoint): def get(self, request: Request, organization, alert_rule) -> Response: @@ -93,46 +99,10 @@ def put(self, request: Request, organization, alert_rule) -> Response: ) if serializer.is_valid(): - if not self._verify_user_has_permission(request, alert_rule): - return Response( - { - "detail": [ - "You do not have permission to edit this alert rule because you are not a member of the assigned team." - ] - }, - status=403, - ) alert_rule = serializer.save() return Response(serialize(alert_rule, request.user), status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def delete(self, request: Request, organization, alert_rule) -> Response: - if not self._verify_user_has_permission(request, alert_rule): - return Response( - { - "detail": [ - "You do not have permission to delete this alert rule because you are not a member of the assigned team." - ] - }, - status=403, - ) - try: - delete_alert_rule( - alert_rule, user=request.user, ip_address=request.META.get("REMOTE_ADDR") - ) - return Response(status=status.HTTP_204_NO_CONTENT) - except AlreadyDeletedError: - return Response( - "This rule has already been deleted", status=status.HTTP_400_BAD_REQUEST - ) - - def _verify_user_has_permission(self, request: Request, alert_rule): - if not is_active_superuser(request): - if alert_rule.owner and alert_rule.owner.type == ACTOR_TYPES["team"]: - team = alert_rule.owner.resolve() - if not OrganizationMemberTeam.objects.filter( - organizationmember__user_id=request.user.id, team=team, is_active=True - ).exists(): - return False - return True + return remove_alert_rule(request, organization, alert_rule) diff --git a/src/sentry/incidents/endpoints/project_alert_rule_details.py b/src/sentry/incidents/endpoints/project_alert_rule_details.py index d0f8d008dac63d..c557391e5da310 100644 --- a/src/sentry/incidents/endpoints/project_alert_rule_details.py +++ b/src/sentry/incidents/endpoints/project_alert_rule_details.py @@ -6,11 +6,8 @@ from sentry.api.serializers import serialize from sentry.api.serializers.models.alert_rule import AlertRuleSerializer from sentry.incidents.endpoints.bases import ProjectAlertRuleEndpoint -from sentry.incidents.logic import ( - AlreadyDeletedError, - delete_alert_rule, - get_slack_actions_with_async_lookups, -) +from sentry.incidents.endpoints.organization_alert_rule_details import remove_alert_rule +from sentry.incidents.logic import get_slack_actions_with_async_lookups from sentry.incidents.serializers import AlertRuleSerializer as DrfAlertRuleSerializer from sentry.incidents.utils.sentry_apps import trigger_sentry_app_action_creators_for_incidents from sentry.integrations.slack.utils import RedisRuleStatus @@ -63,10 +60,9 @@ def put(self, request: Request, project, alert_rule) -> Response: return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def delete(self, request: Request, project, alert_rule) -> Response: - try: - delete_alert_rule(alert_rule, request.user, ip_address=request.META.get("REMOTE_ADDR")) - return Response(status=status.HTTP_204_NO_CONTENT) - except AlreadyDeletedError: - return Response( - "This rule has already been deleted", status=status.HTTP_400_BAD_REQUEST - ) + """ + Delete a metric alert rule. @deprecated. Use OrganizationAlertRuleDetailsEndpoint instead. + `````````````````` + :auth: required + """ + return remove_alert_rule(request, project.organization, alert_rule) diff --git a/tests/sentry/incidents/endpoints/test_organization_alert_rule_details.py b/tests/sentry/incidents/endpoints/test_organization_alert_rule_details.py index 2015b79d11e168..b8595ca0433678 100644 --- a/tests/sentry/incidents/endpoints/test_organization_alert_rule_details.py +++ b/tests/sentry/incidents/endpoints/test_organization_alert_rule_details.py @@ -521,7 +521,6 @@ def test_no_owner(self): def test_team_permission(self): # Test ensures you can only edit alerts owned by your team or no one. - om = self.create_member( user=self.user, organization=self.organization, role="owner", teams=[self.team] ) @@ -537,7 +536,7 @@ def test_team_permission(self): ).delete() with self.feature("organizations:incidents"): resp = self.get_response(self.organization.slug, alert_rule.id, **serialized_alert_rule) - assert resp.status_code == 403 + assert resp.status_code == 200 self.create_team_membership(team=self.team, member=om) with self.feature("organizations:incidents"): resp = self.get_success_response( @@ -624,7 +623,10 @@ def test_team_permission(self): ).delete() with self.feature("organizations:incidents"): resp = self.get_response(self.organization.slug, alert_rule.id) - assert resp.status_code == 403 + assert resp.status_code == 204 + another_alert_rule = self.alert_rule + alert_rule.owner = self.team.actor + another_alert_rule.save() self.create_team_membership(team=self.team, member=om) with self.feature("organizations:incidents"): resp = self.get_success_response(self.organization.slug, alert_rule.id, status_code=204) diff --git a/tests/sentry/incidents/endpoints/test_project_alert_rule_details.py b/tests/sentry/incidents/endpoints/test_project_alert_rule_details.py index 8b81f02bdabd65..2abed41ebedc07 100644 --- a/tests/sentry/incidents/endpoints/test_project_alert_rule_details.py +++ b/tests/sentry/incidents/endpoints/test_project_alert_rule_details.py @@ -14,8 +14,6 @@ AlertRuleStatus, AlertRuleTrigger, AlertRuleTriggerAction, - Incident, - IncidentStatus, ) from sentry.integrations.slack.client import SlackClient from sentry.models import AuditLogEntry, Integration @@ -699,22 +697,3 @@ def test_simple(self): event=audit_log.get_event_id("ALERT_RULE_REMOVE"), target_object=self.alert_rule.id ) assert len(audit_log_entry) == 1 - - def test_snapshot_and_create_new_with_same_name(self): - with self.tasks(): - # We attach the rule to an incident so the rule is snapshotted instead of deleted. - incident = self.create_incident(alert_rule=self.alert_rule) - - with self.feature("organizations:incidents"): - self.get_success_response( - self.organization.slug, self.project.slug, self.alert_rule.id, status_code=204 - ) - - alert_rule = AlertRule.objects_with_snapshots.get(id=self.alert_rule.id) - - assert not AlertRule.objects.filter(id=alert_rule.id).exists() - assert AlertRule.objects_with_snapshots.filter(id=alert_rule.id).exists() - assert alert_rule.status == AlertRuleStatus.SNAPSHOT.value - - # We also confirm that the incident is automatically resolved. - assert Incident.objects.get(id=incident.id).status == IncidentStatus.CLOSED.value From 93a9d8d2790a4fdd1c82ae0ead654cee546d9add Mon Sep 17 00:00:00 2001 From: Chad Whitacre Date: Tue, 1 Aug 2023 18:12:11 -0400 Subject: [PATCH 03/44] Stop publishing to PyPI (#53980) --- .craft.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.craft.yml b/.craft.yml index 3f554c3c48a23b..ebf71f1f835856 100644 --- a/.craft.yml +++ b/.craft.yml @@ -23,5 +23,4 @@ targets: source: us.gcr.io/sentryio/sentry target: getsentry/sentry targetFormat: '{{{target}}}:latest' - - name: pypi - name: github From 005f4dc772a3a0e4a07c565a201903e592620d39 Mon Sep 17 00:00:00 2001 From: David Wang Date: Tue, 1 Aug 2023 15:12:22 -0700 Subject: [PATCH 04/44] ref(crons): Match environment label color to icon color in timeline row (#53977) Before: image After: image --- .../overviewTimeline/timelineTableRow.tsx | 11 +++--- static/app/views/monitors/utils/constants.tsx | 36 +++++++++++++++---- 2 files changed, 35 insertions(+), 12 deletions(-) diff --git a/static/app/views/monitors/components/overviewTimeline/timelineTableRow.tsx b/static/app/views/monitors/components/overviewTimeline/timelineTableRow.tsx index c683b02a66209a..a05418b584ce51 100644 --- a/static/app/views/monitors/components/overviewTimeline/timelineTableRow.tsx +++ b/static/app/views/monitors/components/overviewTimeline/timelineTableRow.tsx @@ -7,9 +7,9 @@ import {tct} from 'sentry/locale'; import {fadeIn} from 'sentry/styles/animations'; import {space} from 'sentry/styles/space'; import useOrganization from 'sentry/utils/useOrganization'; -import {Monitor} from 'sentry/views/monitors/types'; +import {Monitor, MonitorStatus} from 'sentry/views/monitors/types'; import {scheduleAsText} from 'sentry/views/monitors/utils'; -import {statusIconMap} from 'sentry/views/monitors/utils/constants'; +import {statusIconColorMap} from 'sentry/views/monitors/utils/constants'; import {CheckInTimeline, CheckInTimelineProps} from './checkInTimeline'; import {TimelinePlaceholder} from './timelinePlaceholder'; @@ -37,8 +37,8 @@ export function TimelineTableRow({monitor, bucketedData, ...timelineProps}: Prop {environments.map(({name, status}) => ( - {name} - {statusIconMap[status]} + {name} + {statusIconColorMap[status].icon} ))} {!isExpanded && ( @@ -135,11 +135,12 @@ const EnvWithStatus = styled('div')` align-items: center; `; -const MonitorEnvLabel = styled('div')` +const MonitorEnvLabel = styled('div')<{status: MonitorStatus}>` text-overflow: ellipsis; overflow: hidden; white-space: nowrap; flex: 1; + color: ${p => p.theme[statusIconColorMap[p.status].color]}; `; const TimelineContainer = styled('div')` diff --git a/static/app/views/monitors/utils/constants.tsx b/static/app/views/monitors/utils/constants.tsx index 701ff7ed2b44a3..ed7226a7ed6757 100644 --- a/static/app/views/monitors/utils/constants.tsx +++ b/static/app/views/monitors/utils/constants.tsx @@ -1,4 +1,5 @@ import {IconCheckmark, IconFire, IconPause, IconTimer, IconWarning} from 'sentry/icons'; +import {Aliases} from 'sentry/utils/theme'; import {StatsBucket} from 'sentry/views/monitors/components/overviewTimeline/types'; import {CheckInStatus, MonitorStatus} from 'sentry/views/monitors/types'; @@ -10,11 +11,32 @@ export const CHECKIN_STATUS_PRECEDENT = [ CheckInStatus.ERROR, ] satisfies Array; -export const statusIconMap: Record = { - ok: , - error: , - timeout: , - missed_checkin: , - active: , - disabled: , +export const statusIconColorMap: Record< + MonitorStatus, + {color: keyof Aliases; icon: React.ReactNode} +> = { + ok: { + icon: , + color: 'successText', + }, + error: { + icon: , + color: 'errorText', + }, + timeout: { + icon: , + color: 'errorText', + }, + missed_checkin: { + icon: , + color: 'warningText', + }, + active: { + icon: , + color: 'subText', + }, + disabled: { + icon: , + color: 'subText', + }, }; From 498f3cdc7d6c1fb7b5e0a2c35801f172aecd7e13 Mon Sep 17 00:00:00 2001 From: Richard Ortenberg Date: Tue, 1 Aug 2023 15:47:09 -0700 Subject: [PATCH 05/44] migration(crons): Backfill next check-in latest (#53984) Backfills `next_checkin_latest` with `next_checkin` if null --- migrations_lockfile.txt | 2 +- .../0527_backfill_next_checkin_latest.py | 49 +++++++++++++++++++ 2 files changed, 50 insertions(+), 1 deletion(-) create mode 100644 src/sentry/migrations/0527_backfill_next_checkin_latest.py diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt index d3ba8380937f57..d5c3d51404b6ac 100644 --- a/migrations_lockfile.txt +++ b/migrations_lockfile.txt @@ -7,5 +7,5 @@ will then be regenerated, and you should be able to merge without conflicts. nodestore: 0002_nodestore_no_dictfield replays: 0003_add_size_to_recording_segment -sentry: 0526_pr_comment_type_column +sentry: 0527_backfill_next_checkin_latest social_auth: 0002_default_auto_field diff --git a/src/sentry/migrations/0527_backfill_next_checkin_latest.py b/src/sentry/migrations/0527_backfill_next_checkin_latest.py new file mode 100644 index 00000000000000..599cc088de93fb --- /dev/null +++ b/src/sentry/migrations/0527_backfill_next_checkin_latest.py @@ -0,0 +1,49 @@ +# Generated by Django 3.2.20 on 2023-08-01 20:51 + +from django.db import migrations + +from sentry.new_migrations.migrations import CheckedMigration +from sentry.utils.query import RangeQuerySetWrapperWithProgressBar + + +def backfill_next_checkin_latest(apps, schema_editor): + MonitorEnvironment = apps.get_model("sentry", "MonitorEnvironment") + + for monitor_environment in RangeQuerySetWrapperWithProgressBar( + MonitorEnvironment.objects.all() + ): + # skip backfill if next_checkin_latest is set or next_checkin is not set + if ( + monitor_environment.next_checkin_latest is not None + or monitor_environment.next_checkin is None + ): + continue + + monitor_environment.next_checkin_latest = monitor_environment.next_checkin + monitor_environment.save(update_fields=["next_checkin_latest"]) + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. For + # the most part, this should only be used for operations where it's safe to run the migration + # after your code has deployed. So this should not be used for most operations that alter the + # schema of a table. + # Here are some things that make sense to mark as dangerous: + # - Large data migrations. Typically we want these to be run manually by ops so that they can + # be monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # have ops run this and not block the deploy. Note that while adding an index is a schema + # change, it's completely safe to run the operation after the code has deployed. + is_dangerous = False + + dependencies = [ + ("sentry", "0526_pr_comment_type_column"), + ] + + operations = [ + migrations.RunPython( + backfill_next_checkin_latest, + migrations.RunPython.noop, + hints={"tables": ["sentry_monitorenvironment"]}, + ), + ] From b8b42cd932a4182d78c05f8a58411d1f2c635be9 Mon Sep 17 00:00:00 2001 From: Matt Duncan <14761+mrduncan@users.noreply.github.com> Date: Tue, 1 Aug 2023 16:41:46 -0700 Subject: [PATCH 06/44] fix(hc): Fix split DB test failures (#53907) This resolves the `AssertionError: Database queries to 'control' are not allowed in this test.` errors when running tests in split DB mode since these tests access more than just the `default` database. --------- Co-authored-by: Mark Story Co-authored-by: getsantry[bot] <66042841+getsantry[bot]@users.noreply.github.com> --- tests/relay_integration/lang/javascript/test_plugin.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/relay_integration/lang/javascript/test_plugin.py b/tests/relay_integration/lang/javascript/test_plugin.py index 41b5f39e9c793b..dc85bf9afbd121 100644 --- a/tests/relay_integration/lang/javascript/test_plugin.py +++ b/tests/relay_integration/lang/javascript/test_plugin.py @@ -27,6 +27,7 @@ from sentry.testutils.helpers.datetime import before_now, iso_format from sentry.testutils.helpers.features import with_feature from sentry.testutils.helpers.options import override_options +from sentry.testutils.pytest.fixtures import django_db_all from sentry.testutils.relay import RelayStoreHelper from sentry.testutils.skips import requires_symbolicator from sentry.utils import json @@ -103,7 +104,7 @@ def upload_bundle(bundle_file, project, release=None, dist=None, upload_as_artif ) -@pytest.mark.django_db(transaction=True) +@django_db_all(transaction=True) class TestJavascriptIntegration(RelayStoreHelper): @pytest.fixture(autouse=True) def initialize(self, default_projectkey, default_project, set_sentry_option, live_server): From 21200987c75306c8be4986f43cc4008f617d5c4e Mon Sep 17 00:00:00 2001 From: Aniket Das <85517732+AniketDas-Tekky@users.noreply.github.com> Date: Tue, 1 Aug 2023 18:30:08 -0700 Subject: [PATCH 07/44] ref(merged-pr-comments): Add a couple of metrics to track successes (#53981) Also remove the rate limit metric as we were saving each rate limit value as a unique tag --- .../tasks/integrations/github/pr_comment.py | 24 +++++++++---------- .../integrations/github/test_pr_comment.py | 16 ++++++------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/src/sentry/tasks/integrations/github/pr_comment.py b/src/sentry/tasks/integrations/github/pr_comment.py index 885b65daa17f92..2e8069885bcb98 100644 --- a/src/sentry/tasks/integrations/github/pr_comment.py +++ b/src/sentry/tasks/integrations/github/pr_comment.py @@ -31,6 +31,8 @@ logger = logging.getLogger(__name__) +METRICS_BASE = "github_pr_comment.{key}" + @dataclass class PullRequestIssue: @@ -153,19 +155,17 @@ def create_or_update_comment( group_ids=issue_list, comment_type=comment_type, ) + metrics.incr(METRICS_BASE.format(key="comment_created")) else: resp = client.update_comment( repo=repo.name, comment_id=pr_comment.external_id, data={"body": comment_body} ) - + metrics.incr(METRICS_BASE.format(key="comment_updated")) pr_comment.updated_at = timezone.now() pr_comment.group_ids = issue_list pr_comment.save() - metrics.incr( - "github_pr_comment.rate_limit_remaining", - tags={"remaining": int(resp.headers["X-Ratelimit-Remaining"])}, - ) + # TODO(adas): Figure out a way to track average rate limit left for GH client logger.info( "github.pr_comment.create_or_update_comment", @@ -184,7 +184,7 @@ def github_comment_workflow(pullrequest_id: int, project_id: int): except Organization.DoesNotExist: cache.delete(cache_key) logger.error("github.pr_comment.org_missing") - metrics.incr("github_pr_comment.error", tags={"type": "missing_org"}) + metrics.incr(METRICS_BASE.format(key="error"), tags={"type": "missing_org"}) return if not ( @@ -210,7 +210,7 @@ def github_comment_workflow(pullrequest_id: int, project_id: int): except Project.DoesNotExist: cache.delete(cache_key) logger.error("github.pr_comment.project_missing", extra={"organization_id": org_id}) - metrics.incr("github_pr_comment.error", tags={"type": "missing_project"}) + metrics.incr(METRICS_BASE.format(key="error"), tags={"type": "missing_project"}) return top_5_issues = get_top_5_issues_by_count(issue_list, project) @@ -222,14 +222,14 @@ def github_comment_workflow(pullrequest_id: int, project_id: int): except Repository.DoesNotExist: cache.delete(cache_key) logger.error("github.pr_comment.repo_missing", extra={"organization_id": org_id}) - metrics.incr("github_pr_comment.error", tags={"type": "missing_repo"}) + metrics.incr(METRICS_BASE.format(key="error"), tags={"type": "missing_repo"}) return integration = integration_service.get_integration(integration_id=repo.integration_id) if not integration: cache.delete(cache_key) logger.error("github.pr_comment.integration_missing", extra={"organization_id": org_id}) - metrics.incr("github_pr_comment.error", tags={"type": "missing_integration"}) + metrics.incr(METRICS_BASE.format(key="error"), tags={"type": "missing_integration"}) return installation = integration.get_installation(organization_id=org_id) @@ -258,14 +258,14 @@ def github_comment_workflow(pullrequest_id: int, project_id: int): if e.json: if ISSUE_LOCKED_ERROR_MESSAGE in e.json.get("message", ""): - metrics.incr("github_pr_comment.issue_locked_error") + metrics.incr(METRICS_BASE.format(key="error"), tags={"type": "issue_locked_error"}) return elif RATE_LIMITED_MESSAGE in e.json.get("message", ""): - metrics.incr("github_pr_comment.rate_limited_error") + metrics.incr(METRICS_BASE.format(key="error"), tags={"type": "rate_limited_error"}) return - metrics.incr("github_pr_comment.api_error") + metrics.incr(METRICS_BASE.format(key="error"), tags={"type": "api_error"}) raise e diff --git a/tests/sentry/tasks/integrations/github/test_pr_comment.py b/tests/sentry/tasks/integrations/github/test_pr_comment.py index 4a2591d915cf17..34a13e2066d5e4 100644 --- a/tests/sentry/tasks/integrations/github/test_pr_comment.py +++ b/tests/sentry/tasks/integrations/github/test_pr_comment.py @@ -365,9 +365,7 @@ def test_comment_workflow(self, mock_metrics, get_jwt, mock_issues): assert len(pull_request_comment_query) == 1 assert pull_request_comment_query[0].external_id == 1 assert pull_request_comment_query[0].comment_type == CommentType.MERGED_PR - mock_metrics.incr.assert_called_with( - "github_pr_comment.rate_limit_remaining", tags={"remaining": 59} - ) + mock_metrics.incr.assert_called_with("github_pr_comment.comment_created") @patch("sentry.tasks.integrations.github.pr_comment.get_top_5_issues_by_count") @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @@ -417,9 +415,7 @@ def test_comment_workflow_updates_comment(self, mock_metrics, get_jwt, mock_issu pull_request_comment.refresh_from_db() assert pull_request_comment.group_ids == [g.id for g in Group.objects.all()] assert pull_request_comment.updated_at == timezone.now() - mock_metrics.incr.assert_called_with( - "github_pr_comment.rate_limit_remaining", tags={"remaining": 59} - ) + mock_metrics.incr.assert_called_with("github_pr_comment.comment_updated") @patch("sentry.tasks.integrations.github.pr_comment.get_top_5_issues_by_count") @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @@ -477,7 +473,9 @@ def test_comment_workflow_api_error_locked_issue(self, mock_metrics, get_jwt, mo github_comment_workflow(self.pr.id, self.project.id) assert cache.get(self.cache_key) is None - mock_metrics.incr.assert_called_with("github_pr_comment.issue_locked_error") + mock_metrics.incr.assert_called_with( + "github_pr_comment.error", tags={"type": "issue_locked_error"} + ) @patch("sentry.tasks.integrations.github.pr_comment.get_top_5_issues_by_count") @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @@ -507,7 +505,9 @@ def test_comment_workflow_api_error_rate_limited(self, mock_metrics, get_jwt, mo github_comment_workflow(self.pr.id, self.project.id) assert cache.get(self.cache_key) is None - mock_metrics.incr.assert_called_with("github_pr_comment.rate_limited_error") + mock_metrics.incr.assert_called_with( + "github_pr_comment.error", tags={"type": "rate_limited_error"} + ) @patch( "sentry.tasks.integrations.github.pr_comment.pr_to_issue_query", From cda3dac76a88698a2f946af75fe40fa2b38bb346 Mon Sep 17 00:00:00 2001 From: Alberto Leal Date: Wed, 2 Aug 2023 02:28:38 -0400 Subject: [PATCH 08/44] chore(test): Update tests/js/sentry-test/charts.tsx to TypeScript (#54010) --- tests/js/sentry-test/{charts.jsx => charts.tsx} | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) rename tests/js/sentry-test/{charts.jsx => charts.tsx} (83%) diff --git a/tests/js/sentry-test/charts.jsx b/tests/js/sentry-test/charts.tsx similarity index 83% rename from tests/js/sentry-test/charts.jsx rename to tests/js/sentry-test/charts.tsx index 8d71cc1fd31f71..421980f8621c77 100644 --- a/tests/js/sentry-test/charts.jsx +++ b/tests/js/sentry-test/charts.tsx @@ -25,10 +25,10 @@ const model = { }; export const chart = { - getModel: jest.fn(() => ({option: model})), + getModel: jest.fn(() => ({...model})), }; -export const mockZoomRange = (startValue, endValue) => { +export const mockZoomRange = (startValue: number, endValue: number) => { chart.getModel.mockImplementation(() => ({ ...model, _payload: { From fc1059d97d22b9fd5b8c9f02e9f27802f42c497b Mon Sep 17 00:00:00 2001 From: Michal Kuffa Date: Wed, 2 Aug 2023 11:26:13 +0200 Subject: [PATCH 09/44] ref(backpressure): Bump rabbit http request timeout (#54014) --- src/sentry/processing/backpressure/memory.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sentry/processing/backpressure/memory.py b/src/sentry/processing/backpressure/memory.py index 91da0aa77fcc2d..c5543276d710f5 100644 --- a/src/sentry/processing/backpressure/memory.py +++ b/src/sentry/processing/backpressure/memory.py @@ -27,7 +27,7 @@ def query_rabbitmq_memory_usage(host: str) -> ServiceMemory: host += "/" url = f"{host}api/nodes" - response = requests.get(url, timeout=1) + response = requests.get(url, timeout=3) response.raise_for_status() json = response.json() return ServiceMemory(json[0]["mem_used"], json[0]["mem_limit"]) From 6652fa27f94c076303559be106e618072127a54e Mon Sep 17 00:00:00 2001 From: Joris Bayer Date: Wed, 2 Aug 2023 11:52:03 +0200 Subject: [PATCH 10/44] chore(spans): Add feature flag for span extraction (#53925) https://github.com/getsentry/relay/pull/2350 introduced feature-flagged extraction of spans from transactions and publishing them on a Kafka topic in Relay. This PR registers the corresponding feature and forwards it to Relay. --- src/sentry/conf/server.py | 2 ++ src/sentry/features/__init__.py | 1 + src/sentry/relay/config/__init__.py | 1 + 3 files changed, 4 insertions(+) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index ffe112d6b961a4..f1a0e1055de056 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1690,6 +1690,8 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str: "projects:data-forwarding": True, # Enable functionality to discard groups. "projects:discard-groups": False, + # Extract spans from transactions in Relay, and forward them via Kafka. + "projects:extract-standalone-spans": False, # Enable functionality for attaching minidumps to events and displaying # then in the group UI. "projects:minidump": True, diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 8246643923505f..7ced4712884f85 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -280,6 +280,7 @@ default_manager.add("projects:similarity-view", ProjectFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("projects:suspect-resolutions", ProjectFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("projects:span-metrics-extraction", ProjectFeature, FeatureHandlerStrategy.INTERNAL) +default_manager.add("projects:extract-standalone-spans", ProjectFeature, FeatureHandlerStrategy.INTERNAL) # Project plugin features default_manager.add("projects:plugins", ProjectPluginFeature, FeatureHandlerStrategy.INTERNAL) diff --git a/src/sentry/relay/config/__init__.py b/src/sentry/relay/config/__init__.py index 85a6e82c49e458..04ab5e0d2eea8e 100644 --- a/src/sentry/relay/config/__init__.py +++ b/src/sentry/relay/config/__init__.py @@ -52,6 +52,7 @@ #: These features will be listed in the project config EXPOSABLE_FEATURES = [ + "projects:extract-standalone-spans", "projects:span-metrics-extraction", "organizations:transaction-name-mark-scrubbed-as-sanitized", "organizations:transaction-name-normalize", From ffee3797bd48ee7d4f390192a5d7a32dab450f00 Mon Sep 17 00:00:00 2001 From: Priscila Oliveira Date: Wed, 2 Aug 2023 14:33:06 +0200 Subject: [PATCH 11/44] ref(getting-started-docs): Make project deletion onBack enabled by default (#54019) --- static/app/views/onboarding/onboarding.spec.tsx | 6 ------ static/app/views/onboarding/onboarding.tsx | 5 ----- static/app/views/projectInstall/platformDocHeader.tsx | 5 ----- 3 files changed, 16 deletions(-) diff --git a/static/app/views/onboarding/onboarding.spec.tsx b/static/app/views/onboarding/onboarding.spec.tsx index d2735383ed834e..ca13169f3454d1 100644 --- a/static/app/views/onboarding/onboarding.spec.tsx +++ b/static/app/views/onboarding/onboarding.spec.tsx @@ -165,9 +165,6 @@ describe('Onboarding', function () { }; const {routerProps, routerContext, organization} = initializeOrg({ - organization: { - features: ['onboarding-project-deletion-on-back-click'], - }, router: { params: routeParams, }, @@ -258,9 +255,6 @@ describe('Onboarding', function () { }; const {routerProps, routerContext, organization} = initializeOrg({ - organization: { - features: ['onboarding-project-deletion-on-back-click'], - }, router: { params: routeParams, }, diff --git a/static/app/views/onboarding/onboarding.tsx b/static/app/views/onboarding/onboarding.tsx index 47b50516f4ffdb..221094d1db1680 100644 --- a/static/app/views/onboarding/onboarding.tsx +++ b/static/app/views/onboarding/onboarding.tsx @@ -138,12 +138,7 @@ function Onboarding(props: Props) { props.location.pathname, ]); - const projectDeletionOnBackClick = !!organization?.features.includes( - 'onboarding-project-deletion-on-back-click' - ); - const shallProjectBeDeleted = - projectDeletionOnBackClick && onboardingSteps[stepIndex].id === 'setup-docs' && recentCreatedProject && // if the project has received a first error, we don't delete it diff --git a/static/app/views/projectInstall/platformDocHeader.tsx b/static/app/views/projectInstall/platformDocHeader.tsx index fcc86188dcc2d1..17fb3cc537621e 100644 --- a/static/app/views/projectInstall/platformDocHeader.tsx +++ b/static/app/views/projectInstall/platformDocHeader.tsx @@ -28,17 +28,12 @@ export function PlatformDocHeader({platform, projectSlug}: Props) { const api = useApi(); const router = useRouter(); - const projectDeletionOnBackClick = !!organization?.features.includes( - 'onboarding-project-deletion-on-back-click' - ); - const recentCreatedProject = useRecentCreatedProject({ orgSlug: organization.slug, projectSlug, }); const shallProjectBeDeleted = - projectDeletionOnBackClick && recentCreatedProject && // if the project has received a first error, we don't delete it recentCreatedProject.firstError === false && From 8fbc7d53f4ea18057f008b2bc15729e74728670d Mon Sep 17 00:00:00 2001 From: Priscila Oliveira Date: Wed, 2 Aug 2023 15:16:32 +0200 Subject: [PATCH 12/44] ref(getting-started-docs): Make project deletion onBack enabled by default (#54020) --- src/sentry/conf/server.py | 3 --- src/sentry/features/__init__.py | 1 - 2 files changed, 4 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index f1a0e1055de056..3e9f4abfb8f437 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1639,9 +1639,6 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str: "organizations:device-class-synthesis": False, # Enable the product selection feature in the getting started docs, regardless of the organization's strategy "organizations:getting-started-doc-with-product-selection": False, - # Enable a new behavior for deleting the freshly created project, - # if the user clicks on the back button in the onboarding for new orgs - "organizations:onboarding-project-deletion-on-back-click": False, # Enable the SDK selection feature in the onboarding "organizations:onboarding-sdk-selection": False, # Enable OpenAI suggestions in the issue details page diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 7ced4712884f85..2f0073b9527112 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -246,7 +246,6 @@ default_manager.add("organizations:integrations-issue-sync", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:integrations-stacktrace-link", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:integrations-ticket-rules", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) -default_manager.add("organizations:onboarding-project-deletion-on-back-click", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:onboarding-sdk-selection", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:open-ai-suggestion", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:performance-view", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) From 7812ca5d333a8434995b5991ba7d622e541c984c Mon Sep 17 00:00:00 2001 From: George Gritsouk <989898+gggritso@users.noreply.github.com> Date: Wed, 2 Aug 2023 10:07:38 -0400 Subject: [PATCH 13/44] fix(starfish): Remove unnecessary wrapper around span description (#53986) Mockup parity stuff. --- .../starfish/views/spanSummaryPage/index.tsx | 45 +++++-------------- 1 file changed, 11 insertions(+), 34 deletions(-) diff --git a/static/app/views/starfish/views/spanSummaryPage/index.tsx b/static/app/views/starfish/views/spanSummaryPage/index.tsx index 8e096978a144ba..34bb2f145ca2cd 100644 --- a/static/app/views/starfish/views/spanSummaryPage/index.tsx +++ b/static/app/views/starfish/views/spanSummaryPage/index.tsx @@ -5,8 +5,6 @@ import * as qs from 'query-string'; import Breadcrumbs, {Crumb} from 'sentry/components/breadcrumbs'; import * as Layout from 'sentry/components/layouts/thirds'; -import Panel from 'sentry/components/panels/panel'; -import PanelBody from 'sentry/components/panels/panelBody'; import QuestionTooltip from 'sentry/components/questionTooltip'; import SentryDocumentTitle from 'sentry/components/sentryDocumentTitle'; import {t, tct} from 'sentry/locale'; @@ -230,27 +228,16 @@ function SpanSummaryPage({params, location}: Props) { {span?.[SpanMetricsFields.SPAN_DESCRIPTION] && ( - - - - - - - {spanDescriptionCardTitle} - - - - - - - + + + )} @@ -395,15 +382,11 @@ export const BlockContainer = styled('div')` const DescriptionContainer = styled('div')` width: 100%; - padding: ${space(1)}; + margin-bottom: ${space(4)}; font-size: 1rem; line-height: 1.2; `; -const DescriptionPanelBody = styled(PanelBody)` - padding: ${space(2)}; -`; - const BlockWrapper = styled('div')` padding-right: ${space(4)}; flex: 1; @@ -411,12 +394,6 @@ const BlockWrapper = styled('div')` word-break: break-word; `; -const DescriptionTitle = styled('h4')` - font-size: 1rem; - font-weight: 600; - line-height: 1.2; -`; - export default SpanSummaryPage; const getDescriptionLabel = (spanOp: string, title?: boolean) => { From d716da41b0846b7d6b30962a22b7b91cc1c3e840 Mon Sep 17 00:00:00 2001 From: George Gritsouk <989898+gggritso@users.noreply.github.com> Date: Wed, 2 Aug 2023 10:08:03 -0400 Subject: [PATCH 14/44] fix(starfish): Improve span table columns (#53988) - Average --> AVG - Queries --> Queries Per Min - Database Query --> Query Description --- .../views/starfish/views/spans/spanTimeCharts.tsx | 3 ++- static/app/views/starfish/views/spans/spansTable.tsx | 2 +- static/app/views/starfish/views/spans/types.tsx | 12 ++++++++++-- 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/static/app/views/starfish/views/spans/spanTimeCharts.tsx b/static/app/views/starfish/views/spans/spanTimeCharts.tsx index 454ddb4dd6d12d..6a2c747e968c99 100644 --- a/static/app/views/starfish/views/spans/spanTimeCharts.tsx +++ b/static/app/views/starfish/views/spans/spanTimeCharts.tsx @@ -18,6 +18,7 @@ import {useSpansQuery} from 'sentry/views/starfish/utils/useSpansQuery'; import {useErrorRateQuery as useErrorCountQuery} from 'sentry/views/starfish/views/spans/queries'; import { DataTitles, + getDurationChartTitle, getThroughputChartTitle, } from 'sentry/views/starfish/views/spans/types'; import {NULL_SPAN_CATEGORY} from 'sentry/views/starfish/views/webServiceView/spanGroupBreakdownContainer'; @@ -65,7 +66,7 @@ export function SpanTimeCharts({moduleName, appliedFilters, spanCategory}: Props > = { [ModuleName.ALL]: [ {title: getThroughputChartTitle(moduleName), Comp: ThroughputChart}, - {title: DataTitles.avg, Comp: DurationChart}, + {title: getDurationChartTitle(moduleName), Comp: DurationChart}, ], [ModuleName.DB]: [], [ModuleName.HTTP]: [{title: DataTitles.errorCount, Comp: ErrorChart}], diff --git a/static/app/views/starfish/views/spans/spansTable.tsx b/static/app/views/starfish/views/spans/spansTable.tsx index c7e48e626aaba2..4ba48ab004cf22 100644 --- a/static/app/views/starfish/views/spans/spansTable.tsx +++ b/static/app/views/starfish/views/spans/spansTable.tsx @@ -225,7 +225,7 @@ function getDescriptionHeader(moduleName: ModuleName, spanCategory?: string) { return 'URL Request'; } if (moduleName === ModuleName.DB) { - return 'Database Query'; + return 'Query Description'; } if (spanCategory === 'cache') { return 'Cache Query'; diff --git a/static/app/views/starfish/views/spans/types.tsx b/static/app/views/starfish/views/spans/types.tsx index e301d12cf15bae..1198c057ce52cb 100644 --- a/static/app/views/starfish/views/spans/types.tsx +++ b/static/app/views/starfish/views/spans/types.tsx @@ -24,7 +24,7 @@ export const DataTitles: Record = { p50p95: t('Duration (P50, P95)'), p50: t('Duration (P50)'), p95: t('Duration (P95)'), - avg: t('Average Duration'), + avg: t('Avg Duration'), duration: t('Duration'), errorCount: t('5XX Responses'), throughput: t('Throughput'), @@ -51,7 +51,7 @@ export const getTooltip = ( export const getThroughputTitle = (spanOp?: string) => { if (spanOp?.startsWith('db')) { - return t('Queries'); + return t('Queries Per Min'); } if (defined(spanOp)) { return t('Requests'); @@ -59,6 +59,14 @@ export const getThroughputTitle = (spanOp?: string) => { return '--'; }; +export const getDurationChartTitle = (spanOp?: string) => { + if (spanOp) { + return t('Average Duration'); + } + + return '--'; +}; + export const getThroughputChartTitle = (spanOp?: string) => { if (spanOp?.startsWith('db')) { return t('Queries Per Minute'); From fe0856e83879f6fdff976402a31535670bd30353 Mon Sep 17 00:00:00 2001 From: George Gritsouk <989898+gggritso@users.noreply.github.com> Date: Wed, 2 Aug 2023 10:08:42 -0400 Subject: [PATCH 15/44] fix(starfish): Improve time spent column formatting (#53989) - Use extra-short abbreviations - Show 2 decimal places - Align rate units with short duration units - De-emphasize the time spent percentage --- static/app/utils/discover/fields.tsx | 2 +- .../components/tableCells/timeSpentCell.tsx | 16 ++++++++++++---- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/static/app/utils/discover/fields.tsx b/static/app/utils/discover/fields.tsx index e7cb9e1f1720f1..a36a8f6a101fde 100644 --- a/static/app/utils/discover/fields.tsx +++ b/static/app/utils/discover/fields.tsx @@ -127,7 +127,7 @@ export enum RateUnits { export const RATE_UNIT_LABELS = { [RateUnits.PER_SECOND]: '/s', [RateUnits.PER_MINUTE]: '/min', - [RateUnits.PER_HOUR]: '/h', + [RateUnits.PER_HOUR]: '/hr', }; const CONDITIONS_ARGUMENTS: SelectValue[] = [ diff --git a/static/app/views/starfish/components/tableCells/timeSpentCell.tsx b/static/app/views/starfish/components/tableCells/timeSpentCell.tsx index 5b71ebbd24fd1c..a0d65aa0ab22d5 100644 --- a/static/app/views/starfish/components/tableCells/timeSpentCell.tsx +++ b/static/app/views/starfish/components/tableCells/timeSpentCell.tsx @@ -1,3 +1,4 @@ +import styled from '@emotion/styled'; import clamp from 'lodash/clamp'; import {Tooltip} from 'sentry/components/tooltip'; @@ -13,7 +14,7 @@ interface Props { export function TimeSpentCell({percentage, total}: Props) { const formattedPercentage = formatPercentage(clamp(percentage ?? 0, 0, 1)); - const formattedTotal = getDuration((total ?? 0) / 1000, 1); + const formattedTotal = getDuration((total ?? 0) / 1000, 2, true); const tooltip = tct( 'The application spent [percentage] of its total time on this span.', { @@ -24,10 +25,17 @@ export function TimeSpentCell({percentage, total}: Props) { return ( - {defined(total) ? formattedTotal : '--'} {'('} - {defined(percentage) ? formattedPercentage : '--%'} - {')'} + {defined(total) ? formattedTotal : '--'} + + {' ('} + {defined(percentage) ? formattedPercentage : '--%'} + {')'} + ); } + +const Deemphasized = styled('span')` + color: ${p => p.theme.gray300}; +`; From 2532ad8c3c845e895fe0aea2d06e6c31a87cb396 Mon Sep 17 00:00:00 2001 From: Mark Story Date: Wed, 2 Aug 2023 10:36:46 -0400 Subject: [PATCH 16/44] fix(hybridcloud) Fix startup warnings for typing related issues. (#53969) These changes will resolve the following warnings during startup. ``` Error on parameter model for NotificationsService.get_many: You should use `typing_extensions.TypedDict` instead of `typing.TypedDict` with Python < 3.9.2. Without it, there is no way to differentiate required and optional fields when subclassed. Error on parameter model for UserSocialAuthService.revoke_token: Type annotations on RPC methods must be actual type tokens, not strings. ``` --- src/sentry/services/hybrid_cloud/notifications/model.py | 4 +++- src/sentry/services/hybrid_cloud/usersocialauth/impl.py | 7 +++++-- src/sentry/services/hybrid_cloud/usersocialauth/model.py | 4 +++- .../services/hybrid_cloud/usersocialauth/service.py | 9 ++++++--- 4 files changed, 17 insertions(+), 7 deletions(-) diff --git a/src/sentry/services/hybrid_cloud/notifications/model.py b/src/sentry/services/hybrid_cloud/notifications/model.py index 655009f6eb7e29..0b58d19a6e33d6 100644 --- a/src/sentry/services/hybrid_cloud/notifications/model.py +++ b/src/sentry/services/hybrid_cloud/notifications/model.py @@ -3,7 +3,9 @@ # in modules such as this one where hybrid cloud data models or service classes are # defined, because we want to reflect on type annotations and avoid forward references. -from typing import List, Optional, TypedDict +from typing import List, Optional + +from typing_extensions import TypedDict from sentry.notifications.types import ( NotificationScopeType, diff --git a/src/sentry/services/hybrid_cloud/usersocialauth/impl.py b/src/sentry/services/hybrid_cloud/usersocialauth/impl.py index 174c5b5a2c7b43..a6d7e04c3eef66 100644 --- a/src/sentry/services/hybrid_cloud/usersocialauth/impl.py +++ b/src/sentry/services/hybrid_cloud/usersocialauth/impl.py @@ -1,4 +1,7 @@ -from __future__ import annotations +# Please do not use +# from __future__ import annotations +# in modules such as this one where hybrid cloud data models or service classes are +# defined, because we want to reflect on type annotations and avoid forward references. from typing import Callable, List, Optional @@ -22,7 +25,7 @@ class DatabaseBackedUserSocialAuthService(UserSocialAuthService): def get_many(self, *, filter: UserSocialAuthFilterArgs) -> List[RpcUserSocialAuth]: return self._FQ.get_many(filter=filter) - def get_one_or_none(self, *, filter: UserSocialAuthFilterArgs) -> RpcUserSocialAuth | None: + def get_one_or_none(self, *, filter: UserSocialAuthFilterArgs) -> Optional[RpcUserSocialAuth]: auths = self.get_many(filter=filter) if len(auths) == 0: return None diff --git a/src/sentry/services/hybrid_cloud/usersocialauth/model.py b/src/sentry/services/hybrid_cloud/usersocialauth/model.py index 45af45ee3af408..214f2087f1c874 100644 --- a/src/sentry/services/hybrid_cloud/usersocialauth/model.py +++ b/src/sentry/services/hybrid_cloud/usersocialauth/model.py @@ -3,7 +3,9 @@ # in modules such as this one where hybrid cloud data models or service classes are # defined, because we want to reflect on type annotations and avoid forward references. -from typing import Any, Dict, TypedDict +from typing import Any, Dict + +from typing_extensions import TypedDict from sentry.services.hybrid_cloud import RpcModel from social_auth.utils import expiration_datetime, get_backend, tokens diff --git a/src/sentry/services/hybrid_cloud/usersocialauth/service.py b/src/sentry/services/hybrid_cloud/usersocialauth/service.py index 30f713dcf40ca9..f59f83ed58f9d6 100644 --- a/src/sentry/services/hybrid_cloud/usersocialauth/service.py +++ b/src/sentry/services/hybrid_cloud/usersocialauth/service.py @@ -1,7 +1,10 @@ -from __future__ import annotations +# Please do not use +# from __future__ import annotations +# in modules such as this one where hybrid cloud data models or service classes are +# defined, because we want to reflect on type annotations and avoid forward references. from abc import abstractmethod -from typing import List, cast +from typing import List, Optional, cast from sentry.services.hybrid_cloud.organization.model import RpcOrganization from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method @@ -34,7 +37,7 @@ def get_many(self, *, filter: UserSocialAuthFilterArgs) -> List[RpcUserSocialAut @rpc_method @abstractmethod - def get_one_or_none(self, *, filter: UserSocialAuthFilterArgs) -> RpcUserSocialAuth | None: + def get_one_or_none(self, *, filter: UserSocialAuthFilterArgs) -> Optional[RpcUserSocialAuth]: """ Returns the first RpcUserSocialAuth based on the given filters. """ From a17e4455dd8a360a5111cab535d953fc9f9b3fc3 Mon Sep 17 00:00:00 2001 From: George Gritsouk <989898+gggritso@users.noreply.github.com> Date: Wed, 2 Aug 2023 10:45:01 -0400 Subject: [PATCH 17/44] feat(starfish): Show full query on database module row hover (#53976) - Extract span description logic to `SpanDescriptionCell` component - Show formatted query on `SpanDescriptionCell` hover, using an `Overlay` - Generalize `useFullSpanDescription` to `useFullSpanFromTrace`. Returns the entire span, not just the description. Also teach it some new tricks, like explicit `enabled` behaviour, and merging the `isLoading`, `isFetching`, and `isError` states from both dependent queries - Load full span description on `SpanDescriptionCell` hover --- .../tableCells/spanDescriptionCell.tsx | 155 ++++++++++++++++++ .../queries/useFullSpanDescription.tsx | 31 ---- .../starfish/queries/useFullSpanFromTrace.tsx | 36 ++++ .../starfish/views/spanSummaryPage/index.tsx | 6 +- .../views/starfish/views/spans/spansTable.tsx | 50 +----- 5 files changed, 203 insertions(+), 75 deletions(-) create mode 100644 static/app/views/starfish/components/tableCells/spanDescriptionCell.tsx delete mode 100644 static/app/views/starfish/queries/useFullSpanDescription.tsx create mode 100644 static/app/views/starfish/queries/useFullSpanFromTrace.tsx diff --git a/static/app/views/starfish/components/tableCells/spanDescriptionCell.tsx b/static/app/views/starfish/components/tableCells/spanDescriptionCell.tsx new file mode 100644 index 00000000000000..59a9cae32991c9 --- /dev/null +++ b/static/app/views/starfish/components/tableCells/spanDescriptionCell.tsx @@ -0,0 +1,155 @@ +import {createPortal} from 'react-dom'; +import {Link} from 'react-router'; +import {useTheme} from '@emotion/react'; +import styled from '@emotion/styled'; +import {AnimatePresence} from 'framer-motion'; +import * as qs from 'query-string'; + +import {CodeSnippet} from 'sentry/components/codeSnippet'; +import LoadingIndicator from 'sentry/components/loadingIndicator'; +import {Overlay, PositionWrapper} from 'sentry/components/overlay'; +import {space} from 'sentry/styles/space'; +import {useHoverOverlay, UseHoverOverlayProps} from 'sentry/utils/useHoverOverlay'; +import {useLocation} from 'sentry/utils/useLocation'; +import {OverflowEllipsisTextContainer} from 'sentry/views/starfish/components/textAlign'; +import {useFullSpanFromTrace} from 'sentry/views/starfish/queries/useFullSpanFromTrace'; +import {ModuleName, StarfishFunctions} from 'sentry/views/starfish/types'; +import {extractRoute} from 'sentry/views/starfish/utils/extractRoute'; +import {SQLishFormatter} from 'sentry/views/starfish/utils/sqlish/SQLishFormatter'; +import {QueryParameterNames} from 'sentry/views/starfish/views/queryParameters'; + +interface Props { + moduleName: ModuleName; + description?: string; + endpoint?: string; + endpointMethod?: string; + group?: string; +} + +const formatter = new SQLishFormatter(); + +export function SpanDescriptionCell({ + description, + group, + moduleName, + endpoint, + endpointMethod, +}: Props) { + const location = useLocation(); + + const hoverOverlayProps = useHoverOverlay('overlay', OVERLAY_OPTIONS); + + if (!description) { + return NULL_DESCRIPTION; + } + + const queryString = { + ...location.query, + endpoint, + endpointMethod, + }; + + const sort: string | undefined = queryString?.[QueryParameterNames.SORT]; + + // the spans page uses time_spent_percentage(local), so to persist the sort upon navigation we need to replace + if (sort?.includes(`${StarfishFunctions.TIME_SPENT_PERCENTAGE}()`)) { + queryString[QueryParameterNames.SORT] = sort.replace( + `${StarfishFunctions.TIME_SPENT_PERCENTAGE}()`, + `${StarfishFunctions.TIME_SPENT_PERCENTAGE}(local)` + ); + } + + const formattedDescription = + moduleName === ModuleName.DB ? formatter.toSimpleMarkup(description) : description; + + const overlayContent = moduleName === ModuleName.DB && hoverOverlayProps.isOpen && ( + + ); + + return ( + + {hoverOverlayProps.wrapTrigger( + + {group ? ( + + {formattedDescription} + + ) : ( + formattedDescription + )} + + )} + {createPortal({overlayContent}, document.body)} + + ); +} + +const DescriptionWrapper = styled('div')` + display: inline-flex; +`; + +const OVERLAY_OPTIONS: UseHoverOverlayProps = { + position: 'right', + isHoverable: true, + skipWrapper: true, +}; + +const NULL_DESCRIPTION = <null>; + +interface QueryDescriptionOverlayProps { + hoverOverlayProps: ReturnType; + group?: string; + shortDescription?: string; +} +function QueryDescriptionOverlay({ + group, + shortDescription, + hoverOverlayProps, +}: QueryDescriptionOverlayProps) { + const theme = useTheme(); + + const { + data: fullSpan, + isLoading, + isFetching, + } = useFullSpanFromTrace(group, Boolean(group)); + + const description = fullSpan?.description ?? shortDescription; + + return description ? ( + + + {/* N.B. A `disabled` query still returns `isLoading: true`, so we also + check the fetching status explicitly. */} + {isLoading && isFetching ? ( + + + + ) : ( + {formatter.toString(description)} + )} + + + ) : null; +} + +const OverlayContent = styled(Overlay)` + max-width: 500px; +`; + +const PaddedSpinner = styled('div')` + padding: ${space(1)}; +`; diff --git a/static/app/views/starfish/queries/useFullSpanDescription.tsx b/static/app/views/starfish/queries/useFullSpanDescription.tsx deleted file mode 100644 index 98c7fa7554f2dd..00000000000000 --- a/static/app/views/starfish/queries/useFullSpanDescription.tsx +++ /dev/null @@ -1,31 +0,0 @@ -import {useEventJSON} from 'sentry/views/starfish/queries/useEventJSON'; -import {useIndexedSpans} from 'sentry/views/starfish/queries/useIndexedSpans'; -import {SpanIndexedFields} from 'sentry/views/starfish/types'; - -// NOTE: Fetching the top one is a bit naive, but works for now. A better -// approach might be to fetch several at a time, and let the hook consumer -// decide how to display them -export function useFullSpanDescription(group: string) { - const {data: indexedSpans} = useIndexedSpans( - { - [SpanIndexedFields.SPAN_GROUP]: group, - }, - 1 - ); - - const firstIndexedSpan = indexedSpans?.[0]; - - const response = useEventJSON( - firstIndexedSpan ? firstIndexedSpan[SpanIndexedFields.TRANSACTION_ID] : undefined, - firstIndexedSpan ? firstIndexedSpan[SpanIndexedFields.PROJECT] : undefined - ); - - const fullSpanDescription = response?.data?.spans?.find( - span => span.span_id === firstIndexedSpan?.[SpanIndexedFields.ID] - )?.description; - - return { - ...response, - data: fullSpanDescription, - }; -} diff --git a/static/app/views/starfish/queries/useFullSpanFromTrace.tsx b/static/app/views/starfish/queries/useFullSpanFromTrace.tsx new file mode 100644 index 00000000000000..6e67aef4112bca --- /dev/null +++ b/static/app/views/starfish/queries/useFullSpanFromTrace.tsx @@ -0,0 +1,36 @@ +import {useEventJSON} from 'sentry/views/starfish/queries/useEventJSON'; +import {useIndexedSpans} from 'sentry/views/starfish/queries/useIndexedSpans'; +import {SpanIndexedFields} from 'sentry/views/starfish/types'; + +// NOTE: Fetching the top one is a bit naive, but works for now. A better +// approach might be to fetch several at a time, and let the hook consumer +// decide how to display them +export function useFullSpanFromTrace(group?: string, enabled: boolean = true) { + const filters: {[key: string]: string} = {}; + + if (group) { + filters[SpanIndexedFields.SPAN_GROUP] = group; + } + + const indexedSpansResponse = useIndexedSpans(filters, 1, enabled); + + const firstIndexedSpan = indexedSpansResponse.data?.[0]; + + const eventJSONResponse = useEventJSON( + firstIndexedSpan ? firstIndexedSpan[SpanIndexedFields.TRANSACTION_ID] : undefined, + firstIndexedSpan ? firstIndexedSpan[SpanIndexedFields.PROJECT] : undefined + ); + + const fullSpan = eventJSONResponse?.data?.spans?.find( + span => span.span_id === firstIndexedSpan?.[SpanIndexedFields.ID] + ); + + // N.B. There isn't a great pattern for us to merge the responses together, + // so we're only merging the three most important properties + return { + isLoading: indexedSpansResponse.isLoading || eventJSONResponse.isLoading, + isFetching: indexedSpansResponse.isFetching || eventJSONResponse.isFetching, + isError: indexedSpansResponse.isError || eventJSONResponse.isError, + data: fullSpan, + }; +} diff --git a/static/app/views/starfish/views/spanSummaryPage/index.tsx b/static/app/views/starfish/views/spanSummaryPage/index.tsx index 34bb2f145ca2cd..711b1d9dcedc55 100644 --- a/static/app/views/starfish/views/spanSummaryPage/index.tsx +++ b/static/app/views/starfish/views/spanSummaryPage/index.tsx @@ -28,7 +28,7 @@ import {CountCell} from 'sentry/views/starfish/components/tableCells/countCell'; import DurationCell from 'sentry/views/starfish/components/tableCells/durationCell'; import ThroughputCell from 'sentry/views/starfish/components/tableCells/throughputCell'; import {TimeSpentCell} from 'sentry/views/starfish/components/tableCells/timeSpentCell'; -import {useFullSpanDescription} from 'sentry/views/starfish/queries/useFullSpanDescription'; +import {useFullSpanFromTrace} from 'sentry/views/starfish/queries/useFullSpanFromTrace'; import { SpanSummaryQueryFilters, useSpanMetrics, @@ -63,7 +63,7 @@ function SpanSummaryPage({params, location}: Props) { const {groupId} = params; const {transaction, transactionMethod, endpoint, endpointMethod} = location.query; - const {data: fullSpanDescription} = useFullSpanDescription(groupId); + const {data: fullSpan} = useFullSpanFromTrace(groupId); const queryFilter: SpanSummaryQueryFilters = endpoint ? {transactionName: endpoint, 'transaction.method': endpointMethod} @@ -233,7 +233,7 @@ function SpanSummaryPage({params, location}: Props) { span={{ ...span, [SpanMetricsFields.SPAN_DESCRIPTION]: - fullSpanDescription ?? + fullSpan?.description ?? spanMetrics?.[SpanMetricsFields.SPAN_DESCRIPTION], }} /> diff --git a/static/app/views/starfish/views/spans/spansTable.tsx b/static/app/views/starfish/views/spans/spansTable.tsx index 4ba48ab004cf22..ed27822ae11784 100644 --- a/static/app/views/starfish/views/spans/spansTable.tsx +++ b/static/app/views/starfish/views/spans/spansTable.tsx @@ -1,13 +1,11 @@ import {Fragment} from 'react'; import {browserHistory} from 'react-router'; import {Location} from 'history'; -import * as qs from 'query-string'; import GridEditable, { COL_WIDTH_UNDEFINED, GridColumnHeader, } from 'sentry/components/gridEditable'; -import Link from 'sentry/components/links/link'; import Pagination, {CursorHandler} from 'sentry/components/pagination'; import {Organization} from 'sentry/types'; import {defined} from 'sentry/utils'; @@ -19,20 +17,16 @@ import {decodeScalar} from 'sentry/utils/queryString'; import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; import {renderHeadCell} from 'sentry/views/starfish/components/tableCells/renderHeadCell'; -import {OverflowEllipsisTextContainer} from 'sentry/views/starfish/components/textAlign'; +import {SpanDescriptionCell} from 'sentry/views/starfish/components/tableCells/spanDescriptionCell'; import {useSpanList} from 'sentry/views/starfish/queries/useSpanList'; import { ModuleName, SpanMetricsFields, StarfishFunctions, } from 'sentry/views/starfish/types'; -import {extractRoute} from 'sentry/views/starfish/utils/extractRoute'; -import {SQLishFormatter} from 'sentry/views/starfish/utils/sqlish/SQLishFormatter'; import {QueryParameterNames} from 'sentry/views/starfish/views/queryParameters'; import {DataTitles, getThroughputTitle} from 'sentry/views/starfish/views/spans/types'; -const formatter = new SQLishFormatter(); - type Row = { 'avg(span.self_time)': number; 'http_error_count()': number; @@ -157,42 +151,16 @@ function renderBodyCell( organization: Organization, endpoint?: string, endpointMethod?: string -): React.ReactNode { +) { if (column.key === SPAN_DESCRIPTION) { - const queryString = { - ...location.query, - endpoint, - endpointMethod, - }; - const sort: string | undefined = queryString?.[QueryParameterNames.SORT]; - - // the spans page uses time_spent_percentage(local), so to persist the sort upon navigation we need to replace - if (sort?.includes(`${TIME_SPENT_PERCENTAGE}()`)) { - queryString[QueryParameterNames.SORT] = sort.replace( - `${TIME_SPENT_PERCENTAGE}()`, - `${TIME_SPENT_PERCENTAGE}(local)` - ); - } - - const description = - moduleName === ModuleName.DB - ? formatter.toSimpleMarkup(row[SPAN_DESCRIPTION]) - : row[SPAN_DESCRIPTION]; - return ( - - {row[SPAN_GROUP] ? ( - - {description || ''} - - ) : ( - description || '' - )} - + ); } From d71b7c7b4c0a5c34f593787df6c7394f106ed1ee Mon Sep 17 00:00:00 2001 From: Filippo Pacifici Date: Wed, 2 Aug 2023 08:00:28 -0700 Subject: [PATCH 18/44] feat(automator) Avoid crashing on invalid options (#53814) The original idea would be that the automator would be strict and refuse to do anything if any of the options in the file was invalid (not updatable, non existing, bad type). That turns out to be too restrictive as it breaks too often for drift or bad PRs. This makes the automator less restrictive. Those three types of errors: invalid option, invalid type or option not enabled will be logged as errors and visible in sentry while the valid ones will still be updated. --- src/sentry/runner/commands/configoptions.py | 151 +++++++++++------- tests/sentry/runner/commands/badpatch.yaml | 1 + .../runner/commands/test_configoptions.py | 16 +- 3 files changed, 108 insertions(+), 60 deletions(-) diff --git a/src/sentry/runner/commands/configoptions.py b/src/sentry/runner/commands/configoptions.py index 51a8d3eb77b622..a4ec1ece445b87 100644 --- a/src/sentry/runner/commands/configoptions.py +++ b/src/sentry/runner/commands/configoptions.py @@ -124,8 +124,11 @@ def configoptions(ctx, dry_run: bool, file: Optional[str], hide_drift: bool) -> to apply changes is UpdateChannel.AUTOMATOR. """ + import logging + from sentry import options - from sentry.utils import metrics + + logger = logging.getLogger("sentry.options_automator") ctx.obj["dry_run"] = dry_run @@ -136,20 +139,38 @@ def configoptions(ctx, dry_run: bool, file: Optional[str], hide_drift: bool) -> ctx.obj["options_to_update"] = options_to_update drifted_options = set() + invalid_options = set() for key, value in options_to_update.items(): - not_writable_reason = options.can_update(key, value, options.UpdateChannel.AUTOMATOR) + try: + not_writable_reason = options.can_update(key, value, options.UpdateChannel.AUTOMATOR) - if not_writable_reason and not_writable_reason != options.NotWritableReason.DRIFTED: - click.echo( - f"Invalid option. {key} cannot be updated. Reason {not_writable_reason.value}" + if not_writable_reason and not_writable_reason != options.NotWritableReason.DRIFTED: + logger.error( + "Option %s is invalid. and cannot be updated. Reason: %s", + key, + not_writable_reason.value, + ) + click.echo( + f"Invalid option. {key} cannot be updated. Reason {not_writable_reason.value}" + ) + invalid_options.add(key) + elif not_writable_reason == options.NotWritableReason.DRIFTED: + drifted_options.add(key) + except options.UnknownOption: + invalid_options.add(key) + logger.error( + "Option %s is not registered. and cannot be updated.", + key, ) - metrics.incr( - "options_automator.run", tags={"status": "invalid_option"}, sample_rate=1.0 + + opt = options.lookup_key(key) + if not opt.type.test(value): + invalid_options.add(key) + logger.error( + "Option %s has invalid type. got %s, expected %s.", key, type(value), opt.type ) - exit(-1) - elif not_writable_reason == options.NotWritableReason.DRIFTED: - drifted_options.add(key) + ctx.obj["invalid_options"] = invalid_options ctx.obj["drifted_options"] = drifted_options ctx.obj["hide_drift"] = hide_drift @@ -169,22 +190,31 @@ def patch(ctx) -> None: if dry_run: click.echo("!!! Dry-run flag on. No update will be performed.") + invalid_options = ctx.obj["invalid_options"] for key, value in ctx.obj["options_to_update"].items(): - try: - _attempt_update( - key, value, ctx.obj["drifted_options"], dry_run, bool(ctx.obj["hide_drift"]) - ) - except Exception: - metrics.incr( - "options_automator.run", - tags={"status": "update_failed"}, - sample_rate=1.0, - ) - raise + if key not in invalid_options: + try: + _attempt_update( + key, value, ctx.obj["drifted_options"], dry_run, bool(ctx.obj["hide_drift"]) + ) + except Exception: + metrics.incr( + "options_automator.run", + tags={"status": "update_failed"}, + sample_rate=1.0, + ) + raise + + if invalid_options: + status = "update_failed" + elif ctx.obj["drifted_options"]: + status = "drift" + else: + status = "success" metrics.incr( "options_automator.run", - tags={"status": "drift" if not ctx.obj["drifted_options"] else "success"}, + tags={"status": status}, sample_rate=1.0, ) @@ -209,44 +239,53 @@ def sync(ctx): all_options = options.filter(options.FLAG_AUTOMATOR_MODIFIABLE) options_to_update = ctx.obj["options_to_update"] + invalid_options = ctx.obj["invalid_options"] drift_found = bool(ctx.obj["drifted_options"]) for opt in all_options: - if opt.name in options_to_update: - try: - _attempt_update( - opt.name, - options_to_update[opt.name], - ctx.obj["drifted_options"], - dry_run, - bool(ctx.obj["hide_drift"]), - ) - except Exception: - metrics.incr( - "options_automator.run", - tags={"status": "update_failed"}, - sample_rate=1.0, - ) - raise - else: - if options.isset(opt.name): - if options.get_last_update_channel(opt.name) == options.UpdateChannel.AUTOMATOR: - if not dry_run: - try: - options.delete(opt.name) - except Exception: - metrics.incr( - "options_automator.run", - tags={"status": "update_failed"}, - sample_rate=1.0, - ) - raise - click.echo(UNSET_MSG % opt.name) - else: - click.echo(DRIFT_MSG % opt.name) - drift_found = True + if opt.name not in invalid_options: + if opt.name in options_to_update: + try: + _attempt_update( + opt.name, + options_to_update[opt.name], + ctx.obj["drifted_options"], + dry_run, + bool(ctx.obj["hide_drift"]), + ) + except Exception: + metrics.incr( + "options_automator.run", + tags={"status": "update_failed"}, + sample_rate=1.0, + ) + raise + else: + if options.isset(opt.name): + if options.get_last_update_channel(opt.name) == options.UpdateChannel.AUTOMATOR: + if not dry_run: + try: + options.delete(opt.name) + except Exception: + metrics.incr( + "options_automator.run", + tags={"status": "update_failed"}, + sample_rate=1.0, + ) + raise + click.echo(UNSET_MSG % opt.name) + else: + click.echo(DRIFT_MSG % opt.name) + drift_found = True + + if invalid_options: + status = "update_failed" + elif drift_found: + status = "drift" + else: + status = "success" metrics.incr( "options_automator.run", - tags={"status": "drift" if not drift_found else "success"}, + tags={"status": status}, sample_rate=1.0, ) diff --git a/tests/sentry/runner/commands/badpatch.yaml b/tests/sentry/runner/commands/badpatch.yaml index f5b8af65c1cb0d..b20d558f5fa793 100644 --- a/tests/sentry/runner/commands/badpatch.yaml +++ b/tests/sentry/runner/commands/badpatch.yaml @@ -1,3 +1,4 @@ options: int_option: 50 readonly_option: 30 + invalid_type: [1,2,3] diff --git a/tests/sentry/runner/commands/test_configoptions.py b/tests/sentry/runner/commands/test_configoptions.py index 8322dc8c94a823..acf8af61916cc6 100644 --- a/tests/sentry/runner/commands/test_configoptions.py +++ b/tests/sentry/runner/commands/test_configoptions.py @@ -30,6 +30,7 @@ def register_options(self) -> Generator[None, None, None]: options.register("drifted_option", default=[], flags=FLAG_AUTOMATOR_MODIFIABLE) options.register("change_channel_option", default=[], flags=FLAG_AUTOMATOR_MODIFIABLE) options.register("to_unset_option", default=[], flags=FLAG_AUTOMATOR_MODIFIABLE) + options.register("invalid_type", default=15, flags=FLAG_AUTOMATOR_MODIFIABLE) yield @@ -41,6 +42,7 @@ def register_options(self) -> Generator[None, None, None]: options.unregister("drifted_option") options.unregister("change_channel_option") options.unregister("to_unset_option") + options.unregister("invalid_type") @pytest.fixture(autouse=True) def set_options(self) -> None: @@ -76,6 +78,7 @@ def _clean_cache(self) -> None: options.default_store.delete_cache(options.lookup_key("list_option")) options.default_store.delete_cache(options.lookup_key("drifted_option")) options.default_store.delete_cache(options.lookup_key("change_channel_option")) + options.default_store.delete_cache(options.lookup_key("invalid_type")) def test_patch(self): def assert_not_set() -> None: @@ -207,7 +210,12 @@ def test_bad_patch(self): "--file=tests/sentry/runner/commands/badpatch.yaml", "patch", ) - assert rv.exit_code == -1 - assert "Invalid option. readonly_option cannot be updated. Reason readonly" in rv.output - # Verify this was not updated - assert options.get("int_option") == 20 + + assert rv.exit_code == 0, rv.output + + assert SET_MSG % ("int_option", 50) in rv.output + assert "Option invalid_type has invalid type." in rv.output + + assert not options.isset("readonly_option") + assert not options.isset("invalid_type") + assert options.get("int_option") == 50 From d6724dc0091c9df809026e70289646b0680fe6c7 Mon Sep 17 00:00:00 2001 From: George Gritsouk <989898+gggritso@users.noreply.github.com> Date: Wed, 2 Aug 2023 11:05:26 -0400 Subject: [PATCH 19/44] fix(starfish): Parse slashes in SQL (#54024) - Allow parsing slashes - Add spec for type casts (while I'm at it) Closes JAVASCRIPT-2NG3 Closes JAVASCRIPT-2NGK --- static/app/views/starfish/utils/sqlish/SQLishParser.spec.tsx | 2 ++ static/app/views/starfish/utils/sqlish/sqlish.pegjs | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/static/app/views/starfish/utils/sqlish/SQLishParser.spec.tsx b/static/app/views/starfish/utils/sqlish/SQLishParser.spec.tsx index 1c41567fd66ef5..f1ad16aedb4190 100644 --- a/static/app/views/starfish/utils/sqlish/SQLishParser.spec.tsx +++ b/static/app/views/starfish/utils/sqlish/SQLishParser.spec.tsx @@ -10,6 +10,8 @@ describe('SQLishParser', function () { 'SELECT *;', // Wildcards 'WHERE age = 10;', // Equality 'WHERE age != 10;', // Inequality + 'total / time', // Division + 'sum(age)::numeric(0, 5)', // Type casting 'WHERE age > 10 AND age < 20;', // Comparison "WHERE$1 ILIKE ' % ' || 'text'", // Conditionals 'SELECT id, name;', // Column lists diff --git a/static/app/views/starfish/utils/sqlish/sqlish.pegjs b/static/app/views/starfish/utils/sqlish/sqlish.pegjs index ac4ce862d2ebc6..bd2e2709ad29ef 100644 --- a/static/app/views/starfish/utils/sqlish/sqlish.pegjs +++ b/static/app/views/starfish/utils/sqlish/sqlish.pegjs @@ -30,4 +30,4 @@ Whitespace = Whitespace:[\n\t ]+ { return { type: 'Whitespace', content: Whitespace.join("") } } GenericToken - = GenericToken:[a-zA-Z0-9"'`_\-.()=><:,*;!\[\]?$%|]+ { return { type: 'GenericToken', content: GenericToken.join('') } } + = GenericToken:[a-zA-Z0-9"'`_\-.()=><:,*;!\[\]?$%|/]+ { return { type: 'GenericToken', content: GenericToken.join('') } } From 3f64a0595900a07a468fab3e9708ba31c56aa470 Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Wed, 2 Aug 2023 11:48:13 -0400 Subject: [PATCH 20/44] =?UTF-8?q?chore(statistical-detectors):=20Add=20opt?= =?UTF-8?q?ions=20to=20enable=20statistical=20detec=E2=80=A6=20(#54025)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit …tors Options to allow us to control if statistical detectors are enabled or not and which projects to run it on. --- src/sentry/options/defaults.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 8211d1b7c529e0..ef40f0a83a4805 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -1422,3 +1422,22 @@ # The flag activates whether to send group attributes messages to kafka register("issues.group_attributes.send_kafka", default=False, flags=FLAG_MODIFIABLE_BOOL) + +# Enables statistical detectors for a project +register( + "statistical_detectors.enable", + default=False, + flags=FLAG_MODIFIABLE_BOOL | FLAG_AUTOMATOR_MODIFIABLE, +) +register( + "statistical_detectors.enable.projects.performance", + type=Sequence, + default=[], + flags=FLAG_AUTOMATOR_MODIFIABLE, +) +register( + "statistical_detectors.enable.projects.profiling", + type=Sequence, + default=[], + flags=FLAG_AUTOMATOR_MODIFIABLE, +) From c840864a3aefbd3c1bd6efa712f865bffe78bc40 Mon Sep 17 00:00:00 2001 From: Gabe Villalobos Date: Wed, 2 Aug 2023 09:12:29 -0700 Subject: [PATCH 21/44] ref(hybrid-cloud): Switches organization monitor deletion to use region scheduled deletions (#53971) --- .../endpoints/organization_monitor_details.py | 12 ++++-------- .../test_organization_monitor_details.py | 16 +++++++--------- 2 files changed, 11 insertions(+), 17 deletions(-) diff --git a/src/sentry/monitors/endpoints/organization_monitor_details.py b/src/sentry/monitors/endpoints/organization_monitor_details.py index dd5af353a183e8..31a734b0cf8f3d 100644 --- a/src/sentry/monitors/endpoints/organization_monitor_details.py +++ b/src/sentry/monitors/endpoints/organization_monitor_details.py @@ -21,13 +21,7 @@ from sentry.apidocs.parameters import GlobalParams, MonitorParams from sentry.apidocs.utils import inline_sentry_response_serializer from sentry.constants import ObjectStatus -from sentry.models import ( - RegionScheduledDeletion, - Rule, - RuleActivity, - RuleActivityType, - ScheduledDeletion, -) +from sentry.models import RegionScheduledDeletion, Rule, RuleActivity, RuleActivityType from sentry.monitors.models import Monitor, MonitorEnvironment, MonitorStatus from sentry.monitors.serializers import MonitorSerializer, MonitorSerializerResponse from sentry.monitors.utils import create_alert_rule, update_alert_rule @@ -243,7 +237,9 @@ def delete(self, request: Request, organization, project, monitor) -> Response: if type(monitor_object) == Monitor: monitor_object.update(slug=get_random_string(length=24)) - schedule = ScheduledDeletion.schedule(monitor_object, days=0, actor=request.user) + schedule = RegionScheduledDeletion.schedule( + monitor_object, days=0, actor=request.user + ) self.create_audit_entry( request=request, organization=project.organization, diff --git a/tests/sentry/monitors/endpoints/test_organization_monitor_details.py b/tests/sentry/monitors/endpoints/test_organization_monitor_details.py index eeffc4103adf34..6957d8ad82a7fb 100644 --- a/tests/sentry/monitors/endpoints/test_organization_monitor_details.py +++ b/tests/sentry/monitors/endpoints/test_organization_monitor_details.py @@ -1,7 +1,7 @@ import pytest from sentry.constants import ObjectStatus -from sentry.models import Environment, Rule, RuleActivity, RuleActivityType, ScheduledDeletion +from sentry.models import Environment, RegionScheduledDeletion, Rule, RuleActivity, RuleActivityType from sentry.monitors.models import Monitor, MonitorEnvironment, ScheduleType from sentry.testutils.cases import MonitorTestCase from sentry.testutils.silo import region_silo_test @@ -431,8 +431,9 @@ def test_simple(self): assert monitor.status == ObjectStatus.PENDING_DELETION # Slug should update on deletion assert monitor.slug != old_slug - # ScheduledDeletion only available in control silo - assert ScheduledDeletion.objects.filter(object_id=monitor.id, model_name="Monitor").exists() + assert RegionScheduledDeletion.objects.filter( + object_id=monitor.id, model_name="Monitor" + ).exists() def test_mismatched_org_slugs(self): monitor = self._create_monitor() @@ -455,8 +456,7 @@ def test_environment(self): monitor_environment = MonitorEnvironment.objects.get(id=monitor_environment.id) assert monitor_environment.status == ObjectStatus.PENDING_DELETION - # ScheduledDeletion only available in control silo - assert ScheduledDeletion.objects.filter( + assert RegionScheduledDeletion.objects.filter( object_id=monitor_environment.id, model_name="MonitorEnvironment" ).exists() @@ -478,15 +478,13 @@ def test_multiple_environments(self): monitor_environment_a = MonitorEnvironment.objects.get(id=monitor_environment_a.id) assert monitor_environment_a.status == ObjectStatus.PENDING_DELETION - # ScheduledDeletion only available in control silo - assert ScheduledDeletion.objects.filter( + assert RegionScheduledDeletion.objects.filter( object_id=monitor_environment_a.id, model_name="MonitorEnvironment" ).exists() monitor_environment_b = MonitorEnvironment.objects.get(id=monitor_environment_b.id) assert monitor_environment_b.status == ObjectStatus.PENDING_DELETION - # ScheduledDeletion only available in control silo - assert ScheduledDeletion.objects.filter( + assert RegionScheduledDeletion.objects.filter( object_id=monitor_environment_b.id, model_name="MonitorEnvironment" ).exists() From d60486610efac0168a078309e5141f1b0e7a5f33 Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Wed, 2 Aug 2023 10:13:14 -0700 Subject: [PATCH 22/44] chore(github-comments): clean up feature flag (#53712) --- src/sentry/conf/server.py | 2 -- src/sentry/features/__init__.py | 1 - src/sentry/tasks/commit_context.py | 6 ++--- .../tasks/integrations/github/pr_comment.py | 12 +++------ .../api/serializers/test_organization.py | 1 - .../integrations/github/test_pr_comment.py | 10 ------- tests/sentry/tasks/test_commit_context.py | 27 ------------------- 7 files changed, 6 insertions(+), 53 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 3e9f4abfb8f437..6858eb505d0df9 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1671,8 +1671,6 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str: "organizations:org-auth-tokens": False, # Enable detecting SDK crashes during event processing "organizations:sdk-crash-detection": False, - # Enables commenting on PRs from the Sentry comment bot. - "organizations:pr-comment-bot": True, # Enables slack channel lookup via schedule message "organizations:slack-use-new-lookup": False, # Enable functionality for recap server polling. diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 2f0073b9527112..13b6ed1dfcfba5 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -258,7 +258,6 @@ default_manager.add("organizations:codecov-commit-sha-from-git-blame", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:ds-sliding-window", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:ds-sliding-window-org", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) -default_manager.add("organizations:pr-comment-bot", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:ds-org-recalibration", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:slack-use-new-lookup", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:slack-disable-on-broken", OrganizationFeature, FeatureHandlerStrategy.REMOTE) diff --git a/src/sentry/tasks/commit_context.py b/src/sentry/tasks/commit_context.py index c74a853f961ac4..c5a40b6c063277 100644 --- a/src/sentry/tasks/commit_context.py +++ b/src/sentry/tasks/commit_context.py @@ -6,7 +6,7 @@ from django.utils import timezone from sentry_sdk import set_tag -from sentry import analytics, features +from sentry import analytics from sentry.api.serializers.models.release import get_users_for_authors from sentry.integrations.base import IntegrationInstallation from sentry.integrations.utils.commit_context import find_commit_context_for_event @@ -361,9 +361,7 @@ def process_commit_context( }, # Updates date of an existing owner, since we just matched them with this new event ) - if features.has( - "organizations:pr-comment-bot", project.organization - ) and OrganizationOption.objects.get_value( + if OrganizationOption.objects.get_value( organization=project.organization, key="sentry:github_pr_bot", default=True, diff --git a/src/sentry/tasks/integrations/github/pr_comment.py b/src/sentry/tasks/integrations/github/pr_comment.py index 2e8069885bcb98..a27f483fb3d50d 100644 --- a/src/sentry/tasks/integrations/github/pr_comment.py +++ b/src/sentry/tasks/integrations/github/pr_comment.py @@ -11,7 +11,6 @@ from snuba_sdk import Column, Condition, Direction, Entity, Function, Op, OrderBy, Query from snuba_sdk import Request as SnubaRequest -from sentry import features from sentry.integrations.github.client import GitHubAppsClient from sentry.models import Group, GroupOwnerType, Project from sentry.models.options.organization_option import OrganizationOption @@ -187,13 +186,10 @@ def github_comment_workflow(pullrequest_id: int, project_id: int): metrics.incr(METRICS_BASE.format(key="error"), tags={"type": "missing_org"}) return - if not ( - features.has("organizations:pr-comment-bot", organization) - and OrganizationOption.objects.get_value( - organization=organization, - key="sentry:github_pr_bot", - default=True, - ) + if not OrganizationOption.objects.get_value( + organization=organization, + key="sentry:github_pr_bot", + default=True, ): logger.error("github.pr_comment.option_missing", extra={"organization_id": org_id}) return diff --git a/tests/sentry/api/serializers/test_organization.py b/tests/sentry/api/serializers/test_organization.py index f81b334b42215d..17f55978c48a74 100644 --- a/tests/sentry/api/serializers/test_organization.py +++ b/tests/sentry/api/serializers/test_organization.py @@ -87,7 +87,6 @@ def test_simple(self): "symbol-sources", "team-insights", "team-roles", - "pr-comment-bot", "performance-issues-search", "transaction-name-normalize", "transaction-name-mark-scrubbed-as-sanitized", diff --git a/tests/sentry/tasks/integrations/github/test_pr_comment.py b/tests/sentry/tasks/integrations/github/test_pr_comment.py index 34a13e2066d5e4..d8fc77c031624f 100644 --- a/tests/sentry/tasks/integrations/github/test_pr_comment.py +++ b/tests/sentry/tasks/integrations/github/test_pr_comment.py @@ -25,7 +25,6 @@ pr_to_issue_query, ) from sentry.testutils.cases import IntegrationTestCase, SnubaTestCase, TestCase -from sentry.testutils.helpers import with_feature from sentry.testutils.helpers.datetime import before_now, iso_format from sentry.testutils.silo import region_silo_test from sentry.utils.cache import cache @@ -337,7 +336,6 @@ def setUp(self): @patch("sentry.tasks.integrations.github.pr_comment.get_top_5_issues_by_count") @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @patch("sentry.tasks.integrations.github.pr_comment.metrics") - @with_feature("organizations:pr-comment-bot") @responses.activate def test_comment_workflow(self, mock_metrics, get_jwt, mock_issues): groups = [g.id for g in Group.objects.all()] @@ -370,7 +368,6 @@ def test_comment_workflow(self, mock_metrics, get_jwt, mock_issues): @patch("sentry.tasks.integrations.github.pr_comment.get_top_5_issues_by_count") @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @patch("sentry.tasks.integrations.github.pr_comment.metrics") - @with_feature("organizations:pr-comment-bot") @responses.activate @freeze_time(datetime(2023, 6, 8, 0, 0, 0, tzinfo=timezone.utc)) def test_comment_workflow_updates_comment(self, mock_metrics, get_jwt, mock_issues): @@ -420,7 +417,6 @@ def test_comment_workflow_updates_comment(self, mock_metrics, get_jwt, mock_issu @patch("sentry.tasks.integrations.github.pr_comment.get_top_5_issues_by_count") @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @patch("sentry.tasks.integrations.github.pr_comment.metrics") - @with_feature("organizations:pr-comment-bot") @responses.activate def test_comment_workflow_api_error(self, mock_metrics, get_jwt, mock_issues): cache.set(self.cache_key, True, timedelta(minutes=5).total_seconds()) @@ -448,7 +444,6 @@ def test_comment_workflow_api_error(self, mock_metrics, get_jwt, mock_issues): @patch("sentry.tasks.integrations.github.pr_comment.get_top_5_issues_by_count") @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @patch("sentry.tasks.integrations.github.pr_comment.metrics") - @with_feature("organizations:pr-comment-bot") @responses.activate def test_comment_workflow_api_error_locked_issue(self, mock_metrics, get_jwt, mock_issues): cache.set(self.cache_key, True, timedelta(minutes=5).total_seconds()) @@ -480,7 +475,6 @@ def test_comment_workflow_api_error_locked_issue(self, mock_metrics, get_jwt, mo @patch("sentry.tasks.integrations.github.pr_comment.get_top_5_issues_by_count") @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @patch("sentry.tasks.integrations.github.pr_comment.metrics") - @with_feature("organizations:pr-comment-bot") @responses.activate def test_comment_workflow_api_error_rate_limited(self, mock_metrics, get_jwt, mock_issues): cache.set(self.cache_key, True, timedelta(minutes=5).total_seconds()) @@ -526,7 +520,6 @@ def test_comment_workflow_missing_org(self, mock_metrics, mock_issues, mock_issu "github_pr_comment.error", tags={"type": "missing_org"} ) - @with_feature("organizations:pr-comment-bot") @patch("sentry.tasks.integrations.github.pr_comment.get_top_5_issues_by_count") def test_comment_workflow_missing_org_option(self, mock_issues): OrganizationOption.objects.set_value( @@ -539,7 +532,6 @@ def test_comment_workflow_missing_org_option(self, mock_issues): @patch("sentry.tasks.integrations.github.pr_comment.get_top_5_issues_by_count") @patch("sentry.models.Project.objects.get_from_cache") @patch("sentry.tasks.integrations.github.pr_comment.metrics") - @with_feature("organizations:pr-comment-bot") def test_comment_workflow_missing_project(self, mock_metrics, mock_project, mock_issues): # Project.DoesNotExist should trigger the cache to release the key cache.set(self.cache_key, True, timedelta(minutes=5).total_seconds()) @@ -560,7 +552,6 @@ def test_comment_workflow_missing_project(self, mock_metrics, mock_project, mock @patch("sentry.models.Repository.objects") @patch("sentry.tasks.integrations.github.pr_comment.format_comment") @patch("sentry.tasks.integrations.github.pr_comment.metrics") - @with_feature("organizations:pr-comment-bot") def test_comment_workflow_missing_repo( self, mock_metrics, mock_format_comment, mock_repository, mock_issues ): @@ -586,7 +577,6 @@ def test_comment_workflow_missing_repo( ) @patch("sentry.tasks.integrations.github.pr_comment.format_comment") @patch("sentry.tasks.integrations.github.pr_comment.metrics") - @with_feature("organizations:pr-comment-bot") def test_comment_workflow_missing_integration( self, mock_metrics, mock_format_comment, mock_issues ): diff --git a/tests/sentry/tasks/test_commit_context.py b/tests/sentry/tasks/test_commit_context.py index 12311fa20d0f91..641633127c3b5a 100644 --- a/tests/sentry/tasks/test_commit_context.py +++ b/tests/sentry/tasks/test_commit_context.py @@ -16,7 +16,6 @@ from sentry.snuba.sessions_v2 import isoformat_z from sentry.tasks.commit_context import process_commit_context from sentry.testutils.cases import IntegrationTestCase, TestCase -from sentry.testutils.helpers import with_feature from sentry.testutils.helpers.datetime import before_now, iso_format from sentry.testutils.silo import region_silo_test from sentry.utils.committers import get_frame_paths @@ -476,7 +475,6 @@ def add_responses(self): json=[{"merge_commit_sha": self.pull_request.merge_commit_sha}], ) - @with_feature("organizations:pr-comment-bot") def test_gh_comment_not_github(self, mock_comment_workflow): """Non github repos shouldn't be commented on""" self.repo.provider = "integrations:gitlab" @@ -492,20 +490,6 @@ def test_gh_comment_not_github(self, mock_comment_workflow): ) assert not mock_comment_workflow.called - def test_gh_comment_feature_flag(self, mock_comment_workflow): - """No comments on org with feature flag disabled""" - with self.tasks(): - event_frames = get_frame_paths(self.event) - process_commit_context( - event_id=self.event.event_id, - event_platform=self.event.platform, - event_frames=event_frames, - group_id=self.event.group_id, - project_id=self.event.project_id, - ) - assert not mock_comment_workflow.called - - @with_feature("organizations:pr-comment-bot") def test_gh_comment_org_option(self, mock_comment_workflow): """No comments on org with organization option disabled""" OrganizationOption.objects.set_value( @@ -523,7 +507,6 @@ def test_gh_comment_org_option(self, mock_comment_workflow): ) assert not mock_comment_workflow.called - @with_feature("organizations:pr-comment-bot") @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @responses.activate def test_gh_comment_no_pr_from_api(self, get_jwt, mock_comment_workflow): @@ -553,7 +536,6 @@ def test_gh_comment_no_pr_from_api(self, get_jwt, mock_comment_workflow): ) assert not mock_comment_workflow.called - @with_feature("organizations:pr-comment-bot") @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @patch("sentry_sdk.capture_exception") @responses.activate @@ -584,7 +566,6 @@ def test_gh_comment_api_error(self, mock_capture_exception, get_jwt, mock_commen assert mock_capture_exception.called assert not mock_comment_workflow.called - @with_feature("organizations:pr-comment-bot") @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @responses.activate def test_gh_comment_commit_not_in_default_branch(self, get_jwt, mock_comment_workflow): @@ -613,7 +594,6 @@ def test_gh_comment_commit_not_in_default_branch(self, get_jwt, mock_comment_wor ) assert not mock_comment_workflow.called - @with_feature("organizations:pr-comment-bot") @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @responses.activate def test_gh_comment_no_pr_from_query(self, get_jwt, mock_comment_workflow): @@ -633,7 +613,6 @@ def test_gh_comment_no_pr_from_query(self, get_jwt, mock_comment_workflow): ) assert not mock_comment_workflow.called - @with_feature("organizations:pr-comment-bot") @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @responses.activate def test_gh_comment_pr_too_old(self, get_jwt, mock_comment_workflow): @@ -655,7 +634,6 @@ def test_gh_comment_pr_too_old(self, get_jwt, mock_comment_workflow): assert not mock_comment_workflow.called assert len(PullRequestCommit.objects.all()) == 0 - @with_feature("organizations:pr-comment-bot") @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @responses.activate def test_gh_comment_repeat_issue(self, get_jwt, mock_comment_workflow): @@ -677,7 +655,6 @@ def test_gh_comment_repeat_issue(self, get_jwt, mock_comment_workflow): assert not mock_comment_workflow.called assert len(PullRequestCommit.objects.all()) == 0 - @with_feature("organizations:pr-comment-bot") @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @responses.activate def test_gh_comment_create_queued(self, get_jwt, mock_comment_workflow): @@ -701,7 +678,6 @@ def test_gh_comment_create_queued(self, get_jwt, mock_comment_workflow): assert len(pr_commits) == 1 assert pr_commits[0].commit == self.commit - @with_feature("organizations:pr-comment-bot") @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @responses.activate def test_gh_comment_create_queued_existing_pr_commit(self, get_jwt, mock_comment_workflow): @@ -728,7 +704,6 @@ def test_gh_comment_create_queued_existing_pr_commit(self, get_jwt, mock_comment assert len(pr_commits) == 1 assert pr_commits[0] == pr_commit - @with_feature("organizations:pr-comment-bot") @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @responses.activate def test_gh_comment_update_queue(self, get_jwt, mock_comment_workflow): @@ -752,7 +727,6 @@ def test_gh_comment_update_queue(self, get_jwt, mock_comment_workflow): assert len(pr_commits) == 1 assert pr_commits[0].commit == self.commit - @with_feature("organizations:pr-comment-bot") def test_gh_comment_no_repo(self, mock_comment_workflow): """No comments on suspect commit if no repo row exists""" self.repo.delete() @@ -768,7 +742,6 @@ def test_gh_comment_no_repo(self, mock_comment_workflow): assert not mock_comment_workflow.called assert len(PullRequestCommit.objects.all()) == 0 - @with_feature("organizations:pr-comment-bot") @patch("sentry.integrations.github.client.get_jwt", return_value=b"jwt_token_1") @responses.activate def test_gh_comment_debounces(self, get_jwt, mock_comment_workflow): From 8a919ca685e0179c57a71815acd66ab5d2a9f60e Mon Sep 17 00:00:00 2001 From: Dameli Ushbayeva Date: Wed, 2 Aug 2023 13:27:09 -0400 Subject: [PATCH 23/44] feat(perf): Set default values for perf detection to 1.0 (#53985) Set defaults to `1.0` so all detectors and general perf-issue detection options are enabled by default. That way detection will work for s4s and self-hosted. --- src/sentry/options/defaults.py | 36 +++++++++---------- .../test_performance_detection.py | 8 ++--- 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index ef40f0a83a4805..6237bb2c93f30c 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -1088,12 +1088,12 @@ ) # Performance issue option for *all* performance issues detection -register("performance.issues.all.problem-detection", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE) +register("performance.issues.all.problem-detection", default=1.0, flags=FLAG_AUTOMATOR_MODIFIABLE) # Individual system-wide options in case we need to turn off specific detectors for load concerns, ignoring the set project options. register( "performance.issues.compressed_assets.problem-creation", - default=0.0, + default=1.0, flags=FLAG_AUTOMATOR_MODIFIABLE, ) register( @@ -1103,11 +1103,11 @@ "performance.issues.compressed_assets.ea-rollout", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE ) register( - "performance.issues.compressed_assets.ga-rollout", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE + "performance.issues.compressed_assets.ga-rollout", default=1.0, flags=FLAG_AUTOMATOR_MODIFIABLE ) register( "performance.issues.consecutive_db.problem-creation", - default=0.0, + default=1.0, flags=FLAG_AUTOMATOR_MODIFIABLE, ) register( @@ -1117,36 +1117,36 @@ "performance.issues.consecutive_db.ea-rollout", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE ) register( - "performance.issues.consecutive_db.ga-rollout", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE + "performance.issues.consecutive_db.ga-rollout", default=1.0, flags=FLAG_AUTOMATOR_MODIFIABLE ) register( "performance.issues.n_plus_one_db.problem-detection", - default=0.0, + default=1.0, flags=FLAG_AUTOMATOR_MODIFIABLE, ) register( "performance.issues.n_plus_one_db.problem-creation", - default=0.0, + default=1.0, flags=FLAG_AUTOMATOR_MODIFIABLE, ) register( "performance.issues.n_plus_one_db_ext.problem-creation", - default=0.0, + default=1.0, flags=FLAG_AUTOMATOR_MODIFIABLE, ) register( "performance.issues.file_io_main_thread.problem-creation", - default=0.0, + default=1.0, flags=FLAG_AUTOMATOR_MODIFIABLE, ) register( "performance.issues.db_main_thread.problem-creation", - default=0.0, + default=1.0, flags=FLAG_AUTOMATOR_MODIFIABLE, ) register( "performance.issues.n_plus_one_api_calls.problem-creation", - default=0.0, + default=1.0, flags=FLAG_AUTOMATOR_MODIFIABLE, ) register( @@ -1161,12 +1161,12 @@ ) register( "performance.issues.n_plus_one_api_calls.ga-rollout", - default=0.0, + default=1.0, flags=FLAG_AUTOMATOR_MODIFIABLE, ) register( "performance.issues.slow_db_query.problem-creation", - default=0.0, + default=1.0, flags=FLAG_AUTOMATOR_MODIFIABLE, ) register( @@ -1176,11 +1176,11 @@ "performance.issues.slow_db_query.ea-rollout", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE ) register( - "performance.issues.slow_db_query.ga-rollout", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE + "performance.issues.slow_db_query.ga-rollout", default=1.0, flags=FLAG_AUTOMATOR_MODIFIABLE ) register( "performance.issues.render_blocking_assets.problem-creation", - default=0.0, + default=1.0, flags=FLAG_AUTOMATOR_MODIFIABLE, ) register( @@ -1195,12 +1195,12 @@ ) register( "performance.issues.render_blocking_assets.ga-rollout", - default=0.0, + default=1.0, flags=FLAG_AUTOMATOR_MODIFIABLE, ) register( "performance.issues.m_n_plus_one_db.problem-creation", - default=0.0, + default=1.0, flags=FLAG_AUTOMATOR_MODIFIABLE, ) register( @@ -1210,7 +1210,7 @@ "performance.issues.m_n_plus_one_db.ea-rollout", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE ) register( - "performance.issues.m_n_plus_one_db.ga-rollout", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE + "performance.issues.m_n_plus_one_db.ga-rollout", default=1.0, flags=FLAG_AUTOMATOR_MODIFIABLE ) register( "performance.issues.http_overhead.problem-creation", diff --git a/tests/sentry/utils/performance_issues/test_performance_detection.py b/tests/sentry/utils/performance_issues/test_performance_detection.py index bfe38332054de3..bce90f1adb2817 100644 --- a/tests/sentry/utils/performance_issues/test_performance_detection.py +++ b/tests/sentry/utils/performance_issues/test_performance_detection.py @@ -109,13 +109,13 @@ def setUp(self): @patch("sentry.utils.performance_issues.performance_detection._detect_performance_problems") def test_options_disabled(self, mock): - detect_performance_problems({}, self.project) - assert mock.call_count == 0 + with override_options({"performance.issues.all.problem-detection": 0.0}): + detect_performance_problems({}, self.project) + assert mock.call_count == 0 @patch("sentry.utils.performance_issues.performance_detection._detect_performance_problems") def test_options_enabled(self, mock): - with override_options({"performance.issues.all.problem-detection": 1.0}): - detect_performance_problems({}, self.project) + detect_performance_problems({}, self.project) assert mock.call_count == 1 @override_options(BASE_DETECTOR_OPTIONS) From a72a32fd8b26e250636b92f29c713d653e318836 Mon Sep 17 00:00:00 2001 From: Abhijeet Prasad Date: Wed, 2 Aug 2023 13:46:19 -0400 Subject: [PATCH 24/44] ref(starfish): Remove definitions views (#54034) As per design review, remove definitions views. --- static/app/routes.tsx | 4 --- .../starfish/views/definitionsView/index.tsx | 35 ------------------- .../app/views/starfish/views/spans/types.tsx | 21 ----------- 3 files changed, 60 deletions(-) delete mode 100644 static/app/views/starfish/views/definitionsView/index.tsx diff --git a/static/app/routes.tsx b/static/app/routes.tsx index e3dd4f214b45f5..23d74e8c8607f9 100644 --- a/static/app/routes.tsx +++ b/static/app/routes.tsx @@ -1690,10 +1690,6 @@ function buildRoutes() { component={make(() => import('sentry/views/starfish/views/spanSummaryPage'))} /> - import('sentry/views/starfish/views/definitionsView'))} - /> import('sentry/views/starfish/views/spans'))} /> - - - - - Defintions - - - - - -
    -
  • - Time Spent - time spent is calculated by dividing the total span time by - the total app time. -
  • -
-
-
-
-
- - ); -} - -export default DefinitionsView; diff --git a/static/app/views/starfish/views/spans/types.tsx b/static/app/views/starfish/views/spans/types.tsx index 1198c057ce52cb..0f3822d563859a 100644 --- a/static/app/views/starfish/views/spans/types.tsx +++ b/static/app/views/starfish/views/spans/types.tsx @@ -1,9 +1,5 @@ -import {Fragment} from 'react'; - -import Link from 'sentry/components/links/link'; import {t} from 'sentry/locale'; import {defined} from 'sentry/utils'; -import DurationCell from 'sentry/views/starfish/components/tableCells/durationCell'; export type DataKey = | 'change' @@ -32,23 +28,6 @@ export const DataTitles: Record = { ttid: t('Time To Initial Display'), }; -export const getTooltip = ( - key: DataKey, - ...options: (string | number)[] -): React.ReactNode => { - if (key === 'timeSpent') { - return ( - -
- -
- {t('How was this calculated?')} -
- ); - } - return ''; -}; - export const getThroughputTitle = (spanOp?: string) => { if (spanOp?.startsWith('db')) { return t('Queries Per Min'); From cc6e75f118a8cac5f2818043f19cfbad9faecf4b Mon Sep 17 00:00:00 2001 From: Alex Zaslavsky Date: Wed, 2 Aug 2023 10:49:15 -0700 Subject: [PATCH 25/44] meta(gitignore): Ignore user-specific VSCode settings (#54036) VSCode has two mechanisms for pulling in repository-local settings, which are applied on top of the global user settings.json: a settings.json file in the /.vscode directory, and a .code-workspace file at the root of the repository. We already track the first of these in git, to enforce some team-scope settings for all users of the repository. This change adds the second method, .code-workspace, to the .gitignore list, allowing users to change their settings if they have a very compelling reason (ex, VSCode running the formatter very slowly, which is what prompted this change). --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 5f3cf7146a1d6f..3186d2769492a8 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ .env .cache/ +.code-workspace .coverage* .DS_Store .venv From 0e4236c28113e1eb94796b5ab74ee7558eac851c Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 2 Aug 2023 14:20:37 -0400 Subject: [PATCH 26/44] ref: fix more mypy issues (#54029) --- pyproject.toml | 22 --- .../integrations/utils/commit_context.py | 5 +- src/sentry/shared_integrations/client/base.py | 2 + src/sentry/utils/email/list_resolver.py | 9 +- src/sentry/utils/event_frames.py | 8 +- src/sentry/utils/locking/backends/redis.py | 13 +- src/sentry/utils/meta.py | 8 +- src/sentry/utils/outcomes.py | 8 +- src/sentry/utils/safe.py | 2 +- tests/sentry/roles/test_manager.py | 3 +- tests/sentry/runner/test_initializer.py | 21 ++- tests/sentry/services/test_http.py | 6 +- .../shared_integrations/client/test_base.py | 14 +- tests/sentry/spans/grouping/test_strategy.py | 4 +- tests/sentry/test_killswitches.py | 145 ++++++++++-------- .../sentry/testutils/helpers/test_features.py | 20 +-- tests/sentry/tsdb/test_redissnuba.py | 2 +- .../sentry/utils/email/test_list_resolver.py | 9 +- .../utils/locking/backends/test_redis.py | 9 +- .../test_metric_correlation.py | 4 + tests/sentry/utils/test_audit.py | 16 +- tests/sentry/utils/test_cursors.py | 10 +- tests/sentry/utils/test_event_frames.py | 14 +- tests/sentry/utils/test_functional.py | 4 +- tests/sentry/utils/test_meta.py | 25 +-- tests/sentry/utils/test_outcomes.py | 42 ++--- tests/sentry/utils/test_safe.py | 13 +- tests/sentry/utils/test_services.py | 47 +++--- tests/sentry/utils/test_time_window.py | 58 +++---- 29 files changed, 292 insertions(+), 251 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index eaf211fbe6f5f2..e9ea7416f52897 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -814,7 +814,6 @@ module = [ "sentry.utils.concurrent", "sentry.utils.distutils.commands.base", "sentry.utils.distutils.commands.build_assets", - "sentry.utils.email.list_resolver", "sentry.utils.email.signer", "sentry.utils.http", "sentry.utils.locking.backends.migration", @@ -1059,7 +1058,6 @@ module = [ "tests.sentry.notifications.test_utils", "tests.sentry.notifications.utils.test_tasks", "tests.sentry.options.test_store", - "tests.sentry.ownership.test_grammar", "tests.sentry.pipeline.test_pipeline", "tests.sentry.plugins.bases.test_issue2", "tests.sentry.processing.realtime_metrics.test_redis", @@ -1083,14 +1081,12 @@ module = [ "tests.sentry.replays.test_project_replay_recording_segment_index", "tests.sentry.replays.unit.test_dead_click_issue", "tests.sentry.replays.unit.test_ingest_dom_index", - "tests.sentry.roles.test_manager", "tests.sentry.rules.conditions.test_event_attribute", "tests.sentry.rules.conditions.test_level_event", "tests.sentry.rules.conditions.test_tagged_event", "tests.sentry.rules.filters.test_issue_category", "tests.sentry.rules.history.endpoints.test_project_rule_preview", "tests.sentry.rules.test_processor", - "tests.sentry.runner.test_initializer", "tests.sentry.search.events.builder.test_discover", "tests.sentry.search.events.builder.test_metrics", "tests.sentry.search.events.test_fields", @@ -1109,8 +1105,6 @@ module = [ "tests.sentry.sentry_metrics.test_multiprocess_steps", "tests.sentry.sentry_metrics.test_postgres_indexer", "tests.sentry.sentry_metrics.test_strings", - "tests.sentry.services.test_http", - "tests.sentry.shared_integrations.client.test_base", "tests.sentry.snuba.metrics.test_metrics_layer.test_release_health", "tests.sentry.snuba.metrics.test_mqb_query_transformer", "tests.sentry.snuba.metrics.test_query_builder", @@ -1122,7 +1116,6 @@ module = [ "tests.sentry.snuba.test_profiles", "tests.sentry.snuba.test_query_subscription_consumer", "tests.sentry.snuba.test_tasks", - "tests.sentry.spans.grouping.test_strategy", "tests.sentry.tagstore.test_types", "tests.sentry.tasks.deletion.test_groups", "tests.sentry.tasks.deletion.test_scheduled", @@ -1136,22 +1129,7 @@ module = [ "tests.sentry.tasks.test_servicehooks", "tests.sentry.tasks.test_store", "tests.sentry.templatetags.test_sentry_assets", - "tests.sentry.test_killswitches", "tests.sentry.test_stacktraces", - "tests.sentry.testutils.helpers.test_features", - "tests.sentry.tsdb.test_redissnuba", - "tests.sentry.utils.email.test_list_resolver", - "tests.sentry.utils.locking.backends.test_redis", - "tests.sentry.utils.suspect_resolutions.test_metric_correlation", - "tests.sentry.utils.test_audit", - "tests.sentry.utils.test_cursors", - "tests.sentry.utils.test_event_frames", - "tests.sentry.utils.test_functional", - "tests.sentry.utils.test_meta", - "tests.sentry.utils.test_outcomes", - "tests.sentry.utils.test_safe", - "tests.sentry.utils.test_services", - "tests.sentry.utils.test_time_window", "tests.sentry.web.test_client_config", "tests.snuba.rules.conditions.test_event_frequency", "tests.snuba.sessions.test_sessions", diff --git a/src/sentry/integrations/utils/commit_context.py b/src/sentry/integrations/utils/commit_context.py index e71987755e6246..0c9da764c7b033 100644 --- a/src/sentry/integrations/utils/commit_context.py +++ b/src/sentry/integrations/utils/commit_context.py @@ -1,9 +1,12 @@ +from __future__ import annotations + import logging from typing import Any, List, Mapping, Sequence, Tuple import sentry_sdk from sentry import analytics +from sentry.integrations.base import IntegrationInstallation from sentry.models.integrations.repository_project_path_config import RepositoryProjectPathConfig from sentry.ownership.grammar import get_source_code_path_from_stacktrace_path from sentry.services.hybrid_cloud.integration import integration_service @@ -17,7 +20,7 @@ def find_commit_context_for_event( code_mappings: Sequence[RepositoryProjectPathConfig], frame: Mapping[str, Any], extra: Mapping[str, Any], -) -> List[Tuple[Mapping[str, Any], RepositoryProjectPathConfig]]: +) -> tuple[List[Tuple[Mapping[str, Any], RepositoryProjectPathConfig]], IntegrationInstallation]: """ Get all the Commit Context for an event frame using a source code integration for all the matching code mappings diff --git a/src/sentry/shared_integrations/client/base.py b/src/sentry/shared_integrations/client/base.py index d0ab4274ae119b..6dd990182b886e 100644 --- a/src/sentry/shared_integrations/client/base.py +++ b/src/sentry/shared_integrations/client/base.py @@ -48,6 +48,8 @@ class BaseApiClient(TrackResponseMixin): page_number_limit = 10 + integration_name: str + def __init__( self, integration_id: int | None = None, diff --git a/src/sentry/utils/email/list_resolver.py b/src/sentry/utils/email/list_resolver.py index 40c23a84f74a3b..c6571bb5fdc798 100644 --- a/src/sentry/utils/email/list_resolver.py +++ b/src/sentry/utils/email/list_resolver.py @@ -1,13 +1,12 @@ from __future__ import annotations -from typing import Callable, Generic, Iterable, Mapping +from typing import Callable, Iterable, Mapping from sentry.db.models import Model -from sentry.db.models.manager import M from sentry.utils.strings import is_valid_dot_atom -class ListResolver(Generic[M]): +class ListResolver: """ Manages the generation of RFC 2919 compliant list-id strings from varying objects types. @@ -19,7 +18,7 @@ class UnregisteredTypeError(Exception): """ def __init__( - self, namespace: str, type_handlers: Mapping[type[Model], Callable[[M], Iterable[str]]] + self, namespace: str, type_handlers: Mapping[type[Model], Callable[[Model], Iterable[str]]] ) -> None: assert is_valid_dot_atom(namespace) @@ -34,7 +33,7 @@ def __init__( # values. self.__type_handlers = type_handlers - def __call__(self, instance: M) -> str: + def __call__(self, instance: Model) -> str: """ Build a list-id string from an instance. diff --git a/src/sentry/utils/event_frames.py b/src/sentry/utils/event_frames.py index 791a5482445397..0a02bd16e87299 100644 --- a/src/sentry/utils/event_frames.py +++ b/src/sentry/utils/event_frames.py @@ -49,7 +49,7 @@ def cocoa_frame_munger(key: str, frame: MutableMapping[str, Any]) -> bool: if not frame.get("package") or not frame.get("abs_path"): return False - rel_path = package_relative_path(str(frame.get("abs_path")), str(frame.get("package"))) + rel_path = package_relative_path(frame.get("abs_path"), frame.get("package")) if rel_path: frame[key] = rel_path return True @@ -79,7 +79,7 @@ def flutter_frame_munger(key: str, frame: MutableMapping[str, Any]) -> bool: return False -def package_relative_path(abs_path: str, package: str) -> str | None: +def package_relative_path(abs_path: str | None, package: str | None) -> str | None: """ returns the left-biased shortened path relative to the package directory """ @@ -131,7 +131,9 @@ def munged_filename_and_frames( return (key, copy_frames) if frames_updated else None -def get_crashing_thread(thread_frames: Sequence[Mapping[str, Any]]) -> Mapping[str, Any] | None: +def get_crashing_thread( + thread_frames: Sequence[Mapping[str, Any]] | None +) -> Mapping[str, Any] | None: if not thread_frames: return None if len(thread_frames) == 1: diff --git a/src/sentry/utils/locking/backends/redis.py b/src/sentry/utils/locking/backends/redis.py index fce4eb4e20a064..94ef7e4fea04ec 100644 --- a/src/sentry/utils/locking/backends/redis.py +++ b/src/sentry/utils/locking/backends/redis.py @@ -13,6 +13,7 @@ def __init__(self, cluster, prefix="l:", uuid=None): uuid = uuid4().hex self.prefix = prefix self.uuid = uuid + self.cluster = cluster def get_client(self, key, routing_key=None): raise NotImplementedError @@ -37,11 +38,9 @@ def locked(self, key, routing_key=None): class RedisBlasterLockBackend(BaseRedisLockBackend): def __init__(self, cluster, prefix="l:", uuid=None): - super().__init__(cluster, prefix=prefix, uuid=uuid) if isinstance(cluster, str): - self.cluster = redis.clusters.get(cluster) - else: - self.cluster = cluster + cluster = redis.clusters.get(cluster) + super().__init__(cluster, prefix=prefix, uuid=uuid) def get_client(self, key, routing_key=None): # This is a bit of an abstraction leak, but if an integer is provided @@ -68,11 +67,9 @@ def get_client(self, key, routing_key=None): class RedisClusterLockBackend(BaseRedisLockBackend): def __init__(self, cluster, prefix="l:", uuid=None): - super().__init__(cluster, prefix=prefix, uuid=uuid) if isinstance(cluster, str): - self.cluster = redis.redis_clusters.get(cluster) - else: - self.cluster = cluster + cluster = redis.redis_clusters.get(cluster) + super().__init__(cluster, prefix=prefix, uuid=uuid) def get_client(self, key, routing_key=None): return self.cluster diff --git a/src/sentry/utils/meta.py b/src/sentry/utils/meta.py index faceab01c7ebbf..708d27687e8dc1 100644 --- a/src/sentry/utils/meta.py +++ b/src/sentry/utils/meta.py @@ -1,5 +1,7 @@ from collections.abc import Mapping -from typing import List, Literal, Optional, TypedDict, Union +from typing import List, Literal, TypedDict, Union + +from typing_extensions import NotRequired RemarkType = Literal["a", "x", "s", "m", "p", "e"] @@ -8,9 +10,9 @@ class Remark(TypedDict): rule_id: str type: RemarkType # Range start is a byte offset - range_start: Optional[int] + range_start: NotRequired[int] # Range end is a byte offset - range_end: Optional[int] + range_end: NotRequired[int] class Meta: diff --git a/src/sentry/utils/outcomes.py b/src/sentry/utils/outcomes.py index c98102041bfeb7..7a1fac8de4f650 100644 --- a/src/sentry/utils/outcomes.py +++ b/src/sentry/utils/outcomes.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import time from datetime import datetime from enum import IntEnum @@ -25,15 +27,15 @@ def api_name(self) -> str: return self.name.lower() @classmethod - def parse(cls, name: str) -> "Outcome": + def parse(cls, name: str) -> Outcome: return Outcome[name.upper()] def is_billing(self) -> bool: return self in (Outcome.ACCEPTED, Outcome.RATE_LIMITED) -outcomes_publisher = None -billing_publisher = None +outcomes_publisher: KafkaPublisher | None = None +billing_publisher: KafkaPublisher | None = None def track_outcome( diff --git a/src/sentry/utils/safe.py b/src/sentry/utils/safe.py index 3b7ac85b5cffd1..bb8d25004b5be4 100644 --- a/src/sentry/utils/safe.py +++ b/src/sentry/utils/safe.py @@ -11,7 +11,7 @@ from sentry.utils import json from sentry.utils.strings import truncatechars -PathSearchable = Union[Mapping[str, Any], Sequence[Any]] +PathSearchable = Union[Mapping[str, Any], Sequence[Any], None] def safe_execute(func, *args, **kwargs): diff --git a/tests/sentry/roles/test_manager.py b/tests/sentry/roles/test_manager.py index 3308d327917cbb..358a7a0a211177 100644 --- a/tests/sentry/roles/test_manager.py +++ b/tests/sentry/roles/test_manager.py @@ -1,6 +1,7 @@ from unittest import mock -from sentry.roles import RoleManager, default_manager +from sentry.roles import default_manager +from sentry.roles.manager import RoleManager from sentry.testutils.cases import TestCase diff --git a/tests/sentry/runner/test_initializer.py b/tests/sentry/runner/test_initializer.py index 32a6a3932889bc..810bb7748f0333 100644 --- a/tests/sentry/runner/test_initializer.py +++ b/tests/sentry/runner/test_initializer.py @@ -1,3 +1,5 @@ +import types + import pytest from sentry.runner.importer import ConfigurationError @@ -7,17 +9,14 @@ @pytest.fixture def settings(): - class Settings: - pass - - s = Settings() - s.TIME_ZONE = "UTC" - s.ALLOWED_HOSTS = [] - s.SENTRY_FEATURES = {} - s.SENTRY_OPTIONS = {} - s.SENTRY_DEFAULT_OPTIONS = {} - s.SENTRY_EMAIL_BACKEND_ALIASES = {"dummy": "alias-for-dummy"} - return s + return types.SimpleNamespace( + TIME_ZONE="UTC", + ALLOWED_HOSTS=[], + SENTRY_FEATURES={}, + SENTRY_OPTIONS={}, + SENTRY_DEFAULT_OPTIONS={}, + SENTRY_EMAIL_BACKEND_ALIASES={"dummy": "alias-for-dummy"}, + ) @pytest.fixture diff --git a/tests/sentry/services/test_http.py b/tests/sentry/services/test_http.py index ee5af1e6be7c02..a1cd316e4ebb92 100644 --- a/tests/sentry/services/test_http.py +++ b/tests/sentry/services/test_http.py @@ -1,3 +1,7 @@ +from __future__ import annotations + +from typing import Any + from django.test.utils import override_settings from sentry.services.http import SentryHTTPServer, convert_options_to_env @@ -36,7 +40,7 @@ def test_options(self): assert server.options["workers"] == 10 # Make sure that changing `protocol` to uwsgi sets the right socket - options = {"protocol": "uwsgi"} + options: dict[str, Any] = {"protocol": "uwsgi"} with override_settings(SENTRY_WEB_OPTIONS=options): server = cls() assert "http-socket" not in server.options diff --git a/tests/sentry/shared_integrations/client/test_base.py b/tests/sentry/shared_integrations/client/test_base.py index fd87b48f905e94..0d111180d1848e 100644 --- a/tests/sentry/shared_integrations/client/test_base.py +++ b/tests/sentry/shared_integrations/client/test_base.py @@ -19,9 +19,11 @@ class BaseApiClientTest(TestCase): """ def setUp(self): - self.client = BaseApiClient() - self.client.integration_type = "integration" - self.client.integration_name = "base" + class Client(BaseApiClient): + integration_type = "integration" + integration_name = "base" + + self.api_client = Client() @responses.activate @patch.object(BaseApiClient, "finalize_request", side_effect=lambda req: req) @@ -30,7 +32,7 @@ def test_finalize_request(self, mock_finalize_request): get_response = responses.add(responses.GET, "https://example.com/get", json={}) assert not mock_finalize_request.called assert get_response.call_count == 0 - self.client.get("https://example.com/get") + self.api_client.get("https://example.com/get") assert mock_finalize_request.called assert get_response.call_count == 1 @@ -58,7 +60,7 @@ def test__request_prepared_request(self): prepared_request = Request(method="PUT", url="https://example.com/put").prepare() # Client should use prepared request instead of using other params assert put_response.call_count == 0 - self.client.get("https://example.com/get", prepared_request=prepared_request) + self.api_client.get("https://example.com/get", prepared_request=prepared_request) assert put_response.call_count == 1 @responses.activate @@ -68,5 +70,5 @@ def test__request_prepared_request(self): def test_restricted_ip_address(self, mock_finalize_request, mock_session_send): assert not mock_finalize_request.called with raises(ApiHostError): - self.client.get("https://172.31.255.255") + self.api_client.get("https://172.31.255.255") assert mock_finalize_request.called diff --git a/tests/sentry/spans/grouping/test_strategy.py b/tests/sentry/spans/grouping/test_strategy.py index 04f4a809f8bbc4..a075f0cc4598e0 100644 --- a/tests/sentry/spans/grouping/test_strategy.py +++ b/tests/sentry/spans/grouping/test_strategy.py @@ -331,13 +331,13 @@ def test_reuse_existing_grouping_results() -> None: SpanBuilder() .with_span_id("b" * 16) .with_description("hi") - .with_fingerprint("a") + .with_fingerprint(["a"]) .build(), SpanBuilder().with_span_id("c" * 16).with_description("hi").build(), SpanBuilder() .with_span_id("d" * 16) .with_description("bye") - .with_fingerprint("a") + .with_fingerprint(["a"]) .build(), ], {"b" * 16: "a", "c" * 16: "hi", "d" * 16: "a"}, diff --git a/tests/sentry/test_killswitches.py b/tests/sentry/test_killswitches.py index d7bf23b29d5e56..3543b74f7b43d0 100644 --- a/tests/sentry/test_killswitches.py +++ b/tests/sentry/test_killswitches.py @@ -1,3 +1,7 @@ +from __future__ import annotations + +import pytest + from sentry.killswitches import _value_matches, normalize_value @@ -13,72 +17,83 @@ def test_normalize_value(): ] -def test_value_matches(): - assert _value_matches( - "store.load-shed-group-creation-projects", - [ - {"project_id": "1"}, - {"project_id": "2"}, - {"project_id": "3"}, - ], - {"project_id": 2}, - ) - - assert _value_matches( - "store.load-shed-group-creation-projects", - [ - {"project_id": 1}, +@pytest.mark.parametrize( + ("cfg", "value"), + ( + ( + [ + {"project_id": "1"}, + {"project_id": "2"}, + {"project_id": "3"}, + ], + {"project_id": 2}, + ), + ( + [ + {"project_id": 1}, + {"project_id": 2}, + {"project_id": 3}, + ], {"project_id": 2}, + ), + ( + [{}], # [{}] corresponds to any([all([])]), which is True {"project_id": 3}, - ], - {"project_id": 2}, - ) - - assert not _value_matches( - "store.load-shed-group-creation-projects", - [ - {"project_id": "1"}, - {"project_id": "2"}, - {"project_id": "3"}, - ], - {"project_id": 4}, - ) - - assert not _value_matches("store.load-shed-group-creation-projects", [], {"project_id": 3}) - assert not _value_matches( - "store.load-shed-group-creation-projects", - [{"project_id": 2, "platform": None}], - {"project_id": 3}, - ) - - assert not _value_matches( - "store.load-shed-group-creation-projects", - [ - {"project_id": "1"}, - {"project_id": "2"}, - {"project_id": "3"}, - ], - {}, - ) - - for conditions in [ - [{}], # [{}] corresponds to any([all([])]), which is True - [{"project_id": None}], - [{"project_id": None, "platform": None}], - [{"project_id": 3, "platform": None, "event_type": None}], - ]: - assert _value_matches( - "store.load-shed-group-creation-projects", conditions, {"project_id": 3} - ) + ), + ( + [{"project_id": None}], + {"project_id": 3}, + ), + ( + [{"project_id": None, "platform": None}], + {"project_id": 3}, + ), + ( + [{"project_id": 3, "platform": None, "event_type": None}], + {"project_id": 3}, + ), + ( + [{"project_id": 3, "platform": None}], + {"project_id": 3}, + ), + ( + [{"event_type": "transaction"}], + {"project_id": 3, "event_type": "transaction"}, + ), + ), +) +def test_value_matches_positive(cfg, value): + assert _value_matches("store.load-shed-group-creation-projects", cfg, value) - assert _value_matches( - "store.load-shed-group-creation-projects", - [{"project_id": 3, "platform": None}], - {"project_id": 3}, - ) - assert _value_matches( - "store.load-shed-group-creation-projects", - [{"event_type": "transaction"}], - {"project_id": 3, "event_type": "transaction"}, - ) +@pytest.mark.parametrize( + ("cfg", "value"), + ( + ( + [ + {"project_id": "1"}, + {"project_id": "2"}, + {"project_id": "3"}, + ], + {"project_id": 4}, + ), + ( + [], + {"project_id": 4}, + ), + ( + [{"project_id": 2, "platform": None}], + {"project_id": 3}, + ), + ( + [ + {"project_id": "1"}, + {"project_id": "2"}, + {"project_id": "3"}, + ], + {}, + ), + ), +) +def test_value_matches_negative(cfg, value): + assert not _value_matches("store.load-shed-group-creation-projects", cfg, value) diff --git a/tests/sentry/testutils/helpers/test_features.py b/tests/sentry/testutils/helpers/test_features.py index a1ac167242c2fe..5877464da6684c 100644 --- a/tests/sentry/testutils/helpers/test_features.py +++ b/tests/sentry/testutils/helpers/test_features.py @@ -27,16 +27,16 @@ def test_batch_has(self): with self.feature("organizations:customer-domains"): # Make sure this check returns True for features that are defaulted to True and aren't # mocked - results = list( - features.batch_has( - [ - "organizations:customer-domains", - "organizations:advanced-search", - "organizations:api-keys", - ], - organization=self.org, - ).values() - )[0] + ret = features.batch_has( + [ + "organizations:customer-domains", + "organizations:advanced-search", + "organizations:api-keys", + ], + organization=self.org, + ) + assert ret is not None + results = list(ret.values())[0] assert results["organizations:customer-domains"] assert results["organizations:advanced-search"] assert not results["organizations:api-keys"] diff --git a/tests/sentry/tsdb/test_redissnuba.py b/tests/sentry/tsdb/test_redissnuba.py index db56453e6b2c3b..c39d0f316ac442 100644 --- a/tests/sentry/tsdb/test_redissnuba.py +++ b/tests/sentry/tsdb/test_redissnuba.py @@ -30,7 +30,7 @@ def test_redissnuba_connects_to_correct_backend(): assert "redis" == selector_func(method, get_callargs(model)) for model in should_resolve_to_snuba: - read_or_write, _ = method_specifications.get(method) + read_or_write, _ = method_specifications[method] if read_or_write == READ: assert "snuba" == selector_func(method, get_callargs(model)) diff --git a/tests/sentry/utils/email/test_list_resolver.py b/tests/sentry/utils/email/test_list_resolver.py index 8768d1518f1b46..942b8aba2e59b1 100644 --- a/tests/sentry/utils/email/test_list_resolver.py +++ b/tests/sentry/utils/email/test_list_resolver.py @@ -1,5 +1,6 @@ import pytest +from sentry.models.project import Project from sentry.testutils.cases import TestCase from sentry.utils.email import ListResolver from sentry.utils.email.message_builder import default_list_type_handlers @@ -14,15 +15,15 @@ def test_rejects_invalid_namespace(self): def test_rejects_invalid_types(self): with pytest.raises(ListResolver.UnregisteredTypeError): - self.resolver(object()) + self.resolver(self.user) def test_generates_list_ids(self): - expected = "<{0.project.slug}.{0.organization.slug}.namespace>".format(self.event) + expected = f"<{self.event.project.slug}.{self.event.organization.slug}.namespace>" assert self.resolver(self.event.group) == expected assert self.resolver(self.event.project) == expected def test_rejects_invalid_objects(self): - resolver = ListResolver("namespace", {object: lambda value: ("\x00",)}) + resolver = ListResolver("namespace", {Project: lambda value: ("\x00",)}) with pytest.raises(AssertionError): - resolver(object()) + resolver(self.project) diff --git a/tests/sentry/utils/locking/backends/test_redis.py b/tests/sentry/utils/locking/backends/test_redis.py index 77b0f444d1ac0c..2be02ad6627012 100644 --- a/tests/sentry/utils/locking/backends/test_redis.py +++ b/tests/sentry/utils/locking/backends/test_redis.py @@ -5,13 +5,16 @@ import pytest -from sentry.utils.locking.backends import LockBackend -from sentry.utils.locking.backends.redis import RedisClusterLockBackend, RedisLockBackend +from sentry.utils.locking.backends.redis import ( + BaseRedisLockBackend, + RedisClusterLockBackend, + RedisLockBackend, +) from sentry.utils.redis import clusters, redis_clusters class RedisBackendTestCaseBase: - backend_class: type[LockBackend] + backend_class: type[BaseRedisLockBackend] @property def cluster(self): diff --git a/tests/sentry/utils/suspect_resolutions/test_metric_correlation.py b/tests/sentry/utils/suspect_resolutions/test_metric_correlation.py index cac8d53c0fadea..2cd626c3dbfebf 100644 --- a/tests/sentry/utils/suspect_resolutions/test_metric_correlation.py +++ b/tests/sentry/utils/suspect_resolutions/test_metric_correlation.py @@ -66,6 +66,7 @@ def test_correlated_issues(self, mock_get_range): ) result = is_issue_error_rate_correlated(group1, [group2]) + assert result is not None assert result.candidate_metric_correlations == [ CandidateMetricCorrResult( @@ -100,6 +101,7 @@ def test_uncorrelated_issues(self, mock_get_range): ) result = is_issue_error_rate_correlated(group1, [group2]) + assert result is not None assert result.candidate_metric_correlations == [ CandidateMetricCorrResult( @@ -133,6 +135,7 @@ def test_perfect_correlation(self, mock_get_range): ) result = is_issue_error_rate_correlated(group1, [group2]) + assert result is not None assert result.candidate_metric_correlations == [ CandidateMetricCorrResult( @@ -191,6 +194,7 @@ def test_multiple_groups(self, mock_get_range): ) result = is_issue_error_rate_correlated(group1, [group2, group3, group4]) + assert result is not None assert result.candidate_metric_correlations == [ CandidateMetricCorrResult( diff --git a/tests/sentry/utils/test_audit.py b/tests/sentry/utils/test_audit.py index 53b9ba7f07e770..bf037c6a1bbaad 100644 --- a/tests/sentry/utils/test_audit.py +++ b/tests/sentry/utils/test_audit.py @@ -1,5 +1,6 @@ from django.contrib.auth.models import AnonymousUser from django.db import router +from django.http.request import HttpRequest from sentry import audit_log from sentry.models import ( @@ -23,17 +24,18 @@ username = "hello" * 20 -class FakeHttpRequest: - def __init__(self, user): - self.user = user - self.META = {"REMOTE_ADDR": "127.0.0.1"} +def fake_http_request(user): + request = HttpRequest() + request.user = user + request.META["REMOTE_ADDR"] = "127.0.0.1" + return request @all_silo_test(stable=True) class CreateAuditEntryTest(TestCase): def setUp(self): self.user = self.create_user(username=username) - self.req = FakeHttpRequest(self.user) + self.req = fake_http_request(self.user) self.org = self.create_organization(owner=self.user) self.team = self.create_team(organization=self.org) self.project = self.create_project(teams=[self.team], platform="java") @@ -48,7 +50,7 @@ def test_audit_entry_api(self): org = self.create_organization() apikey = self.create_api_key(org, allowed_origins="*") - req = FakeHttpRequest(AnonymousUser()) + req = fake_http_request(AnonymousUser()) req.auth = apikey entry = create_audit_entry(req) @@ -59,7 +61,7 @@ def test_audit_entry_api(self): self.assert_no_delete_log_created() def test_audit_entry_frontend(self): - req = FakeHttpRequest(self.create_user()) + req = fake_http_request(self.create_user()) entry = create_audit_entry(req) assert entry.actor == req.user diff --git a/tests/sentry/utils/test_cursors.py b/tests/sentry/utils/test_cursors.py index d7578027995f55..5bc7b42fe8f2c6 100644 --- a/tests/sentry/utils/test_cursors.py +++ b/tests/sentry/utils/test_cursors.py @@ -1,7 +1,13 @@ import math from types import SimpleNamespace +from typing import TypedDict -from sentry.utils.cursors import Cursor, build_cursor +from sentry.utils.cursors import Cursor, KeyCallable, build_cursor + + +class CursorKwargs(TypedDict): + key: KeyCallable + limit: int def test_build_cursor(): @@ -14,7 +20,7 @@ def test_build_cursor(): def item_key(key, for_prev=False): return int(math.floor(key.id)) - cursor_kwargs = {"key": item_key, "limit": 1} + cursor_kwargs: CursorKwargs = {"key": item_key, "limit": 1} cursor = build_cursor(results, **cursor_kwargs) assert isinstance(cursor.next, Cursor) diff --git a/tests/sentry/utils/test_event_frames.py b/tests/sentry/utils/test_event_frames.py index 12708647cd7557..6326c15d0b5f19 100644 --- a/tests/sentry/utils/test_event_frames.py +++ b/tests/sentry/utils/test_event_frames.py @@ -1,5 +1,4 @@ import unittest -from typing import Any, Mapping from sentry.testutils.cases import TestCase from sentry.utils.event_frames import ( @@ -102,7 +101,9 @@ def test_platform_java(self): "filename": "Application.java", }, ] - key, munged_frames = munged_filename_and_frames("java", frames, "munged_filename") + ret = munged_filename_and_frames("java", frames, "munged_filename") + assert ret is not None + key, munged_frames = ret assert len(munged_frames) == 3 assert munged_frames[0][key] == "jdk/internal/reflect/NativeMethodAccessorImpl.java" assert munged_frames[1][key] == "io/sentry/example/Application.java" @@ -250,14 +251,16 @@ def test_platform_android_kotlin(self): "in_app": True, }, ] - key, munged_frames = munged_filename_and_frames("java", exception_frames, "munged_filename") + ret = munged_filename_and_frames("java", exception_frames, "munged_filename") + assert ret is not None + key, munged_frames = ret assert len(munged_frames) == 16 for z in zip(exception_frames, munged_frames): assert z[0].items() <= z[1].items() has_munged = list(filter(lambda f: f.get("filename") and f.get("module"), munged_frames)) assert len(has_munged) == 14 - assert all(str(x.get("munged_filename")).endswith(x.get("filename")) for x in has_munged) + assert all(x["munged_filename"].endswith(x["filename"]) for x in has_munged) class CocoaFilenameMungingTestCase(unittest.TestCase): @@ -406,7 +409,8 @@ def test_flutter_munger_supported(self): munged_frames = munged_filename_and_frames( "other", frames, "munged_filename", "sentry.dart.flutter" ) - munged_first_frame: Mapping[str, Any] = munged_frames[1][0] + assert munged_frames is not None + munged_first_frame = munged_frames[1][0] assert munged_first_frame.items() > frames[0].items() assert munged_first_frame["munged_filename"] == "a/b/test.dart" diff --git a/tests/sentry/utils/test_functional.py b/tests/sentry/utils/test_functional.py index 3df498bed50b84..378c1ba26503f5 100644 --- a/tests/sentry/utils/test_functional.py +++ b/tests/sentry/utils/test_functional.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from unittest import TestCase, mock from sentry.utils.functional import cached, compact @@ -22,7 +24,7 @@ def test_cached_with_kwargs(): foo = mock.Mock() - cache = {} + cache: dict[object, object] = {} cached(cache, foo, kw1=1, kw2=2) assert foo.call_count == 1 diff --git a/tests/sentry/utils/test_meta.py b/tests/sentry/utils/test_meta.py index 1d0e2b2f5130c3..0f3bf7146476ea 100644 --- a/tests/sentry/utils/test_meta.py +++ b/tests/sentry/utils/test_meta.py @@ -1,4 +1,7 @@ +from __future__ import annotations + from copy import deepcopy +from typing import Any from unittest import TestCase from sentry.utils.meta import Meta @@ -39,19 +42,19 @@ def test_get_missing(self): assert Meta({}).get_event_errors() == [] def test_create_missing(self): - data = {} + data: dict[str, Any] = {} meta = Meta(data) assert meta.create() == {} assert data == {"": {}} def test_merge_missing(self): - data = {} + data: dict[str, Any] = {} meta = Meta(data) assert meta.merge(Meta(other_meta)) == other_meta[""] assert data == other_meta def test_add_error_missing(self): - data = {} + data: dict[str, Any] = {} meta = Meta(data) meta.add_error("additional", "changed") assert data == {"": {"err": ["additional"], "val": "changed"}} @@ -87,19 +90,19 @@ def test_get_empty(self): assert Meta({"": {}}).get_event_errors() == [] def test_create_empty(self): - data = {"": {}} + data: dict[str, Any] = {"": {}} meta = Meta(data) assert meta.create() == {} assert data == {"": {}} def test_merge_empty(self): - data = {"": {}} + data: dict[str, Any] = {"": {}} meta = Meta(data) assert meta.merge(Meta(other_meta)) == other_meta[""] assert data == other_meta def test_add_error_empty(self): - data = {"": {}} + data: dict[str, Any] = {"": {}} meta = Meta(data) meta.add_error("additional", "changed") assert data == {"": {"err": ["additional"], "val": "changed"}} @@ -134,26 +137,26 @@ def test_add_error_root(self): } def test_get_nested_missing(self): - data = {} + data: dict[str, Any] = {} assert Meta(data).enter("field").raw() == {} assert Meta(data).enter("field").get() == {} assert list(Meta(data).enter("field").iter_errors()) == [] assert Meta(data).enter("field").get_event_errors() == [] def test_create_nested_missing(self): - data = {} + data: dict[str, Any] = {} meta = Meta(data) assert meta.enter("field").create() == {} assert data == {"field": {"": {}}} def test_merge_nested_missing(self): - data = {} + data: dict[str, Any] = {} meta = Meta(data) assert meta.enter("field").merge(Meta(other_meta)) == other_meta[""] assert data == {"field": other_meta} def test_add_error_nested_missing(self): - data = {} + data: dict[str, Any] = {} meta = Meta(data) meta.enter("field").add_error("additional", "changed") assert meta.enter("field").get() == {"err": ["additional"], "val": "changed"} @@ -199,7 +202,7 @@ def test_get_nested_index(self): assert list(Meta(data).enter(0).iter_errors()) == [["existing", {}]] def test_create_nested_index(self): - data = {} + data: dict[str, Any] = {} meta = Meta(data) assert meta.enter(0).create() == {} assert data == {"0": {"": {}}} diff --git a/tests/sentry/utils/test_outcomes.py b/tests/sentry/utils/test_outcomes.py index a18f31312ff9cd..27a6d7ab22cec3 100644 --- a/tests/sentry/utils/test_outcomes.py +++ b/tests/sentry/utils/test_outcomes.py @@ -1,3 +1,4 @@ +import types from unittest import mock import pytest @@ -8,14 +9,17 @@ @pytest.fixture(autouse=True) -def setup(monkeypatch): +def setup(): # Rely on the fact that the publisher is initialized lazily - with mock.patch.object(kafka_config, "get_kafka_producer_cluster_options"): - with mock.patch.object(outcomes, "KafkaPublisher"): + with mock.patch.object(kafka_config, "get_kafka_producer_cluster_options") as mck_get_options: + with mock.patch.object(outcomes, "KafkaPublisher") as mck_publisher: # Reset internals of the outcomes module with mock.patch.object(outcomes, "outcomes_publisher", None): with mock.patch.object(outcomes, "billing_publisher", None): - yield + yield types.SimpleNamespace( + mock_get_kafka_producer_cluster_options=mck_get_options, + mock_publisher=mck_publisher, + ) @pytest.mark.parametrize( @@ -53,7 +57,7 @@ def test_parse_outcome(name, outcome): assert Outcome.parse(name) == outcome -def test_track_outcome_default(): +def test_track_outcome_default(setup): """ Asserts an outcomes serialization roundtrip with defaults. @@ -74,13 +78,13 @@ def test_track_outcome_default(): reason="project_id", ) - cluster_args, _ = kafka_config.get_kafka_producer_cluster_options.call_args + cluster_args, _ = setup.mock_get_kafka_producer_cluster_options.call_args assert cluster_args == ( kafka_config.get_topic_definition(settings.KAFKA_OUTCOMES)["cluster"], ) assert outcomes.outcomes_publisher - (topic_name, payload), _ = outcomes.outcomes_publisher.publish.call_args + (topic_name, payload), _ = setup.mock_publisher.return_value.publish.call_args assert topic_name == settings.KAFKA_OUTCOMES data = json.loads(payload) @@ -99,7 +103,7 @@ def test_track_outcome_default(): assert outcomes.billing_publisher is None -def test_track_outcome_billing(): +def test_track_outcome_billing(setup): """ Checks that outcomes are routed to the SHARED topic within the same cluster in default configuration. @@ -112,17 +116,17 @@ def test_track_outcome_billing(): outcome=Outcome.ACCEPTED, ) - cluster_args, _ = kafka_config.get_kafka_producer_cluster_options.call_args + cluster_args, _ = setup.mock_get_kafka_producer_cluster_options.call_args assert cluster_args == (kafka_config.get_topic_definition(settings.KAFKA_OUTCOMES)["cluster"],) assert outcomes.outcomes_publisher - (topic_name, _), _ = outcomes.outcomes_publisher.publish.call_args + (topic_name, _), _ = setup.mock_publisher.return_value.publish.call_args assert topic_name == settings.KAFKA_OUTCOMES assert outcomes.billing_publisher is None -def test_track_outcome_billing_topic(): +def test_track_outcome_billing_topic(setup): """ Checks that outcomes are routed to the DEDICATED billing topic within the same cluster in default configuration. @@ -132,7 +136,7 @@ def test_track_outcome_billing_topic(): settings.KAFKA_TOPICS, { settings.KAFKA_OUTCOMES_BILLING: { - "cluster": settings.KAFKA_TOPICS[settings.KAFKA_OUTCOMES]["cluster"], + "cluster": kafka_config.get_topic_definition(settings.KAFKA_OUTCOMES)["cluster"], } }, ): @@ -143,17 +147,19 @@ def test_track_outcome_billing_topic(): outcome=Outcome.ACCEPTED, ) - cluster_args, _ = kafka_config.get_kafka_producer_cluster_options.call_args - assert cluster_args == (settings.KAFKA_TOPICS[settings.KAFKA_OUTCOMES]["cluster"],) + cluster_args, _ = setup.mock_get_kafka_producer_cluster_options.call_args + assert cluster_args == ( + kafka_config.get_topic_definition(settings.KAFKA_OUTCOMES)["cluster"], + ) assert outcomes.outcomes_publisher - (topic_name, _), _ = outcomes.outcomes_publisher.publish.call_args + (topic_name, _), _ = setup.mock_publisher.return_value.publish.call_args assert topic_name == settings.KAFKA_OUTCOMES_BILLING assert outcomes.billing_publisher is None -def test_track_outcome_billing_cluster(settings): +def test_track_outcome_billing_cluster(settings, setup): """ Checks that outcomes are routed to the dedicated cluster and topic. """ @@ -168,11 +174,11 @@ def test_track_outcome_billing_cluster(settings): outcome=Outcome.ACCEPTED, ) - cluster_args, _ = kafka_config.get_kafka_producer_cluster_options.call_args + cluster_args, _ = setup.mock_get_kafka_producer_cluster_options.call_args assert cluster_args == ("different",) assert outcomes.billing_publisher - (topic_name, _), _ = outcomes.billing_publisher.publish.call_args + (topic_name, _), _ = setup.mock_publisher.return_value.publish.call_args assert topic_name == settings.KAFKA_OUTCOMES_BILLING assert outcomes.outcomes_publisher is None diff --git a/tests/sentry/utils/test_safe.py b/tests/sentry/utils/test_safe.py index 0df53e41993ce2..9e07db535a45c3 100644 --- a/tests/sentry/utils/test_safe.py +++ b/tests/sentry/utils/test_safe.py @@ -1,5 +1,8 @@ +from __future__ import annotations + import unittest from functools import partial +from typing import Any, MutableMapping from unittest.mock import Mock, patch import pytest @@ -48,7 +51,7 @@ def test_sorted_trim(self): def test_max_depth(self): trm = partial(trim, max_depth=2) - a = {"a": {"b": {"c": "d"}}} + a: dict[str, Any] = {"a": {"b": {"c": "d"}}} assert trm(a) == a a = {"a": {"b": {"c": "d"}}} @@ -109,9 +112,9 @@ class GetPathTest(unittest.TestCase): def test_get_none(self): assert get_path(None, "foo") is None assert get_path("foo", "foo") is None - assert get_path(42, "foo") is None - assert get_path(ValueError(), "foo") is None - assert get_path(True, "foo") is None + assert get_path(42, "foo") is None # type: ignore[arg-type] + assert get_path(ValueError(), "foo") is None # type: ignore[arg-type] + assert get_path(True, "foo") is None # type: ignore[arg-type] def test_get_path_dict(self): assert get_path({}, "a") is None @@ -169,7 +172,7 @@ def test_set_none(self): assert not set_path(True, "foo", value=42) def test_set_dict(self): - data = {} + data: MutableMapping[str, Any] = {} assert set_path(data, "a", value=42) assert data == {"a": 42} diff --git a/tests/sentry/utils/test_services.py b/tests/sentry/utils/test_services.py index 227412a316a829..97bb9987c3ced5 100644 --- a/tests/sentry/utils/test_services.py +++ b/tests/sentry/utils/test_services.py @@ -1,13 +1,15 @@ +from __future__ import annotations + from abc import ABC, abstractmethod from unittest.mock import Mock import pytest from sentry.utils.concurrent import SynchronousExecutor -from sentry.utils.services import Delegator +from sentry.utils.services import Delegator, Service -class Operation(ABC): +class Operation(Service, ABC): @abstractmethod def apply(self, x: int, y: int) -> int: raise NotImplementedError @@ -29,38 +31,43 @@ def apply(self, x: int, y: int) -> int: @pytest.fixture -def delegator() -> Delegator: +def delegator_fixture() -> tuple[Delegator, Mock, Mock]: executor = SynchronousExecutor() - return Delegator( + selector = Mock() + callback = Mock() + delegator = Delegator( Operation, {"add": (Add(), executor), "sub": (Sub(), executor), "error": (Error(), executor)}, - Mock(), - Mock(), + selector, + callback, ) + return (delegator, selector, callback) -def test_single_backend(delegator: Delegator) -> None: - delegator.selector.return_value = ["add"] +def test_single_backend(delegator_fixture: tuple[Delegator, Mock, Mock]) -> None: + (delegator, selector, callback) = delegator_fixture + selector.return_value = ["add"] assert delegator.apply(1, 1) == 2 - (_, method, kwargs), _ = delegator.selector.call_args + (_, method, kwargs), _ = selector.call_args assert method == "apply" assert kwargs.items() >= {"x": 1, "y": 1}.items() - (_, method, kwargs, backends, futures), _ = delegator.callback.call_args + (_, method, kwargs, backends, futures), _ = callback.call_args assert method == "apply" assert kwargs.items() >= {"x": 1, "y": 1}.items() assert backends == ["add"] assert [f.result() for f in futures] == [2] -def test_multiple_backends(delegator: Delegator) -> None: - delegator.selector.return_value = ["add", "sub", "error"] +def test_multiple_backends(delegator_fixture: tuple[Delegator, Mock, Mock]) -> None: + (delegator, selector, callback) = delegator_fixture + selector.return_value = ["add", "sub", "error"] assert delegator.apply(1, 1) == 2 - (_, _, _, backends, futures), _ = delegator.callback.call_args + (_, _, _, backends, futures), _ = callback.call_args results = dict(zip(backends, futures)) assert results["add"].result() == 2 @@ -69,21 +76,23 @@ def test_multiple_backends(delegator: Delegator) -> None: results["error"].result() -def test_invalid_primary_backend(delegator: Delegator) -> None: - delegator.selector.return_value = ["invalid", "add"] +def test_invalid_primary_backend(delegator_fixture: tuple[Delegator, Mock, Mock]) -> None: + (delegator, selector, callback) = delegator_fixture + selector.return_value = ["invalid", "add"] with pytest.raises(Delegator.InvalidBackend): assert delegator.apply(1, 1) - assert delegator.callback.called is False + assert callback.called is False -def test_invalid_secondary_backend(delegator: Delegator) -> None: - delegator.selector.return_value = ["add", "invalid"] +def test_invalid_secondary_backend(delegator_fixture: tuple[Delegator, Mock, Mock]) -> None: + (delegator, selector, callback) = delegator_fixture + selector.return_value = ["add", "invalid"] assert delegator.apply(1, 1) == 2 - (_, _, _, backends, futures), _ = delegator.callback.call_args + (_, _, _, backends, futures), _ = callback.call_args assert backends == ["add", "invalid"] primary_future, secondary_future = futures diff --git a/tests/sentry/utils/test_time_window.py b/tests/sentry/utils/test_time_window.py index 5e441dd3e7c0b1..f971673e86b93d 100644 --- a/tests/sentry/utils/test_time_window.py +++ b/tests/sentry/utils/test_time_window.py @@ -19,37 +19,31 @@ def test_time_window_duration(start, end, expected): union_time_windows_test_cases = [ - pytest.param( + ( [(0, 1), (2, 3), (4, 5), (6, 7), (8, 9)], [(0, 1), (2, 3), (4, 5), (6, 7), (8, 9)], - id="non_overlapping", - ), - pytest.param([(0, 1), (1, 2), (2, 3), (3, 4), (4, 5)], [(0, 5)], id="all_edges_overlapping"), - pytest.param( - [(0, 2), (1, 3), (2, 4), (3, 5), (4, 6)], [(0, 6)], id="all_intervals_overlapping" - ), - pytest.param([(0, 1), (1, 2), (3, 4), (4, 5)], [(0, 2), (3, 5)], id="some_edges_overlapping"), - pytest.param( - [(0, 2), (1, 3), (4, 6), (5, 7)], [(0, 3), (4, 7)], id="some_intervals_overlapping" - ), - pytest.param( - [(0, 1), (1, 2), (3, 5), (4, 6), (6, 7)], [(0, 2), (3, 7)], id="mixed_of_different_overlaps" + "non_overlapping", ), + ([(0, 1), (1, 2), (2, 3), (3, 4), (4, 5)], [(0, 5)], "all_edges_overlapping"), + ([(0, 2), (1, 3), (2, 4), (3, 5), (4, 6)], [(0, 6)], "all_intervals_overlapping"), + ([(0, 1), (1, 2), (3, 4), (4, 5)], [(0, 2), (3, 5)], "some_edges_overlapping"), + ([(0, 2), (1, 3), (4, 6), (5, 7)], [(0, 3), (4, 7)], "some_intervals_overlapping"), + ([(0, 1), (1, 2), (3, 5), (4, 6), (6, 7)], [(0, 2), (3, 7)], "mixed_of_different_overlaps"), ] @pytest.mark.parametrize( "time_windows, expected", - union_time_windows_test_cases + [pytest.param(*case, id=test_id) for *case, test_id in union_time_windows_test_cases] + [ # the order of the time windows shouldn't matter, # give it a shuffle to generate additional test cases pytest.param( - random.sample(test_case.values[0], len(test_case.values[0])), - test_case.values[1], - id=f"shuffled_{test_case.id}", + random.sample(inputs, len(inputs)), + outputs, + id=f"shuffled_{test_case_id}", ) - for test_case in union_time_windows_test_cases + for inputs, outputs, test_case_id in union_time_windows_test_cases ], ) def test_union_time_windows(time_windows, expected): @@ -59,35 +53,33 @@ def test_union_time_windows(time_windows, expected): remove_time_windows_test_cases = [ - pytest.param( - (4, 5), [(0, 1), (1, 2), (3, 4), (6, 7), (7, 8), (8, 9)], [(4, 5)], id="non_overlapping" - ), - pytest.param((0, 1), [(0, 1)], [], id="is_source_time_window"), - pytest.param((1, 3), [(0, 2), (2, 4)], [], id="covers_source_time_window"), - pytest.param((4, 7), [(3, 5), (6, 8)], [(5, 6)], id="leaves_source_time_window_center"), - pytest.param((4, 7), [(5, 6)], [(4, 5), (6, 7)], id="leaves_source_time_window_ends"), - pytest.param( + ((4, 5), [(0, 1), (1, 2), (3, 4), (6, 7), (7, 8), (8, 9)], [(4, 5)], "non_overlapping"), + ((0, 1), [(0, 1)], [], "is_source_time_window"), + ((1, 3), [(0, 2), (2, 4)], [], "covers_source_time_window"), + ((4, 7), [(3, 5), (6, 8)], [(5, 6)], "leaves_source_time_window_center"), + ((4, 7), [(5, 6)], [(4, 5), (6, 7)], "leaves_source_time_window_ends"), + ( (2, 7), [(0, 3), (1, 4), (5, 8), (6, 9)], [(4, 5)], - id="covers_source_time_window_ends_multiple_times", + "covers_source_time_window_ends_multiple_times", ), ] @pytest.mark.parametrize( "source_time_window, time_windows, expected", - remove_time_windows_test_cases + [pytest.param(*case, id=test_id) for *case, test_id in remove_time_windows_test_cases] + [ # the order of the time windows shouldn't matter, # give it a shuffle to generate additional test cases pytest.param( - test_case.values[0], - random.sample(test_case.values[1], len(test_case.values[1])), - test_case.values[2], - id=f"shuffled_{test_case.id}", + src, + random.sample(inputs, len(inputs)), + outputs, + id=f"shuffled_{test_case_id}", ) - for test_case in remove_time_windows_test_cases + for src, inputs, outputs, test_case_id in remove_time_windows_test_cases ], ) def test_remove_time_windows(source_time_window, time_windows, expected): From e8a472824442a0063450c41c24f0a51168496e6a Mon Sep 17 00:00:00 2001 From: Cathy Teng <70817427+cathteng@users.noreply.github.com> Date: Wed, 2 Aug 2023 11:33:48 -0700 Subject: [PATCH 27/44] fix(github-comments): specify comment task minute (#54042) --- src/sentry/conf/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 6858eb505d0df9..dd33ffe78e2a87 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1110,7 +1110,7 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str: }, "github_comment_reactions": { "task": "sentry.tasks.integrations.github_comment_reactions", - "schedule": crontab(hour=16), # 9:00 PDT, 12:00 EDT, 16:00 UTC + "schedule": crontab(minute=0, hour=16), # 9:00 PDT, 12:00 EDT, 16:00 UTC }, "poll_recap_servers": { "task": "sentry.tasks.poll_recap_servers", From cd8005754a67dd6503d9a61d82f80bf8cf7b0b8b Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Wed, 2 Aug 2023 11:43:22 -0700 Subject: [PATCH 28/44] fix(grouping): Restore default value for `Variant.type` (#53990) In https://github.com/getsentry/sentry/pull/53787, the default value for `Variant.type` was accidentally removed, causing `AttributeError`s. This restores it. --- src/sentry/grouping/variants.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/sentry/grouping/variants.py b/src/sentry/grouping/variants.py index dc84b22e1c1673..2c08d75fee0291 100644 --- a/src/sentry/grouping/variants.py +++ b/src/sentry/grouping/variants.py @@ -1,9 +1,11 @@ +from __future__ import annotations + from sentry.grouping.utils import hash_from_values, is_default_fingerprint_var class BaseVariant: # The type of the variant that is reported to the UI. - type: str + type: str | None = None # This is true if `get_hash` does not return `None`. contributes = True From f04fd28a1c9c249130880fff0a937dd57ca7dcf9 Mon Sep 17 00:00:00 2001 From: Julia Hoge Date: Wed, 2 Aug 2023 11:46:00 -0700 Subject: [PATCH 29/44] style(most-helpful-event): Style and text updates (#53943) Closes https://github.com/getsentry/sentry/issues/53738 ## Before Screenshot 2023-08-01 at 10 05 08 AM ## After Screenshot 2023-08-01 at 9 39 47 AM ## Before Screenshot 2023-08-01 at 1 20 43 PM ## After Screenshot 2023-08-01 at 1 21 41 PM --------- Co-authored-by: Malachi Willey --- static/app/icons/iconJson.tsx | 16 ++++ static/app/icons/index.tsx | 1 + .../issueDetails/groupEventCarousel.spec.tsx | 74 ++++++++++----- .../views/issueDetails/groupEventCarousel.tsx | 95 +++++++++++++++---- 4 files changed, 144 insertions(+), 42 deletions(-) create mode 100644 static/app/icons/iconJson.tsx diff --git a/static/app/icons/iconJson.tsx b/static/app/icons/iconJson.tsx new file mode 100644 index 00000000000000..cd7af518944a10 --- /dev/null +++ b/static/app/icons/iconJson.tsx @@ -0,0 +1,16 @@ +import {forwardRef} from 'react'; + +import {SvgIcon, SVGIconProps} from './svgIcon'; + +const IconJson = forwardRef((props, ref) => { + return ( + + + + + ); +}); + +IconJson.displayName = 'IconJson'; + +export {IconJson}; diff --git a/static/app/icons/index.tsx b/static/app/icons/index.tsx index 8257af3e4e0109..ea604779cbe6a5 100644 --- a/static/app/icons/index.tsx +++ b/static/app/icons/index.tsx @@ -49,6 +49,7 @@ export {IconInfo} from './iconInfo'; export {IconInput} from './iconInput'; export {IconIssues} from './iconIssues'; export {IconJira} from './iconJira'; +export {IconJson} from './iconJson'; export {IconLab} from './iconLab'; export {IconLaptop} from './iconLaptop'; export {IconLightning} from './iconLightning'; diff --git a/static/app/views/issueDetails/groupEventCarousel.spec.tsx b/static/app/views/issueDetails/groupEventCarousel.spec.tsx index a723ee174f5dad..dc1d7cb819550b 100644 --- a/static/app/views/issueDetails/groupEventCarousel.spec.tsx +++ b/static/app/views/issueDetails/groupEventCarousel.spec.tsx @@ -33,41 +33,63 @@ describe('GroupEventCarousel', () => { window.open = jest.fn(); }); - it('can use event dropdown to navigate events', async () => { - // Because it isn't rendered on smaller screens - jest.spyOn(useMedia, 'default').mockReturnValue(true); - - render(, { - organization: TestStubs.Organization({ - features: [ - 'issue-details-most-helpful-event', - 'issue-details-most-helpful-event-ui', - ], - }), + describe('recommended event ui', () => { + const orgWithRecommendedEvent = TestStubs.Organization({ + features: [ + 'issue-details-most-helpful-event', + 'issue-details-most-helpful-event-ui', + ], }); - await userEvent.click(screen.getByRole('button', {name: /recommended event/i})); - await userEvent.click(screen.getByRole('option', {name: /oldest event/i})); + it('can navigate to the oldest event', async () => { + jest.spyOn(useMedia, 'default').mockReturnValue(true); + + render(, { + organization: orgWithRecommendedEvent, + }); + + await userEvent.click(screen.getByRole('button', {name: /recommended/i})); + await userEvent.click(screen.getByRole('option', {name: /oldest/i})); - expect(browserHistory.push).toHaveBeenCalledWith({ - pathname: '/organizations/org-slug/issues/group-id/events/oldest/', - query: {referrer: 'oldest-event'}, + expect(browserHistory.push).toHaveBeenCalledWith({ + pathname: '/organizations/org-slug/issues/group-id/events/oldest/', + query: {referrer: 'oldest-event'}, + }); }); - await userEvent.click(screen.getByRole('button', {name: /oldest event/i})); - await userEvent.click(screen.getByRole('option', {name: /latest event/i})); + it('can navigate to the latest event', async () => { + jest.spyOn(useMedia, 'default').mockReturnValue(true); - expect(browserHistory.push).toHaveBeenCalledWith({ - pathname: '/organizations/org-slug/issues/group-id/events/oldest/', - query: {referrer: 'oldest-event'}, + render(, { + organization: orgWithRecommendedEvent, + }); + + await userEvent.click(screen.getByRole('button', {name: /recommended/i})); + await userEvent.click(screen.getByRole('option', {name: /latest/i})); + + expect(browserHistory.push).toHaveBeenCalledWith({ + pathname: '/organizations/org-slug/issues/group-id/events/latest/', + query: {referrer: 'latest-event'}, + }); }); - await userEvent.click(screen.getByRole('button', {name: /latest event/i})); - await userEvent.click(screen.getByRole('option', {name: /recommended event/i})); + it('can navigate to the recommended event', async () => { + jest.spyOn(useMedia, 'default').mockReturnValue(true); + + render(, { + organization: orgWithRecommendedEvent, + router: { + params: {eventId: 'latest'}, + }, + }); + + await userEvent.click(screen.getByRole('button', {name: /latest/i})); + await userEvent.click(screen.getByRole('option', {name: /recommended/i})); - expect(browserHistory.push).toHaveBeenCalledWith({ - pathname: '/organizations/org-slug/issues/group-id/events/recommended/', - query: {referrer: 'recommended-event'}, + expect(browserHistory.push).toHaveBeenCalledWith({ + pathname: '/organizations/org-slug/issues/group-id/events/recommended/', + query: {referrer: 'recommended-event'}, + }); }); }); diff --git a/static/app/views/issueDetails/groupEventCarousel.tsx b/static/app/views/issueDetails/groupEventCarousel.tsx index 4ff4be4871f6f5..4a54c662d9c9aa 100644 --- a/static/app/views/issueDetails/groupEventCarousel.tsx +++ b/static/app/views/issueDetails/groupEventCarousel.tsx @@ -8,11 +8,15 @@ import {Button, ButtonProps} from 'sentry/components/button'; import {CompactSelect} from 'sentry/components/compactSelect'; import DateTime from 'sentry/components/dateTime'; import {DropdownMenu} from 'sentry/components/dropdownMenu'; +import FeatureBadge from 'sentry/components/featureBadge'; +import TimeSince from 'sentry/components/timeSince'; import {Tooltip} from 'sentry/components/tooltip'; import { IconChevron, IconCopy, IconEllipsis, + IconJson, + IconLink, IconNext, IconOpen, IconPrevious, @@ -46,6 +50,11 @@ type GroupEventCarouselProps = { projectSlug: string; }; +type GroupEventNavigationProps = { + group: Group; + relativeTime: string; +}; + type EventNavigationButtonProps = { disabled: boolean; group: Group; @@ -59,19 +68,13 @@ enum EventNavDropdownOption { RECOMMENDED = 'recommended', LATEST = 'latest', OLDEST = 'oldest', + CUSTOM = 'custom', ALL = 'all', } const BUTTON_SIZE = 'sm'; const BUTTON_ICON_SIZE = 'sm'; -const EVENT_NAV_DROPDOWN_OPTIONS = [ - {value: EventNavDropdownOption.RECOMMENDED, label: 'Recommended Event'}, - {value: EventNavDropdownOption.LATEST, label: 'Latest Event'}, - {value: EventNavDropdownOption.OLDEST, label: 'Oldest Event'}, - {options: [{value: EventNavDropdownOption.ALL, label: 'View All Events'}]}, -]; - const makeBaseEventsPath = ({ organization, group, @@ -112,7 +115,7 @@ function EventNavigationButton({ ); } -function EventNavigationDropdown({group}: {group: Group}) { +function EventNavigationDropdown({group, relativeTime}: GroupEventNavigationProps) { const location = useLocation(); const params = useParams<{eventId?: string}>(); const theme = useTheme(); @@ -141,13 +144,54 @@ function EventNavigationDropdown({group}: {group: Group}) { }; const selectedValue = getSelectedOption(); + const eventNavDropdownOptions = [ + { + value: EventNavDropdownOption.RECOMMENDED, + label: ( +
+ {t('Recommended')} + +
+ ), + textValue: t('Recommended'), + details: t('Event with the most context'), + }, + { + value: EventNavDropdownOption.LATEST, + label: t('Latest'), + details: t('Last seen event in this issue'), + }, + { + value: EventNavDropdownOption.OLDEST, + label: t('Oldest'), + details: t('First seen event in this issue'), + }, + ...(!selectedValue + ? [ + { + value: EventNavDropdownOption.CUSTOM, + label: t('Custom Selection'), + }, + ] + : []), + { + options: [{value: EventNavDropdownOption.ALL, label: 'View All Events'}], + }, + ]; return ( + ) : selectedValue === EventNavDropdownOption.RECOMMENDED ? ( + t('Recommended') + ) : undefined + } + menuWidth={232} onChange={selectedOption => { switch (selectedOption.value) { case EventNavDropdownOption.RECOMMENDED: @@ -337,20 +381,39 @@ export function GroupEventCarousel({event, group, projectSlug}: GroupEventCarous ]} /> {xlargeViewport && ( - )} {xlargeViewport && ( )} - + {!isHelpfulEventUiEnabled && ( Date: Wed, 2 Aug 2023 14:53:23 -0400 Subject: [PATCH 30/44] fix(starfish): Database module chart improvements (#54030) - Increase chart height. 100 and 140 is very squashy, let's give them some breathing room - Increase metrics fidelity! --- static/app/components/charts/utils.tsx | 28 +++++++++++++++---- .../starfish/views/spanSummaryPage/index.tsx | 8 ++++-- .../starfish/views/spans/spanTimeCharts.tsx | 8 ++++-- 3 files changed, 32 insertions(+), 12 deletions(-) diff --git a/static/app/components/charts/utils.tsx b/static/app/components/charts/utils.tsx index affdd961dc4c4b..3a5ae7c3f6d198 100644 --- a/static/app/components/charts/utils.tsx +++ b/static/app/components/charts/utils.tsx @@ -78,7 +78,7 @@ export function getInterval(datetimeObj: DateTimeObject, fidelity: Fidelity = 'm return '4h'; } if (fidelity === 'metrics') { - return '1d'; + return '12h'; } return '1d'; } @@ -91,13 +91,13 @@ export function getInterval(datetimeObj: DateTimeObject, fidelity: Fidelity = 'm return '1h'; } if (fidelity === 'metrics') { - return '1h'; + return '4h'; } return '12h'; } if (diffInMinutes > TWENTY_FOUR_HOURS) { - // Greater than 24 hours + // Between 24 hours and 14 days if (fidelity === 'high') { return '30m'; } @@ -105,13 +105,29 @@ export function getInterval(datetimeObj: DateTimeObject, fidelity: Fidelity = 'm return '1h'; } if (fidelity === 'metrics') { - return '1h'; + return '30m'; } return '6h'; } + if (diffInMinutes > SIX_HOURS) { + // Between six hours and 24 hours + if (fidelity === 'high') { + return '5m'; + } + + if (fidelity === 'medium') { + return '15m'; + } + + if (fidelity === 'metrics') { + return '5m'; + } + return '1h'; + } + if (diffInMinutes > ONE_HOUR) { - // Between 1 hour and 24 hours + // Between 1 hour and 6 hours if (fidelity === 'high') { return '5m'; } @@ -119,7 +135,7 @@ export function getInterval(datetimeObj: DateTimeObject, fidelity: Fidelity = 'm return '15m'; } if (fidelity === 'metrics') { - return '1h'; + return '1m'; } return '1h'; } diff --git a/static/app/views/starfish/views/spanSummaryPage/index.tsx b/static/app/views/starfish/views/spanSummaryPage/index.tsx index 711b1d9dcedc55..6e5f1835d1db22 100644 --- a/static/app/views/starfish/views/spanSummaryPage/index.tsx +++ b/static/app/views/starfish/views/spanSummaryPage/index.tsx @@ -54,6 +54,8 @@ const DEFAULT_SORT: Sort = { field: 'time_spent_percentage(local)', }; +const CHART_HEIGHT = 160; + type Props = { location: Location; } & RouteComponentProps<{groupId: string}, {transaction: string}>; @@ -246,7 +248,7 @@ function SpanSummaryPage({params, location}: Props) { title={getThroughputChartTitle(span?.[SpanMetricsFields.SPAN_OP])} > Date: Wed, 2 Aug 2023 12:03:04 -0700 Subject: [PATCH 31/44] fix(integrations) removing redis watch and changing key to a daily redis hash key (#54001) Co-authored-by: Gilbert Szeto --- src/sentry/integrations/request_buffer.py | 163 +++++++----------- .../sentry/integrations/slack/test_disable.py | 22 ++- 2 files changed, 86 insertions(+), 99 deletions(-) diff --git a/src/sentry/integrations/request_buffer.py b/src/sentry/integrations/request_buffer.py index 4b79bfab8b52c3..bf337de0dc3d58 100644 --- a/src/sentry/integrations/request_buffer.py +++ b/src/sentry/integrations/request_buffer.py @@ -1,14 +1,15 @@ -from datetime import datetime +from datetime import datetime, timedelta from django.conf import settings -from redis.exceptions import WatchError -from sentry.utils import json, redis +from sentry.utils import redis BUFFER_SIZE = 30 # 30 days KEY_EXPIRY = 60 * 60 * 24 * 30 # 30 days -IS_BROKEN_RANGE = 7 # 7 days +BROKEN_RANGE_DAYS = 7 # 7 days + +VALID_KEYS = ["success", "error", "fatal"] class IntegrationRequestBuffer: @@ -17,126 +18,96 @@ class IntegrationRequestBuffer: This should store the aggregate counts of each type for last 30 days for each integration """ - def __init__(self, key): - self.integrationkey = key - + def __init__(self, key, expiration_seconds=KEY_EXPIRY): cluster_id = settings.SENTRY_INTEGRATION_ERROR_LOG_REDIS_CLUSTER self.client = redis.redis_clusters.get(cluster_id) + self.integration_key = key + self.key_expiration_seconds = expiration_seconds - def _convert_obj_to_dict(self, redis_object): - """ - Convert the request string stored in Redis to a python dict - """ - - return json.loads(redis_object) - - def _get_all_from_buffer(self, buffer_key): - """ - Get the list at the buffer key. - """ - - return self.client.lrange(buffer_key, 0, BUFFER_SIZE - 1) - - def _get_broken_range_from_buffer(self, buffer_key): - """ - Get the list at the buffer key in the broken range. - """ + def record_error(self): + self._add("error") - return self.client.lrange(buffer_key, 0, IS_BROKEN_RANGE - 1) + def record_success(self): + self._add("success") - def _get(self): - """ - Returns the list of daily aggregate error counts. - """ - return [ - self._convert_obj_to_dict(obj) - for obj in self._get_broken_range_from_buffer(self.integrationkey) - ] + def record_fatal(self): + self._add("fatal") def is_integration_broken(self): """ Integration is broken if we have 7 consecutive days of errors and no successes OR have a fatal error """ - items = self._get() + broken_range_days_counts = self._get_broken_range_from_buffer() - data = [ - datetime.strptime(item.get("date"), "%Y-%m-%d").date() - for item in items - if item.get("fatal_count", 0) > 0 and item.get("date") - ] + days_fatal = [] + days_error = [] - if len(data) > 0: - return True + for day_count in broken_range_days_counts: + if int(day_count.get("fatal_count", 0)) > 0: + days_fatal.append(day_count) + elif ( + int(day_count.get("error_count", 0)) > 0 + and int(day_count.get("success_count", 0)) == 0 + ): + days_error.append(day_count) - data = [ - datetime.strptime(item.get("date"), "%Y-%m-%d").date() - for item in items - if item.get("error_count", 0) > 0 - and item.get("success_count", 0) == 0 - and item.get("date") - ] + if len(days_fatal) > 0: + return True - if not len(data): + if not len(days_error): return False - if len(data) < IS_BROKEN_RANGE: + if len(days_error) < BROKEN_RANGE_DAYS: return False return True - def add(self, count: str): - VALID_KEYS = ["success", "error", "fatal"] + def _add(self, count: str): if count not in VALID_KEYS: raise Exception("Requires a valid key param.") - other_count1, other_count2 = list(set(VALID_KEYS).difference([count]))[0:2] now = datetime.now().strftime("%Y-%m-%d") + buffer_key = f"{self.integration_key}:{now}" - buffer_key = self.integrationkey pipe = self.client.pipeline() + pipe.hincrby(buffer_key, count + "_count", 1) + pipe.expire(buffer_key, self.key_expiration_seconds) + pipe.execute() - while True: - try: - pipe.watch(buffer_key) - recent_item_array = pipe.lrange(buffer_key, 0, 1) # get first element from array - pipe.multi() - if len(recent_item_array): - recent_item = self._convert_obj_to_dict(recent_item_array[0]) - if recent_item.get("date") == now: - recent_item[f"{count}_count"] += 1 - pipe.lset(buffer_key, 0, json.dumps(recent_item)) - else: - data = { - "date": now, - f"{count}_count": 1, - f"{other_count1}_count": 0, - f"{other_count2}_count": 0, - } - pipe.lpush(buffer_key, json.dumps(data)) - - else: - data = { - "date": now, - f"{count}_count": 1, - f"{other_count1}_count": 0, - f"{other_count2}_count": 0, - } - pipe.lpush(buffer_key, json.dumps(data)) - pipe.ltrim(buffer_key, 0, BUFFER_SIZE - 1) - pipe.expire(buffer_key, KEY_EXPIRY) - pipe.execute() - break - except WatchError: - continue - finally: - pipe.reset() + def _get_all_from_buffer(self): + """ + Get the list at the buffer key. + """ - def record_error(self): - self.add("error") + now = datetime.now() + all_range = [ + f"{self.integration_key}:{(now - timedelta(days=i)).strftime('%Y-%m-%d')}" + for i in range(BUFFER_SIZE) + ] - def record_success(self): - self.add("success") + return self._get_range_buffers(all_range) - def record_fatal(self): - self.add("fatal") + def _get_broken_range_from_buffer(self): + """ + Get the list at the buffer key in the broken range. + """ + + now = datetime.now() + broken_range_keys = [ + f"{self.integration_key}:{(now - timedelta(days=i)).strftime('%Y-%m-%d')}" + for i in range(BROKEN_RANGE_DAYS) + ] + + return self._get_range_buffers(broken_range_keys) + + def _get_range_buffers(self, keys): + pipe = self.client.pipeline() + ret = [] + for key in keys: + pipe.hgetall(key) + response = pipe.execute() + for item in response: + ret.append(item) + + return ret diff --git a/tests/sentry/integrations/slack/test_disable.py b/tests/sentry/integrations/slack/test_disable.py index a1a74876f7bd32..3b31999afcaf00 100644 --- a/tests/sentry/integrations/slack/test_disable.py +++ b/tests/sentry/integrations/slack/test_disable.py @@ -1,3 +1,4 @@ +import time from datetime import datetime, timedelta import pytest @@ -128,7 +129,7 @@ def test_error_integration(self): with pytest.raises(ApiError): client.post("/chat.postMessage", data=self.payload) buffer = IntegrationRequestBuffer(client._get_redis_key()) - assert (buffer._get()[0]["error_count"]) == 2 + assert int(buffer._get_all_from_buffer()[0]["error_count"]) == 2 assert buffer.is_integration_broken() is False @responses.activate @@ -199,9 +200,24 @@ def test_expiry(self): client = SlackClient(integration_id=self.integration.id) buffer = IntegrationRequestBuffer(client._get_redis_key()) now = datetime.now() - timedelta(hours=1) - for i in reversed(range(32)): + for i in reversed(range(30)): with freeze_time(now - timedelta(days=i)): buffer.record_error() + + buffer_expired = IntegrationRequestBuffer(client._get_redis_key(), 1) + with freeze_time(now - timedelta(days=30)): + buffer_expired.record_error() + with freeze_time(now - timedelta(days=31)): + buffer_expired.record_error() + with pytest.raises(ApiError): client.post("/chat.postMessage", data=self.payload) - assert len(buffer._get_all_from_buffer(buffer.integrationkey)) == 30 + time.sleep(1) + resp = buffer._get_range_buffers( + [ + f"{client._get_redis_key()}:{(now - timedelta(days=i)).strftime('%Y-%m-%d')}" + for i in range(32) + ] + ) + assert len(resp) == 32 + assert len([item for item in resp if item]) == 30 From d178af71c12de22293ce1798cf09075199465e1b Mon Sep 17 00:00:00 2001 From: Spencer Murray <62224025+spalmurray@users.noreply.github.com> Date: Wed, 2 Aug 2023 19:03:17 +0000 Subject: [PATCH 32/44] feat(discord): Implement issue alerts (#53785) --- src/sentry/api/helpers/group_index/update.py | 2 +- src/sentry/integrations/discord/__init__.py | 9 + .../integrations/discord/actions/__init__.py | 2 + .../integrations/discord/actions/form.py | 57 ++++ .../discord/actions/notification.py | 79 +++++ src/sentry/integrations/discord/client.py | 28 +- .../discord/message_builder/__init__.py | 2 + .../discord/message_builder/base/base.py | 20 +- .../base/component/__init__.py | 18 + .../message_builder/base/component/base.py | 3 - .../message_builder/base/component/button.py | 2 +- .../base/component/select_menu.py | 54 +++ .../message_builder/base/embed/base.py | 6 + .../discord/message_builder/issues.py | 148 ++++++++ .../integrations/discord/requests/base.py | 56 ++- .../integrations/discord/utils/__init__.py | 1 + .../integrations/discord/utils/channel.py | 42 +++ .../integrations/discord/webhooks/__init__.py | 8 +- .../integrations/discord/webhooks/base.py | 119 +------ .../integrations/discord/webhooks/command.py | 88 +++++ .../integrations/discord/webhooks/handler.py | 41 +++ .../discord/webhooks/message_component.py | 233 +++++++++++++ .../integrations/discord/webhooks/types.py | 13 + src/sentry/integrations/message_builder.py | 11 +- .../slack/actions/notification.py | 6 +- .../slack/message_builder/issues.py | 25 +- src/sentry/models/activity.py | 1 + .../notifications/notifications/base.py | 1 + .../test_project_rules_configuration.py | 8 +- .../message_builder/test_action_row.py | 62 ++++ .../discord/message_builder/test_builder.py | 144 ++++++++ .../discord/message_builder/test_button.py | 40 +++ .../discord/message_builder/test_embed.py | 97 ++++++ .../discord/message_builder/test_flags.py | 36 ++ .../message_builder/test_select_menu.py | 61 ++++ .../integrations/discord/test_integration.py | 6 +- .../integrations/discord/test_issue_alert.py | 283 ++++++++++++++++ .../discord/test_message_builder.py | 319 ------------------ .../integrations/discord/test_requests.py | 17 +- .../sentry/integrations/discord/test_utils.py | 43 +++ .../test_command.py} | 74 +--- .../discord/webhooks/test_endpoint.py | 80 +++++ .../webhooks/test_message_component.py | 210 ++++++++++++ 43 files changed, 1969 insertions(+), 586 deletions(-) create mode 100644 src/sentry/integrations/discord/actions/__init__.py create mode 100644 src/sentry/integrations/discord/actions/form.py create mode 100644 src/sentry/integrations/discord/actions/notification.py create mode 100644 src/sentry/integrations/discord/message_builder/base/component/select_menu.py create mode 100644 src/sentry/integrations/discord/message_builder/issues.py create mode 100644 src/sentry/integrations/discord/utils/channel.py create mode 100644 src/sentry/integrations/discord/webhooks/command.py create mode 100644 src/sentry/integrations/discord/webhooks/handler.py create mode 100644 src/sentry/integrations/discord/webhooks/message_component.py create mode 100644 src/sentry/integrations/discord/webhooks/types.py create mode 100644 tests/sentry/integrations/discord/message_builder/test_action_row.py create mode 100644 tests/sentry/integrations/discord/message_builder/test_builder.py create mode 100644 tests/sentry/integrations/discord/message_builder/test_button.py create mode 100644 tests/sentry/integrations/discord/message_builder/test_embed.py create mode 100644 tests/sentry/integrations/discord/message_builder/test_flags.py create mode 100644 tests/sentry/integrations/discord/message_builder/test_select_menu.py create mode 100644 tests/sentry/integrations/discord/test_issue_alert.py delete mode 100644 tests/sentry/integrations/discord/test_message_builder.py rename tests/sentry/integrations/discord/{test_webhook.py => webhooks/test_command.py} (76%) create mode 100644 tests/sentry/integrations/discord/webhooks/test_endpoint.py create mode 100644 tests/sentry/integrations/discord/webhooks/test_message_component.py diff --git a/src/sentry/api/helpers/group_index/update.py b/src/sentry/api/helpers/group_index/update.py index c406f1416f3d95..0ebd570785e073 100644 --- a/src/sentry/api/helpers/group_index/update.py +++ b/src/sentry/api/helpers/group_index/update.py @@ -173,7 +173,7 @@ def get_current_release_version_of_group( def update_groups( request: Request, - group_ids: Sequence[Group], + group_ids: Sequence[int], projects: Sequence[Project], organization_id: int, search_fn: SearchFunction | None, diff --git a/src/sentry/integrations/discord/__init__.py b/src/sentry/integrations/discord/__init__.py index 984e00885a5e11..1a6baa7f850df2 100644 --- a/src/sentry/integrations/discord/__init__.py +++ b/src/sentry/integrations/discord/__init__.py @@ -1,5 +1,14 @@ +from sentry.rules import rules + +from .actions import * # noqa: F401,F403 +from .actions import DiscordNotifyServiceAction +from .client import * # noqa: F401,F403 from .commands import * # noqa: F401,F403 from .integration import * # noqa: F401,F403 +from .message_builder.base import * # noqa: F401,F403 +from .message_builder.issues import * # noqa: F401,F403 from .urls import * # noqa: F401,F403 from .utils import * # noqa: F401,F403 from .views import * # noqa: F401,F403 + +rules.add(DiscordNotifyServiceAction) # type: ignore diff --git a/src/sentry/integrations/discord/actions/__init__.py b/src/sentry/integrations/discord/actions/__init__.py new file mode 100644 index 00000000000000..79d2ad18c6d1d0 --- /dev/null +++ b/src/sentry/integrations/discord/actions/__init__.py @@ -0,0 +1,2 @@ +from .form import * # noqa: F401, F403 +from .notification import * # noqa: F401, F403 diff --git a/src/sentry/integrations/discord/actions/form.py b/src/sentry/integrations/discord/actions/form.py new file mode 100644 index 00000000000000..5f3d610aca4f56 --- /dev/null +++ b/src/sentry/integrations/discord/actions/form.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +from typing import Any + +from django import forms +from django.core.exceptions import ValidationError +from django.forms.fields import ChoiceField + +from sentry.integrations.discord.utils.channel import validate_channel_id +from sentry.services.hybrid_cloud.integration import integration_service + + +class DiscordNotifyServiceForm(forms.Form): + server = forms.ChoiceField(choices=(), widget=forms.Select()) + channel_id = forms.CharField(widget=forms.TextInput()) + tags = forms.CharField(required=False, widget=forms.TextInput()) + + def __init__(self, *args: Any, **kwargs: Any) -> None: + server_list = [(i.id, i.name) for i in kwargs.pop("integrations")] + + super().__init__(*args, **kwargs) + + if server_list: + assert isinstance(self.fields["server"], ChoiceField) + self.fields["server"].initial = server_list[0][0] + self.fields["server"].choices = server_list + self.fields["server"].widget.choices = server_list + + def _format_discord_error_message(self, message: str) -> str: + return f"Discord: {message}" + + def clean(self) -> dict[str, object] | None: + cleaned_data: dict[str, object] = super().clean() or {} + channel_id = cleaned_data.get("channel_id") + server = cleaned_data.get("server") + integration = integration_service.get_integration(integration_id=server) + + if not server or not integration: + raise forms.ValidationError( + self._format_discord_error_message("Server is a required field."), + code="invalid", + ) + + if channel_id and isinstance(channel_id, str): + try: + validate_channel_id( + channel_id=channel_id, + guild_id=integration.external_id, + integration_id=integration.id, + ) + except ValidationError as e: + raise forms.ValidationError( + self._format_discord_error_message("; ".join(e.messages)), + code="invalid", + ) + + return cleaned_data diff --git a/src/sentry/integrations/discord/actions/notification.py b/src/sentry/integrations/discord/actions/notification.py new file mode 100644 index 00000000000000..e8017ced39796f --- /dev/null +++ b/src/sentry/integrations/discord/actions/notification.py @@ -0,0 +1,79 @@ +from typing import Any, Generator, Sequence + +from sentry.eventstore.models import GroupEvent +from sentry.integrations.discord.actions.form import DiscordNotifyServiceForm +from sentry.integrations.discord.client import DiscordClient +from sentry.integrations.discord.message_builder.issues import DiscordIssuesMessageBuilder +from sentry.rules.actions import IntegrationEventAction +from sentry.rules.base import CallbackFuture, EventState +from sentry.shared_integrations.exceptions.base import ApiError +from sentry.types.rules import RuleFuture +from sentry.utils import metrics + + +class DiscordNotifyServiceAction(IntegrationEventAction): + id = "sentry.integrations.discord.notify_action.DiscordNotifyServiceAction" + form_cls = DiscordNotifyServiceForm + label = "Send a notification to the {server} Discord server in the channel with ID: {channel_id} and show tags {tags} in the notification." + prompt = "Send a Discord notification" + provider = "discord" + integration_key = "server" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.form_fields = { + "server": { + "type": "choice", + "choices": [(i.id, i.name) for i in self.get_integrations()], + }, + "channel_id": {"type": "string", "placeholder": "e.g., 1134274732116676679"}, + "tags": {"type": "string", "placeholder": "e.g., environment,user,my_tag"}, + } + + def after(self, event: GroupEvent, state: EventState) -> Generator[CallbackFuture, None, None]: + channel_id = self.get_option("channel_id") + tags = set(self.get_tags_list()) + + integration = self.get_integration() + if not integration: + # Integration removed, but rule still active + return + + def send_notification(event: GroupEvent, futures: Sequence[RuleFuture]) -> None: + rules = [f.rule for f in futures] + message = DiscordIssuesMessageBuilder(event.group, event=event, tags=tags, rules=rules) + + client = DiscordClient(integration_id=integration.id) + try: + client.send_message(channel_id, message) + except ApiError as e: + self.logger.info( + "rule.fail.discord_post", + extra={ + "error": str(e), + "project_id": event.project_id, + "event_id": event.event_id, + "guild_id": integration.external_id, + "channel_id": channel_id, + }, + ) + + key = f"discord:{integration.id}:{channel_id}" + + metrics.incr("notifications.sent", instance="discord.notifications", skip_internal=False) + yield self.future(send_notification, key=key) + + def render_label(self) -> str: + tags = self.get_tags_list() + + return self.label.format( + server=self.get_integration_name(), + channel_id=self.get_option("channel_id"), + tags="[{}]".format(", ".join(tags)), + ) + + def get_tags_list(self) -> Sequence[str]: + return [s.strip() for s in self.get_option("tags", "").split(",")] + + def get_form_instance(self) -> Any: + return self.form_cls(self.data, integrations=self.get_integrations()) diff --git a/src/sentry/integrations/discord/client.py b/src/sentry/integrations/discord/client.py index 6a8053c2efd9c2..33cd6dc4656e43 100644 --- a/src/sentry/integrations/discord/client.py +++ b/src/sentry/integrations/discord/client.py @@ -1,13 +1,17 @@ from __future__ import annotations +# to avoid a circular import +import logging + from requests import PreparedRequest from sentry import options +from sentry.integrations.discord.message_builder.base.base import DiscordMessageBuilder from sentry.services.hybrid_cloud.util import control_silo_function from sentry.shared_integrations.client.proxy import IntegrationProxyClient, infer_org_integration from sentry.utils.json import JSONData -from .utils import logger +logger = logging.getLogger("sentry.integrations.discord") class DiscordClient(IntegrationProxyClient): @@ -21,7 +25,13 @@ class DiscordClient(IntegrationProxyClient): USERS_GUILD_URL = "/users/@me/guilds/{guild_id}" # https://discord.com/developers/docs/interactions/application-commands#get-global-application-commands - APPLICATION_COMMANDS = "/applications/{application_id}/commands" + APPLICATION_COMMANDS_URL = "/applications/{application_id}/commands" + + # https://discord.com/developers/docs/resources/channel#get-channel + CHANNEL_URL = "/channels/{channel_id}" + + # https://discord.com/developers/docs/resources/channel#create-message + MESSAGE_URL = "/channels/{channel_id}/messages" def __init__( self, @@ -58,6 +68,18 @@ def leave_guild(self, guild_id: str) -> None: def overwrite_application_commands(self, commands: list[object]) -> None: self.put( - self.APPLICATION_COMMANDS.format(application_id=self.application_id), + self.APPLICATION_COMMANDS_URL.format(application_id=self.application_id), data=commands, ) + + def get_channel(self, channel_id: str) -> object | None: + """ + Get a channel by id. + """ + return self.get(self.CHANNEL_URL.format(channel_id=channel_id)) + + def send_message(self, channel_id: str, message: DiscordMessageBuilder) -> None: + """ + Send a message to the specified channel. + """ + self.post(self.MESSAGE_URL.format(channel_id=channel_id), data=message.build()) diff --git a/src/sentry/integrations/discord/message_builder/__init__.py b/src/sentry/integrations/discord/message_builder/__init__.py index 019dfa3ffed4db..20e26d83c3e896 100644 --- a/src/sentry/integrations/discord/message_builder/__init__.py +++ b/src/sentry/integrations/discord/message_builder/__init__.py @@ -16,3 +16,5 @@ "Warning": "warning", "Critical": "fatal", } + +DISCORD_URL_FORMAT = "[{text}]({url})" diff --git a/src/sentry/integrations/discord/message_builder/base/base.py b/src/sentry/integrations/discord/message_builder/base/base.py index 9a28a1d9e6780e..02c2b2b28e3636 100644 --- a/src/sentry/integrations/discord/message_builder/base/base.py +++ b/src/sentry/integrations/discord/message_builder/base/base.py @@ -17,7 +17,7 @@ class DiscordMessageBuilder(AbstractMessageBuilder): def __init__( self, - content: str | None = None, + content: str = "", embeds: list[DiscordMessageEmbed] | None = None, components: list[DiscordMessageComponent] | None = None, flags: DiscordMessageFlags | None = None, @@ -37,7 +37,7 @@ def build(self) -> dict[str, object]: def _build( self, - content: str | None = None, + content: str = "", embeds: list[DiscordMessageEmbed] | None = None, components: list[DiscordMessageComponent] | None = None, flags: DiscordMessageFlags | None = None, @@ -46,17 +46,11 @@ def _build( Helper method for building arbitrary Discord messages. """ message: dict[str, object] = {} - - if content is not None: - message["content"] = content - - if embeds is not None: - message["embeds"] = [embed.build() for embed in embeds] - - if components is not None: - message["components"] = [component.build() for component in components] - + message["content"] = content + message["embeds"] = [] if embeds is None else [embed.build() for embed in embeds] + message["components"] = ( + [] if components is None else [component.build() for component in components] + ) if flags is not None: message["flags"] = flags.value - return message diff --git a/src/sentry/integrations/discord/message_builder/base/component/__init__.py b/src/sentry/integrations/discord/message_builder/base/component/__init__.py index b8d4601c2a04de..6bcf57ccbabe9d 100644 --- a/src/sentry/integrations/discord/message_builder/base/component/__init__.py +++ b/src/sentry/integrations/discord/message_builder/base/component/__init__.py @@ -1,3 +1,21 @@ from .action_row import DiscordActionRow # noqa: F401,F403 from .base import * # noqa: F401,F403 from .button import * # noqa: F401,F403 + + +class DiscordComponentCustomIds: + """ + Constant to track these ids across modules + + A custom_id must have ':{group_id}' appended to it, so we can track the + group across interactions. This may need to be changed once we extend to + other notification types. + """ + + ARCHIVE = "archive" + ASSIGN_DIALOG = "assign_dialog" + RESOLVE_DIALOG = "resolve_dialog" + ASSIGN = "assign" + RESOLVE = "resolve" + UNRESOLVE = "unresolve" + MARK_ONGOING = "mark_ongoing" diff --git a/src/sentry/integrations/discord/message_builder/base/component/base.py b/src/sentry/integrations/discord/message_builder/base/component/base.py index b939cdb7f9e60d..a83272e0902a87 100644 --- a/src/sentry/integrations/discord/message_builder/base/component/base.py +++ b/src/sentry/integrations/discord/message_builder/base/component/base.py @@ -8,9 +8,6 @@ class DiscordMessageComponent: Child classes should override the constructor with necessary fields for the component type. - This class' build method should work for children also, The one exception - to this being the ActionRow component, which needs to build its children. - https://discord.com/developers/docs/interactions/message-components#component-object """ diff --git a/src/sentry/integrations/discord/message_builder/base/component/button.py b/src/sentry/integrations/discord/message_builder/base/component/button.py index 9786c1ea2b3044..17794c5d85c64a 100644 --- a/src/sentry/integrations/discord/message_builder/base/component/button.py +++ b/src/sentry/integrations/discord/message_builder/base/component/button.py @@ -15,8 +15,8 @@ class DiscordButton(DiscordMessageComponent): # Note that buttons must be contained in an ActionRow! def __init__( self, - style: int, custom_id: str, + style: int = DiscordButtonStyle.SECONDARY, label: str | None = None, url: str | None = None, disabled: bool = False, diff --git a/src/sentry/integrations/discord/message_builder/base/component/select_menu.py b/src/sentry/integrations/discord/message_builder/base/component/select_menu.py new file mode 100644 index 00000000000000..afb31d00ae4f0d --- /dev/null +++ b/src/sentry/integrations/discord/message_builder/base/component/select_menu.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +from collections.abc import Iterable + +from sentry.integrations.discord.message_builder.base.component.base import DiscordMessageComponent + + +class DiscordSelectMenuOption: + """ + An option for a DiscordSelectMenu. + """ + + def __init__( + self, label: str, value: str, description: str | None = None, default: bool = False + ) -> None: + self.label = label + self.value = value + self.description = description + self.default = default + + def build(self) -> dict[str, object]: + attributes = vars(self).items() + return {k: v for k, v in attributes if v is not None} + + +class DiscordSelectMenu(DiscordMessageComponent): + """ + A Discord select menu message component. We are only implementing the + string select variation because the other types are not currently required. + + https://discord.com/developers/docs/interactions/message-components#select-menu-object + """ + + def __init__( + self, + custom_id: str, + options: Iterable[DiscordSelectMenuOption], + placeholder: str | None = None, + min_values: int = 1, + max_values: int = 1, + disabled: bool = False, + ) -> None: + super().__init__(type=3) + self.custom_id = custom_id + self.options = options + self.placeholder = placeholder + self.min_values = min_values + self.max_values = max_values + self.disabled = disabled + + def build(self) -> dict[str, object]: + select_menu = super().build() + select_menu["options"] = [o.build() for o in self.options] + return select_menu diff --git a/src/sentry/integrations/discord/message_builder/base/embed/base.py b/src/sentry/integrations/discord/message_builder/base/embed/base.py index 2ea456248563cb..64c98cac812a72 100644 --- a/src/sentry/integrations/discord/message_builder/base/embed/base.py +++ b/src/sentry/integrations/discord/message_builder/base/embed/base.py @@ -1,6 +1,7 @@ from __future__ import annotations from collections.abc import Iterable +from datetime import datetime from sentry.integrations.discord.message_builder.base.embed.field import DiscordMessageEmbedField from sentry.integrations.discord.message_builder.base.embed.footer import DiscordMessageEmbedFooter @@ -23,6 +24,7 @@ def __init__( color: int | None = None, footer: DiscordMessageEmbedFooter | None = None, fields: Iterable[DiscordMessageEmbedField] | None = None, + timestamp: datetime | None = None, ) -> None: self.title = title self.description = description @@ -30,6 +32,7 @@ def __init__( self.color = color self.footer = footer self.fields = fields + self.timestamp = timestamp def build(self) -> dict[str, object]: attributes = vars(self).items() @@ -41,4 +44,7 @@ def build(self) -> dict[str, object]: if self.fields is not None: embed["fields"] = [field.build() for field in self.fields] + if self.timestamp is not None: + embed["timestamp"] = self.timestamp.isoformat() + return embed diff --git a/src/sentry/integrations/discord/message_builder/issues.py b/src/sentry/integrations/discord/message_builder/issues.py new file mode 100644 index 00000000000000..40040a72769e36 --- /dev/null +++ b/src/sentry/integrations/discord/message_builder/issues.py @@ -0,0 +1,148 @@ +from __future__ import annotations + +from sentry import tagstore +from sentry.eventstore.models import GroupEvent +from sentry.integrations.discord.message_builder import LEVEL_TO_COLOR +from sentry.integrations.discord.message_builder.base.base import DiscordMessageBuilder +from sentry.integrations.discord.message_builder.base.component.action_row import DiscordActionRow +from sentry.integrations.discord.message_builder.base.component.base import DiscordMessageComponent +from sentry.integrations.discord.message_builder.base.component.button import DiscordButton +from sentry.integrations.discord.message_builder.base.embed.base import DiscordMessageEmbed +from sentry.integrations.discord.message_builder.base.embed.field import DiscordMessageEmbedField +from sentry.integrations.discord.message_builder.base.embed.footer import DiscordMessageEmbedFooter +from sentry.integrations.message_builder import ( + build_attachment_text, + build_attachment_title, + build_footer, + get_title_link, +) +from sentry.models.group import Group, GroupStatus +from sentry.models.project import Project +from sentry.models.rule import Rule +from sentry.notifications.notifications.base import ProjectNotification +from sentry.types.integrations import ExternalProviders + +from ..message_builder.base.component import DiscordComponentCustomIds as CustomIds + + +class DiscordIssuesMessageBuilder(DiscordMessageBuilder): + def __init__( + self, + group: Group, + event: GroupEvent | None = None, + tags: set[str] | None = None, + rules: list[Rule] | None = None, + link_to_event: bool = False, + issue_details: bool = False, + notification: ProjectNotification | None = None, + ) -> None: + self.group = group + self.event = event + self.tags = tags + self.rules = rules + self.link_to_event = link_to_event + self.issue_details = issue_details + self.notification = notification + + def build(self) -> dict[str, object]: + project = Project.objects.get_from_cache(id=self.group.project_id) + event_for_tags = self.event or self.group.get_latest_event() + timestamp = ( + max(self.group.last_seen, self.event.datetime) if self.event else self.group.last_seen + ) + obj: Group | GroupEvent = self.event if self.event is not None else self.group + rule_id = None + if self.rules: + rule_id = self.rules[0].id + + embeds = [ + DiscordMessageEmbed( + title=build_attachment_title(obj), + description=build_attachment_text(self.group, self.event) or None, + url=get_title_link( + self.group, + self.event, + self.link_to_event, + self.issue_details, + self.notification, + ExternalProviders.DISCORD, + rule_id, + ), + color=LEVEL_TO_COLOR["info"], + # We can't embed urls in Discord embed footers. + footer=DiscordMessageEmbedFooter( + build_footer(self.group, project, self.rules, "{text}") + ), + fields=build_tag_fields(event_for_tags, self.tags), + timestamp=timestamp, + ) + ] + + components = build_components(self.group, project) + + return self._build(embeds=embeds, components=components) + + +def build_tag_fields( + event_for_tags: GroupEvent | None, tags: set[str] | None = None +) -> list[DiscordMessageEmbedField]: + fields: list[DiscordMessageEmbedField] = [] + if tags: + event_tags = event_for_tags.tags if event_for_tags else [] + for key, value in event_tags: + std_key = tagstore.get_standardized_key(key) # type: ignore + if std_key not in tags: + continue + + labeled_value = tagstore.get_tag_value_label(key, value) # type: ignore + fields.append( + DiscordMessageEmbedField( + std_key, + labeled_value, + inline=True, + ) + ) + return fields + + +def build_components( + group: Group, + project: Project, +) -> list[DiscordMessageComponent]: + + archive_button = DiscordButton( + custom_id=f"{CustomIds.ARCHIVE}:{group.id}", + label="Archive", + ) + + resolve_button = DiscordButton( + custom_id=f"{CustomIds.RESOLVE_DIALOG}:{group.id}", label="Resolve..." + ) + + assign_button = DiscordButton( + custom_id=f"{CustomIds.ASSIGN_DIALOG}:{group.id}", label="Assign..." + ) + + status = group.get_status() + + if not project.flags.has_releases: + resolve_button = DiscordButton( + custom_id=f"{CustomIds.RESOLVE}:{group.id}", + label="Resolve", + ) + + if status == GroupStatus.RESOLVED: + resolve_button = DiscordButton( + custom_id=f"{CustomIds.UNRESOLVE}:{group.id}", + label="Unresolve", + ) + + if status == GroupStatus.IGNORED: + archive_button = DiscordButton( + custom_id=f"{CustomIds.MARK_ONGOING}:{group.id}", + label="Mark as Ongoing", + ) + + return [ + DiscordActionRow(components=[resolve_button, archive_button, assign_button]), + ] diff --git a/src/sentry/integrations/discord/requests/base.py b/src/sentry/integrations/discord/requests/base.py index 68fd2ca812ef36..32f3f373b0c982 100644 --- a/src/sentry/integrations/discord/requests/base.py +++ b/src/sentry/integrations/discord/requests/base.py @@ -29,6 +29,15 @@ class DiscordRequestError(Exception): class DiscordRequestTypes: PING = 1 COMMAND = 2 + MESSAGE_COMPONENT = 3 + MODAL_SUBMIT = 5 + + +class DiscordMessageComponentTypes: + ACTION_ROW = 1 + BUTTON = 2 + SELECT = 3 + TEXT_INPUT = 4 class DiscordRequest: @@ -44,7 +53,7 @@ class DiscordRequest: def __init__(self, request: Request): self.request = request self._integration: RpcIntegration | None = None - self._data: Mapping[str, object] = {} + self._data: Mapping[str, object] = self.request.data self._identity: RpcIdentity | None = None self.user: RpcUser | None = None @@ -54,24 +63,23 @@ def integration(self) -> RpcIntegration | None: @property def data(self) -> Mapping[str, object]: - if not self._data: - self._validate_data() - return self._data + """This is the data object nested within request.data""" + return self._data.get("data") or {} # type: ignore @property def guild_id(self) -> str | None: - guild_id = self.data.get("guild_id") + guild_id = self._data.get("guild_id") return str(guild_id) if guild_id else None @property def channel_id(self) -> str | None: - channel_id = self.data.get("channel_id") + channel_id = self._data.get("channel_id") return str(channel_id) if channel_id else None @property def user_id(self) -> str | None: try: - return self.data.get("member")["user"]["id"] # type: ignore + return self._data.get("member")["user"]["id"] # type: ignore except (AttributeError, TypeError): return None @@ -87,11 +95,16 @@ def logging_data(self) -> Mapping[str, str | int]: data["integration_id"] = self.integration.id if self.user_id: data["discord_user_id"] = self.user_id + if self.has_identity(): + data["identity"] = self.get_identity_str() + if self.is_command(): + data["command"] = self.get_command_name() + if self.is_message_component(): + data["component_custom_id"] = self.get_component_custom_id() return {k: v for k, v in data.items() if v} def validate(self) -> None: - self._validate_data() self._log_request() self.authorize() self.validate_integration() @@ -108,12 +121,6 @@ def authorize(self) -> None: raise DiscordRequestError(status=status.HTTP_401_UNAUTHORIZED) - def _validate_data(self) -> None: - try: - self._data = self.request.data - except (ValueError, TypeError): - raise DiscordRequestError(status=status.HTTP_400_BAD_REQUEST) - def _validate_identity(self) -> None: self.user = self.get_identity_user() @@ -163,7 +170,26 @@ def is_ping(self) -> bool: def is_command(self) -> bool: return self._data.get("type", 0) == DiscordRequestTypes.COMMAND + def is_message_component(self) -> bool: + return self._data.get("type", 0) == DiscordRequestTypes.MESSAGE_COMPONENT + + def is_modal_submit(self) -> bool: + return self._data.get("type", 0) == DiscordRequestTypes.MODAL_SUBMIT + def get_command_name(self) -> str: if not self.is_command(): return "" - return self._data.get("data")["name"] # type: ignore + return self.data["name"] # type: ignore + + def get_component_custom_id(self) -> str: + if not self.is_message_component(): + return "" + return self.data["custom_id"] # type: ignore + + def is_select_component(self) -> bool: + return self.data["component_type"] == DiscordMessageComponentTypes.SELECT + + def get_selected_options(self) -> list[str]: + if not self.is_select_component(): + return [] + return self.data["values"] # type: ignore diff --git a/src/sentry/integrations/discord/utils/__init__.py b/src/sentry/integrations/discord/utils/__init__.py index a506edaebbba7b..5ada2cc6f9f37c 100644 --- a/src/sentry/integrations/discord/utils/__init__.py +++ b/src/sentry/integrations/discord/utils/__init__.py @@ -3,3 +3,4 @@ logger = logging.getLogger("sentry.integrations.discord") from .auth import * # noqa: F401,F403 +from .channel import * # noqa: F401,F403 diff --git a/src/sentry/integrations/discord/utils/channel.py b/src/sentry/integrations/discord/utils/channel.py new file mode 100644 index 00000000000000..79620a2717ac28 --- /dev/null +++ b/src/sentry/integrations/discord/utils/channel.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +from django.core.exceptions import ValidationError + +from sentry.integrations.discord.client import DiscordClient +from sentry.shared_integrations.exceptions import IntegrationError +from sentry.shared_integrations.exceptions.base import ApiError + +from . import logger + +NO_CHANNEL_MESSAGE = ( + "We couldn't find a channel with that ID. Make sure you have the correct server selected." +) + + +def validate_channel_id(channel_id: str, guild_id: str, integration_id: int | None) -> None: + """ + Make sure that for this integration, the channel exists, belongs to this + integration, and our bot has access to it. + """ + client = DiscordClient(integration_id=integration_id) + try: + result = client.get_channel(channel_id) + except ApiError as e: + if e.code == 404: + logger.info("rule.discord.channel_info_failed", extra={"error": str(e)}) + raise ValidationError(NO_CHANNEL_MESSAGE) + logger.error("rule.discord.channel_info_failed", extra={"error": str(e)}) + raise IntegrationError("Could not retrieve Discord channel information.") + + if not isinstance(result, dict): + raise IntegrationError("Bad response from Discord channel lookup.") + + if result["guild_id"] != guild_id: + # The channel exists and we have access to it, but it does not belong + # to the specified guild! We'll use the same message as generic 404, + # so we don't expose other guilds' channel IDs. + logger.info( + "rule.discord.wrong_guild_for_channel", + extra={"guild_id": guild_id, "channel_belongs_to": result["guild_id"]}, + ) + raise ValidationError(NO_CHANNEL_MESSAGE) diff --git a/src/sentry/integrations/discord/webhooks/__init__.py b/src/sentry/integrations/discord/webhooks/__init__.py index dcc1898e6c7b49..66f93df248690c 100644 --- a/src/sentry/integrations/discord/webhooks/__init__.py +++ b/src/sentry/integrations/discord/webhooks/__init__.py @@ -1,3 +1,5 @@ -from .base import DiscordInteractionsEndpoint - -__all__ = ("DiscordInteractionsEndpoint",) +from .base import * # noqa: F401,F403 +from .command import * # noqa: F401,F403 +from .handler import * # noqa: F401,F403 +from .message_component import * # noqa: F401,F403 +from .types import * # noqa: F401, F403 diff --git a/src/sentry/integrations/discord/webhooks/base.py b/src/sentry/integrations/discord/webhooks/base.py index b6024932103e51..1422f0c7ae4207 100644 --- a/src/sentry/integrations/discord/webhooks/base.py +++ b/src/sentry/integrations/discord/webhooks/base.py @@ -1,33 +1,16 @@ from __future__ import annotations from django.views.decorators.csrf import csrf_exempt -from rest_framework import status from rest_framework.request import Request from rest_framework.response import Response from sentry.api.base import Endpoint, region_silo_endpoint -from sentry.integrations.discord.message_builder.base import ( - DiscordMessageBuilder, - DiscordMessageFlags, -) from sentry.integrations.discord.requests.base import DiscordRequest, DiscordRequestError -from sentry.integrations.discord.views.link_identity import build_linking_url -from sentry.integrations.discord.views.unlink_identity import build_unlinking_url +from sentry.integrations.discord.webhooks.command import DiscordCommandHandler +from sentry.integrations.discord.webhooks.message_component import DiscordMessageComponentHandler from sentry.web.decorators import transaction_start -from ..utils import logger - -LINK_USER_MESSAGE = "[Click here]({url}) to link your Discord account to your Sentry account." -ALREADY_LINKED_MESSAGE = "You are already linked to the Sentry account with email: `{email}`." -UNLINK_USER_MESSAGE = "[Click here]({url}) to unlink your Discord account from your Sentry Account." -NOT_LINKED_MESSAGE = ( - "Your Discord account is not linked to a Sentry account. Use `/link` to link your accounts." -) -HELP_MESSAGE = """ -`/help`: View this message. -`/link`: Link your Discord account to your Sentry account to perform actions on Sentry notifications. -`/unlink`: Unlink your Discord account from your Sentry account. -""" +from .types import DiscordResponseTypes @region_silo_endpoint @@ -45,103 +28,27 @@ class DiscordInteractionsEndpoint(Endpoint): def __init__(self) -> None: super().__init__() - self.discord_request: DiscordRequest @csrf_exempt @transaction_start("DiscordInteractionsEndpoint") def post(self, request: Request) -> Response: try: - self.discord_request = self.discord_request_class(request) - self.discord_request.validate() + discord_request = self.discord_request_class(request) + discord_request.validate() - if self.discord_request.is_ping(): + if discord_request.is_ping(): # https://discord.com/developers/docs/tutorials/upgrading-to-application-commands#adding-an-interactions-endpoint-url - return self.respond({"type": 1}, status=200) + return self.respond({"type": DiscordResponseTypes.PONG}, status=200) + + elif discord_request.is_command(): + return DiscordCommandHandler(discord_request).handle() - elif self.discord_request.is_command(): - return self.handle_command() + elif discord_request.is_message_component(): + return DiscordMessageComponentHandler(discord_request).handle() except DiscordRequestError as e: return self.respond(status=e.status) # This isn't an interaction type that we need to worry about, so we'll - # just return 200 + # just return 200. return self.respond(status=200) - - def reply(self, message: DiscordMessageBuilder) -> Response: - return self.respond( - {"type": 4, "data": message.build()}, - headers={"Content-Type": "application/json"}, - status=200, - ) - - def handle_command(self) -> Response: - command_name = self.discord_request.get_command_name() - logging_data = self.discord_request.logging_data - - if command_name == "link": - logger.info("discord.interaction.command.link", extra={**logging_data}) - return self.link_user() - elif command_name == "unlink": - logger.info("discord.interaction.command.unlink", extra={**logging_data}) - return self.unlink_user() - elif command_name == "help": - logger.info("discord.interaction.command.help", extra={**logging_data}) - return self.help() - - logger.info( - "discord.interaction.command.unknown", extra={"command": command_name, **logging_data} - ) - return self.help() - - def link_user(self) -> Response: - if self.discord_request.has_identity(): - message = DiscordMessageBuilder( - content=ALREADY_LINKED_MESSAGE.format( - email=self.discord_request.get_identity_str() - ), - flags=DiscordMessageFlags().set_ephemeral(), - ) - return self.reply(message) - - if not self.discord_request.integration or not self.discord_request.user_id: - raise DiscordRequestError(status=status.HTTP_400_BAD_REQUEST) - - link_url = build_linking_url( - integration=self.discord_request.integration, - discord_id=self.discord_request.user_id, - ) - - message = DiscordMessageBuilder( - content=LINK_USER_MESSAGE.format(url=link_url), - flags=DiscordMessageFlags().set_ephemeral(), - ) - return self.reply(message) - - def unlink_user(self) -> Response: - if not self.discord_request.has_identity(): - message = DiscordMessageBuilder( - content=NOT_LINKED_MESSAGE, flags=DiscordMessageFlags().set_ephemeral() - ) - return self.reply(message) - - # if self.discord_request.has_identity() then these must not be None - assert self.discord_request.integration is not None - assert self.discord_request.user_id is not None - - unlink_url = build_unlinking_url( - integration=self.discord_request.integration, - discord_id=self.discord_request.user_id, - ) - - message = DiscordMessageBuilder( - content=UNLINK_USER_MESSAGE.format(url=unlink_url), - flags=DiscordMessageFlags().set_ephemeral(), - ) - return self.reply(message) - - def help(self) -> Response: - message = DiscordMessageBuilder( - content=HELP_MESSAGE, flags=DiscordMessageFlags().set_ephemeral() - ) - return self.reply(message) diff --git a/src/sentry/integrations/discord/webhooks/command.py b/src/sentry/integrations/discord/webhooks/command.py new file mode 100644 index 00000000000000..b4ff6b2744dc88 --- /dev/null +++ b/src/sentry/integrations/discord/webhooks/command.py @@ -0,0 +1,88 @@ +from rest_framework import status +from rest_framework.response import Response + +from sentry.integrations.discord.requests.base import DiscordRequestError +from sentry.integrations.discord.views.link_identity import build_linking_url +from sentry.integrations.discord.views.unlink_identity import build_unlinking_url +from sentry.integrations.discord.webhooks.handler import DiscordInteractionHandler + +from ..utils import logger + +LINK_USER_MESSAGE = "[Click here]({url}) to link your Discord account to your Sentry account." +ALREADY_LINKED_MESSAGE = "You are already linked to the Sentry account with email: `{email}`." +UNLINK_USER_MESSAGE = "[Click here]({url}) to unlink your Discord account from your Sentry Account." +NOT_LINKED_MESSAGE = ( + "Your Discord account is not linked to a Sentry account. Use `/link` to link your accounts." +) +HELP_MESSAGE = """ +`/help`: View this message. +`/link`: Link your Discord account to your Sentry account to perform actions on Sentry notifications. +`/unlink`: Unlink your Discord account from your Sentry account. +""" + + +class DiscordCommandNames: + LINK = "link" + UNLINK = "unlink" + HELP = "help" + + +class DiscordCommandHandler(DiscordInteractionHandler): + """ + Handles logic for Discord Command interactions. + + Request passed in constructor must be command interaction. + """ + + def handle(self) -> Response: + command_name = self.request.get_command_name() + logging_data = self.request.logging_data + + if command_name == DiscordCommandNames.LINK: + logger.info("discord.interaction.command.link", extra={**logging_data}) + return self.link_user() + elif command_name == DiscordCommandNames.UNLINK: + logger.info("discord.interaction.command.unlink", extra={**logging_data}) + return self.unlink_user() + elif command_name == DiscordCommandNames.HELP: + logger.info("discord.interaction.command.help", extra={**logging_data}) + return self.help() + + logger.info( + "discord.interaction.command.unknown", extra={"command": command_name, **logging_data} + ) + return self.help() + + def link_user(self) -> Response: + if self.request.has_identity(): + return self.send_message( + ALREADY_LINKED_MESSAGE.format(email=self.request.get_identity_str()) + ) + + if not self.request.integration or not self.request.user_id: + raise DiscordRequestError(status=status.HTTP_400_BAD_REQUEST) + + link_url = build_linking_url( + integration=self.request.integration, + discord_id=self.request.user_id, + ) + + return self.send_message(LINK_USER_MESSAGE.format(url=link_url)) + + def unlink_user(self) -> Response: + if not self.request.has_identity(): + return self.send_message(NOT_LINKED_MESSAGE) + + # if self.request.has_identity() then these must not be None + assert self.request.integration is not None + assert self.request.user_id is not None + + unlink_url = build_unlinking_url( + integration=self.request.integration, + discord_id=self.request.user_id, + ) + + return self.send_message(UNLINK_USER_MESSAGE.format(url=unlink_url)) + + def help(self) -> Response: + return self.send_message(HELP_MESSAGE) diff --git a/src/sentry/integrations/discord/webhooks/handler.py b/src/sentry/integrations/discord/webhooks/handler.py new file mode 100644 index 00000000000000..4387a4b178f23f --- /dev/null +++ b/src/sentry/integrations/discord/webhooks/handler.py @@ -0,0 +1,41 @@ +from __future__ import annotations + +from rest_framework.response import Response + +from sentry.integrations.discord.message_builder.base.base import DiscordMessageBuilder +from sentry.integrations.discord.message_builder.base.flags import DiscordMessageFlags +from sentry.integrations.discord.requests.base import DiscordRequest + +from .types import DiscordResponseTypes + + +class DiscordInteractionHandler: + """ + Abstract class defining the shared interface of interaction handlers, + along with some helper methods. + """ + + def __init__(self, request: DiscordRequest) -> None: + """ + Request must be *verified*. + """ + self.request: DiscordRequest = request + + def send_message(self, message: str | DiscordMessageBuilder, update: bool = False) -> Response: + """Sends a new follow up message.""" + response_type = DiscordResponseTypes.UPDATE if update else DiscordResponseTypes.MESSAGE + + if isinstance(message, str): + message = DiscordMessageBuilder( + content=message, flags=DiscordMessageFlags().set_ephemeral() + ) + return Response( + { + "type": response_type, + "data": message.build(), + }, + status=200, + ) + + def handle(self) -> Response: + raise NotImplementedError diff --git a/src/sentry/integrations/discord/webhooks/message_component.py b/src/sentry/integrations/discord/webhooks/message_component.py new file mode 100644 index 00000000000000..21ac921d45ee86 --- /dev/null +++ b/src/sentry/integrations/discord/webhooks/message_component.py @@ -0,0 +1,233 @@ +from __future__ import annotations + +from collections.abc import Mapping + +from rest_framework.response import Response + +from sentry.api.helpers.group_index.update import update_groups +from sentry.integrations.discord.message_builder.base.base import DiscordMessageBuilder +from sentry.integrations.discord.message_builder.base.component import ( + DiscordComponentCustomIds as CustomIds, +) +from sentry.integrations.discord.message_builder.base.component.action_row import DiscordActionRow +from sentry.integrations.discord.message_builder.base.component.select_menu import ( + DiscordSelectMenu, + DiscordSelectMenuOption, +) +from sentry.integrations.discord.message_builder.base.flags import DiscordMessageFlags +from sentry.integrations.discord.requests.base import DiscordRequest +from sentry.integrations.discord.webhooks.handler import DiscordInteractionHandler +from sentry.models.activity import ActivityIntegration +from sentry.models.group import Group +from sentry.models.grouphistory import STATUS_TO_STRING_LOOKUP, GroupHistoryStatus +from sentry.services.hybrid_cloud.user.model import RpcUser +from sentry.types.group import SUBSTATUS_TO_STR, GroupSubStatus + +from ..utils import logger + +NO_IDENTITY = "You need to link your Discord account to your Sentry account to do that. You can do this with `/link`!" +NOT_IN_ORG = "You must be a member of the org this issue belongs to in order to act on it." +ASSIGNEE_UPDATED = "Assignee has been updated." +RESOLVE_DIALOG_OPTIONS = [ + DiscordSelectMenuOption("Immediately", ""), + DiscordSelectMenuOption("In the next release", "inNextRelease"), + DiscordSelectMenuOption("In the current release", "inCurrentRelease"), +] +RESOLVED = "The issue has been resolved." +RESOLVED_IN_NEXT_RELEASE = "The issue will be resolved in the next release." +RESOLVED_IN_CURRENT_RELEASE = "The issue will be resolved in the current release." +UNRESOLVED = "The issue has been unresolved." +MARKED_ONGOING = "The issue has been marked as ongoing." +IGNORE_UNTIL_ESCALATES = "The issue will be ignored until it escalates." + + +class DiscordMessageComponentHandler(DiscordInteractionHandler): + """ + Handles logic for Discord Message Component interactions. + + Request passed in constructor must be a Message Component interaction. + """ + + def __init__(self, request: DiscordRequest) -> None: + super().__init__(request) + self.custom_id: str = request.get_component_custom_id() + self.user: RpcUser + # Everything after the colon is the group id in a custom_id + self.group_id: str = self.custom_id.split(":")[1] + self.group: Group = Group.objects.get(id=self.group_id) + + def handle(self) -> Response: + logging_data = self.request.logging_data + + if self.request.user is None: + logger.warning("discord.interaction.component.not_linked", extra={**logging_data}) + return self.send_message(NO_IDENTITY) + self.user = self.request.user + + if not self.group.organization.has_access(self.user): + logger.warning( + "discord.interaction.component.not_in_org", + extra={"org_slug": self.group.organization.slug, **logging_data}, + ) + return self.send_message(NOT_IN_ORG) + + if self.custom_id.startswith(CustomIds.ASSIGN_DIALOG): + logger.info("discord.interaction.component.assign_dialog", extra={**logging_data}) + return self.assign_dialog() + + elif self.custom_id.startswith(CustomIds.ASSIGN): + logger.info( + "discord.interaction.component.assign", + extra={**logging_data, "assign_to": self.request.get_selected_options()[0]}, + ) + return self.assign() + + elif self.custom_id.startswith(CustomIds.RESOLVE_DIALOG): + logger.info("discord.interaction.component.resolve_dialog", extra={**logging_data}) + return self.resolve_dialog() + + elif self.custom_id.startswith(CustomIds.RESOLVE): + logger.info("discord.interaction.component.resolve", extra={**logging_data}) + return self.resolve() + + elif self.custom_id.startswith(CustomIds.UNRESOLVE): + logger.info("discord.interaction.component.unresolve", extra={**logging_data}) + return self.unresolve() + + elif self.custom_id.startswith(CustomIds.MARK_ONGOING): + logger.info("discord.interaction.component.mark_ongoing", extra={**logging_data}) + return self.unresolve(from_mark_ongoing=True) + + elif self.custom_id.startswith(CustomIds.ARCHIVE): + logger.info("discord.interaction.component.archive", extra={**logging_data}) + return self.archive() + + logger.warning("discord.interaction.component.unknown_custom_id", extra={**logging_data}) + return Response(status=404) + + def assign_dialog(self) -> Response: + assign_selector = DiscordSelectMenu( + custom_id=f"{CustomIds.ASSIGN}:{self.group_id}", + placeholder="Select Assignee...", + options=get_assign_selector_options(self.group), + ) + message = DiscordMessageBuilder( + components=[DiscordActionRow([assign_selector])], + flags=DiscordMessageFlags().set_ephemeral(), + ) + return self.send_message(message) + + def assign(self) -> Response: + assignee = self.request.get_selected_options()[0] + + self.update_group( + { + "assignedTo": assignee, + "integration": ActivityIntegration.DISCORD.value, + } + ) + + message = DiscordMessageBuilder( + content=ASSIGNEE_UPDATED, + flags=DiscordMessageFlags().set_ephemeral(), + ) + return self.send_message(message, update=True) + + def resolve_dialog(self) -> Response: + resolve_selector = DiscordSelectMenu( + custom_id=f"{CustomIds.RESOLVE}:{self.group_id}", + placeholder="Select the resolution target", + options=RESOLVE_DIALOG_OPTIONS, + ) + message = DiscordMessageBuilder( + components=[DiscordActionRow([resolve_selector])], + flags=DiscordMessageFlags().set_ephemeral(), + ) + return self.send_message(message) + + def resolve(self) -> Response: + status: dict[str, object] = { + "status": STATUS_TO_STRING_LOOKUP[GroupHistoryStatus.RESOLVED], + } + message = RESOLVED + + selected_option = "" + if self.request.is_select_component(): + selected_option = self.request.get_selected_options()[0] + + if selected_option == "inNextRelease": + status["statusDetails"] = {"inNextRelease": True} + message = RESOLVED_IN_NEXT_RELEASE + elif selected_option == "inCurrentRelease": + status["statusDetails"] = {"inRelease": "latest"} + message = RESOLVED_IN_CURRENT_RELEASE + + self.update_group(status) + return self.send_message(message, update=self.request.is_select_component()) + + def unresolve(self, from_mark_ongoing: bool = False) -> Response: + self.update_group( + { + "status": STATUS_TO_STRING_LOOKUP[GroupHistoryStatus.UNRESOLVED], + "substatus": SUBSTATUS_TO_STR[GroupSubStatus.ONGOING], + } + ) + + if from_mark_ongoing: + return self.send_message(MARKED_ONGOING) + return self.send_message(UNRESOLVED) + + def archive(self) -> Response: + self.update_group( + { + "status": STATUS_TO_STRING_LOOKUP[GroupHistoryStatus.IGNORED], + "substatus": SUBSTATUS_TO_STR[GroupSubStatus.UNTIL_ESCALATING], + } + ) + return self.send_message(IGNORE_UNTIL_ESCALATES) + + def update_group(self, data: Mapping[str, object]) -> None: + update_groups( + request=self.request.request, + group_ids=[self.group.id], + projects=[self.group.project], + organization_id=self.group.organization.id, + search_fn=None, + user=self.user, # type: ignore + data=data, + ) + + +def get_assign_selector_options(group: Group) -> list[DiscordSelectMenuOption]: + """ + Helper function for building the new assignee dropdown. + """ + all_members = group.project.get_members_as_rpc_users() + members = list({m.id: m for m in all_members}.values()) + teams = group.project.teams.all() + + assignee = group.get_assignee() + + options = [] + # We don't have the luxury of option groups like Slack has, so we will just + # list all the teams and then all the members. + if teams: + team_options = [ + DiscordSelectMenuOption( + label=f"#{team.slug}", value=f"team:{team.id}", default=(team == assignee) + ) + for team in teams + ] + options.extend(sorted(team_options, key=lambda t: t.label)) + if members: + member_options = [ + DiscordSelectMenuOption( + label=member.get_display_name(), + value=f"user:{member.id}", + default=(member == assignee), + ) + for member in members + ] + options.extend(sorted(member_options, key=lambda m: m.label)) + + return options diff --git a/src/sentry/integrations/discord/webhooks/types.py b/src/sentry/integrations/discord/webhooks/types.py new file mode 100644 index 00000000000000..bd75d87ec97ee4 --- /dev/null +++ b/src/sentry/integrations/discord/webhooks/types.py @@ -0,0 +1,13 @@ +class DiscordResponseTypes: + """ + For more description of these types see + https://discord.com/developers/docs/interactions/receiving-and-responding#interaction-response-object-interaction-callback-type + """ + + PONG = 1 + MESSAGE = 4 + DEFERRED_MESSAGE = 5 + DEFERRED_UPDATE = 6 + UPDATE = 7 + AUTOCOMPLETE_RESULT = 8 + MODAL = 9 diff --git a/src/sentry/integrations/message_builder.py b/src/sentry/integrations/message_builder.py index 7008574eaffb41..aac06d1a78f334 100644 --- a/src/sentry/integrations/message_builder.py +++ b/src/sentry/integrations/message_builder.py @@ -10,6 +10,7 @@ from sentry.notifications.notifications.base import BaseNotification from sentry.services.hybrid_cloud.user import RpcUser from sentry.types.integrations import EXTERNAL_PROVIDERS, ExternalProviders +from sentry.utils.dates import to_timestamp from sentry.utils.http import absolute_uri @@ -139,9 +140,17 @@ def build_footer( footer = f"{group.qualified_short_id}" if rules: rule_url = build_rule_url(rules[0], group, project) - footer += f" via {url_format.format(text=rules[0].label, url=rule_url)}" + # If this notification is triggered via the "Send Test Notification" + # button then the label is not defined, but the url works. + text = rules[0].label if rules[0].label else "Test Alert" + footer += f" via {url_format.format(text=text, url=rule_url)}" if len(rules) > 1: footer += f" (+{len(rules) - 1} other)" return footer + + +def get_timestamp(group: Group, event: GroupEvent | None) -> float: + ts = group.last_seen + return to_timestamp(max(ts, event.datetime) if event else ts) diff --git a/src/sentry/integrations/slack/actions/notification.py b/src/sentry/integrations/slack/actions/notification.py index f6416d8f713cd7..002cd4fc1d3828 100644 --- a/src/sentry/integrations/slack/actions/notification.py +++ b/src/sentry/integrations/slack/actions/notification.py @@ -33,9 +33,9 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: "type": "choice", "choices": [(i.id, i.name) for i in self.get_integrations()], }, - "channel": {"type": "string", "placeholder": "i.e #critical, Jane Schmidt"}, - "channel_id": {"type": "string", "placeholder": "i.e. CA2FRA079 or UA1J9RTE1"}, - "tags": {"type": "string", "placeholder": "i.e environment,user,my_tag"}, + "channel": {"type": "string", "placeholder": "e.g., #critical, Jane Schmidt"}, + "channel_id": {"type": "string", "placeholder": "e.g., CA2FRA079 or UA1J9RTE1"}, + "tags": {"type": "string", "placeholder": "e.g., environment,user,my_tag"}, } def after(self, event: GroupEvent, state: EventState) -> Generator[CallbackFuture, None, None]: diff --git a/src/sentry/integrations/slack/message_builder/issues.py b/src/sentry/integrations/slack/message_builder/issues.py index dc9ffcbc451a7c..1d6f50f353553e 100644 --- a/src/sentry/integrations/slack/message_builder/issues.py +++ b/src/sentry/integrations/slack/message_builder/issues.py @@ -2,8 +2,6 @@ from typing import Any, Mapping, Sequence -from django.core.cache import cache - from sentry import features, tagstore from sentry.eventstore.models import GroupEvent from sentry.integrations.message_builder import ( @@ -12,13 +10,14 @@ build_attachment_title, build_footer, format_actor_options, + get_timestamp, get_title_link, ) from sentry.integrations.slack.message_builder import LEVEL_TO_COLOR, SLACK_URL_FORMAT, SlackBody from sentry.integrations.slack.message_builder.base.base import SlackMessageBuilder from sentry.integrations.slack.utils.escape import escape_slack_text from sentry.issues.grouptype import GroupCategory -from sentry.models import ActorTuple, Group, GroupStatus, Project, ReleaseProject, Rule, Team, User +from sentry.models import ActorTuple, Group, GroupStatus, Project, Rule, Team, User from sentry.notifications.notifications.base import BaseNotification, ProjectNotification from sentry.notifications.notifications.rules import AlertRuleNotification from sentry.notifications.utils.actions import MessageAction @@ -26,7 +25,6 @@ from sentry.services.hybrid_cloud.identity import RpcIdentity, identity_service from sentry.types.integrations import ExternalProviders from sentry.utils import json -from sentry.utils.dates import to_timestamp STATUSES = {"resolved": "resolved", "ignored": "ignored", "unresolved": "re-opened"} @@ -116,18 +114,6 @@ def get_option_groups(group: Group) -> Sequence[Mapping[str, Any]]: return option_groups -def has_releases(project: Project) -> bool: - cache_key = f"has_releases:2:{project.id}" - has_releases_option: bool | None = cache.get(cache_key) - if has_releases_option is None: - has_releases_option = ReleaseProject.objects.filter(project_id=project.id).exists() - if has_releases_option: - cache.set(cache_key, True, 3600) - else: - cache.set(cache_key, False, 60) - return has_releases_option - - def get_action_text( text: str, actions: Sequence[Any], identity: RpcIdentity, has_escalating: bool = False ) -> str: @@ -175,7 +161,7 @@ def build_actions( status = group.get_status() - if not has_releases(project): + if not project.flags.has_releases: resolve_button = MessageAction( name="status", label="Resolve", @@ -208,11 +194,6 @@ def build_actions( return [resolve_button, ignore_button, assign_button], text, color -def get_timestamp(group: Group, event: GroupEvent | None) -> float: - ts = group.last_seen - return to_timestamp(max(ts, event.datetime) if event else ts) - - def get_color( event_for_tags: GroupEvent | None, notification: BaseNotification | None, group: Group ) -> str: diff --git a/src/sentry/models/activity.py b/src/sentry/models/activity.py index 32fb889e95c0a4..83e4e7009c2637 100644 --- a/src/sentry/models/activity.py +++ b/src/sentry/models/activity.py @@ -163,4 +163,5 @@ class ActivityIntegration(Enum): PROJECT_OWNERSHIP = "projectOwnership" SLACK = "slack" MSTEAMS = "msteams" + DISCORD = "discord" SUSPECT_COMMITTER = "suspectCommitter" diff --git a/src/sentry/notifications/notifications/base.py b/src/sentry/notifications/notifications/base.py index 04517c0c258d60..b07a1de5e864fe 100644 --- a/src/sentry/notifications/notifications/base.py +++ b/src/sentry/notifications/notifications/base.py @@ -23,6 +23,7 @@ class BaseNotification(abc.ABC): provider_to_url_format = { ExternalProviders.SLACK: "<{url}|{text}>", ExternalProviders.MSTEAMS: "[{text}]({url})", + ExternalProviders.DISCORD: "[{text}]({url})", } message_builder = "SlackNotificationsMessageBuilder" # some notifications have no settings for it diff --git a/tests/sentry/api/endpoints/test_project_rules_configuration.py b/tests/sentry/api/endpoints/test_project_rules_configuration.py index 4be4d599554e64..8b9a0a61b5a245 100644 --- a/tests/sentry/api/endpoints/test_project_rules_configuration.py +++ b/tests/sentry/api/endpoints/test_project_rules_configuration.py @@ -25,7 +25,7 @@ def test_simple(self): self.create_project(teams=[team], name="baz") response = self.get_success_response(self.organization.slug, project1.slug) - assert len(response.data["actions"]) == 8 + assert len(response.data["actions"]) == 9 assert len(response.data["conditions"]) == 7 assert len(response.data["filters"]) == 8 @@ -117,7 +117,7 @@ def test_sentry_app_alertable_webhook(self): response = self.get_success_response(self.organization.slug, project1.slug) - assert len(response.data["actions"]) == 9 + assert len(response.data["actions"]) == 10 assert { "id": "sentry.rules.actions.notify_event_service.NotifyEventServiceAction", "label": "Send a notification via {service}", @@ -147,7 +147,7 @@ def test_sentry_app_alert_rules(self, mock_sentry_app_components_preparer): ) response = self.get_success_response(self.organization.slug, project1.slug) - assert len(response.data["actions"]) == 9 + assert len(response.data["actions"]) == 10 assert { "id": SENTRY_APP_ALERT_ACTION, "service": sentry_app.slug, @@ -163,7 +163,7 @@ def test_sentry_app_alert_rules(self, mock_sentry_app_components_preparer): def test_issue_type_and_category_filter_feature(self): response = self.get_success_response(self.organization.slug, self.project.slug) - assert len(response.data["actions"]) == 8 + assert len(response.data["actions"]) == 9 assert len(response.data["conditions"]) == 7 assert len(response.data["filters"]) == 8 diff --git a/tests/sentry/integrations/discord/message_builder/test_action_row.py b/tests/sentry/integrations/discord/message_builder/test_action_row.py new file mode 100644 index 00000000000000..41d3cc18dac98a --- /dev/null +++ b/tests/sentry/integrations/discord/message_builder/test_action_row.py @@ -0,0 +1,62 @@ +import pytest + +from sentry.integrations.discord.message_builder.base.component.action_row import ( + DiscordActionRow, + DiscordActionRowError, +) +from sentry.integrations.discord.message_builder.base.component.base import DiscordMessageComponent +from sentry.integrations.discord.message_builder.base.component.button import ( + DiscordButton, + DiscordButtonStyle, +) +from sentry.testutils.cases import TestCase + + +class TestDiscordActionRow(TestCase): + def test_empty(self): + action_row = DiscordActionRow([]) + result = action_row.build() + assert result == { + "type": 1, + "components": [], + } + + def test_non_empty(self): + button = DiscordButton( + style=DiscordButtonStyle.PRIMARY, + custom_id="test_button", + label="button label", + url="https://sentry.io", + disabled=True, + ) + custom_component = DiscordMessageComponent( + type=9 + ) # not a real type number, just testing custom component + action_row = DiscordActionRow( + [ + button, + custom_component, + ] + ) + result = action_row.build() + assert result == { + "type": 1, + "components": [ + { + "type": 2, + "style": 1, + "custom_id": "test_button", + "label": "button label", + "url": "https://sentry.io", + "disabled": True, + }, + { + "type": 9, + }, + ], + } + + def test_action_row_error(self): + nested_row = DiscordActionRow([]) + with pytest.raises(DiscordActionRowError): + DiscordActionRow([nested_row]) diff --git a/tests/sentry/integrations/discord/message_builder/test_builder.py b/tests/sentry/integrations/discord/message_builder/test_builder.py new file mode 100644 index 00000000000000..16f3d961290b07 --- /dev/null +++ b/tests/sentry/integrations/discord/message_builder/test_builder.py @@ -0,0 +1,144 @@ +from sentry.integrations.discord.message_builder import LEVEL_TO_COLOR +from sentry.integrations.discord.message_builder.base.base import DiscordMessageBuilder +from sentry.integrations.discord.message_builder.base.component.action_row import DiscordActionRow +from sentry.integrations.discord.message_builder.base.component.button import ( + DiscordButton, + DiscordButtonStyle, +) +from sentry.integrations.discord.message_builder.base.component.select_menu import ( + DiscordSelectMenu, + DiscordSelectMenuOption, +) +from sentry.integrations.discord.message_builder.base.embed.base import DiscordMessageEmbed +from sentry.integrations.discord.message_builder.base.flags import DiscordMessageFlags +from sentry.integrations.discord.requests.base import DiscordMessageComponentTypes +from sentry.testutils.cases import TestCase + + +class TestDiscordMessageBuilder(TestCase): + def test_empty(self): + message = DiscordMessageBuilder() + result = message.build() + assert result == { + "content": "", + "components": [], + "embeds": [], + } + + def test_some(self): + flags = DiscordMessageFlags().set_ephemeral() + message = DiscordMessageBuilder( + content="message content", + flags=flags, + ) + result = message.build() + assert result == { + "content": "message content", + "components": [], + "embeds": [], + "flags": 1 << 6, + } + + def test_all(self): + embed = DiscordMessageEmbed( + title="Title", + description="description", + url="https://sentry.io", + color=LEVEL_TO_COLOR["warning"], + ) + other_embed = DiscordMessageEmbed( + title="Other title", + description="other description", + color=LEVEL_TO_COLOR["info"], + ) + button = DiscordButton( + style=DiscordButtonStyle.PRIMARY, + custom_id="test_button", + label="button label", + ) + other_button = DiscordButton( + style=DiscordButtonStyle.DANGER, + custom_id="danger_button", + label="delete", + ) + component = DiscordActionRow([button, other_button]) + option = DiscordSelectMenuOption("option", "first", "descriptionnn", True) + other_option = DiscordSelectMenuOption("other", "second") + menu = DiscordSelectMenu( + "custom-id", [option, other_option], placeholder="place being held" + ) + other_component = DiscordActionRow([menu]) + + flags = DiscordMessageFlags().set_ephemeral() + + message = DiscordMessageBuilder( + content="message content", + embeds=[embed, other_embed], + components=[component, other_component], + flags=flags, + ) + result = message.build() + assert result == { + "content": "message content", + "embeds": [ + { + "title": "Title", + "description": "description", + "url": "https://sentry.io", + "color": LEVEL_TO_COLOR["warning"], + }, + { + "title": "Other title", + "description": "other description", + "color": LEVEL_TO_COLOR["info"], + }, + ], + "components": [ + { + "type": DiscordMessageComponentTypes.ACTION_ROW, + "components": [ + { + "type": DiscordMessageComponentTypes.BUTTON, + "style": DiscordButtonStyle.PRIMARY, + "custom_id": "test_button", + "label": "button label", + "disabled": False, + }, + { + "type": DiscordMessageComponentTypes.BUTTON, + "style": DiscordButtonStyle.DANGER, + "custom_id": "danger_button", + "label": "delete", + "disabled": False, + }, + ], + }, + { + "type": DiscordMessageComponentTypes.ACTION_ROW, + "components": [ + { + "type": DiscordMessageComponentTypes.SELECT, + "custom_id": "custom-id", + "options": [ + { + "label": "option", + "value": "first", + "description": "descriptionnn", + "default": True, + }, + { + "label": "other", + "value": "second", + "default": False, + }, + ], + "placeholder": "place being held", + "min_values": 1, + "max_values": 1, + "disabled": False, + } + ], + }, + ], + "flags": 1 << 6, + } diff --git a/tests/sentry/integrations/discord/message_builder/test_button.py b/tests/sentry/integrations/discord/message_builder/test_button.py new file mode 100644 index 00000000000000..615f0186031a53 --- /dev/null +++ b/tests/sentry/integrations/discord/message_builder/test_button.py @@ -0,0 +1,40 @@ +from sentry.integrations.discord.message_builder.base.component.button import ( + DiscordButton, + DiscordButtonStyle, +) +from sentry.testutils.cases import TestCase + + +class TestDiscordButton(TestCase): + def test_some(self): + button = DiscordButton( + style=DiscordButtonStyle.PRIMARY, + custom_id="test_button", + label="button label", + ) + result = button.build() + assert result == { + "type": 2, + "style": 1, + "custom_id": "test_button", + "label": "button label", + "disabled": False, + } + + def test_all(self): + button = DiscordButton( + style=DiscordButtonStyle.PRIMARY, + custom_id="test_button", + label="button label", + url="https://sentry.io", + disabled=True, + ) + result = button.build() + assert result == { + "type": 2, + "style": 1, + "custom_id": "test_button", + "label": "button label", + "url": "https://sentry.io", + "disabled": True, + } diff --git a/tests/sentry/integrations/discord/message_builder/test_embed.py b/tests/sentry/integrations/discord/message_builder/test_embed.py new file mode 100644 index 00000000000000..182018b916c551 --- /dev/null +++ b/tests/sentry/integrations/discord/message_builder/test_embed.py @@ -0,0 +1,97 @@ +from sentry.integrations.discord.message_builder import LEVEL_TO_COLOR +from sentry.integrations.discord.message_builder.base.embed.base import DiscordMessageEmbed +from sentry.integrations.discord.message_builder.base.embed.field import DiscordMessageEmbedField +from sentry.integrations.discord.message_builder.base.embed.footer import DiscordMessageEmbedFooter +from sentry.testutils.cases import TestCase + + +class TestDiscordMessageEmbed(TestCase): + def test_empty(self): + embed = DiscordMessageEmbed() + result = embed.build() + assert result == {} + + def test_some(self): + embed = DiscordMessageEmbed( + title="Title", + url="https://sentry.io", + color=LEVEL_TO_COLOR["warning"], + ) + result = embed.build() + assert result == { + "title": "Title", + "url": "https://sentry.io", + "color": 16761383, + } + + def test_footer(self): + footer = DiscordMessageEmbedFooter(text="footer text", icon_url="https://sentry.io") + embed = DiscordMessageEmbed(footer=footer) + result = embed.build() + assert result == { + "footer": { + "text": "footer text", + "icon_url": "https://sentry.io", + } + } + + def test_fields(self): + field = DiscordMessageEmbedField( + "field name", + "field value", + True, + ) + other_field = DiscordMessageEmbedField( + "other field name", + "other field value", + ) + embed = DiscordMessageEmbed(fields=[field, other_field]) + result = embed.build() + assert result == { + "fields": [ + { + "name": "field name", + "value": "field value", + "inline": True, + }, + { + "name": "other field name", + "value": "other field value", + "inline": False, + }, + ] + } + + def test_all(self): + footer = DiscordMessageEmbedFooter(text="footer text", icon_url="https://sentry.io") + field = DiscordMessageEmbedField( + "field name", + "field value", + True, + ) + embed = DiscordMessageEmbed( + title="Title", + description="description", + url="https://sentry.io", + color=LEVEL_TO_COLOR["warning"], + footer=footer, + fields=[field], + ) + result = embed.build() + assert result == { + "title": "Title", + "description": "description", + "url": "https://sentry.io", + "color": 16761383, + "footer": { + "text": "footer text", + "icon_url": "https://sentry.io", + }, + "fields": [ + { + "name": "field name", + "value": "field value", + "inline": True, + } + ], + } diff --git a/tests/sentry/integrations/discord/message_builder/test_flags.py b/tests/sentry/integrations/discord/message_builder/test_flags.py new file mode 100644 index 00000000000000..19c3bc458dada7 --- /dev/null +++ b/tests/sentry/integrations/discord/message_builder/test_flags.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from sentry.integrations.discord.message_builder.base.flags import DiscordMessageFlags +from sentry.testutils.cases import TestCase + + +class TestDiscordMessageFlags(TestCase): + def assert_bits_are_set(self, value: int, bits: list[int]) -> None: + expected = 0 + for bit in bits: + expected = expected | 1 << bit + assert (value & 1 << bit) == 1 << bit + assert expected == value + + def test_none(self): + flags = DiscordMessageFlags() + assert flags.value == 0 + + def test_ephemeral(self): + flags = DiscordMessageFlags().set_ephemeral() + self.assert_bits_are_set(flags.value, [6]) + + def test_loading(self): + flags = DiscordMessageFlags().set_loading() + self.assert_bits_are_set(flags.value, [7]) + + def test_suppress_notifications(self): + flags = DiscordMessageFlags().set_suppress_notifications() + self.assert_bits_are_set(flags.value, [12]) + + def test_all(self): + flags = DiscordMessageFlags() + flags.set_ephemeral() + flags.set_loading() + flags.set_suppress_notifications() + self.assert_bits_are_set(flags.value, [6, 7, 12]) diff --git a/tests/sentry/integrations/discord/message_builder/test_select_menu.py b/tests/sentry/integrations/discord/message_builder/test_select_menu.py new file mode 100644 index 00000000000000..8b97ab478d733f --- /dev/null +++ b/tests/sentry/integrations/discord/message_builder/test_select_menu.py @@ -0,0 +1,61 @@ +from sentry.integrations.discord.message_builder.base.component.select_menu import ( + DiscordSelectMenu, + DiscordSelectMenuOption, +) +from sentry.integrations.discord.requests.base import DiscordMessageComponentTypes +from sentry.testutils.cases import TestCase + + +class TestDiscordSelectMenu(TestCase): + def test_empty(self): + menu = DiscordSelectMenu("custom-id", []) + result = menu.build() + assert result == { + "type": DiscordMessageComponentTypes.SELECT, + "custom_id": "custom-id", + "options": [], + "min_values": 1, + "max_values": 1, + "disabled": False, + } + + def test_disabled(self): + menu = DiscordSelectMenu("custom-id", [], disabled=True) + result = menu.build() + assert result == { + "type": DiscordMessageComponentTypes.SELECT, + "custom_id": "custom-id", + "options": [], + "min_values": 1, + "max_values": 1, + "disabled": True, + } + + def test_non_empty(self): + option = DiscordSelectMenuOption("option", "first", "descriptionnn", True) + other_option = DiscordSelectMenuOption("other", "second") + menu = DiscordSelectMenu( + "custom-id", [option, other_option], placeholder="place being held" + ) + result = menu.build() + assert result == { + "type": DiscordMessageComponentTypes.SELECT, + "custom_id": "custom-id", + "options": [ + { + "label": "option", + "value": "first", + "description": "descriptionnn", + "default": True, + }, + { + "label": "other", + "value": "second", + "default": False, + }, + ], + "placeholder": "place being held", + "min_values": 1, + "max_values": 1, + "disabled": False, + } diff --git a/tests/sentry/integrations/discord/test_integration.py b/tests/sentry/integrations/discord/test_integration.py index b2a2ce41e6bf4d..92d425517ac9b8 100644 --- a/tests/sentry/integrations/discord/test_integration.py +++ b/tests/sentry/integrations/discord/test_integration.py @@ -153,7 +153,7 @@ def test_get_guild_name_failure(self): def test_setup(self): provider = self.provider() - url = f"{DiscordClient.base_url}{DiscordClient.APPLICATION_COMMANDS.format(application_id=self.application_id)}" + url = f"{DiscordClient.base_url}{DiscordClient.APPLICATION_COMMANDS_URL.format(application_id=self.application_id)}" responses.add( responses.PUT, url=url, @@ -170,7 +170,7 @@ def test_setup_failure(self, mock_log_error): mock_log_error.return_value = None provider = self.provider() - url = f"{DiscordClient.base_url}{DiscordClient.APPLICATION_COMMANDS.format(application_id=self.application_id)}" + url = f"{DiscordClient.base_url}{DiscordClient.APPLICATION_COMMANDS_URL.format(application_id=self.application_id)}" responses.add( responses.PUT, url=url, @@ -186,7 +186,7 @@ def test_setup_failure(self, mock_log_error): def test_setup_cache(self): provider = self.provider() - url = f"{DiscordClient.base_url}{DiscordClient.APPLICATION_COMMANDS.format(application_id=self.application_id)}" + url = f"{DiscordClient.base_url}{DiscordClient.APPLICATION_COMMANDS_URL.format(application_id=self.application_id)}" responses.add( responses.PUT, url=url, diff --git a/tests/sentry/integrations/discord/test_issue_alert.py b/tests/sentry/integrations/discord/test_issue_alert.py new file mode 100644 index 00000000000000..fd61b61a814bff --- /dev/null +++ b/tests/sentry/integrations/discord/test_issue_alert.py @@ -0,0 +1,283 @@ +from unittest import mock + +import responses +from django.core.exceptions import ValidationError + +from sentry.integrations.discord.actions.form import DiscordNotifyServiceForm +from sentry.integrations.discord.actions.notification import DiscordNotifyServiceAction +from sentry.integrations.discord.client import DiscordClient +from sentry.integrations.discord.message_builder import LEVEL_TO_COLOR +from sentry.integrations.discord.message_builder.base.component import DiscordComponentCustomIds +from sentry.integrations.message_builder import build_attachment_title, build_footer, get_title_link +from sentry.models.group import GroupStatus +from sentry.models.release import Release +from sentry.services.hybrid_cloud.integration import integration_service +from sentry.testutils.cases import RuleTestCase, TestCase +from sentry.testutils.helpers.datetime import before_now, iso_format +from sentry.types.integrations import ExternalProviders +from sentry.utils import json + + +class DiscordIssueAlertTest(RuleTestCase): + rule_cls = DiscordNotifyServiceAction + + def setUp(self): + self.guild_id = "guild-id" + self.channel_id = "channel-id" + self.discord_user_id = "user1234" + self.discord_integration = self.create_integration( + provider="discord", + name="Cool server", + external_id=self.guild_id, + organization=self.organization, + ) + self.provider = self.create_identity_provider(integration=self.discord_integration) + self.identity = self.create_identity( + user=self.user, identity_provider=self.provider, external_id=self.discord_user_id + ) + self.event = self.store_event( + data={ + "event_id": "a" * 32, + "message": "Event message", + "timestamp": iso_format(before_now(seconds=1)), + }, + project_id=self.project.id, + ) + self.tags = "environment, user" + self.rule = self.get_rule( + data={ + "server": self.discord_integration.id, + "channel_id": self.channel_id, + "tags": self.tags, + } + ) + + responses.add( + method=responses.POST, + url=f"{DiscordClient.MESSAGE_URL.format(channel_id=self.channel_id)}", + status=200, + ) + + @responses.activate + def test_basic(self): + results = list(self.rule.after(self.event, self.get_state())) + assert len(results) == 1 + + results[0].callback(self.event, futures=[]) + body = responses.calls[0].request.body + data = json.loads(bytes.decode(body, "utf-8")) + + embed = data["embeds"][0] + assert embed == { + "title": build_attachment_title(self.event.group), + "url": get_title_link( + self.event.group, + self.event, + False, + False, + None, + ExternalProviders.DISCORD, + ), + "color": LEVEL_TO_COLOR["info"], + "footer": {"text": build_footer(self.event.group, self.event.project, None, "{text}")}, + "fields": [], + "timestamp": self.event.timestamp, + } + + buttons = data["components"][0]["components"] + assert ( + buttons[0]["custom_id"] == f"{DiscordComponentCustomIds.RESOLVE}:{self.event.group.id}" + ) + assert ( + buttons[1]["custom_id"] == f"{DiscordComponentCustomIds.ARCHIVE}:{self.event.group.id}" + ) + assert ( + buttons[2]["custom_id"] + == f"{DiscordComponentCustomIds.ASSIGN_DIALOG}:{self.event.group.id}" + ) + + @responses.activate + def test_has_releases(self): + release = Release.objects.create( + organization_id=self.organization.id, + version="1.0", + ) + release.add_project(self.project) + + results = list(self.rule.after(self.event, self.get_state())) + assert len(results) == 1 + + results[0].callback(self.event, futures=[]) + body = responses.calls[0].request.body + data = json.loads(bytes.decode(body, "utf-8")) + + buttons = data["components"][0]["components"] + assert ( + buttons[0]["custom_id"] + == f"{DiscordComponentCustomIds.RESOLVE_DIALOG}:{self.event.group.id}" + ) + assert ( + buttons[1]["custom_id"] == f"{DiscordComponentCustomIds.ARCHIVE}:{self.event.group.id}" + ) + assert ( + buttons[2]["custom_id"] + == f"{DiscordComponentCustomIds.ASSIGN_DIALOG}:{self.event.group.id}" + ) + + @responses.activate + @mock.patch( + "sentry.integrations.discord.message_builder.issues.Group.get_status", + return_value=GroupStatus.RESOLVED, + ) + def test_resolved(self, mock_get_status): + results = list(self.rule.after(self.event, self.get_state())) + assert len(results) == 1 + + results[0].callback(self.event, futures=[]) + body = responses.calls[0].request.body + data = json.loads(bytes.decode(body, "utf-8")) + + buttons = data["components"][0]["components"] + assert ( + buttons[0]["custom_id"] + == f"{DiscordComponentCustomIds.UNRESOLVE}:{self.event.group.id}" + ) + assert ( + buttons[1]["custom_id"] == f"{DiscordComponentCustomIds.ARCHIVE}:{self.event.group.id}" + ) + assert ( + buttons[2]["custom_id"] + == f"{DiscordComponentCustomIds.ASSIGN_DIALOG}:{self.event.group.id}" + ) + + @responses.activate + @mock.patch( + "sentry.integrations.discord.message_builder.issues.Group.get_status", + return_value=GroupStatus.IGNORED, + ) + def test_ignored(self, mock_get_status): + results = list(self.rule.after(self.event, self.get_state())) + assert len(results) == 1 + + results[0].callback(self.event, futures=[]) + body = responses.calls[0].request.body + data = json.loads(bytes.decode(body, "utf-8")) + + buttons = data["components"][0]["components"] + assert ( + buttons[0]["custom_id"] == f"{DiscordComponentCustomIds.RESOLVE}:{self.event.group.id}" + ) + assert ( + buttons[1]["custom_id"] + == f"{DiscordComponentCustomIds.MARK_ONGOING}:{self.event.group.id}" + ) + assert ( + buttons[2]["custom_id"] + == f"{DiscordComponentCustomIds.ASSIGN_DIALOG}:{self.event.group.id}" + ) + + def test_integration_removed(self): + integration_service.delete_integration(integration_id=self.discord_integration.id) + results = list(self.rule.after(self.event, self.get_state())) + assert len(results) == 0 + + @mock.patch("sentry.integrations.discord.actions.form.validate_channel_id") + def test_get_form_instance(self, mock_validate_channel_id): + form = self.rule.get_form_instance() + form.full_clean() + assert form.is_valid() + assert int(form.cleaned_data["server"]) == self.discord_integration.id + assert form.cleaned_data["channel_id"] == self.channel_id + assert form.cleaned_data["tags"] == self.tags + + def test_label(self): + label = self.rule.render_label() + assert ( + label + == "Send a notification to the Cool server Discord server in the channel with ID: channel-id and show tags [environment, user] in the notification." + ) + + +class DiscordNotifyServiceFormTest(TestCase): + def setUp(self): + self.guild_id = "guild-id" + self.channel_id = "channel-id" + self.discord_integration = self.create_integration( + provider="discord", + name="Cool server", + external_id=self.guild_id, + organization=self.organization, + ) + self.other_integration = self.create_integration( + provider="discord", + name="Uncool server", + external_id="different-guild-id", + organization=self.organization, + ) + self.integrations = [self.discord_integration, self.other_integration] + + def test_has_choices(self): + form = DiscordNotifyServiceForm(integrations=self.integrations) + assert form.fields["server"].choices == [ # type: ignore + (self.discord_integration.id, self.discord_integration.name), + (self.other_integration.id, self.other_integration.name), + ] + + @mock.patch("sentry.integrations.discord.actions.form.validate_channel_id", return_value=None) + def test_valid(self, mock_validate_channel_id): + form = DiscordNotifyServiceForm( + data={ + "server": self.discord_integration.id, + "channel_id": self.channel_id, + "tags": "environment", + }, + integrations=self.integrations, + ) + + form.full_clean() + assert form.is_valid() + assert mock_validate_channel_id.call_count == 1 + + def test_no_channel_id(self): + form = DiscordNotifyServiceForm( + data={"server": self.discord_integration.id}, + integrations=self.integrations, + ) + form.full_clean() + assert not form.is_valid() + + def test_no_server(self): + form = DiscordNotifyServiceForm(integrations=self.integrations) + form.full_clean() + assert not form.is_valid() + + @mock.patch("sentry.integrations.discord.actions.form.validate_channel_id", return_value=None) + def test_no_tags(self, mock_validate_channel_id): + form = DiscordNotifyServiceForm( + data={ + "server": self.discord_integration.id, + "channel_id": self.channel_id, + }, + integrations=self.integrations, + ) + + form.full_clean() + assert form.is_valid() + assert mock_validate_channel_id.call_count == 1 + + @mock.patch( + "sentry.integrations.discord.actions.form.validate_channel_id", + side_effect=ValidationError("bad"), + ) + def test_invalid_channel_id(self, mock_validate_channel_id): + form = DiscordNotifyServiceForm( + data={ + "server": self.discord_integration.id, + "channel_id": self.channel_id, + }, + integrations=self.integrations, + ) + + form.full_clean() + assert not form.is_valid() + assert mock_validate_channel_id.call_count == 1 diff --git a/tests/sentry/integrations/discord/test_message_builder.py b/tests/sentry/integrations/discord/test_message_builder.py deleted file mode 100644 index 8fa51e9bb36474..00000000000000 --- a/tests/sentry/integrations/discord/test_message_builder.py +++ /dev/null @@ -1,319 +0,0 @@ -from __future__ import annotations - -import pytest - -from sentry.integrations.discord.message_builder import LEVEL_TO_COLOR -from sentry.integrations.discord.message_builder.base.base import DiscordMessageBuilder -from sentry.integrations.discord.message_builder.base.component.action_row import ( - DiscordActionRow, - DiscordActionRowError, -) -from sentry.integrations.discord.message_builder.base.component.base import DiscordMessageComponent -from sentry.integrations.discord.message_builder.base.component.button import ( - DiscordButton, - DiscordButtonStyle, -) -from sentry.integrations.discord.message_builder.base.embed.base import DiscordMessageEmbed -from sentry.integrations.discord.message_builder.base.embed.field import DiscordMessageEmbedField -from sentry.integrations.discord.message_builder.base.embed.footer import DiscordMessageEmbedFooter -from sentry.integrations.discord.message_builder.base.flags import DiscordMessageFlags -from sentry.testutils.cases import TestCase - - -class TestDiscordMessageBuilder(TestCase): - def test_empty(self): - message = DiscordMessageBuilder() - result = message.build() - assert result == {} - - def test_some(self): - flags = DiscordMessageFlags().set_ephemeral() - message = DiscordMessageBuilder( - content="message content", - flags=flags, - ) - result = message.build() - assert result == { - "content": "message content", - "flags": 1 << 6, - } - - def test_all(self): - embed = DiscordMessageEmbed( - title="Title", - description="description", - url="https://sentry.io", - color=LEVEL_TO_COLOR["warning"], - ) - other_embed = DiscordMessageEmbed( - title="Other title", - description="other description", - color=LEVEL_TO_COLOR["info"], - ) - button = DiscordButton( - style=DiscordButtonStyle.PRIMARY, - custom_id="test_button", - label="button label", - ) - other_button = DiscordButton( - style=DiscordButtonStyle.DANGER, - custom_id="danger_button", - label="delete", - ) - component = DiscordActionRow([button, other_button]) - flags = DiscordMessageFlags().set_ephemeral() - - message = DiscordMessageBuilder( - content="message content", - embeds=[embed, other_embed], - components=[component], - flags=flags, - ) - result = message.build() - assert result == { - "content": "message content", - "embeds": [ - { - "title": "Title", - "description": "description", - "url": "https://sentry.io", - "color": LEVEL_TO_COLOR["warning"], - }, - { - "title": "Other title", - "description": "other description", - "color": LEVEL_TO_COLOR["info"], - }, - ], - "components": [ - { - "type": 1, - "components": [ - { - "type": 2, - "style": DiscordButtonStyle.PRIMARY, - "custom_id": "test_button", - "label": "button label", - "disabled": False, - }, - { - "type": 2, - "style": DiscordButtonStyle.DANGER, - "custom_id": "danger_button", - "label": "delete", - "disabled": False, - }, - ], - } - ], - "flags": 1 << 6, - } - - -class TestDiscordMessageFlags(TestCase): - def assert_bits_are_set(self, value: int, bits: list[int]) -> None: - expected = 0 - for bit in bits: - expected = expected | 1 << bit - assert (value & 1 << bit) == 1 << bit - assert expected == value - - def test_none(self): - flags = DiscordMessageFlags() - assert flags.value == 0 - - def test_ephemeral(self): - flags = DiscordMessageFlags().set_ephemeral() - self.assert_bits_are_set(flags.value, [6]) - - def test_loading(self): - flags = DiscordMessageFlags().set_loading() - self.assert_bits_are_set(flags.value, [7]) - - def test_suppress_notifications(self): - flags = DiscordMessageFlags().set_suppress_notifications() - self.assert_bits_are_set(flags.value, [12]) - - def test_all(self): - flags = DiscordMessageFlags() - flags.set_ephemeral() - flags.set_loading() - flags.set_suppress_notifications() - self.assert_bits_are_set(flags.value, [6, 7, 12]) - - -class TestDiscordButton(TestCase): - def test_some(self): - button = DiscordButton( - style=DiscordButtonStyle.PRIMARY, - custom_id="test_button", - label="button label", - ) - result = button.build() - assert result == { - "type": 2, - "style": 1, - "custom_id": "test_button", - "label": "button label", - "disabled": False, - } - - def test_all(self): - button = DiscordButton( - style=DiscordButtonStyle.PRIMARY, - custom_id="test_button", - label="button label", - url="https://sentry.io", - disabled=True, - ) - result = button.build() - assert result == { - "type": 2, - "style": 1, - "custom_id": "test_button", - "label": "button label", - "url": "https://sentry.io", - "disabled": True, - } - - -class TestDiscordActionRow(TestCase): - def test_empty(self): - action_row = DiscordActionRow([]) - result = action_row.build() - assert result == { - "type": 1, - "components": [], - } - - def test_non_empty(self): - button = DiscordButton( - style=DiscordButtonStyle.PRIMARY, - custom_id="test_button", - label="button label", - url="https://sentry.io", - disabled=True, - ) - custom_component = DiscordMessageComponent( - type=9 - ) # not a real type number, just testing custom component - action_row = DiscordActionRow( - [ - button, - custom_component, - ] - ) - result = action_row.build() - assert result == { - "type": 1, - "components": [ - { - "type": 2, - "style": 1, - "custom_id": "test_button", - "label": "button label", - "url": "https://sentry.io", - "disabled": True, - }, - { - "type": 9, - }, - ], - } - - def test_action_row_error(self): - nested_row = DiscordActionRow([]) - with pytest.raises(DiscordActionRowError): - DiscordActionRow([nested_row]) - - -class TestDiscordMessageEmbed(TestCase): - def test_empty(self): - embed = DiscordMessageEmbed() - result = embed.build() - assert result == {} - - def test_some(self): - embed = DiscordMessageEmbed( - title="Title", - url="https://sentry.io", - color=LEVEL_TO_COLOR["warning"], - ) - result = embed.build() - assert result == { - "title": "Title", - "url": "https://sentry.io", - "color": 16761383, - } - - def test_footer(self): - footer = DiscordMessageEmbedFooter(text="footer text", icon_url="https://sentry.io") - embed = DiscordMessageEmbed(footer=footer) - result = embed.build() - assert result == { - "footer": { - "text": "footer text", - "icon_url": "https://sentry.io", - } - } - - def test_fields(self): - field = DiscordMessageEmbedField( - "field name", - "field value", - True, - ) - other_field = DiscordMessageEmbedField( - "other field name", - "other field value", - ) - embed = DiscordMessageEmbed(fields=[field, other_field]) - result = embed.build() - assert result == { - "fields": [ - { - "name": "field name", - "value": "field value", - "inline": True, - }, - { - "name": "other field name", - "value": "other field value", - "inline": False, - }, - ] - } - - def test_all(self): - footer = DiscordMessageEmbedFooter(text="footer text", icon_url="https://sentry.io") - field = DiscordMessageEmbedField( - "field name", - "field value", - True, - ) - embed = DiscordMessageEmbed( - title="Title", - description="description", - url="https://sentry.io", - color=LEVEL_TO_COLOR["warning"], - footer=footer, - fields=[field], - ) - result = embed.build() - assert result == { - "title": "Title", - "description": "description", - "url": "https://sentry.io", - "color": 16761383, - "footer": { - "text": "footer text", - "icon_url": "https://sentry.io", - }, - "fields": [ - { - "name": "field name", - "value": "field value", - "inline": True, - } - ], - } diff --git a/tests/sentry/integrations/discord/test_requests.py b/tests/sentry/integrations/discord/test_requests.py index dd7f216b83759e..1c1b6a43df545c 100644 --- a/tests/sentry/integrations/discord/test_requests.py +++ b/tests/sentry/integrations/discord/test_requests.py @@ -3,9 +3,7 @@ from unittest import mock from urllib.parse import urlencode -import pytest - -from sentry.integrations.discord.requests.base import DiscordRequest, DiscordRequestError +from sentry.integrations.discord.requests.base import DiscordRequest from sentry.services.hybrid_cloud.integration.model import RpcIntegration from sentry.testutils.cases import TestCase from sentry.testutils.silo import control_silo_test @@ -36,21 +34,10 @@ def mock_request(self, request_data: dict | None = None) -> DiscordRequest: } return DiscordRequest(self.request) - def test_exposes_data(self): - discord_request = self.mock_request() - assert discord_request.data["type"] == 1 - def test_exposes_guild_id(self): discord_request = self.mock_request() assert discord_request.guild_id == "guild-id" - def test_validate_data_returns_400(self): - discord_request = self.mock_request() - type(self.request).data = mock.PropertyMock(side_effect=ValueError()) - with pytest.raises(DiscordRequestError) as e: - discord_request.validate() - assert e.value.status == 400 - def test_collects_logging_data(self): discord_request = self.mock_request() assert discord_request.logging_data == { @@ -112,13 +99,11 @@ def test_get_command_name(self): }, } ) - discord_request._validate_data() res = discord_request.get_command_name() assert res == "test_command" def test_get_command_name_not_command(self): discord_request = self.mock_request() - discord_request._validate_data() res = discord_request.get_command_name() assert res == "" diff --git a/tests/sentry/integrations/discord/test_utils.py b/tests/sentry/integrations/discord/test_utils.py index f9cf7f0a213e68..cee5192676a298 100644 --- a/tests/sentry/integrations/discord/test_utils.py +++ b/tests/sentry/integrations/discord/test_utils.py @@ -1,4 +1,12 @@ +from unittest import mock + +from django.core.exceptions import ValidationError +from pytest import raises + from sentry.integrations.discord.utils.auth import verify_signature +from sentry.integrations.discord.utils.channel import validate_channel_id +from sentry.shared_integrations.exceptions import IntegrationError +from sentry.shared_integrations.exceptions.base import ApiError from sentry.testutils.cases import TestCase @@ -24,3 +32,38 @@ def test_verify_signature_invalid(self): result = verify_signature(public_key_string, signature, message) assert not result + + +class ValidateChannelTest(TestCase): + guild_id = "guild-id" + channel_id = "channel-id" + integration_id = 1234 + + @mock.patch("sentry.integrations.discord.utils.channel.DiscordClient.get_channel") + def test_happy_path(self, mock_get_channel): + mock_get_channel.return_value = {"guild_id": self.guild_id} + validate_channel_id(self.channel_id, self.guild_id, self.integration_id) + + @mock.patch("sentry.integrations.discord.utils.channel.DiscordClient.get_channel") + def test_404(self, mock_get_channel): + mock_get_channel.side_effect = ApiError(code=404, text="") + with raises(ValidationError): + validate_channel_id(self.channel_id, self.guild_id, self.integration_id) + + @mock.patch("sentry.integrations.discord.utils.channel.DiscordClient.get_channel") + def test_api_error(self, mock_get_channel): + mock_get_channel.side_effect = ApiError(code=401, text="") + with raises(IntegrationError): + validate_channel_id(self.channel_id, self.guild_id, self.integration_id) + + @mock.patch("sentry.integrations.discord.utils.channel.DiscordClient.get_channel") + def test_bad_response(self, mock_get_channel): + mock_get_channel.return_value = "" + with raises(IntegrationError): + validate_channel_id(self.channel_id, self.guild_id, self.integration_id) + + @mock.patch("sentry.integrations.discord.utils.channel.DiscordClient.get_channel") + def test_not_guild_member(self, mock_get_channel): + mock_get_channel.return_value = {"guild_id": "not-my-guild"} + with raises(ValidationError): + validate_channel_id(self.channel_id, self.guild_id, self.integration_id) diff --git a/tests/sentry/integrations/discord/test_webhook.py b/tests/sentry/integrations/discord/webhooks/test_command.py similarity index 76% rename from tests/sentry/integrations/discord/test_webhook.py rename to tests/sentry/integrations/discord/webhooks/test_command.py index f4d2a4cca024cc..dd8effa3a5fe54 100644 --- a/tests/sentry/integrations/discord/test_webhook.py +++ b/tests/sentry/integrations/discord/webhooks/test_command.py @@ -8,24 +8,7 @@ @region_silo_test(stable=True) -class DiscordWebhookTest(APITestCase): - @mock.patch("sentry.integrations.discord.requests.base.verify_signature") - def test_ping_interaction(self, mock_verify_signature): - mock_verify_signature.return_value = True - resp = self.client.post( - path=WEBHOOK_URL, - data={ - "type": 1, - }, - format="json", - HTTP_X_SIGNATURE_ED25519="signature", - HTTP_X_SIGNATURE_TIMESTAMP="timestamp", - ) - - assert resp.status_code == 200 - assert resp.json()["type"] == 1 - assert mock_verify_signature.call_count == 1 - +class DiscordCommandInteractionTest(APITestCase): @mock.patch("sentry.integrations.discord.requests.base.verify_signature") def test_command_interaction(self, mock_verify_signature): mock_verify_signature.return_value = True @@ -40,60 +23,6 @@ def test_command_interaction(self, mock_verify_signature): assert resp.status_code == 200 assert resp.json()["type"] == 4 - @mock.patch("sentry.integrations.discord.requests.base.verify_signature") - def test_unknown_interaction(self, mock_verify_signature): - mock_verify_signature.return_value = True - resp = self.client.post( - path=WEBHOOK_URL, - data={ - "type": -1, - }, - format="json", - HTTP_X_SIGNATURE_ED25519="signature", - HTTP_X_SIGNATURE_TIMESTAMP="timestamp", - ) - - assert resp.status_code == 200 - - @mock.patch("sentry.integrations.discord.requests.base.verify_signature") - def test_unauthorized_interaction(self, mock_verify_signature): - mock_verify_signature.return_value = False - resp = self.client.post( - path=WEBHOOK_URL, - data={ - "type": -1, - }, - format="json", - HTTP_X_SIGNATURE_ED25519="signature", - HTTP_X_SIGNATURE_TIMESTAMP="timestamp", - ) - - assert resp.status_code == 401 - - def test_missing_signature(self): - resp = self.client.post( - path=WEBHOOK_URL, - data={ - "type": -1, - }, - format="json", - HTTP_X_SIGNATURE_TIMESTAMP="timestamp", - ) - - assert resp.status_code == 401 - - def test_missing_timestamp(self): - resp = self.client.post( - path=WEBHOOK_URL, - data={ - "type": -1, - }, - format="json", - HTTP_X_SIGNATURE_ED25519="signature", - ) - - assert resp.status_code == 401 - def test_link_no_integration(self): with mock.patch( "sentry.integrations.discord.requests.base.verify_signature", return_value=True @@ -279,5 +208,4 @@ def test_help(self): HTTP_X_SIGNATURE_ED25519="signature", HTTP_X_SIGNATURE_TIMESTAMP="timestamp", ) - assert resp.status_code == 200 diff --git a/tests/sentry/integrations/discord/webhooks/test_endpoint.py b/tests/sentry/integrations/discord/webhooks/test_endpoint.py new file mode 100644 index 00000000000000..11097d7fae924f --- /dev/null +++ b/tests/sentry/integrations/discord/webhooks/test_endpoint.py @@ -0,0 +1,80 @@ +from unittest import mock + +from sentry.testutils.cases import APITestCase +from sentry.testutils.silo import region_silo_test + +WEBHOOK_URL = "/extensions/discord/interactions/" + + +@region_silo_test(stable=True) +class DiscordWebhookTest(APITestCase): + @mock.patch("sentry.integrations.discord.requests.base.verify_signature") + def test_ping_interaction(self, mock_verify_signature): + mock_verify_signature.return_value = True + resp = self.client.post( + path=WEBHOOK_URL, + data={ + "type": 1, + }, + format="json", + HTTP_X_SIGNATURE_ED25519="signature", + HTTP_X_SIGNATURE_TIMESTAMP="timestamp", + ) + + assert resp.status_code == 200 + assert resp.json()["type"] == 1 + assert mock_verify_signature.call_count == 1 + + @mock.patch("sentry.integrations.discord.requests.base.verify_signature") + def test_unknown_interaction(self, mock_verify_signature): + mock_verify_signature.return_value = True + resp = self.client.post( + path=WEBHOOK_URL, + data={ + "type": -1, + }, + format="json", + HTTP_X_SIGNATURE_ED25519="signature", + HTTP_X_SIGNATURE_TIMESTAMP="timestamp", + ) + + assert resp.status_code == 200 + + @mock.patch("sentry.integrations.discord.requests.base.verify_signature") + def test_unauthorized_interaction(self, mock_verify_signature): + mock_verify_signature.return_value = False + resp = self.client.post( + path=WEBHOOK_URL, + data={ + "type": -1, + }, + format="json", + HTTP_X_SIGNATURE_ED25519="signature", + HTTP_X_SIGNATURE_TIMESTAMP="timestamp", + ) + + assert resp.status_code == 401 + + def test_missing_signature(self): + resp = self.client.post( + path=WEBHOOK_URL, + data={ + "type": -1, + }, + format="json", + HTTP_X_SIGNATURE_TIMESTAMP="timestamp", + ) + + assert resp.status_code == 401 + + def test_missing_timestamp(self): + resp = self.client.post( + path=WEBHOOK_URL, + data={ + "type": -1, + }, + format="json", + HTTP_X_SIGNATURE_ED25519="signature", + ) + + assert resp.status_code == 401 diff --git a/tests/sentry/integrations/discord/webhooks/test_message_component.py b/tests/sentry/integrations/discord/webhooks/test_message_component.py new file mode 100644 index 00000000000000..351ee827355a30 --- /dev/null +++ b/tests/sentry/integrations/discord/webhooks/test_message_component.py @@ -0,0 +1,210 @@ +from __future__ import annotations + +from typing import Any +from unittest import mock + +from sentry.integrations.discord.message_builder.base.component import ( + DiscordComponentCustomIds as CustomIds, +) +from sentry.integrations.discord.requests.base import ( + DiscordMessageComponentTypes, + DiscordRequestTypes, +) +from sentry.integrations.discord.webhooks.message_component import ( + ASSIGNEE_UPDATED, + IGNORE_UNTIL_ESCALATES, + MARKED_ONGOING, + NO_IDENTITY, + NOT_IN_ORG, + RESOLVE_DIALOG_OPTIONS, + RESOLVED, + RESOLVED_IN_CURRENT_RELEASE, + RESOLVED_IN_NEXT_RELEASE, + UNRESOLVED, +) +from sentry.models.release import Release +from sentry.testutils.cases import APITestCase + +WEBHOOK_URL = "/extensions/discord/interactions/" + + +class DiscordMessageComponentInteractionTest(APITestCase): + def setUp(self): + patcher = mock.patch( + "sentry.integrations.discord.requests.base.verify_signature", return_value=True + ) + patcher.start() + + self.guild_id = "guild-id" + self.channel_id = "channel-id" + self.discord_user_id = "user1234" + + self.discord_integration = self.create_integration( + provider="discord", + name="Cool server", + external_id=self.guild_id, + organization=self.organization, + ) + self.provider = self.create_identity_provider(integration=self.discord_integration) + self.create_identity( + user=self.user, identity_provider=self.provider, external_id=self.discord_user_id + ) + + def send_interaction(self, data: Any | None = None, member: Any | None = None): + if data is None: + data = {"custom_id": f"unknown:{self.group.id}"} + if member is None: + member = {"user": {"id": self.discord_user_id}} + + return self.client.post( + path=WEBHOOK_URL, + data={ + "type": DiscordRequestTypes.MESSAGE_COMPONENT, + "guild_id": self.guild_id, + "channel_id": self.channel_id, + "data": data, + "member": member, + }, + format="json", + HTTP_X_SIGNATURE_ED25519="signature", + HTTP_X_SIGNATURE_TIMESTAMP="timestamp", + ) + + def get_message_content(self, response: Any) -> str: + return response.json()["data"]["content"] + + def get_message_components(self, response: Any) -> Any: + return response.json()["data"]["components"] + + def get_select_options(self, response: Any) -> Any: + return self.get_message_components(response)[0]["components"][0]["options"] + + def test_unknown_id_interaction(self): + response = self.send_interaction({"custom_id": f"unknown:{self.group.id}"}) + assert response.status_code == 404 + + def test_no_user(self): + response = self.send_interaction(member={"user": {"id": "not-our-user"}}) + assert self.get_message_content(response) == NO_IDENTITY + + def test_not_in_org(self): + other_user = self.create_user() + other_user_discord_id = "other-user1234" + other_org = self.create_organization() + self.discord_integration.add_organization(other_org) + self.create_identity( + user=other_user, identity_provider=self.provider, external_id=other_user_discord_id + ) + + response = self.send_interaction(member={"user": {"id": other_user_discord_id}}) + + assert self.get_message_content(response) == NOT_IN_ORG + + def test_assign_dialog(self): + response = self.send_interaction( + { + "component_type": DiscordMessageComponentTypes.BUTTON, + "custom_id": f"{CustomIds.ASSIGN_DIALOG}:{self.group.id}", + } + ) + assert self.get_select_options(response) == [ + {"label": f"#{self.team.slug}", "value": f"team:{self.team.id}", "default": False}, + {"label": self.user.email, "value": f"user:{self.user.id}", "default": False}, + ] + + def test_assign(self): + response = self.send_interaction( + { + "component_type": DiscordMessageComponentTypes.SELECT, + "custom_id": f"{CustomIds.ASSIGN}:{self.group.id}", + "values": [f"user:{self.user.id}"], + } + ) + assert self.get_message_content(response) == ASSIGNEE_UPDATED + + def test_resolve_dialog(self): + response = self.send_interaction( + { + "component_type": DiscordMessageComponentTypes.BUTTON, + "custom_id": f"{CustomIds.RESOLVE_DIALOG}:{self.group.id}", + } + ) + assert self.get_select_options(response) == [ + option.build() for option in RESOLVE_DIALOG_OPTIONS + ] + + def test_resolve_non_dialog(self): + response = self.send_interaction( + { + "component_type": DiscordMessageComponentTypes.BUTTON, + "custom_id": f"{CustomIds.RESOLVE}:{self.group.id}", + } + ) + assert self.get_message_content(response) == RESOLVED + + def test_resolve_now_from_dialog(self): + response = self.send_interaction( + { + "component_type": DiscordMessageComponentTypes.SELECT, + "custom_id": f"{CustomIds.RESOLVE}:{self.group.id}", + "values": [""], + } + ) + assert self.get_message_content(response) == RESOLVED + + def test_resolve_in_next_release(self): + release = Release.objects.create( + organization_id=self.organization.id, + version="1.0", + ) + release.add_project(self.project) + response = self.send_interaction( + { + "component_type": DiscordMessageComponentTypes.SELECT, + "custom_id": f"{CustomIds.RESOLVE}:{self.group.id}", + "values": ["inNextRelease"], + } + ) + assert self.get_message_content(response) == RESOLVED_IN_NEXT_RELEASE + + def test_resolve_in_current_release(self): + release = Release.objects.create( + organization_id=self.organization.id, + version="1.0", + ) + release.add_project(self.project) + response = self.send_interaction( + { + "component_type": DiscordMessageComponentTypes.SELECT, + "custom_id": f"{CustomIds.RESOLVE}:{self.group.id}", + "values": ["inCurrentRelease"], + } + ) + assert self.get_message_content(response) == RESOLVED_IN_CURRENT_RELEASE + + def test_unresolve(self): + response = self.send_interaction( + { + "component_type": DiscordMessageComponentTypes.BUTTON, + "custom_id": f"{CustomIds.UNRESOLVE}:{self.group.id}", + } + ) + assert self.get_message_content(response) == UNRESOLVED + + def test_mark_ongoing(self): + response = self.send_interaction( + { + "component_type": DiscordMessageComponentTypes.BUTTON, + "custom_id": f"{CustomIds.MARK_ONGOING}:{self.group.id}", + } + ) + assert self.get_message_content(response) == MARKED_ONGOING + + def test_archive(self): + response = self.send_interaction( + { + "component_type": DiscordMessageComponentTypes.BUTTON, + "custom_id": f"{CustomIds.ARCHIVE}:{self.group.id}", + } + ) + assert self.get_message_content(response) == IGNORE_UNTIL_ESCALATES From b58dfa1680b71a3d8b666d3ca04323142bd340f8 Mon Sep 17 00:00:00 2001 From: Abhijeet Prasad Date: Wed, 2 Aug 2023 15:13:33 -0400 Subject: [PATCH 33/44] fix(starfish): Remove http method if already on transaction (#54037) Some SDKs send `http.method` as part of the description. To prevent rendering duplicate methods (for ex. `GET GET /route`), this patch updates the body cell renderer to only set an http method if it has not been defined already. --- .../starfish/views/webServiceView/endpointList.tsx | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/static/app/views/starfish/views/webServiceView/endpointList.tsx b/static/app/views/starfish/views/webServiceView/endpointList.tsx index 4405f46619c844..90b361cc641cba 100644 --- a/static/app/views/starfish/views/webServiceView/endpointList.tsx +++ b/static/app/views/starfish/views/webServiceView/endpointList.tsx @@ -80,10 +80,11 @@ function EndpointList({eventView, location, organization, setError}: Props) { const rendered = fieldRenderer(dataRow, {organization, location}); if (field === 'transaction') { - let prefix = ''; - if (dataRow['http.method']) { - prefix = `${dataRow['http.method']} `; - } + const method = dataRow['http.method']; + const endpointName = + method && !dataRow.transaction.toString().startsWith(method.toString()) + ? `${method} ${dataRow.transaction}` + : dataRow.transaction; return ( - {prefix} - {dataRow.transaction} + {endpointName} ); } From 673dd0f3a609e87abb10e9fa1d42664a3127d423 Mon Sep 17 00:00:00 2001 From: Evan Hicks Date: Wed, 2 Aug 2023 15:18:57 -0400 Subject: [PATCH 34/44] ref(metrics) Add the transactions http_error_rate function to metrics layer (#53104) This adds the transactions version of `http_error_rate()` to the metrics layer, so it can be queried with the metrics builder. Also update the CODEOWNERS so the search team gets notified of these PRs and can also comment on them. --- .github/CODEOWNERS | 5 +-- .../search/events/datasets/metrics_layer.py | 14 ++++++++ src/sentry/snuba/metrics/fields/base.py | 20 +++++++++++ src/sentry/snuba/metrics/fields/snql.py | 36 +++++++++++++++++++ src/sentry/snuba/metrics/naming_layer/mri.py | 2 ++ .../snuba/metrics/naming_layer/public.py | 2 ++ .../endpoints/test_organization_events_mep.py | 6 +--- 7 files changed, 78 insertions(+), 7 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f925b3b77e193f..e98e11f2718922 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -24,6 +24,9 @@ /src/sentry/tagstore/snuba/ @getsentry/owners-snuba /src/sentry/sentry_metrics/ @getsentry/owners-snuba /tests/sentry/sentry_metrics/ @getsentry/owners-snuba +/src/sentry/snuba/metrics/ @getsentry/owners-snuba @getsentry/telemetry-experience +/src/sentry/snuba/metrics/query.py @getsentry/owners-snuba @getsentry/telemetry-experience +/src/sentry/search/events/datasets/metrics_layer.py @getsentry/owners-snuba ## Event Ingestion /src/sentry/attachments/ @getsentry/owners-ingest @@ -397,13 +400,11 @@ yarn.lock @getsentry/owners-js-de ## Telemetry Experience -/src/sentry/snuba/metrics/ @getsentry/telemetry-experience /src/sentry/api/endpoints/organization_metrics.py @getsentry/telemetry-experience /src/sentry/api/endpoints/organization_sessions.py @getsentry/telemetry-experience /src/sentry/api/endpoints/project_dynamic_sampling.py @getsentry/telemetry-experience /src/sentry/api/endpoints/organization_dynamic_sampling_sdk_versions.py @getsentry/telemetry-experience /src/sentry/dynamic_sampling/ @getsentry/telemetry-experience -/src/sentry/snuba/metrics/query.py @getsentry/telemetry-experience /src/sentry/release_health/metrics_sessions_v2.py @getsentry/telemetry-experience /tests/sentry/api/endpoints/test_organization_metric_data.py @getsentry/telemetry-experience /tests/sentry/api/endpoints/test_organization_metric_details.py @getsentry/telemetry-experience diff --git a/src/sentry/search/events/datasets/metrics_layer.py b/src/sentry/search/events/datasets/metrics_layer.py index b75e64ae2c4b54..5d8bc88c3edd95 100644 --- a/src/sentry/search/events/datasets/metrics_layer.py +++ b/src/sentry/search/events/datasets/metrics_layer.py @@ -396,6 +396,20 @@ def function_converter(self) -> Mapping[str, fields.MetricsFunction]: ), default_result_type="percentage", ), + fields.MetricsFunction( + "http_error_count", + snql_metric_layer=lambda args, alias: AliasedExpression( + Column(TransactionMRI.HTTP_ERROR_COUNT.value), alias + ), + default_result_type="integer", + ), + fields.MetricsFunction( + "http_error_rate", + snql_metric_layer=lambda args, alias: AliasedExpression( + Column(TransactionMRI.HTTP_ERROR_RATE.value), alias + ), + default_result_type="percentage", + ), fields.MetricsFunction( "histogram", required_args=[fields.MetricArg("column")], diff --git a/src/sentry/snuba/metrics/fields/base.py b/src/sentry/snuba/metrics/fields/base.py index 9444f56e9a368e..724d506d1653df 100644 --- a/src/sentry/snuba/metrics/fields/base.py +++ b/src/sentry/snuba/metrics/fields/base.py @@ -54,6 +54,7 @@ failure_count_transaction, foreground_anr_users, histogram_snql_factory, + http_error_count_transaction, max_timestamp, min_timestamp, miserable_users, @@ -1546,6 +1547,25 @@ def generate_where_statements( failure_count, tx_count, alias=alias ), ), + SingularEntityDerivedMetric( + metric_mri=TransactionMRI.HTTP_ERROR_COUNT.value, + metrics=[TransactionMRI.DURATION.value], + unit="transactions", + snql=lambda project_ids, org_id, metric_ids, alias=None: http_error_count_transaction( + org_id, metric_ids=metric_ids, alias=alias + ), + ), + SingularEntityDerivedMetric( + metric_mri=TransactionMRI.HTTP_ERROR_RATE.value, + metrics=[ + TransactionMRI.HTTP_ERROR_COUNT.value, + TransactionMRI.ALL.value, + ], + unit="transactions", + snql=lambda http_error_count, tx_count, project_ids, org_id, metric_ids, alias=None: division_float( + http_error_count, tx_count, alias=alias + ), + ), SingularEntityDerivedMetric( metric_mri=TransactionMRI.SATISFIED.value, metrics=[TransactionMRI.DURATION.value, TransactionMRI.MEASUREMENTS_LCP.value], diff --git a/src/sentry/snuba/metrics/fields/snql.py b/src/sentry/snuba/metrics/fields/snql.py index 188a6528c5707e..fc404345c60216 100644 --- a/src/sentry/snuba/metrics/fields/snql.py +++ b/src/sentry/snuba/metrics/fields/snql.py @@ -3,6 +3,7 @@ from snuba_sdk import Column, Function from sentry.api.utils import InvalidParams +from sentry.search.events import constants from sentry.search.events.datasets.function_aliases import resolve_project_threshold_config from sentry.sentry_metrics.use_case_id_registry import UseCaseID from sentry.sentry_metrics.utils import ( @@ -319,6 +320,41 @@ def failure_count_transaction(org_id, metric_ids, alias=None): ) +def http_error_count_transaction(org_id, metric_ids, alias=None): + statuses = [ + resolve_tag_value(UseCaseID.TRANSACTIONS, org_id, status) + for status in constants.HTTP_SERVER_ERROR_STATUS + ] + base_condition = Function( + "in", + [ + Column( + name=resolve_tag_key( + UseCaseID.TRANSACTIONS, + org_id, + TransactionTagsKey.TRANSACTION_HTTP_STATUS_CODE.value, + ) + ), + list(status for status in statuses if status is not None), + ], + ) + + return Function( + "countIf", + [ + Column("value"), + Function( + "and", + [ + base_condition, + Function("in", [Column("metric_id"), list(metric_ids)]), + ], + ), + ], + alias, + ) + + def _project_threshold_multi_if_function( project_ids: Sequence[int], org_id: int, metric_ids: Set[int] ) -> Function: diff --git a/src/sentry/snuba/metrics/naming_layer/mri.py b/src/sentry/snuba/metrics/naming_layer/mri.py index d270778aef010a..97552cb9935599 100644 --- a/src/sentry/snuba/metrics/naming_layer/mri.py +++ b/src/sentry/snuba/metrics/naming_layer/mri.py @@ -124,6 +124,8 @@ class TransactionMRI(Enum): ALL_USER = "e:transactions/user.all@none" USER_MISERY = "e:transactions/user_misery@ratio" TEAM_KEY_TRANSACTION = "e:transactions/team_key_transaction@none" + HTTP_ERROR_COUNT = "e:transactions/http_error_count@none" + HTTP_ERROR_RATE = "e:transactions/http_error_rate@ratio" # Spans (might be moved to their own namespace soon) SPAN_USER = "s:spans/user@none" diff --git a/src/sentry/snuba/metrics/naming_layer/public.py b/src/sentry/snuba/metrics/naming_layer/public.py index 80172fc1cc53e1..d67b84a6d009c8 100644 --- a/src/sentry/snuba/metrics/naming_layer/public.py +++ b/src/sentry/snuba/metrics/naming_layer/public.py @@ -94,6 +94,7 @@ class TransactionMetricKey(Enum): USER_MISERY = "transaction.user_misery" FAILURE_COUNT = "transaction.failure_count" TEAM_KEY_TRANSACTION = "transactions.team_key_transaction" + HTTP_ERROR_RATE = "transaction.http_error_rate" # Span metrics. # NOTE: These might be moved to their own namespace soon. @@ -115,6 +116,7 @@ class TransactionTagsKey(Enum): TRANSACTION_STATUS = "transaction.status" TRANSACTION_SATISFACTION = "satisfaction" + TRANSACTION_HTTP_STATUS_CODE = "http.status_code" class TransactionStatusTagValue(Enum): diff --git a/tests/snuba/api/endpoints/test_organization_events_mep.py b/tests/snuba/api/endpoints/test_organization_events_mep.py index f5ca50fa9b8af0..010e89a53b43d4 100644 --- a/tests/snuba/api/endpoints/test_organization_events_mep.py +++ b/tests/snuba/api/endpoints/test_organization_events_mep.py @@ -2296,8 +2296,4 @@ def test_having_condition(self): @pytest.mark.xfail(reason="Not supported") def test_time_spent(self): - super().test_custom_measurement_size_filtering() - - @pytest.mark.xfail(reason="Not supported") - def test_http_error_rate(self): - super().test_having_condition() + super().test_time_spent() From 6488f779ada84228dce0d65fae6e6a0840b1a374 Mon Sep 17 00:00:00 2001 From: Jonas Date: Wed, 2 Aug 2023 15:45:16 -0400 Subject: [PATCH 35/44] feat(profiling): add chart padding on Y axis (#53933) Co-authored-by: Tony Xiao --- .../flamegraph/collapsibleTimeline.tsx | 9 +- .../profiling/flamegraph/flamegraph.tsx | 34 +++- .../flamegraph/flamegraphCpuChart.tsx | 39 ++++- .../profiling/flamegraph/flamegraphLayout.tsx | 1 + .../profiling/flamegraph/flamegraphTheme.tsx | 8 + .../app/utils/profiling/flamegraphChart.tsx | 26 ++- .../profiling/renderers/chartRenderer.tsx | 161 ++++++++++++++---- 7 files changed, 226 insertions(+), 52 deletions(-) diff --git a/static/app/components/profiling/flamegraph/collapsibleTimeline.tsx b/static/app/components/profiling/flamegraph/collapsibleTimeline.tsx index d845c469c0143a..b7dac21052b839 100644 --- a/static/app/components/profiling/flamegraph/collapsibleTimeline.tsx +++ b/static/app/components/profiling/flamegraph/collapsibleTimeline.tsx @@ -20,6 +20,7 @@ function CollapsibleTimeline(props: CollapsibleTimelineProps) { return ( @@ -88,7 +89,11 @@ const CollapsibleTimelineLoadingIndicatorContainer = styled('div')` height: 100%; `; -const CollapsibleTimelineHeader = styled('div')<{border: string; labelHeight: number}>` +const CollapsibleTimelineHeader = styled('div')<{ + border: string; + labelHeight: number; + open: boolean; +}>` display: flex; justify-content: space-between; align-items: center; @@ -97,6 +102,7 @@ const CollapsibleTimelineHeader = styled('div')<{border: string; labelHeight: nu height: ${p => p.labelHeight}px; min-height: ${p => p.labelHeight}px; border-top: 1px solid ${p => p.border}; + border-bottom: 1px solid ${p => (p.open ? p.border : 'transparent')}; background-color: ${p => p.theme.backgroundSecondary}; `; @@ -114,7 +120,6 @@ export const CollapsibleTimelineMessage = styled('p')` width: 100%; position: absolute; color: ${p => p.theme.subText}; - padding-bottom: ${space(4)}; font-size: ${p => p.theme.fontSizeSmall}; `; export {CollapsibleTimeline}; diff --git a/static/app/components/profiling/flamegraph/flamegraph.tsx b/static/app/components/profiling/flamegraph/flamegraph.tsx index 862627f5601c78..edd88020b7c8c1 100644 --- a/static/app/components/profiling/flamegraph/flamegraph.tsx +++ b/static/app/components/profiling/flamegraph/flamegraph.tsx @@ -304,9 +304,15 @@ function Flamegraph(): ReactElement { profileGroup.measurements?.cpu_usage_0 ?? { unit: 'percentage', values: [], - } + }, + flamegraphTheme.COLORS.CPU_CHART_COLORS ); - }, [profileGroup.measurements?.cpu_usage_0, flamegraph.configSpace, hasCPUChart]); + }, [ + profileGroup.measurements?.cpu_usage_0, + flamegraph.configSpace, + flamegraphTheme, + hasCPUChart, + ]); const flamegraphCanvas = useMemo(() => { if (!flamegraphCanvasRef) { @@ -487,7 +493,7 @@ function Flamegraph(): ReactElement { const cpuChartView = useMemoWithPrevious | null>( _previousView => { - if (!flamegraphView || !flamegraphCanvas || !CPUChart) { + if (!flamegraphView || !flamegraphCanvas || !CPUChart || !cpuChartCanvas) { return null; } @@ -502,18 +508,28 @@ function Flamegraph(): ReactElement { minWidth: uiFrames.minFrameDuration, barHeight: 0, depthOffset: 0, - maxHeight: 100, + maxHeight: CPUChart.configSpace.height, }, }); - // Initialize configView to whatever the flamegraph configView is - newView.setConfigView(flamegraphView.configView.withHeight(100), { - width: {min: 0}, - }); + // Compute the total size of the padding and stretch the view. This ensures that + // the total range is rendered and perfectly aligned from top to bottom. + newView.setConfigView( + flamegraphView.configView.withHeight(newView.configView.height), + { + width: {min: 0}, + } + ); return newView; }, - [flamegraphView, flamegraphCanvas, CPUChart, uiFrames.minFrameDuration] + [ + flamegraphView, + flamegraphCanvas, + CPUChart, + uiFrames.minFrameDuration, + cpuChartCanvas, + ] ); const spansView = useMemoWithPrevious | null>( diff --git a/static/app/components/profiling/flamegraph/flamegraphCpuChart.tsx b/static/app/components/profiling/flamegraph/flamegraphCpuChart.tsx index 457cff4c63bad0..90388b7b0c7a38 100644 --- a/static/app/components/profiling/flamegraph/flamegraphCpuChart.tsx +++ b/static/app/components/profiling/flamegraph/flamegraphCpuChart.tsx @@ -1,5 +1,6 @@ import {CSSProperties, Fragment, useEffect, useMemo} from 'react'; import styled from '@emotion/styled'; +import {mat3} from 'gl-matrix'; import {t} from 'sentry/locale'; import { @@ -7,8 +8,10 @@ import { useCanvasScheduler, } from 'sentry/utils/profiling/canvasScheduler'; import {CanvasView} from 'sentry/utils/profiling/canvasView'; +import {useFlamegraphTheme} from 'sentry/utils/profiling/flamegraph/useFlamegraphTheme'; import {FlamegraphCanvas} from 'sentry/utils/profiling/flamegraphCanvas'; import type {FlamegraphChart} from 'sentry/utils/profiling/flamegraphChart'; +import {transformMatrixBetweenRect} from 'sentry/utils/profiling/gl/utils'; import {FlamegraphChartRenderer} from 'sentry/utils/profiling/renderers/chartRenderer'; import {Rect} from 'sentry/utils/profiling/speedscope'; import {useProfiles} from 'sentry/views/profiling/profilesProvider'; @@ -38,14 +41,15 @@ export function FlamegraphCpuChart({ }: FlamegraphChartProps) { const profiles = useProfiles(); const scheduler = useCanvasScheduler(canvasPoolManager); + const theme = useFlamegraphTheme(); const cpuChartRenderer = useMemo(() => { if (!cpuChartCanvasRef || !chart) { return null; } - return new FlamegraphChartRenderer(cpuChartCanvasRef, chart); - }, [cpuChartCanvasRef, chart]); + return new FlamegraphChartRenderer(cpuChartCanvasRef, chart, theme); + }, [cpuChartCanvasRef, chart, theme]); useEffect(() => { if (!cpuChartCanvas || !chart || !cpuChartView || !cpuChartRenderer) { @@ -53,11 +57,38 @@ export function FlamegraphCpuChart({ } const drawCpuChart = () => { + const configViewToPhysicalSpaceTransform = transformMatrixBetweenRect( + cpuChartView.configView, + cpuChartCanvas.physicalSpace + ); + + const offsetPhysicalSpace = cpuChartCanvas.physicalSpace + // shrink the chart height by the padding to pad the top of chart + .withHeight(cpuChartCanvas.physicalSpace.height - theme.SIZES.CHART_PX_PADDING); + + const physicalSpaceToOffsetPhysicalSpaceTransform = transformMatrixBetweenRect( + cpuChartCanvas.physicalSpace, + offsetPhysicalSpace + ); + + const fromConfigView = mat3.create(); + mat3.multiply( + fromConfigView, + physicalSpaceToOffsetPhysicalSpaceTransform, + configViewToPhysicalSpaceTransform + ); + mat3.multiply( + fromConfigView, + cpuChartCanvas.physicalSpace.invertYTransform(), + fromConfigView + ); + cpuChartRenderer.draw( cpuChartView.configView, cpuChartView.configSpace, cpuChartCanvas.physicalSpace, - cpuChartView.fromConfigView(cpuChartCanvas.physicalSpace) + fromConfigView, + cpuChartView.toConfigView(cpuChartCanvas.logicalSpace) ); }; @@ -67,7 +98,7 @@ export function FlamegraphCpuChart({ return () => { scheduler.unregisterBeforeFrameCallback(drawCpuChart); }; - }, [scheduler, chart, cpuChartCanvas, cpuChartRenderer, cpuChartView]); + }, [scheduler, chart, cpuChartCanvas, cpuChartRenderer, cpuChartView, theme]); return ( diff --git a/static/app/components/profiling/flamegraph/flamegraphLayout.tsx b/static/app/components/profiling/flamegraph/flamegraphLayout.tsx index 8e2a53f4a7ec74..1475ed009fe238 100644 --- a/static/app/components/profiling/flamegraph/flamegraphLayout.tsx +++ b/static/app/components/profiling/flamegraph/flamegraphLayout.tsx @@ -260,6 +260,7 @@ const FlamegraphLayoutContainer = styled('div')` const FlamegraphGrid = styled('div')<{ layout?: FlamegraphPreferences['layout']; }>` + background-color: ${p => p.theme.background}; display: grid; width: 100%; grid-template-rows: ${({layout}) => diff --git a/static/app/utils/profiling/flamegraph/flamegraphTheme.tsx b/static/app/utils/profiling/flamegraph/flamegraphTheme.tsx index 9d954404039ded..95743e1822822a 100644 --- a/static/app/utils/profiling/flamegraph/flamegraphTheme.tsx +++ b/static/app/utils/profiling/flamegraph/flamegraphTheme.tsx @@ -46,6 +46,8 @@ export interface FlamegraphTheme { BAR_LABEL_FONT_COLOR: string; COLOR_BUCKET: (t: number) => ColorChannels; COLOR_MAPS: Record; + CPU_CHART_COLORS: ColorChannels[]; + CPU_CHART_LABEL_COLOR: string; CURSOR_CROSSHAIR: string; DIFFERENTIAL_DECREASE: ColorChannels; DIFFERENTIAL_INCREASE: ColorChannels; @@ -90,6 +92,7 @@ export interface FlamegraphTheme { BAR_FONT_SIZE: number; BAR_HEIGHT: number; BAR_PADDING: number; + CHART_PX_PADDING: number; CPU_CHART_HEIGHT: number; FLAMEGRAPH_DEPTH_OFFSET: number; GRID_LINE_WIDTH: number; @@ -163,6 +166,7 @@ const SIZES: FlamegraphTheme['SIZES'] = { TOOLTIP_FONT_SIZE: 12, TIMELINE_LABEL_HEIGHT: 20, UI_FRAMES_HEIGHT: 60, + CHART_PX_PADDING: 30, }; const FONTS: FlamegraphTheme['FONTS'] = { @@ -185,6 +189,8 @@ export const LightFlamegraphTheme: FlamegraphTheme = { 'by frequency': makeColorMapByFrequency, 'by system vs application frame': makeColorMapBySystemVsApplicationFrame, }, + CPU_CHART_COLORS: [[0.96, 0.69, 0.0, 0.5]], + CPU_CHART_LABEL_COLOR: 'rgba(31,35,58,.75)', CURSOR_CROSSHAIR: '#bbbbbb', DIFFERENTIAL_DECREASE: [0.309, 0.2058, 0.98], DIFFERENTIAL_INCREASE: [0.98, 0.2058, 0.4381], @@ -231,6 +237,8 @@ export const DarkFlamegraphTheme: FlamegraphTheme = { 'by frequency': makeColorMapByFrequency, 'by system vs application frame': makeColorMapBySystemVsApplicationFrame, }, + CPU_CHART_COLORS: [[0.96, 0.69, 0.0, 0.6]], + CPU_CHART_LABEL_COLOR: 'rgba(255, 255, 255, 0.5)', CURSOR_CROSSHAIR: '#828285', DIFFERENTIAL_DECREASE: [0.309, 0.2058, 0.98], DIFFERENTIAL_INCREASE: [0.98, 0.2058, 0.4381], diff --git a/static/app/utils/profiling/flamegraphChart.tsx b/static/app/utils/profiling/flamegraphChart.tsx index 47f374b8b0e214..6c0dfbbdc56653 100644 --- a/static/app/utils/profiling/flamegraphChart.tsx +++ b/static/app/utils/profiling/flamegraphChart.tsx @@ -1,9 +1,14 @@ +import {ColorChannels} from 'sentry/utils/profiling/flamegraph/flamegraphTheme'; import {Rect} from 'sentry/utils/profiling/speedscope'; +import {colorComponentsToRGBA} from './colors/utils'; import {makeFormatter} from './units/units'; interface Series { + fillColor: string; + lineColor: string; points: {x: number; y: number}[]; + type: 'line' | 'area'; } export class FlamegraphChart { @@ -18,12 +23,27 @@ export class FlamegraphChart { y: [0, 0], }; - static Empty = new FlamegraphChart(Rect.Empty(), {unit: 'percent', values: []}); + static Empty = new FlamegraphChart(Rect.Empty(), {unit: 'percent', values: []}, [ + [0, 0, 0, 0], + ]); - constructor(configSpace: Rect, measurement: Profiling.Measurement) { + constructor( + configSpace: Rect, + measurement: Profiling.Measurement, + colors: ColorChannels[] + ) { this.series = new Array(); + if (!measurement || !measurement.values.length) { + this.formatter = makeFormatter('percent'); + this.configSpace = configSpace.clone(); + return; + } + this.series[0] = { + type: 'area', + lineColor: colorComponentsToRGBA(colors[0]), + fillColor: colorComponentsToRGBA(colors[0]), points: new Array(measurement.values.length), }; @@ -52,6 +72,6 @@ export class FlamegraphChart { this.domains.y[1] = 100; this.configSpace = configSpace.withHeight(this.domains.y[1] - this.domains.y[0]); - this.formatter = makeFormatter(measurement.unit); + this.formatter = makeFormatter(measurement.unit, 0); } } diff --git a/static/app/utils/profiling/renderers/chartRenderer.tsx b/static/app/utils/profiling/renderers/chartRenderer.tsx index 194f725219d266..90ed75ae37e66c 100644 --- a/static/app/utils/profiling/renderers/chartRenderer.tsx +++ b/static/app/utils/profiling/renderers/chartRenderer.tsx @@ -1,17 +1,58 @@ -import {mat3, vec2} from 'gl-matrix'; +import {mat3, vec2, vec3} from 'gl-matrix'; +import {FlamegraphTheme} from 'sentry/utils/profiling/flamegraph/flamegraphTheme'; import {FlamegraphChart} from 'sentry/utils/profiling/flamegraphChart'; import {getContext, resizeCanvasToDisplaySize} from 'sentry/utils/profiling/gl/utils'; import {Rect} from 'sentry/utils/profiling/speedscope'; +function findYIntervals( + configView: Rect, + logicalSpaceToConfigView: mat3, + getInterval: (mat: mat3, x: number) => number +): number[] { + const target = 20; + const targetInterval = Math.abs( + getInterval(logicalSpaceToConfigView, target) - configView.bottom + ); + + const minInterval = Math.pow(10, Math.floor(Math.log10(targetInterval))); + let interval = minInterval; + + if (targetInterval / interval > 3) { + interval *= 5; + } + + let x = Math.ceil(configView.top / interval) * interval; + const intervals: number[] = []; + + while (x <= configView.bottom) { + intervals.push(x); + x += interval; + } + + return intervals; +} + +function getIntervalTimeAtY(logicalSpaceToConfigView: mat3, y: number): number { + const vector = logicalSpaceToConfigView[4] * y + logicalSpaceToConfigView[7]; + + if (vector > 1) { + return Math.round(vector); + } + + return Math.round(vector * 10) / 10; +} + export class FlamegraphChartRenderer { canvas: HTMLCanvasElement | null; chart: FlamegraphChart; context: CanvasRenderingContext2D; + theme: FlamegraphTheme; - constructor(canvas: HTMLCanvasElement, chart: FlamegraphChart) { + constructor(canvas: HTMLCanvasElement, chart: FlamegraphChart, theme: FlamegraphTheme) { this.canvas = canvas; this.chart = chart; + this.theme = theme; this.context = getContext(this.canvas, '2d'); resizeCanvasToDisplaySize(this.canvas); @@ -22,56 +63,108 @@ export class FlamegraphChartRenderer { } draw( - _configView: Rect, + configView: Rect, _configSpace: Rect, _physicalSpace: Rect, - configViewToPhysicalSpace: mat3 + configViewToPhysicalSpace: mat3, + logicalSpaceToConfigView: mat3 ) { if (!this.canvas) { throw new Error('No canvas to draw on'); } + this.context.clearRect(0, 0, this.canvas.width, this.canvas.height); + if (!this.chart.series.length) { return; } this.context.clearRect(0, 0, this.canvas.width, this.canvas.height); + this.context.font = `bold 14px ${this.theme.FONTS.FRAME_FONT}`; - // Helper lines for dev - this.context.font = '16px sans-serif'; - this.context.textBaseline = 'middle'; - - this.context.strokeStyle = `red`; this.context.beginPath(); - const origin = new Rect(0, 0, 1, 1).transformRect(configViewToPhysicalSpace); - this.context.arc(origin.x, origin.y, 10, 0, 2 * Math.PI); - this.context.fill(); + this.context.stroke(); + + const intervals = findYIntervals( + configView, + logicalSpaceToConfigView, + getIntervalTimeAtY + ); - this.context.strokeStyle = `black`; - for (const h of [0, 25, 50, 75, 100]) { - const r = new Rect(0, h, 1, 1).transformRect(configViewToPhysicalSpace); + this.context.textBaseline = 'bottom'; + this.context.lineWidth = 1; + const TICK_WIDTH = 14 * window.devicePixelRatio; + const {left, right} = configView.transformRect(configViewToPhysicalSpace); + const textOffsetLeft = 2 * window.devicePixelRatio; + + // Draw series + for (let i = 0; i < this.chart.series.length; i++) { + this.context.lineWidth = 1; + this.context.fillStyle = this.chart.series[i].fillColor; + this.context.strokeStyle = this.chart.series[i].lineColor; this.context.beginPath(); - this.context.moveTo(0, r.y); - this.context.lineTo(this.canvas.width, r.y); - this.context.stroke(); - this.context.fillText(h.toString(), this.canvas.width / 2, r.y); + this.context.lineCap = 'round'; + const serie = this.chart.series[i]; + + const origin = vec3.fromValues(0, 0, 1); + vec3.transformMat3(origin, origin, configViewToPhysicalSpace); + + for (let j = 0; j < serie.points.length; j++) { + const point = serie.points[j]; + + const r = vec3.fromValues(point.x, point.y, 1); + vec3.transformMat3(r, r, configViewToPhysicalSpace); + + if (j === 0) { + this.context.lineTo(r[0], origin[1]); + } + this.context.lineTo(r[0], r[1]); + if (j === serie.points.length - 1) { + this.context.lineTo(r[0], origin[1]); + } + } + + if (this.chart.series[i].type === 'line') { + this.context.stroke(); + } else { + this.context.fill(); + } } - // @TODO draw series - // for (let i = 0; i < this.chart.series.length; i++) { - // this.context.strokeStyle = `red`; - // this.context.beginPath(); - // const serie = this.chart.series[i]; - - // for (let j = 0; j < serie.points.length; j++) { - // const point = serie.points[j]; - // const r = new Rect(point.x, point.y, 1, 1).transformRect( - // configViewToPhysicalSpace - // ); - // this.context.lineTo(r.x, r.y); - // } - // this.context.stroke(); - // } + // Draw interval ticks + this.context.strokeStyle = this.theme.COLORS.CPU_CHART_LABEL_COLOR; + this.context.fillStyle = this.theme.COLORS.CPU_CHART_LABEL_COLOR; + for (let i = 0; i < intervals.length; i++) { + const interval = new Rect(configView.left, intervals[i], 5, 2).transformRect( + configViewToPhysicalSpace + ); + const textOffset = interval.height; + const text = this.chart.formatter(intervals[i]); + + if (i === 0) { + this.context.textAlign = 'left'; + this.context.fillText(text, left + textOffsetLeft, interval.y - textOffset); + this.context.textAlign = 'end'; + this.context.fillText(text, right - textOffsetLeft, interval.y - textOffset); + continue; + } + + this.context.textAlign = 'left'; + this.context.beginPath(); + this.context.moveTo(left, interval.y); + this.context.lineTo(left + TICK_WIDTH, interval.y); + this.context.stroke(); + + this.context.fillText(text, left + textOffsetLeft, interval.y - textOffset); + + this.context.textAlign = 'end'; + this.context.beginPath(); + this.context.moveTo(right, interval.y); + this.context.lineTo(right - TICK_WIDTH, interval.y); + this.context.stroke(); + + this.context.fillText(text, right - textOffsetLeft, interval.y - textOffset); + } } } From 280b1e33bcb3c70e014ce3914aeb39884bb9b238 Mon Sep 17 00:00:00 2001 From: Gabe Villalobos Date: Wed, 2 Aug 2023 12:49:44 -0700 Subject: [PATCH 36/44] test(hybrid-cloud): Fixes connection acquisition for sentry app deletions test in split DB mode (#53975) --- tests/sentry/deletions/test_sentry_app.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/sentry/deletions/test_sentry_app.py b/tests/sentry/deletions/test_sentry_app.py index d1c66268e0c7b4..41a98455501652 100644 --- a/tests/sentry/deletions/test_sentry_app.py +++ b/tests/sentry/deletions/test_sentry_app.py @@ -1,5 +1,5 @@ import pytest -from django.db import connection +from django.db import connections, router from sentry import deletions from sentry.models import ApiApplication, SentryApp, SentryAppInstallation, User @@ -42,7 +42,7 @@ def test_soft_deletes_sentry_app(self): # The QuerySet will automatically NOT include deleted installs, so we # use a raw sql query to ensure it still exists. - c = connection.cursor() + c = connections[router.db_for_write(SentryApp)].cursor() c.execute( "SELECT count(1) " "FROM sentry_sentryapp " From 3a596d54e5765b8119199112e08d3891cc83a06c Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Wed, 2 Aug 2023 12:51:08 -0700 Subject: [PATCH 37/44] feat(issues): Prompt to setup releases in resolve dropdown (#53997) --- .../app/components/actions/resolve.spec.tsx | 12 +++ static/app/components/actions/resolve.tsx | 79 ++++++++++++++++++- 2 files changed, 87 insertions(+), 4 deletions(-) diff --git a/static/app/components/actions/resolve.spec.tsx b/static/app/components/actions/resolve.spec.tsx index a8e37afd6bcba6..b49e9caafd94d3 100644 --- a/static/app/components/actions/resolve.spec.tsx +++ b/static/app/components/actions/resolve.spec.tsx @@ -193,4 +193,16 @@ describe('ResolveActions', function () { expect(screen.getByText('The current release')).toBeInTheDocument(); expect(screen.getByText('1.2.3 (semver)')).toBeInTheDocument(); }); + + it('displays prompt to setup releases when there are no releases', async function () { + const organization = TestStubs.Organization({ + features: ['issue-resolve-release-setup'], + }); + render(, { + organization, + }); + + await userEvent.click(screen.getByLabelText('More resolve options')); + expect(screen.getByText('Resolving is better with Releases')).toBeInTheDocument(); + }); }); diff --git a/static/app/components/actions/resolve.tsx b/static/app/components/actions/resolve.tsx index 1bbe419006e913..d4c56321988b57 100644 --- a/static/app/components/actions/resolve.tsx +++ b/static/app/components/actions/resolve.tsx @@ -2,15 +2,16 @@ import {css} from '@emotion/react'; import styled from '@emotion/styled'; import {openModal} from 'sentry/actionCreators/modal'; -import {Button} from 'sentry/components/button'; +import {Button, LinkButton} from 'sentry/components/button'; import ButtonBar from 'sentry/components/buttonBar'; import {openConfirmModal} from 'sentry/components/confirm'; import CustomCommitsResolutionModal from 'sentry/components/customCommitsResolutionModal'; import CustomResolutionModal from 'sentry/components/customResolutionModal'; import {DropdownMenu, MenuItemProps} from 'sentry/components/dropdownMenu'; import {Tooltip} from 'sentry/components/tooltip'; -import {IconChevron} from 'sentry/icons'; +import {IconChevron, IconReleases} from 'sentry/icons'; import {t} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; import { GroupStatusResolution, GroupSubstatus, @@ -22,6 +23,35 @@ import {trackAnalytics} from 'sentry/utils/analytics'; import {formatVersion, isSemverRelease} from 'sentry/utils/formatters'; import useOrganization from 'sentry/utils/useOrganization'; +function SetupReleasesPrompt() { + return ( + + + +
+ + {t('Resolving is better with Releases')} + +
+ {t( + "Set up Releases so Sentry will only bother you when somthing you've fixed breaks in a future release." + )} +
+
+ + {t('Set up Releases Now')} + +
+ ); +} + export interface ResolveActionsProps { hasRelease: boolean; onUpdate: (data: GroupStatusResolution) => void; @@ -196,9 +226,13 @@ function ResolveActions({ ]; const isDisabled = !projectSlug ? disabled : disableDropdown; + const hasResolveReleaseSetup = organization.features.includes( + 'issue-resolve-release-setup' + ); return ( - ( + ) + } isDisabled={isDisabled} /> ); @@ -307,3 +347,34 @@ const DropdownTrigger = styled(Button)` border-radius: ${p => p.theme.borderRadiusRight}; border-left: none; `; + +/** + * Used to hide the list items when prompting to set up releases + */ +const StyledDropdownMenu = styled(DropdownMenu)<{itemsHidden: boolean}>` + ${p => + p.itemsHidden && + css` + ul { + display: none; + } + `} +`; + +const SetupReleases = styled('div')` + display: flex; + flex-direction: column; + gap: ${space(1.5)}; + align-items: center; + padding: ${space(2)} 0; + text-align: center; + color: ${p => p.theme.gray400}; + width: 288px; + white-space: normal; + font-weight: normal; +`; + +const SetupReleasesHeader = styled('h6')` + font-size: ${p => p.theme.fontSizeLarge}; + margin-bottom: ${space(1)}; +`; From d87569cc4c8dfcb76ddd877c4374851e6f7c6c36 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 2 Aug 2023 16:02:38 -0400 Subject: [PATCH 38/44] ref: remove unused request.timezone attribute stuffing (#54039) last used in 5c93fc10da5a09724e72580475c3ccf624e8fd80 --- src/sentry/middleware/locale.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/sentry/middleware/locale.py b/src/sentry/middleware/locale.py index 35b6f3b6e2992b..f4cfd5967b0c18 100644 --- a/src/sentry/middleware/locale.py +++ b/src/sentry/middleware/locale.py @@ -1,4 +1,3 @@ -import pytz import sentry_sdk from django.conf import settings from django.http.request import HttpRequest @@ -48,9 +47,6 @@ def load_user_conf(self, request: HttpRequest) -> None: # TODO: django 4.x removes this from session request.session[translation.LANGUAGE_SESSION_KEY] = language # type: ignore[attr-defined] - if timezone := get_option_from_list(options, key="timezone"): - request.timezone = pytz.timezone(timezone) # type: ignore[attr-defined] - def process_response( self, request: HttpRequest, response: HttpResponseBase ) -> HttpResponseBase: From 2799b0bf27ab4070ab9ff66d0df49040c683cc16 Mon Sep 17 00:00:00 2001 From: Jonas Date: Wed, 2 Aug 2023 16:18:22 -0400 Subject: [PATCH 39/44] feat(profiling): wire up pan zoom behavior (#54053) Wire up zoom/pan behavior on CPU chart --------- Co-authored-by: Tony Xiao --- .../profiling/flamegraph/flamegraph.tsx | 4 +- .../flamegraph/flamegraphCpuChart.tsx | 164 +++++++++++++++++- .../flamegraph/flamegraphUIFrames.tsx | 8 +- 3 files changed, 167 insertions(+), 9 deletions(-) diff --git a/static/app/components/profiling/flamegraph/flamegraph.tsx b/static/app/components/profiling/flamegraph/flamegraph.tsx index edd88020b7c8c1..e428e5b2c4da30 100644 --- a/static/app/components/profiling/flamegraph/flamegraph.tsx +++ b/static/app/components/profiling/flamegraph/flamegraph.tsx @@ -619,6 +619,7 @@ function Flamegraph(): ReactElement { cpuChartView.setConfigView(rect); } } + canvasPoolManager.draw(); }; @@ -628,7 +629,8 @@ function Flamegraph(): ReactElement { ) => { if ( sourceTransformConfigView === flamegraphView || - sourceTransformConfigView === uiFramesView + sourceTransformConfigView === uiFramesView || + sourceTransformConfigView === cpuChartView ) { flamegraphView.transformConfigView(mat); if (spansView) { diff --git a/static/app/components/profiling/flamegraph/flamegraphCpuChart.tsx b/static/app/components/profiling/flamegraph/flamegraphCpuChart.tsx index 90388b7b0c7a38..74f4a07806f9a2 100644 --- a/static/app/components/profiling/flamegraph/flamegraphCpuChart.tsx +++ b/static/app/components/profiling/flamegraph/flamegraphCpuChart.tsx @@ -1,6 +1,6 @@ -import {CSSProperties, Fragment, useEffect, useMemo} from 'react'; +import {CSSProperties, Fragment, useCallback, useEffect, useMemo, useState} from 'react'; import styled from '@emotion/styled'; -import {mat3} from 'gl-matrix'; +import {mat3, vec2} from 'gl-matrix'; import {t} from 'sentry/locale'; import { @@ -11,11 +11,19 @@ import {CanvasView} from 'sentry/utils/profiling/canvasView'; import {useFlamegraphTheme} from 'sentry/utils/profiling/flamegraph/useFlamegraphTheme'; import {FlamegraphCanvas} from 'sentry/utils/profiling/flamegraphCanvas'; import type {FlamegraphChart} from 'sentry/utils/profiling/flamegraphChart'; -import {transformMatrixBetweenRect} from 'sentry/utils/profiling/gl/utils'; +import { + getConfigViewTranslationBetweenVectors, + getPhysicalSpacePositionFromOffset, + transformMatrixBetweenRect, +} from 'sentry/utils/profiling/gl/utils'; import {FlamegraphChartRenderer} from 'sentry/utils/profiling/renderers/chartRenderer'; import {Rect} from 'sentry/utils/profiling/speedscope'; import {useProfiles} from 'sentry/views/profiling/profilesProvider'; +import {useCanvasScroll} from './interactions/useCanvasScroll'; +import {useCanvasZoomOrScroll} from './interactions/useCanvasZoomOrScroll'; +import {useInteractionViewCheckPoint} from './interactions/useInteractionViewCheckPoint'; +import {useWheelCenterZoom} from './interactions/useWheelCenterZoom'; import { CollapsibleTimelineLoadingIndicator, CollapsibleTimelineMessage, @@ -43,6 +51,12 @@ export function FlamegraphCpuChart({ const scheduler = useCanvasScheduler(canvasPoolManager); const theme = useFlamegraphTheme(); + const [configSpaceCursor, setConfigSpaceCursor] = useState(null); + const [startInteractionVector, setStartInteractionVector] = useState(null); + const [lastInteraction, setLastInteraction] = useState< + 'pan' | 'click' | 'zoom' | 'scroll' | 'select' | 'resize' | null + >(null); + const cpuChartRenderer = useMemo(() => { if (!cpuChartCanvasRef || !chart) { return null; @@ -100,9 +114,151 @@ export function FlamegraphCpuChart({ }; }, [scheduler, chart, cpuChartCanvas, cpuChartRenderer, cpuChartView, theme]); + const onMouseDrag = useCallback( + (evt: React.MouseEvent) => { + if (!cpuChartCanvas || !cpuChartView || !startInteractionVector) { + return; + } + + const configDelta = getConfigViewTranslationBetweenVectors( + evt.nativeEvent.offsetX, + evt.nativeEvent.offsetY, + startInteractionVector, + cpuChartView, + cpuChartCanvas + ); + + if (!configDelta) { + return; + } + + canvasPoolManager.dispatch('transform config view', [configDelta, cpuChartView]); + setStartInteractionVector( + getPhysicalSpacePositionFromOffset( + evt.nativeEvent.offsetX, + evt.nativeEvent.offsetY + ) + ); + }, + [cpuChartCanvas, cpuChartView, startInteractionVector, canvasPoolManager] + ); + + const onCanvasMouseMove = useCallback( + (evt: React.MouseEvent) => { + if (!cpuChartCanvas || !cpuChartView) { + return; + } + + const configSpaceMouse = cpuChartView.getConfigViewCursor( + vec2.fromValues(evt.nativeEvent.offsetX, evt.nativeEvent.offsetY), + cpuChartCanvas + ); + + setConfigSpaceCursor(configSpaceMouse); + + if (startInteractionVector) { + onMouseDrag(evt); + setLastInteraction('pan'); + } else { + setLastInteraction(null); + } + }, + [cpuChartCanvas, cpuChartView, onMouseDrag, startInteractionVector] + ); + + const onMapCanvasMouseUp = useCallback(() => { + setConfigSpaceCursor(null); + setLastInteraction(null); + }, []); + + useEffect(() => { + window.addEventListener('mouseup', onMapCanvasMouseUp); + + return () => { + window.removeEventListener('mouseup', onMapCanvasMouseUp); + }; + }, [onMapCanvasMouseUp]); + + const onWheelCenterZoom = useWheelCenterZoom( + cpuChartCanvas, + cpuChartView, + canvasPoolManager + ); + const onCanvasScroll = useCanvasScroll(cpuChartCanvas, cpuChartView, canvasPoolManager); + + useCanvasZoomOrScroll({ + setConfigSpaceCursor, + setLastInteraction, + handleWheel: onWheelCenterZoom, + handleScroll: onCanvasScroll, + canvas: cpuChartCanvasRef, + }); + + useInteractionViewCheckPoint({ + view: cpuChartView, + lastInteraction, + }); + + // When a user click anywhere outside the spans, clear cursor and selected node + useEffect(() => { + const onClickOutside = (evt: MouseEvent) => { + if (!cpuChartCanvasRef || cpuChartCanvasRef.contains(evt.target as Node)) { + return; + } + setConfigSpaceCursor(null); + }; + + document.addEventListener('click', onClickOutside); + + return () => { + document.removeEventListener('click', onClickOutside); + }; + }); + + const onCanvasMouseLeave = useCallback(() => { + setConfigSpaceCursor(null); + setStartInteractionVector(null); + setLastInteraction(null); + }, []); + + const onCanvasMouseDown = useCallback((evt: React.MouseEvent) => { + setLastInteraction('click'); + setStartInteractionVector( + getPhysicalSpacePositionFromOffset(evt.nativeEvent.offsetX, evt.nativeEvent.offsetY) + ); + }, []); + + const onCanvasMouseUp = useCallback( + (evt: React.MouseEvent) => { + evt.preventDefault(); + evt.stopPropagation(); + + if (!cpuChartView) { + return; + } + + if (!configSpaceCursor) { + setLastInteraction(null); + setStartInteractionVector(null); + return; + } + + setLastInteraction(null); + setStartInteractionVector(null); + }, + [configSpaceCursor, cpuChartView] + ); + return ( - + {/* transaction loads after profile, so we want to show loading even if it's in initial state */} {profiles.type === 'loading' || profiles.type === 'initial' ? ( diff --git a/static/app/components/profiling/flamegraph/flamegraphUIFrames.tsx b/static/app/components/profiling/flamegraph/flamegraphUIFrames.tsx index a58d966dff7584..5b0f3320abebe3 100644 --- a/static/app/components/profiling/flamegraph/flamegraphUIFrames.tsx +++ b/static/app/components/profiling/flamegraph/flamegraphUIFrames.tsx @@ -142,7 +142,7 @@ export function FlamegraphUIFrames({ [uiFramesCanvas, uiFramesView, onMouseDrag, startInteractionVector] ); - const onMinimapCanvasMouseUp = useCallback(() => { + const onMapCanvasMouseUp = useCallback(() => { setConfigSpaceCursor(null); setLastInteraction(null); }, []); @@ -168,12 +168,12 @@ export function FlamegraphUIFrames({ }); useEffect(() => { - window.addEventListener('mouseup', onMinimapCanvasMouseUp); + window.addEventListener('mouseup', onMapCanvasMouseUp); return () => { - window.removeEventListener('mouseup', onMinimapCanvasMouseUp); + window.removeEventListener('mouseup', onMapCanvasMouseUp); }; - }, [onMinimapCanvasMouseUp]); + }, [onMapCanvasMouseUp]); const onCanvasMouseLeave = useCallback(() => { setConfigSpaceCursor(null); From 862bc243b194b4ae4347f36c815f85dac76cde05 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Wed, 2 Aug 2023 13:21:05 -0700 Subject: [PATCH 40/44] feat(issues): Add flag `issue-resolve-release-setup` (#53998) --- src/sentry/conf/server.py | 2 ++ src/sentry/features/__init__.py | 1 + 2 files changed, 3 insertions(+) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index dd33ffe78e2a87..bf31060652c408 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1500,6 +1500,8 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str: "organizations:issue-details-most-helpful-event-ui": False, # Display if a release is using semver when resolving issues "organizations:issue-release-semver": False, + # Display a prompt to setup releases in the resolve options dropdown + "organizations:issue-resolve-release-setup": False, # Adds the ttid & ttfd vitals to the frontend "organizations:mobile-vitals": False, # Display CPU and memory metrics in transactions with profiles diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 13b6ed1dfcfba5..2182da6734bd6d 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -95,6 +95,7 @@ default_manager.add("organizations:issue-details-stacktrace-improvements", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-platform", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-release-semver", OrganizationFeature, FeatureHandlerStrategy.REMOTE) +default_manager.add("organizations:issue-resolve-release-setup", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-search-allow-postgres-only-search", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-search-use-cdc-primary", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:issue-search-use-cdc-secondary", OrganizationFeature, FeatureHandlerStrategy.REMOTE) From 0c7b37ae019be41e1ebe4b7765e6f9eea468c2d9 Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Wed, 2 Aug 2023 13:22:41 -0700 Subject: [PATCH 41/44] feat(workflow): Add custom percent change alert text (#53789) --- src/sentry/integrations/metric_alerts.py | 30 ++++++++++-- .../sentry/integrations/test_metric_alerts.py | 46 +++++++++++++++++++ 2 files changed, 73 insertions(+), 3 deletions(-) diff --git a/src/sentry/integrations/metric_alerts.py b/src/sentry/integrations/metric_alerts.py index 7cb2c2a77cf0fb..b60d0fddddec4a 100644 --- a/src/sentry/integrations/metric_alerts.py +++ b/src/sentry/integrations/metric_alerts.py @@ -11,6 +11,7 @@ from sentry.incidents.models import ( INCIDENT_STATUS, AlertRule, + AlertRuleThresholdType, Incident, IncidentStatus, IncidentTrigger, @@ -25,6 +26,16 @@ "percentage(users_crashed, users)": "% users crash free rate", } LOGO_URL = absolute_uri(get_asset_url("sentry", "images/sentry-email-avatar.png")) +# These should be the same as the options in the frontend +# COMPARISON_DELTA_OPTIONS +TEXT_COMPARISON_DELTA = { + 5: ("same time 5 minutes ago"), # 5 minutes + 15: ("same time 15 minutes ago"), # 15 minutes + 60: ("same time one hour ago"), # one hour + 1440: ("same time one day ago"), # one day + 10080: ("same time one week ago"), # one week + 43200: ("same time one month ago"), # 30 days +} def get_metric_count_from_incident(incident: Incident) -> str: @@ -67,14 +78,27 @@ def get_incident_status_text(alert_rule: AlertRule, metric_value: str) -> str: time_window = alert_rule.snuba_query.time_window // 60 interval = "minute" if time_window == 1 else "minutes" - text = _("%(metric_and_agg_text)s in the last %(time_window)d %(interval)s") % { + # % change alerts have a comparison delta + if alert_rule.comparison_delta: + metric_and_agg_text = f"{agg_text.capitalize()} {int(metric_value)}%" + higher_or_lower = ( + "higher" if alert_rule.threshold_type == AlertRuleThresholdType.ABOVE.value else "lower" + ) + comparison_delta_minutes = alert_rule.comparison_delta // 60 + comparison_string = TEXT_COMPARISON_DELTA.get( + comparison_delta_minutes, f"same time {comparison_delta_minutes} minutes ago" + ) + return _( + f"{metric_and_agg_text} {higher_or_lower} in the last {time_window} {interval} " + f"compared to the {comparison_string}" + ) + + return _("%(metric_and_agg_text)s in the last %(time_window)d %(interval)s") % { "metric_and_agg_text": metric_and_agg_text, "time_window": time_window, "interval": interval, } - return text - def incident_attachment_info(incident: Incident, new_status: IncidentStatus, metric_value=None): alert_rule = incident.alert_rule diff --git a/tests/sentry/integrations/test_metric_alerts.py b/tests/sentry/integrations/test_metric_alerts.py index 8e0afbc5dd99cc..ab105cfadecf59 100644 --- a/tests/sentry/integrations/test_metric_alerts.py +++ b/tests/sentry/integrations/test_metric_alerts.py @@ -123,6 +123,52 @@ def test_with_incident_trigger(self): == "http://testserver/_static/{version}/sentry/images/sentry-email-avatar.png" ) + def test_percent_change_alert(self): + # 1 hour comparison_delta + alert_rule = self.create_alert_rule(comparison_delta=60) + date_started = self.now + incident = self.create_incident( + self.organization, + title="Incident #1", + projects=[self.project], + alert_rule=alert_rule, + status=IncidentStatus.CLOSED.value, + date_started=date_started, + ) + trigger = self.create_alert_rule_trigger(alert_rule, CRITICAL_TRIGGER_LABEL, 100) + self.create_alert_rule_trigger_action( + alert_rule_trigger=trigger, triggered_for_incident=incident + ) + metric_value = 123.12 + data = incident_attachment_info(incident, IncidentStatus.CRITICAL, metric_value) + assert ( + data["text"] + == "Events 123% higher in the last 10 minutes compared to the same time one hour ago" + ) + + def test_percent_change_alert_custom_comparison_delta(self): + # 12 hour comparison_delta + alert_rule = self.create_alert_rule(comparison_delta=60 * 12) + date_started = self.now + incident = self.create_incident( + self.organization, + title="Incident #1", + projects=[self.project], + alert_rule=alert_rule, + status=IncidentStatus.CLOSED.value, + date_started=date_started, + ) + trigger = self.create_alert_rule_trigger(alert_rule, CRITICAL_TRIGGER_LABEL, 100) + self.create_alert_rule_trigger_action( + alert_rule_trigger=trigger, triggered_for_incident=incident + ) + metric_value = 123.12 + data = incident_attachment_info(incident, IncidentStatus.CRITICAL, metric_value) + assert ( + data["text"] + == "Events 123% higher in the last 10 minutes compared to the same time 720 minutes ago" + ) + MOCK_NOW = timezone.now().replace(hour=13, minute=0, second=0, microsecond=0) From d66e09a36bce435c1851947c80b31732fd752414 Mon Sep 17 00:00:00 2001 From: Ryan Albrecht Date: Wed, 2 Aug 2023 13:27:20 -0700 Subject: [PATCH 42/44] fix(replay): Fix what tab appears when you click a Timestamp button in Replay Details>Network (#53973) Before: 1. Open the Replay Details > Network tab 2. Click the Timestamp (in the right column) for a script/img/etc resource 3. Notice how it changes over to the Console tab Now: It'll stay on the network tab. --- static/app/utils/replays/getFrameDetails.tsx | 42 ++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/static/app/utils/replays/getFrameDetails.tsx b/static/app/utils/replays/getFrameDetails.tsx index f83992e348ff4a..4987ea93506505 100644 --- a/static/app/utils/replays/getFrameDetails.tsx +++ b/static/app/utils/replays/getFrameDetails.tsx @@ -227,6 +227,13 @@ const MAPPER_FOR_FRAME: Record Details> = { title: 'Paint', type: BreadcrumbType.INFO, }), + 'resource.css': frame => ({ + color: 'gray300', + description: undefined, + tabKey: TabKey.NETWORK, + title: frame.description, + type: BreadcrumbType.HTTP, + }), 'resource.fetch': frame => ({ color: 'gray300', description: undefined, @@ -234,6 +241,41 @@ const MAPPER_FOR_FRAME: Record Details> = { title: frame.description, type: BreadcrumbType.HTTP, }), + 'resource.iframe': frame => ({ + color: 'gray300', + description: undefined, + tabKey: TabKey.NETWORK, + title: frame.description, + type: BreadcrumbType.HTTP, + }), + 'resource.img': frame => ({ + color: 'gray300', + description: undefined, + tabKey: TabKey.NETWORK, + title: frame.description, + type: BreadcrumbType.HTTP, + }), + 'resource.link': frame => ({ + color: 'gray300', + description: undefined, + tabKey: TabKey.NETWORK, + title: frame.description, + type: BreadcrumbType.HTTP, + }), + 'resource.other': frame => ({ + color: 'gray300', + description: undefined, + tabKey: TabKey.NETWORK, + title: frame.description, + type: BreadcrumbType.HTTP, + }), + 'resource.script': frame => ({ + color: 'gray300', + description: undefined, + tabKey: TabKey.NETWORK, + title: frame.description, + type: BreadcrumbType.HTTP, + }), 'resource.xhr': frame => ({ color: 'gray300', description: undefined, From a8bcd95b6f7fad15a7180d3cd74bc50547cd1cd3 Mon Sep 17 00:00:00 2001 From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com> Date: Wed, 2 Aug 2023 16:37:22 -0400 Subject: [PATCH 43/44] ref: clean up django 1.x remnant in sentry.wsgi (#54040) --- src/sentry/services/http.py | 2 +- src/sentry/wsgi.py | 29 ++--------------------------- 2 files changed, 3 insertions(+), 28 deletions(-) diff --git a/src/sentry/services/http.py b/src/sentry/services/http.py index ca78b21bf63d81..1fda0cef8ae9a8 100644 --- a/src/sentry/services/http.py +++ b/src/sentry/services/http.py @@ -189,7 +189,7 @@ def run(self) -> NoReturn: assert os.environ.get("UWSGI_MODULE") == "sentry.wsgi:application" host, port = os.environ["UWSGI_HTTP_SOCKET"].split(":") - httpd = make_server(host, int(port), application) + httpd = make_server(host, int(port), application) # type: ignore[arg-type] # typeddjango/django-stubs#1053 httpd.serve_forever() raise AssertionError("unreachable") else: diff --git a/src/sentry/wsgi.py b/src/sentry/wsgi.py index 8b360af6d916e1..3dfac9678fbdce 100644 --- a/src/sentry/wsgi.py +++ b/src/sentry/wsgi.py @@ -1,4 +1,3 @@ -import os import os.path import sys @@ -14,33 +13,9 @@ configure() if settings.SESSION_FILE_PATH and not os.path.exists(settings.SESSION_FILE_PATH): - try: - os.makedirs(settings.SESSION_FILE_PATH) - except OSError: - pass + os.makedirs(settings.SESSION_FILE_PATH, exist_ok=True) from django.core.handlers.wsgi import WSGIHandler - -class FileWrapperWSGIHandler(WSGIHandler): - """A WSGIHandler implementation that handles a StreamingHttpResponse - from django to leverage wsgi.file_wrapper for delivering large streaming - responses. - - Note: this was added natively into Django 1.8, so if by some reason, - we upgraded, this wouldn't be relevant anymore.""" - - def __call__(self, environ, start_response): - response = super().__call__(environ, start_response) - if hasattr(response, "streaming") and response.streaming: - try: - response = environ["wsgi.file_wrapper"](response.streaming_content) # type: ignore[attr-defined] - except KeyError: - # In our case, we're shipping with uwsgi, so it's safer to assume - # that wsgi.file_wrapper does exist. It'd be exceptional otherwise. - pass - return response - - # Run WSGI handler for the application -application = FileWrapperWSGIHandler() +application = WSGIHandler() From d3d5bdc2bb31aaeeccd04d27abddb0daf2a6638c Mon Sep 17 00:00:00 2001 From: Mark Story Date: Wed, 2 Aug 2023 16:47:57 -0400 Subject: [PATCH 44/44] fix(hybridcloud) Don't attempt RPC based authentication in RPC requests (#54050) If we're handling an RPC request, we should not attempt RPC based authentication as RPC requests include request signatures, and no other forms of authentication. Skipping this auth request in the middleware makes the dev environment much faster, and hopefully helps resolve the errors we're seeing in the siloed test environment. --- src/sentry/middleware/auth.py | 6 ++++++ tests/sentry/middleware/test_auth.py | 11 +++++++++++ 2 files changed, 17 insertions(+) diff --git a/src/sentry/middleware/auth.py b/src/sentry/middleware/auth.py index fc76e27789a651..b6828dc1b52f8e 100644 --- a/src/sentry/middleware/auth.py +++ b/src/sentry/middleware/auth.py @@ -112,6 +112,12 @@ class HybridCloudAuthenticationMiddleware(MiddlewareMixin): def process_request(self, request: Request): from sentry.web.frontend.accounts import expired + if request.path.startswith("/api/0/internal/rpc/"): + # Avoid doing RPC authentication when we're already + # in an RPC request. + request.user = AnonymousUser() + return + auth_result = auth_service.authenticate(request=authentication_request_from(request)) request.user_from_signed_request = auth_result.user_from_signed_request diff --git a/tests/sentry/middleware/test_auth.py b/tests/sentry/middleware/test_auth.py index ba8d2095a7d993..28442b817d030c 100644 --- a/tests/sentry/middleware/test_auth.py +++ b/tests/sentry/middleware/test_auth.py @@ -131,6 +131,17 @@ def test_process_request_invalid_apikey(self): assert request.user.is_anonymous assert request.auth is None + def test_process_request_rpc_path_ignored(self): + request = self.make_request( + method="GET", path="/api/0/internal/rpc/organization/get_organization_by_id" + ) + request.META["HTTP_AUTHORIZATION"] = b"Rpcsignature not-a-checksum" + + self.middleware.process_request(request) + # No errors, and no user identified. + assert request.user.is_anonymous + assert request.auth is None + @patch("sentry.models.userip.geo_by_addr") def test_process_request_log_userip(self, mock_geo_by_addr): mock_geo_by_addr.return_value = {