diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 51694ef407213e..af44a0b6ad80fd 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -249,8 +249,11 @@ build-utils/ @getsentry/owners-js-build /src/sentry/api/endpoints/chunk.py @getsentry/owners-native /src/sentry/api/endpoints/project_app_store_connect_credentials.py @getsentry/owners-native /src/sentry/lang/native/ @getsentry/owners-native +src/sentry/processing/realtime_metrics/ @getsentry/owners-native /src/sentry/tasks/app_store_connect.py @getsentry/owners-native /src/sentry/tasks/assemble.py @getsentry/owners-native +/src/sentry/tasks/low_priority_symbolication.py @getsentry/owners-native +/tests/sentry/tasks/test_low_priority_symbolication.py @getsentry/owners-native /src/sentry/utils/appleconnect/ @getsentry/owners-native ## End of Native ## diff --git a/.github/actions/setup-sentry/action.yml b/.github/actions/setup-sentry/action.yml index 350edb43c6706d..715f4c13c538d9 100644 --- a/.github/actions/setup-sentry/action.yml +++ b/.github/actions/setup-sentry/action.yml @@ -158,16 +158,12 @@ runs: # TODO: Use devservices kafka. See https://github.com/getsentry/sentry/pull/20986#issuecomment-704510570 if [ "$NEED_KAFKA" = "true" ]; then - # This is *not* the production version. Unclear reason as to why this was chosen - # https://github.com/getsentry/ops/blob/c823e62f930ecc6c97bb08898c71e49edc7232f6/cookbooks/getsentry/attributes/default.rb#L631 docker run \ --name sentry_zookeeper \ -d --network host \ -e ZOOKEEPER_CLIENT_PORT=2181 \ confluentinc/cp-zookeeper:4.1.0 - # This is the production version; do not change w/o changing it there as well - # https://github.com/getsentry/ops/blob/c823e62f930ecc6c97bb08898c71e49edc7232f6/cookbooks/getsentry/attributes/default.rb#L643 docker run \ --name sentry_kafka \ -d --network host \ diff --git a/docs-ui/stories/components/alertBar.stories.js b/docs-ui/stories/components/alertBar.stories.js new file mode 100644 index 00000000000000..d48424a25f2b4b --- /dev/null +++ b/docs-ui/stories/components/alertBar.stories.js @@ -0,0 +1,10 @@ +import PageAlertBar from 'app/components/pageAlertBar'; + +export default { + title: 'Components/Alerts/Alert Bar', + component: PageAlertBar, +}; + +export const Default = ({...args}) => ( + Alert message +); diff --git a/mypy.ini b/mypy.ini index cd616d266f3986..7c60979ae0e483 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,6 +1,7 @@ [mypy] python_version = 3.6 files = src/sentry/api/bases/external_actor.py, + src/sentry/api/bases/organization_events.py, src/sentry/api/endpoints/external_team.py, src/sentry/api/endpoints/external_team_details.py, src/sentry/api/endpoints/external_user.py, @@ -8,6 +9,8 @@ files = src/sentry/api/bases/external_actor.py, src/sentry/api/endpoints/organization_events_trace.py, src/sentry/api/endpoints/project_app_store_connect_credentials.py, src/sentry/api/endpoints/project_codeowners.py, + src/sentry/api/endpoints/organization_events_stats.py, + src/sentry/api/endpoints/team_issue_breakdown.py, src/sentry/api/serializers/base.py, src/sentry/api/serializers/models/external_actor.py, src/sentry/api/serializers/models/integration.py, @@ -52,6 +55,7 @@ files = src/sentry/api/bases/external_actor.py, src/sentry/snuba/query_subscription_consumer.py, src/sentry/spans/**/*.py, src/sentry/tasks/app_store_connect.py, + src/sentry/tasks/low_priority_symbolication.py, src/sentry/tasks/update_user_reports.py, src/sentry/unmerge.py, src/sentry/utils/appleconnect/, diff --git a/src/sentry/api/bases/organization_events.py b/src/sentry/api/bases/organization_events.py index 8cfe878de02929..c6a5482dfb97ad 100644 --- a/src/sentry/api/bases/organization_events.py +++ b/src/sentry/api/bases/organization_events.py @@ -1,60 +1,56 @@ from contextlib import contextmanager -from typing import Sequence +from datetime import datetime, timedelta +from typing import Any, Callable, Dict, Generator, Optional, Sequence, Union, cast import sentry_sdk -from django.http import HttpRequest +from django.utils import timezone from django.utils.http import urlquote -from rest_framework.exceptions import APIException, ParseError +from rest_framework.exceptions import APIException, ParseError, ValidationError +from rest_framework.request import Request from sentry_relay.consts import SPAN_STATUS_CODE_TO_NAME -from sentry import features +from sentry import features, quotas from sentry.api.base import LINK_HEADER from sentry.api.bases import NoProjects, OrganizationEndpoint from sentry.api.helpers.teams import get_teams -from sentry.api.serializers.snuba import SnubaTSResultSerializer +from sentry.api.serializers.snuba import BaseSnubaSerializer, SnubaTSResultSerializer from sentry.discover.arithmetic import ArithmeticError, is_equation, strip_equation from sentry.exceptions import InvalidSearchQuery -from sentry.models import Organization, Team +from sentry.models import Organization, Project, Team from sentry.models.group import Group from sentry.search.events.constants import TIMEOUT_ERROR_MESSAGE from sentry.search.events.fields import get_function_alias from sentry.search.events.filter import get_filter from sentry.snuba import discover from sentry.utils import snuba +from sentry.utils.cursors import Cursor from sentry.utils.dates import get_interval_from_range, get_rollup_from_request, parse_stats_period from sentry.utils.http import absolute_uri -from sentry.utils.snuba import MAX_FIELDS +from sentry.utils.snuba import MAX_FIELDS, SnubaTSResult -def resolve_axis_column(column: str, index=0) -> str: - return get_function_alias(column) if not is_equation(column) else f"equation[{index}]" +def resolve_axis_column(column: str, index: int = 0) -> str: + return cast( + str, get_function_alias(column) if not is_equation(column) else f"equation[{index}]" + ) -class OrganizationEventsEndpointBase(OrganizationEndpoint): - def has_feature(self, organization, request): +class OrganizationEventsEndpointBase(OrganizationEndpoint): # type: ignore + def has_feature(self, organization: Organization, request: Request) -> bool: return features.has( "organizations:discover-basic", organization, actor=request.user ) or features.has("organizations:performance-view", organization, actor=request.user) - def get_equation_list(self, organization: Organization, request: HttpRequest) -> Sequence[str]: + def get_equation_list(self, organization: Organization, request: Request) -> Sequence[str]: """equations have a prefix so that they can be easily included alongside our existing fields""" return [ strip_equation(field) for field in request.GET.getlist("field")[:] if is_equation(field) ] - def get_field_list(self, organization: Organization, request: HttpRequest) -> Sequence[str]: + def get_field_list(self, organization: Organization, request: Request) -> Sequence[str]: return [field for field in request.GET.getlist("field")[:] if not is_equation(field)] - def get_snuba_filter(self, request, organization, params=None): - if params is None: - params = self.get_snuba_params(request, organization) - query = request.GET.get("query") - try: - return get_filter(query, params) - except InvalidSearchQuery as e: - raise ParseError(detail=str(e)) - - def get_team_ids(self, request, organization): + def get_team_ids(self, request: Request, organization: Organization) -> Sequence[int]: if not request.user: return [] @@ -64,7 +60,9 @@ def get_team_ids(self, request, organization): return [team.id for team in teams] - def get_snuba_params(self, request, organization, check_global_views=True): + def get_snuba_params( + self, request: Request, organization: Organization, check_global_views: bool = True + ) -> Dict[str, Any]: with sentry_sdk.start_span(op="discover.endpoint", description="filter_params"): if ( len(self.get_field_list(organization, request)) @@ -75,7 +73,7 @@ def get_snuba_params(self, request, organization, check_global_views=True): detail=f"You can view up to {MAX_FIELDS} fields at a time. Please delete some and try again." ) - params = self.get_filter_params(request, organization) + params: Dict[str, Any] = self.get_filter_params(request, organization) params = self.quantize_date_params(request, params) params["user_id"] = request.user.id if request.user else None params["team_id"] = self.get_team_ids(request, organization) @@ -89,17 +87,27 @@ def get_snuba_params(self, request, organization, check_global_views=True): return params - def get_orderby(self, request): - sort = request.GET.getlist("sort") + def get_orderby(self, request: Request) -> Optional[Sequence[str]]: + sort: Sequence[str] = request.GET.getlist("sort") if sort: return sort # Deprecated. `sort` should be used as it is supported by # more endpoints. - orderby = request.GET.getlist("orderby") + orderby: Sequence[str] = request.GET.getlist("orderby") if orderby: return orderby - - def get_snuba_query_args_legacy(self, request, organization): + return None + + def get_snuba_query_args_legacy( + self, request: Request, organization: Organization + ) -> Dict[ + str, + Union[ + Optional[datetime], + Sequence[Sequence[Union[str, str, Any]]], + Optional[Dict[str, Sequence[int]]], + ], + ]: params = self.get_filter_params(request, organization) query = request.GET.get("query") try: @@ -116,7 +124,7 @@ def get_snuba_query_args_legacy(self, request, organization): return snuba_args - def quantize_date_params(self, request, params): + def quantize_date_params(self, request: Request, params: Dict[str, Any]) -> Dict[str, Any]: # We only need to perform this rounding on relative date periods if "statsPeriod" not in request.GET: return params @@ -133,7 +141,7 @@ def quantize_date_params(self, request, params): return results @contextmanager - def handle_query_errors(self): + def handle_query_errors(self) -> Generator[None, None, None]: try: yield except discover.InvalidSearchQuery as error: @@ -184,7 +192,7 @@ def handle_query_errors(self): class OrganizationEventsV2EndpointBase(OrganizationEventsEndpointBase): - def build_cursor_link(self, request, name, cursor): + def build_cursor_link(self, request: Request, name: str, cursor: Optional[Cursor]) -> str: # The base API function only uses the last query parameter, but this endpoint # needs all the parameters, particularly for the "field" query param. querystring = "&".join( @@ -200,21 +208,33 @@ def build_cursor_link(self, request, name, cursor): else: base_url = base_url + "?" - return LINK_HEADER.format( + return cast(str, LINK_HEADER).format( uri=base_url, cursor=str(cursor), name=name, has_results="true" if bool(cursor) else "false", ) - def handle_results_with_meta(self, request, organization, project_ids, results): + def handle_results_with_meta( + self, + request: Request, + organization: Organization, + project_ids: Sequence[int], + results: Dict[str, Any], + ) -> Dict[str, Any]: with sentry_sdk.start_span(op="discover.endpoint", description="base.handle_results"): data = self.handle_data(request, organization, project_ids, results.get("data")) if not data: return {"data": [], "meta": {}} return {"data": data, "meta": results.get("meta", {})} - def handle_data(self, request, organization, project_ids, results): + def handle_data( + self, + request: Request, + organization: Organization, + project_ids: Sequence[int], + results: Optional[Sequence[Any]], + ) -> Optional[Sequence[Any]]: if not results: return results @@ -240,7 +260,9 @@ def handle_data(self, request, organization, project_ids, results): return results - def handle_issues(self, results, project_ids, organization): + def handle_issues( + self, results: Sequence[Any], project_ids: Sequence[int], organization: Organization + ) -> None: issue_ids = {row.get("issue.id") for row in results} issues = Group.issues_mapping(issue_ids, project_ids, organization) for result in results: @@ -249,16 +271,19 @@ def handle_issues(self, results, project_ids, organization): def get_event_stats_data( self, - request, - organization, - get_event_stats, - top_events=0, - query_column="count()", - params=None, - query=None, - allow_partial_buckets=False, - zerofill_results=True, - ): + request: Request, + organization: Organization, + get_event_stats: Callable[ + [Sequence[str], str, Dict[str, str], int, bool, Optional[timedelta]], SnubaTSResult + ], + top_events: int = 0, + query_column: str = "count()", + params: Optional[Dict[str, Any]] = None, + query: Optional[str] = None, + allow_partial_buckets: bool = False, + zerofill_results: bool = True, + comparison_delta: Optional[timedelta] = None, + ) -> Dict[str, Any]: with self.handle_query_errors(): with sentry_sdk.start_span( op="discover.endpoint", description="base.stats_query_creation" @@ -287,11 +312,15 @@ def get_event_stats_data( except InvalidSearchQuery: sentry_sdk.set_tag("user.invalid_interval", request.GET.get("interval")) date_range = params["end"] - params["start"] - rollup = int( - parse_stats_period( - get_interval_from_range(date_range, False) - ).total_seconds() - ) + stats_period = parse_stats_period(get_interval_from_range(date_range, False)) + rollup = int(stats_period.total_seconds()) if stats_period is not None else 3600 + + if comparison_delta is not None: + retention = quotas.get_event_retention(organization=organization) + comparison_start = params["start"] - comparison_delta + if retention and comparison_start < timezone.now() - timedelta(days=retention): + raise ValidationError("Comparison period is outside your retention window") + # Backwards compatibility for incidents which uses the old # column aliases as it straddles both versions of events/discover. # We will need these aliases until discover2 flags are enabled for all @@ -308,7 +337,9 @@ def get_event_stats_data( query_columns = [column_map.get(column, column) for column in columns] with sentry_sdk.start_span(op="discover.endpoint", description="base.stats_query"): - result = get_event_stats(query_columns, query, params, rollup, zerofill_results) + result = get_event_stats( + query_columns, query, params, rollup, zerofill_results, comparison_delta + ) serializer = SnubaTSResultSerializer(organization, None, request.user) @@ -359,13 +390,13 @@ def get_event_stats_data( def serialize_multiple_axis( self, - serializer, - event_result, - columns, - query_columns, - allow_partial_buckets, - zerofill_results=True, - ): + serializer: BaseSnubaSerializer, + event_result: SnubaTSResult, + columns: Sequence[str], + query_columns: Sequence[str], + allow_partial_buckets: bool, + zerofill_results: bool = True, + ) -> Dict[str, Any]: # Return with requested yAxis as the key result = {} equations = 0 @@ -387,10 +418,10 @@ def serialize_multiple_axis( class KeyTransactionBase(OrganizationEventsV2EndpointBase): - def has_feature(self, request, organization): + def has_feature(self, organization: Organization, request: Request) -> bool: return features.has("organizations:performance-view", organization, actor=request.user) - def get_project(self, request, organization): + def get_project(self, request: Request, organization: Organization) -> Project: projects = self.get_projects(request, organization) if len(projects) != 1: diff --git a/src/sentry/api/endpoints/organization_events_stats.py b/src/sentry/api/endpoints/organization_events_stats.py index abcb3b7908f2b4..bf2766fb1be93d 100644 --- a/src/sentry/api/endpoints/organization_events_stats.py +++ b/src/sentry/api/endpoints/organization_events_stats.py @@ -1,12 +1,19 @@ +from datetime import datetime, timedelta +from typing import Dict, Optional, Sequence, Set + import sentry_sdk +from rest_framework.exceptions import ValidationError +from rest_framework.request import Request from rest_framework.response import Response from sentry import features from sentry.api.bases import OrganizationEventsV2EndpointBase from sentry.constants import MAX_TOP_EVENTS +from sentry.models import Organization from sentry.snuba import discover +from sentry.utils.snuba import SnubaTSResult -ALLOWED_EVENTS_STATS_REFERRERS = { +ALLOWED_EVENTS_STATS_REFERRERS: Set[str] = { "api.alerts.alert-rule-chart", "api.dashboards.widget.area-chart", "api.dashboards.widget.bar-chart", @@ -27,16 +34,16 @@ } -class OrganizationEventsStatsEndpoint(OrganizationEventsV2EndpointBase): - def has_chart_interpolation(self, organization, request): +class OrganizationEventsStatsEndpoint(OrganizationEventsV2EndpointBase): # type: ignore + def has_chart_interpolation(self, organization: Organization, request: Request) -> bool: return features.has( "organizations:performance-chart-interpolation", organization, actor=request.user ) - def has_top_events(self, organization, request): + def has_top_events(self, organization: Organization, request: Request) -> bool: return features.has("organizations:discover-top-events", organization, actor=request.user) - def get(self, request, organization): + def get(self, request: Request, organization: Organization) -> Response: with sentry_sdk.start_span(op="discover.endpoint", description="filter_params") as span: span.set_data("organization", organization) if not self.has_feature(organization, request): @@ -60,7 +67,14 @@ def get(self, request, organization): elif top_events <= 0: return Response({"detail": "If topEvents needs to be at least 1"}, status=400) - # The partial parameter determins whether or not partial buckets are allowed. + comparison_delta = None + if "comparisonDelta" in request.GET: + try: + comparison_delta = timedelta(seconds=int(request.GET["comparisonDelta"])) + except ValueError: + return Response({"detail": "comparisonDelta must be an integer"}, status=400) + + # The partial parameter determines whether or not partial buckets are allowed. # The last bucket of the time series can potentially be a partial bucket when # the start of the bucket does not align with the rollup. allow_partial_buckets = request.GET.get("partial") == "1" @@ -72,7 +86,14 @@ def get(self, request, organization): else "api.organization-event-stats" ) - def get_event_stats(query_columns, query, params, rollup, zerofill_results): + def get_event_stats( + query_columns: Sequence[str], + query: str, + params: Dict[str, str], + rollup: int, + zerofill_results: bool, + comparison_delta: Optional[datetime], + ) -> SnubaTSResult: if top_events > 0: return discover.top_events_timeseries( timeseries_columns=query_columns, @@ -94,21 +115,26 @@ def get_event_stats(query_columns, query, params, rollup, zerofill_results): query=query, params=params, rollup=rollup, - referrer="api.organization-event-stats", + referrer=referrer, zerofill_results=zerofill_results, + comparison_delta=comparison_delta, ) - return Response( - self.get_event_stats_data( - request, - organization, - get_event_stats, - top_events, - allow_partial_buckets=allow_partial_buckets, - zerofill_results=not ( - request.GET.get("withoutZerofill") == "1" - and self.has_chart_interpolation(organization, request) + try: + return Response( + self.get_event_stats_data( + request, + organization, + get_event_stats, + top_events, + allow_partial_buckets=allow_partial_buckets, + zerofill_results=not ( + request.GET.get("withoutZerofill") == "1" + and self.has_chart_interpolation(organization, request) + ), + comparison_delta=comparison_delta, ), - ), - status=200, - ) + status=200, + ) + except ValidationError: + return Response({"detail": "Comparison period is outside retention window"}, status=400) diff --git a/src/sentry/api/endpoints/organization_events_trends.py b/src/sentry/api/endpoints/organization_events_trends.py index f674cce50738ee..17684e5ee2f094 100644 --- a/src/sentry/api/endpoints/organization_events_trends.py +++ b/src/sentry/api/endpoints/organization_events_trends.py @@ -256,7 +256,9 @@ def build_result_handler( self, request, organization, params, trend_function, selected_columns, orderby, query ): def on_results(events_results): - def get_event_stats(query_columns, query, params, rollup, zerofill_results): + def get_event_stats( + query_columns, query, params, rollup, zerofill_results, comparison_delta=None + ): return discover.top_events_timeseries( query_columns, selected_columns, diff --git a/src/sentry/api/endpoints/organization_release_details.py b/src/sentry/api/endpoints/organization_release_details.py index 4a28773d557eff..0f72a88b51e088 100644 --- a/src/sentry/api/endpoints/organization_release_details.py +++ b/src/sentry/api/endpoints/organization_release_details.py @@ -453,14 +453,21 @@ def put(self, request, organization, version): refs = result.get("refs") if not refs: - refs = [ - { - "repository": r["repository"], - "previousCommit": r.get("previousId"), - "commit": r["currentId"], - } - for r in result.get("headCommits", []) - ] + # Handle legacy + if result.get("headCommits", []): + refs = [ + { + "repository": r["repository"], + "previousCommit": r.get("previousId"), + "commit": r["currentId"], + } + for r in result.get("headCommits", []) + ] + # Clear commits in release + else: + if result.get("refs") == []: + release.clear_commits() + scope.set_tag("has_refs", bool(refs)) if refs: if not request.user.is_authenticated: diff --git a/src/sentry/api/endpoints/project_release_stats.py b/src/sentry/api/endpoints/project_release_stats.py index f205a4d8d02645..b50dcecf8c59c5 100644 --- a/src/sentry/api/endpoints/project_release_stats.py +++ b/src/sentry/api/endpoints/project_release_stats.py @@ -5,7 +5,7 @@ from sentry.api.exceptions import ResourceDoesNotExist from sentry.api.serializers import serialize from sentry.models import Release, ReleaseProject -from sentry.snuba.sessions import get_crash_free_breakdown, get_project_release_stats +from sentry.snuba.sessions import get_project_release_stats from sentry.utils.dates import get_rollup_from_request @@ -74,7 +74,7 @@ def get(self, request, project, version): ) users_breakdown = [] - for data in get_crash_free_breakdown( + for data in release_health.get_crash_free_breakdown( project_id=params["project_id"][0], release=version, environments=params.get("environment"), diff --git a/src/sentry/api/endpoints/project_rule_details.py b/src/sentry/api/endpoints/project_rule_details.py index 0e1068b405b88b..9d174c0381cb9a 100644 --- a/src/sentry/api/endpoints/project_rule_details.py +++ b/src/sentry/api/endpoints/project_rule_details.py @@ -2,6 +2,7 @@ from rest_framework.response import Response from sentry.api.bases.project import ProjectAlertRulePermission, ProjectEndpoint +from sentry.api.endpoints.project_rules import trigger_alert_rule_action_creators from sentry.api.exceptions import ResourceDoesNotExist from sentry.api.serializers import serialize from sentry.api.serializers.rest_framework.rule import RuleSerializer @@ -109,6 +110,9 @@ def put(self, request, project, rule): return Response(context, status=202) updated_rule = project_rules.Updater.run(rule=rule, request=request, **kwargs) + + trigger_alert_rule_action_creators(kwargs.get("actions"), rule, request) + RuleActivity.objects.create( rule=updated_rule, user=request.user, type=RuleActivityType.UPDATED.value ) diff --git a/src/sentry/api/endpoints/project_rules.py b/src/sentry/api/endpoints/project_rules.py index 989ac905af13cb..4f6b18f4570274 100644 --- a/src/sentry/api/endpoints/project_rules.py +++ b/src/sentry/api/endpoints/project_rules.py @@ -1,11 +1,14 @@ +from typing import Mapping, Sequence + from rest_framework import status +from rest_framework.request import Request from rest_framework.response import Response from sentry.api.bases.project import ProjectAlertRulePermission, ProjectEndpoint from sentry.api.serializers import serialize from sentry.api.serializers.rest_framework import RuleSerializer from sentry.integrations.slack import tasks -from sentry.mediators import project_rules +from sentry.mediators import alert_rule_actions, project_rules from sentry.models import ( AuditLogEntryEvent, Rule, @@ -15,10 +18,30 @@ Team, User, ) +from sentry.models.sentryappinstallation import SentryAppInstallation from sentry.signals import alert_rule_created from sentry.web.decorators import transaction_start +def trigger_alert_rule_action_creators( + actions: Sequence[Mapping[str, str]], + rule: Rule, + request: Request, +) -> None: + for action in actions: + # Only call creator for Sentry Apps with UI Components for alert rules. + if not action.get("hasSchemaFormConfig"): + continue + + alert_rule_actions.AlertRuleActionCreator.run( + install=SentryAppInstallation.objects.get(uuid=action.get("sentryAppInstallationUuid")), + fields=action.get("settings"), + uri=action.get("uri"), + rule=rule, + request=request, + ) + + class ProjectRulesEndpoint(ProjectEndpoint): permission_classes = (ProjectAlertRulePermission,) @@ -103,6 +126,9 @@ def post(self, request, project): RuleActivity.objects.create( rule=rule, user=request.user, type=RuleActivityType.CREATED.value ) + + trigger_alert_rule_action_creators(kwargs.get("actions"), rule, request) + self.create_audit_entry( request=request, organization=project.organization, diff --git a/src/sentry/api/endpoints/project_rules_configuration.py b/src/sentry/api/endpoints/project_rules_configuration.py index bde1f0dac76452..d11da72920e525 100644 --- a/src/sentry/api/endpoints/project_rules_configuration.py +++ b/src/sentry/api/endpoints/project_rules_configuration.py @@ -2,12 +2,7 @@ from sentry import features from sentry.api.bases.project import ProjectEndpoint -from sentry.api.serializers import serialize -from sentry.api.serializers.models.sentry_app_component import SentryAppAlertRuleActionSerializer -from sentry.constants import MIGRATED_CONDITIONS, TICKET_ACTIONS -from sentry.coreapi import APIError -from sentry.mediators import sentry_app_components -from sentry.models import SentryAppComponent, SentryAppInstallation +from sentry.constants import MIGRATED_CONDITIONS, SCHEMA_FORM_ACTIONS, TICKET_ACTIONS from sentry.rules import rules @@ -38,6 +33,12 @@ def get(self, request, project): if not can_create_tickets and node.id in TICKET_ACTIONS: continue + if node.id in SCHEMA_FORM_ACTIONS: + custom_actions = node.get_custom_actions(project) + if custom_actions: + action_list.extend(custom_actions) + continue + context = {"id": node.id, "label": node.label, "enabled": node.is_enabled()} if hasattr(node, "prompt"): context["prompt"] = node.prompt @@ -71,27 +72,6 @@ def get(self, request, project): elif rule_type.startswith("action/"): action_list.append(context) - for install in SentryAppInstallation.get_installed_for_org(project.organization_id): - _components = SentryAppComponent.objects.filter( - sentry_app_id=install.sentry_app_id, type="alert-rule-action" - ) - for component in _components: - try: - sentry_app_components.Preparer.run( - component=component, install=install, project=project - ) - action_list.append( - serialize( - component, - request.user, - SentryAppAlertRuleActionSerializer(), - install=install, - ) - ) - - except APIError: - continue - context = {"actions": action_list, "conditions": condition_list, "filters": filter_list} return Response(context) diff --git a/src/sentry/api/endpoints/team_alerts_triggered.py b/src/sentry/api/endpoints/team_alerts_triggered.py index 9058589f65b2ad..f23ef2c7fa2f75 100644 --- a/src/sentry/api/endpoints/team_alerts_triggered.py +++ b/src/sentry/api/endpoints/team_alerts_triggered.py @@ -16,14 +16,17 @@ from sentry.models import Project -class TeamAlertsTriggeredEndpoint(TeamEndpoint, EnvironmentMixin): - def get(self, request, team): +class TeamAlertsTriggeredEndpoint(TeamEndpoint, EnvironmentMixin): # type: ignore + def get(self, request, team) -> Response: """ Return a time-bucketed (by day) count of triggered alerts owned by a given team. """ project_list = Project.objects.get_for_team_ids([team.id]) owner_ids = [team.actor_id] + list(team.member_set.values_list("user__actor_id", flat=True)) start, end = get_date_range_from_params(request.GET) + end = end.replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(days=1) + start = start.replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(days=1) + bucketed_alert_counts = ( IncidentActivity.objects.filter( ( @@ -37,6 +40,7 @@ def get(self, request, team): ], ) ), + incident__organization_id=team.organization_id, incident__alert_rule__owner__in=owner_ids, incident_id__in=IncidentProject.objects.filter(project__in=project_list).values( "incident_id" @@ -49,13 +53,10 @@ def get(self, request, team): .annotate(count=Count("id")) ) - counts = {str(r["bucket"].replace(tzinfo=None)): r["count"] for r in bucketed_alert_counts} - current_day = start.replace( - hour=0, minute=0, second=0, microsecond=0, tzinfo=None - ) + timedelta(days=1) - end_day = end.replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=None) - while current_day <= end_day: - counts.setdefault(str(current_day), 0) + counts = {str(r["bucket"].isoformat()): r["count"] for r in bucketed_alert_counts} + current_day = start + while current_day < end: + counts.setdefault(str(current_day.isoformat()), 0) current_day += timedelta(days=1) return Response(counts) diff --git a/src/sentry/api/endpoints/team_issue_breakdown.py b/src/sentry/api/endpoints/team_issue_breakdown.py new file mode 100644 index 00000000000000..8d1003cae5cf06 --- /dev/null +++ b/src/sentry/api/endpoints/team_issue_breakdown.py @@ -0,0 +1,54 @@ +import copy +from datetime import timedelta + +from django.db.models import Count +from django.db.models.functions import TruncDay +from rest_framework.request import Request +from rest_framework.response import Response + +from sentry.api.base import EnvironmentMixin +from sentry.api.bases.team import TeamEndpoint +from sentry.api.utils import get_date_range_from_params +from sentry.models import GroupHistory, GroupHistoryStatus, Project, Team +from sentry.models.grouphistory import ACTIONED_STATUSES + + +class TeamIssueBreakdownEndpoint(TeamEndpoint, EnvironmentMixin): # type: ignore + def get(self, request: Request, team: Team) -> Response: + """ + Returns a dict of team projects, and a time-series dict of issue stat breakdowns for each. + + Right now the stats we return are the count of reviewed issues and the total count of issues. + """ + project_list = Project.objects.get_for_team_ids(team_ids=[team.id]) + start, end = get_date_range_from_params(request.GET) + end = end.replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(days=1) + start = start.replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(days=1) + bucketed_issues = ( + GroupHistory.objects.filter( + status__in=[GroupHistoryStatus.UNRESOLVED] + ACTIONED_STATUSES, + project__in=project_list, + date_added__gte=start, + date_added__lte=end, + ) + .annotate(bucket=TruncDay("date_added")) + .order_by("bucket") + .values("project", "bucket", "status") + .annotate(count=Count("id")) + ) + + current_day, date_series_dict = start, {} + while current_day < end: + date_series_dict[current_day.isoformat()] = {"reviewed": 0, "total": 0} + current_day += timedelta(days=1) + + agg_project_counts = { + project.id: copy.deepcopy(date_series_dict) for project in project_list + } + for r in bucketed_issues: + bucket = agg_project_counts[r["project"]][r["bucket"].isoformat()] + bucket["total"] += r["count"] + if r["status"] != GroupHistoryStatus.UNRESOLVED: + bucket["reviewed"] += r["count"] + + return Response(agg_project_counts) diff --git a/src/sentry/api/endpoints/team_time_to_resolution.py b/src/sentry/api/endpoints/team_time_to_resolution.py new file mode 100644 index 00000000000000..c34317d1556d3b --- /dev/null +++ b/src/sentry/api/endpoints/team_time_to_resolution.py @@ -0,0 +1,54 @@ +from collections import defaultdict +from datetime import timedelta + +from django.db.models import Avg, F +from django.db.models.functions import TruncDay +from rest_framework.response import Response + +from sentry.api.base import EnvironmentMixin +from sentry.api.bases.team import TeamEndpoint +from sentry.api.utils import get_date_range_from_params +from sentry.models import GroupHistory, GroupHistoryStatus, Project + + +class TeamTimeToResolutionEndpoint(TeamEndpoint, EnvironmentMixin): + def get(self, request, team): + """ + Return a a time bucketed list of mean group resolution times for a given team. + """ + project_list = Project.objects.get_for_team_ids(team_ids=[team.id]) + start, end = get_date_range_from_params(request.GET) + end = end.date() + timedelta(days=1) + start = start.date() + timedelta(days=1) + history_list = ( + GroupHistory.objects.filter( + status=GroupHistoryStatus.RESOLVED, + project__in=project_list, + date_added__gte=start, + date_added__lte=end, + ) + .annotate(bucket=TruncDay("date_added")) + .values("bucket", "prev_history_date") + .annotate(ttr=F("date_added") - F("prev_history_date")) + .annotate(avg_ttr=Avg("ttr")) + ) + sums = defaultdict(lambda: {"sum": timedelta(), "count": 0}) + for gh in history_list: + key = str(gh["bucket"].date()) + sums[key]["sum"] += gh["ttr"] + sums[key]["count"] += 1 + + avgs = {} + current_day = start + while current_day < end: + key = str(current_day) + if key in sums: + avg = int((sums[key]["sum"] / sums[key]["count"]).total_seconds()) + count = sums[key]["count"] + else: + avg = count = 0 + + avgs[key] = {"avg": avg, "count": count} + current_day += timedelta(days=1) + + return Response(avgs) diff --git a/src/sentry/api/event_search.py b/src/sentry/api/event_search.py index 046d6a73464168..38ead3fe32caba 100644 --- a/src/sentry/api/event_search.py +++ b/src/sentry/api/event_search.py @@ -11,7 +11,6 @@ from parsimonious.nodes import Node from sentry.search.events.constants import ( - KEY_TRANSACTION_ALIAS, OPERATOR_NEGATION_MAP, SEARCH_MAP, SEMVER_ALIAS, @@ -1033,7 +1032,6 @@ def generic_visit(self, node, children): "error.handled", "error.unhandled", "stack.in_app", - KEY_TRANSACTION_ALIAS, TEAM_KEY_TRANSACTION_ALIAS, }, ) diff --git a/src/sentry/api/serializers/models/discoversavedquery.py b/src/sentry/api/serializers/models/discoversavedquery.py index 188d4c4300fff5..1cc6ef130acafe 100644 --- a/src/sentry/api/serializers/models/discoversavedquery.py +++ b/src/sentry/api/serializers/models/discoversavedquery.py @@ -22,6 +22,7 @@ def serialize(self, obj, attrs, user, **kwargs): "limit", "yAxis", "display", + "topEvents", ] data = { "id": str(obj.id), diff --git a/src/sentry/api/serializers/models/release.py b/src/sentry/api/serializers/models/release.py index 35104c39d40076..7e8d8cc3ddab6a 100644 --- a/src/sentry/api/serializers/models/release.py +++ b/src/sentry/api/serializers/models/release.py @@ -3,7 +3,7 @@ from django.core.cache import cache from django.db.models import Sum -from sentry import tagstore +from sentry import release_health, tagstore from sentry.api.serializers import Serializer, register, serialize from sentry.db.models.query import in_iexact from sentry.models import ( @@ -18,7 +18,6 @@ User, UserEmail, ) -from sentry.snuba.sessions import get_release_health_data_overview from sentry.utils import metrics from sentry.utils.compat import zip from sentry.utils.hashlib import md5_text @@ -378,7 +377,7 @@ def get_attrs(self, item_list, user, **kwargs): # XXX: Legacy should be removed later if with_health_data: - health_data = get_release_health_data_overview( + health_data = release_health.get_release_health_data_overview( [(pr["project__id"], pr["release__version"]) for pr in project_releases], health_stats_period=health_stats_period, summary_stats_period=summary_stats_period, diff --git a/src/sentry/api/serializers/models/sentry_app_component.py b/src/sentry/api/serializers/models/sentry_app_component.py index f5a1cdd207c085..dc6eb5f170564a 100644 --- a/src/sentry/api/serializers/models/sentry_app_component.py +++ b/src/sentry/api/serializers/models/sentry_app_component.py @@ -18,14 +18,22 @@ def serialize(self, obj, attrs, user): class SentryAppAlertRuleActionSerializer(Serializer): - def serialize(self, obj, attrs, user, install, **kwargs): + def serialize(self, obj, attrs, user, **kwargs): + event_action = kwargs.get("event_action") + if not event_action: + raise AssertionError("Requires event_action keyword argument of type EventAction") + + install = kwargs.get("install") + if not install: + raise AssertionError("Requires install keyword argument of type SentryAppInstallation") + return { - "id": f"sentry.sentryapp.{obj.sentry_app.slug}", - "uuid": str(obj.uuid), + "id": f"{event_action.id}", + "enabled": event_action.is_enabled(), + "actionType": event_action.actionType, + "service": obj.sentry_app.slug, "sentryAppInstallationUuid": f"{install.uuid}", - "actionType": "sentryapp", "prompt": f"{obj.sentry_app.name}", - "enabled": True, "label": f"{obj.schema.get('title', obj.sentry_app.name)} with these ", "formFields": obj.schema.get("settings", {}), } diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index 30964e1e63a73f..fd0604f705b29e 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -401,10 +401,12 @@ from .endpoints.team_details import TeamDetailsEndpoint from .endpoints.team_groups_new import TeamGroupsNewEndpoint from .endpoints.team_groups_trending import TeamGroupsTrendingEndpoint +from .endpoints.team_issue_breakdown import TeamIssueBreakdownEndpoint from .endpoints.team_members import TeamMembersEndpoint from .endpoints.team_notification_settings_details import TeamNotificationSettingsDetailsEndpoint from .endpoints.team_projects import TeamProjectsEndpoint from .endpoints.team_stats import TeamStatsEndpoint +from .endpoints.team_time_to_resolution import TeamTimeToResolutionEndpoint from .endpoints.user_authenticator_details import UserAuthenticatorDetailsEndpoint from .endpoints.user_authenticator_enroll import UserAuthenticatorEnrollEndpoint from .endpoints.user_authenticator_index import UserAuthenticatorIndexEndpoint @@ -1453,6 +1455,11 @@ TeamGroupsNewEndpoint.as_view(), name="sentry-api-0-team-groups-new", ), + url( + r"^(?P[^\/]+)/(?P[^\/]+)/time-to-resolution/$", + TeamTimeToResolutionEndpoint.as_view(), + name="sentry-api-0-team-time-to-resolution", + ), url( r"^(?P[^\/]+)/(?P[^\/]+)/alerts-triggered/$", TeamAlertsTriggeredEndpoint.as_view(), @@ -1463,6 +1470,11 @@ TeamGroupsTrendingEndpoint.as_view(), name="sentry-api-0-team-groups-trending", ), + url( + r"^(?P[^\/]+)/(?P[^\/]+)/issue-breakdown/$", + TeamIssueBreakdownEndpoint.as_view(), + name="sentry-api-0-team-issue-breakdown", + ), url( r"^(?P[^\/]+)/(?P[^\/]+)/notification-settings/$", TeamNotificationSettingsDetailsEndpoint.as_view(), diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 36451b632e0320..d0319e11068007 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -562,6 +562,7 @@ def SOCIAL_AUTH_DEFAULT_USERNAME(): "sentry.tasks.files", "sentry.tasks.groupowner", "sentry.tasks.integrations", + "sentry.tasks.low_priority_symbolication", "sentry.tasks.members", "sentry.tasks.merge", "sentry.tasks.releasemonitor", @@ -573,6 +574,7 @@ def SOCIAL_AUTH_DEFAULT_USERNAME(): "sentry.tasks.release_registry", "sentry.tasks.reports", "sentry.tasks.reprocessing", + "sentry.tasks.reprocessing2", "sentry.tasks.scheduler", "sentry.tasks.sentry_apps", "sentry.tasks.servicehooks", @@ -638,6 +640,10 @@ def SOCIAL_AUTH_DEFAULT_USERNAME(): Queue("sleep", routing_key="sleep"), Queue("stats", routing_key="stats"), Queue("subscriptions", routing_key="subscriptions"), + Queue( + "symbolications.compute_low_priority_projects", + routing_key="symbolications.compute_low_priority_projects", + ), Queue("unmerge", routing_key="unmerge"), Queue("update", routing_key="update"), ] @@ -779,6 +785,11 @@ def create_partitioned_queues(name): "schedule": timedelta(minutes=20), "options": {"expires": 20 * 60}, }, + "check-symbolicator-lpq-project-eligibility": { + "task": "sentry.tasks.low_priority_symbolication.scan_for_suspect_projects", + "schedule": timedelta(seconds=10), + "options": {"expires": 10}, + }, } BGTASKS = { @@ -1738,9 +1749,7 @@ def build_cdc_postgres_init_db_volume(settings): ), "zookeeper": lambda settings, options: ( { - # Upgrading to version 6.x allows zookeeper to run properly on Apple's arm64 - # See details https://github.com/confluentinc/kafka-images/issues/80#issuecomment-855511438 - "image": "confluentinc/cp-zookeeper:6.2.0", + "image": "confluentinc/cp-zookeeper:5.1.2", "environment": {"ZOOKEEPER_CLIENT_PORT": "2181"}, "volumes": {"zookeeper": {"bind": "/var/lib/zookeeper"}}, "only_if": "kafka" in settings.SENTRY_EVENTSTREAM or settings.SENTRY_USE_RELAY, @@ -1748,8 +1757,7 @@ def build_cdc_postgres_init_db_volume(settings): ), "kafka": lambda settings, options: ( { - # We upgrade to version 6.x to match zookeeper's version (I believe they both release together) - "image": "confluentinc/cp-kafka:6.2.0", + "image": "confluentinc/cp-kafka:5.1.2", "ports": {"9092/tcp": 9092}, "environment": { "KAFKA_ZOOKEEPER_CONNECT": "{containers[zookeeper][name]}:2181", @@ -2165,9 +2173,11 @@ def build_cdc_postgres_init_db_volume(settings): KAFKA_OUTCOMES = "outcomes" KAFKA_EVENTS_SUBSCRIPTIONS_RESULTS = "events-subscription-results" KAFKA_TRANSACTIONS_SUBSCRIPTIONS_RESULTS = "transactions-subscription-results" +KAFKA_SESSIONS_SUBSCRIPTIONS_RESULTS = "sessions-subscription-results" KAFKA_SUBSCRIPTION_RESULT_TOPICS = { "events": KAFKA_EVENTS_SUBSCRIPTIONS_RESULTS, "transactions": KAFKA_TRANSACTIONS_SUBSCRIPTIONS_RESULTS, + "sessions": KAFKA_SESSIONS_SUBSCRIPTIONS_RESULTS, } KAFKA_INGEST_EVENTS = "ingest-events" KAFKA_INGEST_ATTACHMENTS = "ingest-attachments" @@ -2186,6 +2196,10 @@ def build_cdc_postgres_init_db_volume(settings): "cluster": "default", "topic": KAFKA_TRANSACTIONS_SUBSCRIPTIONS_RESULTS, }, + KAFKA_SESSIONS_SUBSCRIPTIONS_RESULTS: { + "cluster": "default", + "topic": KAFKA_SESSIONS_SUBSCRIPTIONS_RESULTS, + }, # Topic for receiving simple events (error events without attachments) from Relay KAFKA_INGEST_EVENTS: {"cluster": "default", "topic": KAFKA_INGEST_EVENTS}, # Topic for receiving 'complex' events (error events with attachments) from Relay @@ -2391,3 +2405,7 @@ def build_cdc_postgres_init_db_volume(settings): # Sentry post process forwarder use batching consumer SENTRY_POST_PROCESS_FORWARDER_BATCHING = False + +# Whether badly behaving projects will be automatically +# sent to the low priority queue +SENTRY_ENABLE_AUTO_LOW_PRIORITY_QUEUE = False diff --git a/src/sentry/constants.py b/src/sentry/constants.py index 02e16f2c8c6696..5b58916606b5c5 100644 --- a/src/sentry/constants.py +++ b/src/sentry/constants.py @@ -219,6 +219,7 @@ def get_all_languages() -> List[str]: "sentry.mail.actions.NotifyEmailAction", "sentry.rules.actions.notify_event.NotifyEventAction", "sentry.rules.actions.notify_event_service.NotifyEventServiceAction", + "sentry.rules.actions.notify_event_sentry_app.NotifyEventSentryAppAction", "sentry.rules.conditions.every_event.EveryEventCondition", "sentry.rules.conditions.first_seen_event.FirstSeenEventCondition", "sentry.rules.conditions.regression_event.RegressionEventCondition", @@ -254,6 +255,10 @@ def get_all_languages() -> List[str]: ] ) +SCHEMA_FORM_ACTIONS = frozenset( + ["sentry.rules.actions.notify_event_sentry_app.NotifyEventSentryAppAction"] +) + # methods as defined by http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html + PATCH HTTP_METHODS = ("GET", "POST", "PUT", "OPTIONS", "HEAD", "DELETE", "TRACE", "CONNECT", "PATCH") @@ -570,3 +575,6 @@ def from_str(cls, string: str) -> Optional[int]: # Defined at https://github.com/getsentry/relay/blob/master/relay-common/src/constants.rs DataCategory = sentry_relay.DataCategory + +CRASH_RATE_ALERT_SESSION_COUNT_ALIAS = "_total_count" +CRASH_RATE_ALERT_AGGREGATE_ALIAS = "_crash_rate_alert_aggregate" diff --git a/src/sentry/deletions/defaults/organization.py b/src/sentry/deletions/defaults/organization.py index 5e2a5464934504..dd26a9970ab550 100644 --- a/src/sentry/deletions/defaults/organization.py +++ b/src/sentry/deletions/defaults/organization.py @@ -14,7 +14,7 @@ def should_proceed(self, instance): } def get_child_relations(self, instance): - from sentry.discover.models import DiscoverSavedQuery, KeyTransaction, TeamKeyTransaction + from sentry.discover.models import DiscoverSavedQuery, TeamKeyTransaction from sentry.incidents.models import AlertRule, Incident from sentry.models import ( CommitAuthor, @@ -46,7 +46,6 @@ def get_child_relations(self, instance): Environment, Dashboard, DiscoverSavedQuery, - KeyTransaction, TeamKeyTransaction, ExternalIssue, PromptsActivity, diff --git a/src/sentry/deletions/defaults/project.py b/src/sentry/deletions/defaults/project.py index 733321be2b3d64..a84e2cc44abf12 100644 --- a/src/sentry/deletions/defaults/project.py +++ b/src/sentry/deletions/defaults/project.py @@ -4,7 +4,7 @@ class ProjectDeletionTask(ModelDeletionTask): def get_child_relations(self, instance): from sentry import models - from sentry.discover.models import DiscoverSavedQueryProject, KeyTransaction + from sentry.discover.models import DiscoverSavedQueryProject from sentry.incidents.models import IncidentProject from sentry.snuba.models import QuerySubscription @@ -44,7 +44,6 @@ def get_child_relations(self, instance): models.UserReport, models.ProjectTransactionThreshold, DiscoverSavedQueryProject, - KeyTransaction, IncidentProject, QuerySubscription, ) diff --git a/src/sentry/discover/endpoints/discover_key_transactions.py b/src/sentry/discover/endpoints/discover_key_transactions.py index 37b5b4f751c857..55fb402e0f1cfb 100644 --- a/src/sentry/discover/endpoints/discover_key_transactions.py +++ b/src/sentry/discover/endpoints/discover_key_transactions.py @@ -28,7 +28,7 @@ class KeyTransactionEndpoint(KeyTransactionBase): permission_classes = (KeyTransactionPermission,) def get(self, request, organization): - if not self.has_feature(request, organization): + if not self.has_feature(organization, request): return Response(status=404) transaction_name = request.GET.get("transaction") @@ -48,7 +48,7 @@ def get(self, request, organization): def post(self, request, organization): """Create a Key Transaction""" - if not self.has_feature(request, organization): + if not self.has_feature(organization, request): return Response(status=404) project = self.get_project(request, organization) @@ -103,7 +103,7 @@ def post(self, request, organization): def delete(self, request, organization): """Remove a Key transaction for a user""" - if not self.has_feature(request, organization): + if not self.has_feature(organization, request): return Response(status=404) project = self.get_project(request, organization) @@ -134,7 +134,7 @@ class KeyTransactionListEndpoint(KeyTransactionBase): permission_classes = (KeyTransactionPermission,) def get(self, request, organization): - if not self.has_feature(request, organization): + if not self.has_feature(organization, request): return Response(status=404) try: diff --git a/src/sentry/discover/endpoints/serializers.py b/src/sentry/discover/endpoints/serializers.py index 991a26c0a8ec99..f72de793b90b57 100644 --- a/src/sentry/discover/endpoints/serializers.py +++ b/src/sentry/discover/endpoints/serializers.py @@ -163,9 +163,10 @@ class DiscoverSavedQuerySerializer(serializers.Serializer): widths = ListField(child=serializers.CharField(), required=False, allow_null=True) yAxis = ListField(child=serializers.CharField(), required=False, allow_null=True) display = serializers.CharField(required=False, allow_null=True) + topEvents = serializers.IntegerField(min_value=1, max_value=10, required=False, allow_null=True) disallowed_fields = { - 1: {"environment", "query", "yAxis", "display"}, + 1: {"environment", "query", "yAxis", "display", "topEvents"}, 2: {"groupby", "rollup", "aggregations", "conditions", "limit"}, } @@ -198,6 +199,7 @@ def validate(self, data): "widths", "yAxis", "display", + "topEvents", ] for key in query_keys: diff --git a/src/sentry/eventstore/base.py b/src/sentry/eventstore/base.py index 318169449e8eda..45a4fc8fa2eae4 100644 --- a/src/sentry/eventstore/base.py +++ b/src/sentry/eventstore/base.py @@ -98,7 +98,6 @@ def params(self): return { "start": self.start, "end": self.end, - # needed for the key transaction column "user_id": self.user_id, "organization_id": self.organization_id, # needed for the team key transaction column diff --git a/src/sentry/eventstream/kafka/postprocessworker.py b/src/sentry/eventstream/kafka/postprocessworker.py index e666ae280cd982..fa0c715231c6b0 100644 --- a/src/sentry/eventstream/kafka/postprocessworker.py +++ b/src/sentry/eventstream/kafka/postprocessworker.py @@ -6,6 +6,7 @@ from sentry import options from sentry.eventstream.kafka.protocol import ( + decode_bool, get_task_kwargs_for_message, get_task_kwargs_for_message_from_headers, ) @@ -21,6 +22,7 @@ _CONCURRENCY_METRIC = "eventstream.concurrency" _MESSAGES_METRIC = "eventstream.messages" _CONCURRENCY_OPTION = "post-process-forwarder:concurrency" +_TRANSACTION_FORWARDER_HEADER = "transaction_forwarder" @contextmanager @@ -114,7 +116,7 @@ def process_message(self, message: Message) -> Optional[Future]: """ return self.__executor.submit(_get_task_kwargs_and_dispatch, message) - def flush_batch(self, batch: Sequence[Future]) -> None: + def flush_batch(self, batch: Optional[Sequence[Future]]) -> None: """ For all work which was submitted to the thread pool executor, we need to ensure that if an exception was raised, then we raise it in the main thread. This is needed so that processing can be stopped in such @@ -140,3 +142,44 @@ def flush_batch(self, batch: Sequence[Future]) -> None: def shutdown(self) -> None: self.__executor.shutdown() + + +class ErrorsPostProcessForwarderWorker(PostProcessForwarderWorker): + """ + ErrorsPostProcessForwarderWorker will processes messages only in the following scenarios: + 1. _TRANSACTION_FORWARDER_HEADER is missing from the kafka headers. This is a backward compatibility + use case. There can be messages in the queue which do not have this header. Those messages should be + handled by the errors post process forwarder + 2. _TRANSACTION_FORWARDER_HEADER is False in the kafka headers. + """ + + def process_message(self, message: Message) -> Optional[Future]: + headers = {header: value for header, value in message.headers()} + + # Backwards-compatibility case for messages missing header. + if _TRANSACTION_FORWARDER_HEADER not in headers: + return super().process_message(message) + + if decode_bool(headers.get(_TRANSACTION_FORWARDER_HEADER)) is False: + return super().process_message(message) + + return None + + +class TransactionsPostProcessForwarderWorker(PostProcessForwarderWorker): + """ + TransactionsPostProcessForwarderWorker will processes messages only in the following scenarios: + 1. _TRANSACTION_FORWARDER_HEADER is True in the kafka headers. + """ + + def process_message(self, message: Message) -> Optional[Future]: + headers = {header: value for header, value in message.headers()} + + # Backwards-compatibility for messages missing headers. + if _TRANSACTION_FORWARDER_HEADER not in headers: + return None + + if decode_bool(headers.get(_TRANSACTION_FORWARDER_HEADER)) is True: + return super().process_message(message) + + return None diff --git a/src/sentry/eventstream/kafka/protocol.py b/src/sentry/eventstream/kafka/protocol.py index 8deb4e6fd8d961..12d68c5a50eba9 100644 --- a/src/sentry/eventstream/kafka/protocol.py +++ b/src/sentry/eventstream/kafka/protocol.py @@ -99,33 +99,37 @@ def get_task_kwargs_for_message(value): return handler(*payload[1:]) -def get_task_kwargs_for_message_from_headers(headers: Sequence[Tuple[str, Optional[bytes]]]): - """ - Same as get_task_kwargs_for_message but gets the required information from - the kafka message headers. - """ +def decode_str(value: Optional[bytes]) -> str: + assert isinstance(value, bytes) + return value.decode("utf-8") - def decode_str(value: Optional[bytes]) -> str: - assert isinstance(value, bytes) - return value.decode("utf-8") - def decode_optional_str(value: Optional[bytes]) -> Optional[str]: - if value is None: - return None - return decode_str(value) +def decode_optional_str(value: Optional[bytes]) -> Optional[str]: + if value is None: + return None + return decode_str(value) - def decode_int(value: Optional[bytes]) -> int: - assert isinstance(value, bytes) - return int(value) - def decode_optional_int(value: Optional[bytes]) -> Optional[int]: - if value is None: - return None - return decode_int(value) +def decode_int(value: Optional[bytes]) -> int: + assert isinstance(value, bytes) + return int(value) + - def decode_bool(value: bytes) -> bool: - return bool(int(decode_str(value))) +def decode_optional_int(value: Optional[bytes]) -> Optional[int]: + if value is None: + return None + return decode_int(value) + +def decode_bool(value: bytes) -> bool: + return bool(int(decode_str(value))) + + +def get_task_kwargs_for_message_from_headers(headers: Sequence[Tuple[str, Optional[bytes]]]): + """ + Same as get_task_kwargs_for_message but gets the required information from + the kafka message headers. + """ try: header_data = {k: v for k, v in headers} version = decode_int(header_data["version"]) diff --git a/src/sentry/incidents/endpoints/serializers.py b/src/sentry/incidents/endpoints/serializers.py index 2ba972fc3aeca7..85b83e9df37b95 100644 --- a/src/sentry/incidents/endpoints/serializers.py +++ b/src/sentry/incidents/endpoints/serializers.py @@ -73,6 +73,9 @@ # TODO(davidenwang): eventually we should pass some form of these to the event_search parser to raise an error unsupported_queries = {"release:latest"} +# Allowed time windows (in minutes) for crash rate alerts +CRASH_RATE_ALERTS_ALLOWED_TIME_WINDOWS = [30, 60, 120, 240, 720, 1440] + class AlertRuleTriggerActionSerializer(CamelSnakeModelSerializer): """ @@ -399,6 +402,8 @@ def validate_dataset(self, dataset): ) def validate_event_types(self, event_types): + if self.initial_data.get("dataset") == Dataset.Sessions.value: + return [] try: return [SnubaQueryEventType.EventType[event_type.upper()] for event_type in event_types] except KeyError: @@ -454,6 +459,9 @@ def validate(self, data): "Invalid Metric: Please pass a valid function for aggregation" ) + dataset = Dataset(data["dataset"].value) + self._validate_time_window(dataset, data.get("time_window")) + try: raw_query( aggregations=snuba_filter.aggregations, @@ -462,7 +470,7 @@ def validate(self, data): conditions=snuba_filter.conditions, filter_keys=snuba_filter.filter_keys, having=snuba_filter.having, - dataset=Dataset(data["dataset"].value), + dataset=dataset, limit=1, referrer="alertruleserializer.test_query", ) @@ -483,7 +491,7 @@ def validate(self, data): event_types = data.get("event_types") - valid_event_types = dataset_valid_event_types[data["dataset"]] + valid_event_types = dataset_valid_event_types.get(data["dataset"], set()) if event_types and set(event_types) - valid_event_types: raise serializers.ValidationError( "Invalid event types for this dataset. Valid event types are %s" @@ -531,6 +539,16 @@ def _translate_thresholds(self, threshold_type, comparison_delta, triggers, data for trigger in triggers: trigger["alert_threshold"] = translator(trigger["alert_threshold"]) + @staticmethod + def _validate_time_window(dataset, time_window): + if dataset == Dataset.Sessions: + # Validate time window + if time_window not in CRASH_RATE_ALERTS_ALLOWED_TIME_WINDOWS: + raise serializers.ValidationError( + "Invalid Time Window: Allowed time windows for crash rate alerts are: " + "30min, 1h, 2h, 4h, 12h and 24h" + ) + def _validate_trigger_thresholds(self, threshold_type, trigger, resolve_threshold): if resolve_threshold is None: return diff --git a/src/sentry/incidents/subscription_processor.py b/src/sentry/incidents/subscription_processor.py index 0a9ed2473852d6..12eb05168069f5 100644 --- a/src/sentry/incidents/subscription_processor.py +++ b/src/sentry/incidents/subscription_processor.py @@ -2,11 +2,13 @@ import operator from copy import deepcopy from datetime import timedelta +from typing import Optional from django.conf import settings from django.db import transaction from sentry import features +from sentry.constants import CRASH_RATE_ALERT_AGGREGATE_ALIAS, CRASH_RATE_ALERT_SESSION_COUNT_ALIAS from sentry.incidents.logic import ( CRITICAL_TRIGGER_LABEL, WARNING_TRIGGER_LABEL, @@ -42,6 +44,12 @@ ALERT_RULE_STAT_KEYS = ("last_update",) ALERT_RULE_BASE_TRIGGER_STAT_KEY = "%s:trigger:%s:%s" ALERT_RULE_TRIGGER_STAT_KEYS = ("alert_triggered", "resolve_triggered") +# Stores a minimum threshold that represents a session count under which we don't evaluate crash +# rate alert, and the update is just dropped. If it is set to None, then no minimum threshold +# check is applied +# ToDo(ahmed): This is still experimental. If we decide that it makes sense to keep this +# functionality, then maybe we should move this to constants +CRASH_RATE_ALERT_MINIMUM_THRESHOLD: Optional[int] = None class SubscriptionProcessor: @@ -205,19 +213,72 @@ def get_comparison_aggregation_value(self, subscription_update, aggregation_valu return (aggregation_value / comparison_aggregate) * 100 - def get_aggregation_value(self, subscription_update): - aggregation_value = list(subscription_update["values"]["data"][0].values())[0] - # In some cases Snuba can return a None value for an aggregation. This means - # there were no rows present when we made the query for certain types of aggregations like - # avg. Defaulting this to 0 for now. It might turn out that we'd prefer to skip the update - # in the future. + @staticmethod + def get_crash_rate_alert_aggregation_value(subscription_update): + """ + Handles validation and extraction of Crash Rate Alerts subscription updates values. + The subscription update looks like + { + '_crash_rate_alert_aggregate': 0.5, + '_total_count': 34 + } + - `_crash_rate_alert_aggregate` represents sessions_crashed/sessions or + users_crashed/users, and so we need to subtract that number from 1 and then mutiply by + 100 to get the crash free percentage + - `_total_count` represents the total sessions or user counts. This is used when + CRASH_RATE_ALERT_MINIMUM_THRESHOLD is set in the sense that if the minimum threshold is + greater than the session count, then the update is dropped. If the minimum threshold is + not set then the total sessions count is just ignored + """ + aggregation_value = subscription_update["values"]["data"][0][ + CRASH_RATE_ALERT_AGGREGATE_ALIAS + ] if aggregation_value is None: - aggregation_value = 0 + metrics.incr("incidents.alert_rules.ignore_update_no_session_data") + return - if self.alert_rule.comparison_delta: - aggregation_value = self.get_comparison_aggregation_value( - subscription_update, aggregation_value + try: + total_count = subscription_update["values"]["data"][0][ + CRASH_RATE_ALERT_SESSION_COUNT_ALIAS + ] + if CRASH_RATE_ALERT_MINIMUM_THRESHOLD is not None: + min_threshold = int(CRASH_RATE_ALERT_MINIMUM_THRESHOLD) + if total_count < min_threshold: + metrics.incr( + "incidents.alert_rules.ignore_update_count_lower_than_min_threshold" + ) + return + except KeyError: + # If for whatever reason total session count was not sent in the update, + # ignore the minimum threshold comparison and continue along with processing the + # update. However, this should not happen. + logger.exception( + "Received an update for a crash rate alert subscription, but no total " + "sessions count was sent" ) + # The subscription aggregation for crash rate alerts uses the Discover percentage + # function, which would technically return a ratio of sessions_crashed/sessions and + # so we need to calculate the crash free percentage out of that returned value + aggregation_value = (1 - aggregation_value) * 100 + return aggregation_value + + def get_aggregation_value(self, subscription_update): + is_sessions_dataset = Dataset(self.subscription.snuba_query.dataset) == Dataset.Sessions + if is_sessions_dataset: + aggregation_value = self.get_crash_rate_alert_aggregation_value(subscription_update) + else: + aggregation_value = list(subscription_update["values"]["data"][0].values())[0] + # In some cases Snuba can return a None value for an aggregation. This means + # there were no rows present when we made the query for certain types of aggregations + # like avg. Defaulting this to 0 for now. It might turn out that we'd prefer to skip + # the update in the future. + if aggregation_value is None: + aggregation_value = 0 + + if self.alert_rule.comparison_delta: + aggregation_value = self.get_comparison_aggregation_value( + subscription_update, aggregation_value + ) return aggregation_value def process_update(self, subscription_update): diff --git a/src/sentry/integrations/example/integration.py b/src/sentry/integrations/example/integration.py index 269d2d58245806..3ca76e027e436b 100644 --- a/src/sentry/integrations/example/integration.py +++ b/src/sentry/integrations/example/integration.py @@ -1,4 +1,4 @@ -from typing import Any, Optional +from typing import Any, Mapping, Optional from django.http import HttpResponse @@ -9,7 +9,7 @@ IntegrationMetadata, IntegrationProvider, ) -from sentry.integrations.issues import IssueSyncMixin +from sentry.integrations.issues import IssueSyncMixin, ResolveSyncAction from sentry.mediators.plugins import Migrator from sentry.models import ExternalIssue, User from sentry.pipeline import PipelineView @@ -141,14 +141,15 @@ def sync_assignee_outbound( def sync_status_outbound(self, external_issue, is_resolved, project_id): pass - def should_unresolve(self, data): - return data["status"]["category"] != "done" - - def should_resolve(self, data): - return data["status"]["category"] == "done" + def get_resolve_sync_action(self, data: Mapping[str, Any]) -> ResolveSyncAction: + category = data["status"]["category"] + return ResolveSyncAction.from_resolve_unresolve( + should_resolve=category == "done", + should_unresolve=category != "done", + ) def get_issue_display_name(self, external_issue): - return "display name: %s" % external_issue.key + return f"display name: {external_issue.key}" def get_stacktrace_link(self, repo, path, default, version): pass diff --git a/src/sentry/integrations/issues.py b/src/sentry/integrations/issues.py index 42b4c767aea54b..0725ed909bcdbb 100644 --- a/src/sentry/integrations/issues.py +++ b/src/sentry/integrations/issues.py @@ -1,12 +1,13 @@ +import enum import logging from collections import defaultdict -from typing import Any, Optional +from typing import Any, Mapping, Optional from sentry import features -from sentry.models import ExternalIssue, Group, GroupLink, GroupStatus, Organization, User +from sentry.models import ExternalIssue, GroupLink, Organization, User from sentry.models.useroption import UserOption from sentry.shared_integrations.exceptions import ApiError, IntegrationError -from sentry.types.activity import ActivityType +from sentry.tasks.integrations import sync_status_inbound as sync_status_inbound_task from sentry.utils.compat import filter from sentry.utils.http import absolute_uri from sentry.utils.safe import safe_execute @@ -14,6 +15,34 @@ logger = logging.getLogger("sentry.integrations.issues") +class ResolveSyncAction(enum.Enum): + """ + When an issue's state changes, we may have to sync the state based on the + "done" states we get from the API. This enum encapsulates the three options + we have: "resolve", "unresolve", or "do nothing". + """ + + NOOP = 0 + RESOLVE = 1 + UNRESOLVE = 2 + + @classmethod + def from_resolve_unresolve( + cls, should_resolve: bool, should_unresolve: bool + ) -> "ResolveSyncAction": + if should_resolve and should_unresolve: + logger.warning("sync-config-conflict") + return ResolveSyncAction.NOOP + + if should_resolve: + return ResolveSyncAction.RESOLVE + + if should_unresolve: + return ResolveSyncAction.UNRESOLVE + + return ResolveSyncAction.NOOP + + class IssueBasicMixin: def should_sync(self, attribute): return False @@ -326,82 +355,36 @@ def sync_status_outbound(self, external_issue, is_resolved, project_id, **kwargs """ raise NotImplementedError - def should_unresolve(self, data): - """ - Given webhook data, check whether the status - category changed FROM "done" to something else, - meaning the sentry issue should be marked as - unresolved - - >>> def should_unresolve(self, data): - >>> client = self.get_client() - >>> statuses = client.get_statuses() - >>> done_statuses = [s['id'] for s in statuses if s['category'] == 'done'] - >>> return data['from_status'] in done_statuses \ - >>> and data['to_status'] not in done_statuses - - """ - raise NotImplementedError - - def should_resolve(self, data): + def get_resolve_sync_action(self, data: Mapping[str, Any]) -> ResolveSyncAction: """ - Given webhook data, check whether the status - category changed TO "done" from something else, - meaning the sentry issue should be marked as - resolved + Given webhook data, check whether the status category changed FROM + "done" to something else, meaning the Sentry issue should be marked as + unresolved or if the status category changed TO "done" from something + else, meaning the sentry issue should be marked as resolved. - see example above + Because checking the "done" states can rely on an API call, this function + should calculate both "resolve" and "unresolve" to save a round trip. """ raise NotImplementedError - def sync_status_inbound(self, issue_key, data): + def should_sync_status_inbound(self) -> bool: if not self.should_sync("inbound_status"): - return + return False organization = Organization.objects.get(id=self.organization_id) has_issue_sync = features.has("organizations:integrations-issue-sync", organization) - if not has_issue_sync: + return has_issue_sync + + def sync_status_inbound(self, issue_key: str, data: Mapping[str, Any]) -> None: + if not self.should_sync_status_inbound(): return - affected_groups = list( - Group.objects.get_groups_by_external_issue(self.model, issue_key) - .filter(project__organization_id=self.organization_id) - .select_related("project") + sync_status_inbound_task.apply_async( + kwargs={ + "integration_id": self.model.id, + "organization_id": self.organization_id, + "issue_key": issue_key, + "data": data, + } ) - - groups_to_resolve = [] - groups_to_unresolve = [] - - should_resolve = self.should_resolve(data) - should_unresolve = self.should_unresolve(data) - - for group in affected_groups: - - # this probably shouldn't be possible unless there - # is a bug in one of those methods - if should_resolve is True and should_unresolve is True: - logger.warning( - "sync-config-conflict", - extra={ - "organization_id": group.project.organization_id, - "integration_id": self.model.id, - "provider": self.model.get_provider(), - }, - ) - continue - - if should_unresolve: - groups_to_unresolve.append(group) - elif should_resolve: - groups_to_resolve.append(group) - - if groups_to_resolve: - Group.objects.update_group_status( - groups_to_resolve, GroupStatus.RESOLVED, ActivityType.SET_RESOLVED - ) - - if groups_to_unresolve: - Group.objects.update_group_status( - groups_to_unresolve, GroupStatus.UNRESOLVED, ActivityType.SET_UNRESOLVED - ) diff --git a/src/sentry/integrations/jira/integration.py b/src/sentry/integrations/jira/integration.py index 5c9206a2d89306..8f216c209ef3c3 100644 --- a/src/sentry/integrations/jira/integration.py +++ b/src/sentry/integrations/jira/integration.py @@ -1,7 +1,7 @@ import logging import re from operator import attrgetter -from typing import Any, Optional +from typing import Any, Mapping, Optional from django.conf import settings from django.urls import reverse @@ -15,7 +15,7 @@ IntegrationMetadata, IntegrationProvider, ) -from sentry.integrations.issues import IssueSyncMixin +from sentry.integrations.issues import IssueSyncMixin, ResolveSyncAction from sentry.models import ( ExternalIssue, IntegrationExternalProject, @@ -941,17 +941,14 @@ def _get_done_statuses(self): statuses = client.get_valid_statuses() return {s["id"] for s in statuses if s["statusCategory"]["key"] == "done"} - def should_unresolve(self, data): + def get_resolve_sync_action(self, data: Mapping[str, Any]) -> ResolveSyncAction: done_statuses = self._get_done_statuses() c_from = data["changelog"]["from"] c_to = data["changelog"]["to"] - return c_from in done_statuses and c_to not in done_statuses - - def should_resolve(self, data): - done_statuses = self._get_done_statuses() - c_from = data["changelog"]["from"] - c_to = data["changelog"]["to"] - return c_to in done_statuses and c_from not in done_statuses + return ResolveSyncAction.from_resolve_unresolve( + should_resolve=c_to in done_statuses and c_from not in done_statuses, + should_unresolve=c_from in done_statuses and c_to not in done_statuses, + ) class JiraIntegrationProvider(IntegrationProvider): diff --git a/src/sentry/integrations/vsts/issues.py b/src/sentry/integrations/vsts/issues.py index d183dfad89d480..efe33d003c105c 100644 --- a/src/sentry/integrations/vsts/issues.py +++ b/src/sentry/integrations/vsts/issues.py @@ -1,12 +1,12 @@ from collections import OrderedDict -from typing import TYPE_CHECKING, Any, Mapping, MutableMapping, Optional, Sequence, Tuple +from typing import TYPE_CHECKING, Any, Mapping, MutableMapping, Optional, Sequence, Set, Tuple from django.urls import reverse from django.utils.translation import ugettext as _ from mistune import markdown from rest_framework.response import Response -from sentry.integrations.issues import IssueSyncMixin +from sentry.integrations.issues import IssueSyncMixin, ResolveSyncAction from sentry.models import Activity, IntegrationExternalProject, OrganizationIntegration, User from sentry.shared_integrations.exceptions import ApiError, ApiUnauthorized @@ -310,15 +310,16 @@ def sync_status_outbound( }, ) - def should_unresolve(self, data: Mapping[str, str]) -> bool: - done_states = self.get_done_states(data["project"]) - return not data["new_state"] in done_states or data["old_state"] is None - - def should_resolve(self, data: Mapping[str, str]) -> bool: - done_states = self.get_done_states(data["project"]) - return not data["old_state"] in done_states and data["new_state"] in done_states + def get_resolve_sync_action(self, data: Mapping[str, Any]) -> ResolveSyncAction: + done_states = self._get_done_statuses(data["project"]) + return ResolveSyncAction.from_resolve_unresolve( + should_resolve=( + not data["old_state"] in done_states and data["new_state"] in done_states + ), + should_unresolve=(not data["new_state"] in done_states or data["old_state"] is None), + ) - def get_done_states(self, project: str) -> Sequence[str]: + def _get_done_statuses(self, project: str) -> Set[str]: client = self.get_client() try: all_states = client.get_work_item_states(self.instance, project)["value"] @@ -327,11 +328,8 @@ def get_done_states(self, project: str) -> Sequence[str]: "vsts.get-done-states.failed", extra={"integration_id": self.model.id, "exception": err}, ) - return [] - done_states = [ - state["name"] for state in all_states if state["category"] in self.done_categories - ] - return done_states + return set() + return {state["name"] for state in all_states if state["category"] in self.done_categories} def get_issue_display_name(self, external_issue: "ExternalIssue") -> str: return (external_issue.metadata or {}).get("display_name", "") diff --git a/src/sentry/mediators/alert_rule_actions/__init__.py b/src/sentry/mediators/alert_rule_actions/__init__.py new file mode 100644 index 00000000000000..77138e849b32dc --- /dev/null +++ b/src/sentry/mediators/alert_rule_actions/__init__.py @@ -0,0 +1,3 @@ +from .creator import AlertRuleActionCreator + +__all__ = ("AlertRuleActionCreator",) diff --git a/src/sentry/mediators/alert_rule_actions/creator.py b/src/sentry/mediators/alert_rule_actions/creator.py index 1a0190207f4728..a949608a1e2252 100644 --- a/src/sentry/mediators/alert_rule_actions/creator.py +++ b/src/sentry/mediators/alert_rule_actions/creator.py @@ -17,7 +17,7 @@ def _save_alert_rule_action(self): self.rule.save() def _make_external_request(self): - self.response = external_requests.AlerRuleActionRequester.run( + self.response = external_requests.AlertRuleActionRequester.run( install=self.install, uri=self.uri, fields=self.fields, diff --git a/src/sentry/models/grouphistory.py b/src/sentry/models/grouphistory.py index 1a7a4be7319964..85733c465ca706 100644 --- a/src/sentry/models/grouphistory.py +++ b/src/sentry/models/grouphistory.py @@ -15,10 +15,19 @@ class GroupHistoryStatus: UNASSIGNED = 6 REGRESSED = 7 DELETED = 8 - DELETED_AND_DISCADED = 9 + DELETED_AND_DISCARDED = 9 REVIEWED = 10 +ACTIONED_STATUSES = [ + GroupHistoryStatus.RESOLVED, + GroupHistoryStatus.IGNORED, + GroupHistoryStatus.REVIEWED, + GroupHistoryStatus.DELETED, + GroupHistoryStatus.DELETED_AND_DISCARDED, +] + + class GroupHistory(Model): """ This model is used to track certain status changes for groups, @@ -48,7 +57,7 @@ class GroupHistory(Model): (GroupHistoryStatus.ASSIGNED, _("Assigned")), (GroupHistoryStatus.UNASSIGNED, _("Unassigned")), (GroupHistoryStatus.DELETED, _("Deleted")), - (GroupHistoryStatus.DELETED_AND_DISCADED, _("Deleted and Discarded")), + (GroupHistoryStatus.DELETED_AND_DISCARDED, _("Deleted and Discarded")), (GroupHistoryStatus.REVIEWED, _("Reviewed")), ), ) diff --git a/src/sentry/models/release.py b/src/sentry/models/release.py index daf470a5100b8f..bd942d39ee1ee4 100644 --- a/src/sentry/models/release.py +++ b/src/sentry/models/release.py @@ -1079,6 +1079,25 @@ def count_artifacts(self): counts = get_artifact_counts([self.id]) return counts.get(self.id, 0) + def clear_commits(self): + """ + Delete all release-specific commit data associated to this release. We will not delete the Commit model values because other releases may use these commits. + """ + with sentry_sdk.start_span(op="clear_commits"): + from sentry.models import ReleaseCommit, ReleaseHeadCommit + + ReleaseHeadCommit.objects.get( + organization_id=self.organization_id, release=self + ).delete() + ReleaseCommit.objects.filter( + organization_id=self.organization_id, release=self + ).delete() + + self.authors = [] + self.commit_count = 0 + self.last_commit_id = None + self.save() + def get_artifact_counts(release_ids: List[int]) -> Mapping[int, int]: """Get artifact count grouped by IDs""" diff --git a/src/sentry/notifications/notifications/activity/release.py b/src/sentry/notifications/notifications/activity/release.py index 73b9cf8309dc8d..d4b17d016cf15f 100644 --- a/src/sentry/notifications/notifications/activity/release.py +++ b/src/sentry/notifications/notifications/activity/release.py @@ -1,5 +1,7 @@ from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Set, Union +from sentry_relay import parse_release + from sentry.models import Activity, CommitFileChange, Project, Team, User from sentry.notifications.utils import ( get_commits_for_release, @@ -37,6 +39,7 @@ def __init__(self, activity: Activity) -> None: self.repos: Iterable[Mapping[str, Any]] = set() self.projects: Set[Project] = set() self.version = "unknown" + self.version_parsed = self.version return self.projects = set(self.release.projects.all()) @@ -47,7 +50,9 @@ def __init__(self, activity: Activity) -> None: self.repos = get_repos(self.commit_list, users, self.organization) self.environment = get_environment_for_deploy(self.deploy) self.group_counts_by_project = get_group_counts_by_project(self.release, self.projects) + self.version = self.release.version + self.version_parsed = parse_release(self.version)["description"] def should_email(self) -> bool: return bool(self.release and self.deploy) @@ -65,15 +70,16 @@ def get_users_by_teams(self) -> Mapping[int, List[int]]: def get_context(self) -> MutableMapping[str, Any]: return { **self.get_base_context(), - "commit_count": len(self.commit_list), "author_count": len(self.email_list), - "file_count": CommitFileChange.objects.get_count_for_commits(self.commit_list), - "repos": self.repos, - "release": self.release, + "commit_count": len(self.commit_list), "deploy": self.deploy, "environment": self.environment, + "file_count": CommitFileChange.objects.get_count_for_commits(self.commit_list), + "release": self.release, + "repos": self.repos, "setup_repo_link": absolute_uri(f"/organizations/{self.organization.slug}/repos/"), - "text_description": f"Version {self.version} was deployed to {self.environment}", + "text_description": f"Version {self.version_parsed} was deployed to {self.environment}", + "version_parsed": self.version_parsed, } def get_projects(self, recipient: Union["Team", "User"]) -> Set[Project]: @@ -104,7 +110,7 @@ def get_recipient_context( } def get_subject(self, context: Optional[Mapping[str, Any]] = None) -> str: - return f"Deployed version {self.version} to {self.environment}" + return f"Deployed version {self.version_parsed} to {self.environment}" def get_title(self) -> str: return self.get_subject() @@ -115,7 +121,7 @@ def get_notification_title(self) -> str: projects_text = " for this project" elif len(self.projects) > 1: projects_text = " for these projects" - return f"Release {self.version} was deployed to {self.environment}{projects_text}" + return f"Release {self.version_parsed} was deployed to {self.environment}{projects_text}" def get_filename(self) -> str: return "activity/release" diff --git a/tests/sentry/security/__init__.py b/src/sentry/processing/__init__.py similarity index 100% rename from tests/sentry/security/__init__.py rename to src/sentry/processing/__init__.py diff --git a/src/sentry/processing/realtime_metrics/__init__.py b/src/sentry/processing/realtime_metrics/__init__.py index 59622d2314c3d8..ec7d2b24f5bae7 100644 --- a/src/sentry/processing/realtime_metrics/__init__.py +++ b/src/sentry/processing/realtime_metrics/__init__.py @@ -1,13 +1,29 @@ +from typing import TYPE_CHECKING + from django.conf import settings from sentry.utils.services import LazyServiceWrapper from .base import RealtimeMetricsStore -realtime_metrics_store = LazyServiceWrapper( +realtime_metrics_store: RealtimeMetricsStore = LazyServiceWrapper( RealtimeMetricsStore, settings.SENTRY_REALTIME_METRICS_BACKEND, settings.SENTRY_REALTIME_METRICS_OPTIONS, ) realtime_metrics_store.expose(locals()) + +if TYPE_CHECKING: + # This is all too dynamic for mypy, so manually set the same attributes from + # RealtimeMetricsStore.__all__: + validate = realtime_metrics_store.validate + increment_project_event_counter = realtime_metrics_store.increment_project_event_counter + increment_project_duration_counter = realtime_metrics_store.increment_project_duration_counter + projects = realtime_metrics_store.projects + get_counts_for_project = realtime_metrics_store.get_counts_for_project + get_durations_for_project = realtime_metrics_store.get_durations_for_project + get_lpq_projects = realtime_metrics_store.get_lpq_projects + is_lpq_project = realtime_metrics_store.is_lpq_project + add_project_to_lpq = realtime_metrics_store.add_project_to_lpq + remove_projects_from_lpq = realtime_metrics_store.remove_projects_from_lpq diff --git a/src/sentry/processing/realtime_metrics/base.py b/src/sentry/processing/realtime_metrics/base.py index 5e2b15f48d0f91..0e2fb539f647c7 100644 --- a/src/sentry/processing/realtime_metrics/base.py +++ b/src/sentry/processing/realtime_metrics/base.py @@ -1,10 +1,52 @@ +import dataclasses +from typing import Dict, Iterable, NewType, Set + from sentry.utils.services import Service +@dataclasses.dataclass(frozen=True) +class BucketedCount: + """ + Timestamp to count mapping. This represents some `count` amount of something performed + during `timestamp`. `timestamp` is stored in seconds. + """ + + timestamp: int + count: int + + +# Duration to count mapping where the keys are durations and the values are counts. This represents +# some `count` instances of some action where each individual instance some +# [`duration`, `duration`+10) seconds of time to complete. `duration` is stored in seconds. +BucketedDurations = NewType("BucketedDurations", Dict[int, int]) + + +@dataclasses.dataclass(frozen=True) +class DurationHistogram: + """ + Mapping of timestamp to histogram-like dict of durations. This represents some `count` amount of + some action performed during `timestamp`, where `counts` are grouped by how long that action + took. `timestamp` is stored in seconds. + """ + + timestamp: int + histogram: BucketedDurations + + class RealtimeMetricsStore(Service): # type: ignore """A service for storing metrics about incoming requests within a given time window.""" - __all__ = ("increment_project_event_counter", "increment_project_duration_counter", "validate") + __all__ = ( + "validate", + "increment_project_event_counter", + "increment_project_duration_counter", + "projects", + "get_counts_for_project", + "get_durations_for_project", + "get_lpq_projects", + "add_project_to_lpq", + "remove_projects_from_lpq", + ) def increment_project_event_counter(self, project_id: int, timestamp: int) -> None: """Increment the event counter for the given project_id. @@ -14,7 +56,7 @@ def increment_project_event_counter(self, project_id: int, timestamp: int) -> No time-window bucket with "timestamp" providing the time of the event in seconds since the UNIX epoch (i.e., as returned by time.time()). """ - pass + raise NotImplementedError def increment_project_duration_counter( self, project_id: int, timestamp: int, duration: int @@ -25,4 +67,62 @@ def increment_project_duration_counter( Calling this increments the counter of the current time-window bucket with "timestamp" providing the time of the event in seconds since the UNIX epoch and "duration" the processing time in seconds. """ - pass + raise NotImplementedError + + def projects(self) -> Iterable[int]: + """ + Returns IDs of all projects that should be considered for the low priority queue. + """ + raise NotImplementedError + + def get_counts_for_project(self, project_id: int) -> Iterable[BucketedCount]: + """ + Returns a sorted list of bucketed timestamps paired with the count of symbolicator requests + made during that time for some given project. + """ + raise NotImplementedError + + def get_durations_for_project(self, project_id: int) -> Iterable[DurationHistogram]: + """ + Returns a sorted list of bucketed timestamps paired with a dictionary of symbolicator + durations grouped in 10 second durations made during that time for some given project. + """ + raise NotImplementedError + + def get_lpq_projects(self) -> Set[int]: + """ + Fetches the list of projects that are currently using the low priority queue. + + Returns a list of project IDs. + """ + raise NotImplementedError + + def is_lpq_project(self, project_id: int) -> bool: + """ + Checks whether the given project is currently using the low priority queue. + """ + raise NotImplementedError + + def add_project_to_lpq(self, project_id: int) -> bool: + """ + Assigns a project to the low priority queue. + + This registers an intent to redirect all symbolication events triggered by the specified + project to be redirected to the low priority queue. + + Returns True if the project was a new addition to the list. Returns False if it was already + assigned to the low priority queue. + """ + raise NotImplementedError + + def remove_projects_from_lpq(self, project_ids: Set[int]) -> int: + """ + Removes projects from the low priority queue. + + This registers an intent to restore all specified projects back to the regular queue. + + Returns the number of projects that were actively removed from the queue. Any projects that + were not assigned to the low priority queue to begin with will be omitted from the return + value. + """ + raise NotImplementedError diff --git a/src/sentry/processing/realtime_metrics/redis.py b/src/sentry/processing/realtime_metrics/redis.py index 03edfd52f8f0ef..72e3fc88ff85c6 100644 --- a/src/sentry/processing/realtime_metrics/redis.py +++ b/src/sentry/processing/realtime_metrics/redis.py @@ -1,10 +1,18 @@ import datetime +import logging +from itertools import chain +from typing import Iterable, Set from sentry.exceptions import InvalidConfiguration from sentry.utils import redis from . import base +# redis key for entry storing current list of LPQ members +LPQ_MEMBERS_KEY = "store.symbolicate-event-lpq-selected" + +logger = logging.getLogger(__name__) + class RedisRealtimeMetricsStore(base.RealtimeMetricsStore): """An implementation of RealtimeMetricsStore based on a Redis backend.""" @@ -42,6 +50,12 @@ def validate(self) -> None: if self._histogram_bucket_size <= 0: raise InvalidConfiguration("histogram bucket size must be at least 1") + def _counter_key_prefix(self) -> str: + return f"{self._prefix}:counter:{self._counter_bucket_size}" + + def _histogram_key_prefix(self) -> str: + return f"{self._prefix}:histogram:{self._histogram_bucket_size}" + def increment_project_event_counter(self, project_id: int, timestamp: int) -> None: """Increment the event counter for the given project_id. @@ -54,7 +68,7 @@ def increment_project_event_counter(self, project_id: int, timestamp: int) -> No if self._counter_bucket_size > 1: timestamp -= timestamp % self._counter_bucket_size - key = f"{self._prefix}:counter:{self._counter_bucket_size}:{project_id}:{timestamp}" + key = f"{self._counter_key_prefix()}:{project_id}:{timestamp}" with self.cluster.pipeline() as pipeline: pipeline.incr(key) @@ -73,10 +87,145 @@ def increment_project_duration_counter( if self._histogram_bucket_size > 1: timestamp -= timestamp % self._histogram_bucket_size - key = f"{self._prefix}:histogram:{self._histogram_bucket_size}:{project_id}:{timestamp}" + key = f"{self._histogram_key_prefix()}:{project_id}:{timestamp}" duration -= duration % 10 with self.cluster.pipeline() as pipeline: pipeline.hincrby(key, duration, 1) pipeline.pexpire(key, self._histogram_ttl) pipeline.execute() + + def projects(self) -> Iterable[int]: + """ + Returns IDs of all projects for which metrics have been recorded in the store. + + This may throw an exception if there is some sort of issue scanning the redis store for + projects. + """ + + already_seen = set() + # Normally if there's a histogram entry for a project then there should be a counter + # entry for it as well, but double check both to be safe + all_keys = chain( + self.cluster.scan_iter( + match=self._counter_key_prefix() + ":*", + ), + self.cluster.scan_iter( + match=self._histogram_key_prefix() + ":*", + ), + ) + + for item in all_keys: + # Because this could be one of two patterns, this splits based on the most basic + # delimiter ":" instead of splitting on known prefixes + _prefix, _metric_type, _bucket_size, project_id_raw, _else = item.split(":", maxsplit=4) + project_id = int(project_id_raw) + if project_id not in already_seen: + already_seen.add(project_id) + yield project_id + + def get_counts_for_project(self, project_id: int) -> Iterable[base.BucketedCount]: + """ + Returns a sorted list of bucketed timestamps paired with the count of symbolicator requests + made during that time for some given project. + + This may throw an exception if there is some sort of issue fetching counts from the redis + store. + """ + key_prefix = f"{self._counter_key_prefix()}:{project_id}:" + + keys = sorted( + self.cluster.scan_iter( + match=key_prefix + "*", + ) + ) + counts = self.cluster.mget(keys) + for key, count_raw in zip(keys, counts): + _, timestamp_raw = key.split(key_prefix) + + timestamp_bucket = int(timestamp_raw) + count = int(count_raw) + yield base.BucketedCount(timestamp=timestamp_bucket, count=count) + + def get_durations_for_project(self, project_id: int) -> Iterable[base.DurationHistogram]: + """ + Returns a sorted list of bucketed timestamps paired with a histogram-like dictionary of + symbolication durations made during some timestamp for some given project. + + For a given `{duration:count}` entry in the dictionary bound to a specific `timestamp`: + + - `duration` represents the amount of time it took for a symbolication request to complete. + Durations are bucketed by 10secs, meaning that a `duration` of `30` covers all requests that + took between 30-39 seconds. + + - `count` is the number of symbolication requests that took some amount of time within the + range of `[duration, duration+10)` to complete. + + This may throw an exception if there is some sort of issue fetching durations from the redis + store. + """ + key_prefix = f"{self._histogram_key_prefix()}:{project_id}:" + keys = sorted( + self.cluster.scan_iter( + match=key_prefix + "*", + ) + ) + + for key in keys: + _, timestamp_raw = key.split(key_prefix) + timestamp_bucket = int(timestamp_raw) + + histogram_raw = self.cluster.hgetall(key) + histogram = base.BucketedDurations( + {int(duration): int(count) for duration, count in histogram_raw.items()} + ) + yield base.DurationHistogram(timestamp=timestamp_bucket, histogram=histogram) + + def get_lpq_projects(self) -> Set[int]: + """ + Fetches the list of projects that are currently using the low priority queue. + + Returns a list of project IDs. + + This may throw an exception if there is some sort of issue fetching the list from the redis + store. + """ + return {int(project_id) for project_id in self.cluster.smembers(LPQ_MEMBERS_KEY)} + + def is_lpq_project(self, project_id: int) -> bool: + """ + Checks whether the given project is currently using the low priority queue. + """ + return bool(self.cluster.sismember(LPQ_MEMBERS_KEY, project_id)) + + def add_project_to_lpq(self, project_id: int) -> bool: + """ + Assigns a project to the low priority queue. + + This registers an intent to redirect all symbolication events triggered by the specified + project to be redirected to the low priority queue. + + This may throw an exception if there is some sort of issue registering the project with the + queue. + """ + + # This returns 0 if project_id was already in the set, 1 if it was added, and throws an + # exception if there's a problem. If this successfully completes then the project is + # expected to be in the set. + return int(self.cluster.sadd(LPQ_MEMBERS_KEY, project_id)) > 0 + + def remove_projects_from_lpq(self, project_ids: Set[int]) -> int: + """ + Removes projects from the low priority queue. + + This registers an intent to restore all specified projects back to the regular queue. + + This may throw an exception if there is some sort of issue deregistering the projects from + the queue. + """ + if len(project_ids) == 0: + return 0 + + # This returns the number of projects removed, and throws an exception if there's a problem. + # If this successfully completes then the projects are expected to no longer be in the set. + return int(self.cluster.srem(LPQ_MEMBERS_KEY, *project_ids)) diff --git a/src/sentry/receivers/reprocessing.py b/src/sentry/receivers/reprocessing.py new file mode 100644 index 00000000000000..e56f26cb4a6b9b --- /dev/null +++ b/src/sentry/receivers/reprocessing.py @@ -0,0 +1,12 @@ +from sentry.models import ProjectOption +from sentry.signals import buffer_incr_complete + + +@buffer_incr_complete.connect( + sender=ProjectOption, dispatch_uid="bump_reprocessing_revision_receiver", weak=False +) +def bump_reprocessing_revision_receiver(filters, **_): + from sentry.reprocessing import REPROCESSING_OPTION, bump_reprocessing_revision + + if filters.get("key") == REPROCESSING_OPTION: + bump_reprocessing_revision(filters["project"], use_buffer=False) diff --git a/src/sentry/release_health/base.py b/src/sentry/release_health/base.py index 5e892b698fe3f1..e3d99901897d1d 100644 --- a/src/sentry/release_health/base.py +++ b/src/sentry/release_health/base.py @@ -1,7 +1,7 @@ from datetime import datetime from typing import Mapping, Optional, Sequence, Set, Tuple, TypeVar, Union -from typing_extensions import TypedDict +from typing_extensions import Literal, TypedDict from sentry.utils.services import Service @@ -15,6 +15,22 @@ ProjectOrRelease = TypeVar("ProjectOrRelease", ProjectId, ProjectRelease) +# taken from sentry.snuba.sessions.STATS_PERIODS +StatsPeriod = Literal[ + "1h", + "24h", + "1d", + "48h", + "2d", + "7d", + "14d", + "30d", + "90d", +] + +OverviewStat = Literal["users", "sessions"] + + class CurrentAndPreviousCrashFreeRate(TypedDict): currentCrashFreeRate: Optional[float] previousCrashFreeRate: Optional[float] @@ -35,6 +51,9 @@ class _NoTimeBounds(TypedDict): ReleaseSessionsTimeBounds = Union[_TimeBounds, _NoTimeBounds] +# Inner list is supposed to be fixed length +ReleaseHealthStats = Sequence[Sequence[int]] + class ReleaseAdoption(TypedDict): #: Adoption rate (based on usercount) for a project's release from 0..100 @@ -54,6 +73,33 @@ class ReleaseAdoption(TypedDict): ReleasesAdoption = Mapping[Tuple[ProjectId, ReleaseName], ReleaseAdoption] +class ReleaseHealthOverview(TypedDict, total=False): + adoption: Optional[float] + sessions_adoption: Optional[float] + total_users_24h: Optional[int] + total_project_users_24h: Optional[int] + total_sessions_24h: Optional[int] + total_project_sessions_24h: Optional[int] + total_sessions: Optional[int] + total_users: Optional[int] + has_health_data: bool + sessions_crashed: int + crash_free_users: Optional[float] + crash_free_sessions: Optional[float] + sessions_errored: int + duration_p50: Optional[float] + duration_p90: Optional[float] + stats: Mapping[StatsPeriod, ReleaseHealthStats] + + +class CrashFreeBreakdown(TypedDict): + date: datetime + total_users: int + crash_free_users: Optional[float] + total_sessions: int + crash_free_sessions: Optional[float] + + class ReleaseHealthBackend(Service): # type: ignore """Abstraction layer for all release health related queries""" @@ -63,6 +109,8 @@ class ReleaseHealthBackend(Service): # type: ignore "check_has_health_data", "get_release_sessions_time_bounds", "check_releases_have_health_data", + "get_release_health_data_overview", + "get_crash_free_breakdown", "get_changed_project_release_model_adoptions", "get_oldest_health_data_for_releases", ) @@ -108,7 +156,7 @@ def get_current_and_previous_crash_free_rates( def get_release_adoption( self, - project_releases: Sequence[Tuple[ProjectId, ReleaseName]], + project_releases: Sequence[ProjectRelease], environments: Optional[Sequence[EnvironmentName]] = None, now: Optional[datetime] = None, org_id: Optional[OrganizationId] = None, @@ -176,8 +224,34 @@ def check_releases_have_health_data( """ Returns a set of all release versions that have health data within a given period of time. """ + raise NotImplementedError() + def get_release_health_data_overview( + self, + project_releases: Sequence[ProjectRelease], + environments: Optional[Sequence[EnvironmentName]] = None, + summary_stats_period: Optional[StatsPeriod] = None, + health_stats_period: Optional[StatsPeriod] = None, + stat: Optional[OverviewStat] = None, + ) -> Mapping[ProjectRelease, ReleaseHealthOverview]: + """Checks quickly for which of the given project releases we have + health data available. The argument is a tuple of `(project_id, release_name)` + tuples. The return value is a set of all the project releases that have health + data. + """ + + raise NotImplementedError() + + def get_crash_free_breakdown( + self, + project_id: ProjectId, + release: ReleaseName, + start: datetime, + environments: Optional[Sequence[EnvironmentName]] = None, + ) -> Sequence[CrashFreeBreakdown]: + """Get stats about crash free sessions and stats for the last 1, 2, 7, 14 and 30 days""" + def get_changed_project_release_model_adoptions( self, project_ids: Sequence[ProjectId], diff --git a/src/sentry/release_health/metrics.py b/src/sentry/release_health/metrics.py index 910fb285e4d130..46fcd094377ab4 100644 --- a/src/sentry/release_health/metrics.py +++ b/src/sentry/release_health/metrics.py @@ -1,34 +1,44 @@ +from collections import defaultdict from datetime import datetime, timedelta from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, Set, Tuple, Union import pytz -from snuba_sdk import BooleanCondition, Column, Condition, Entity, Function, Op, Query +from snuba_sdk import Column, Condition, Entity, Function, Op, Query from snuba_sdk.expressions import Granularity from snuba_sdk.query import SelectableExpression from sentry.models.project import Project from sentry.release_health.base import ( + CrashFreeBreakdown, CurrentAndPreviousCrashFreeRates, EnvironmentName, OrganizationId, + OverviewStat, ProjectId, ProjectOrRelease, ProjectRelease, ReleaseAdoption, ReleaseHealthBackend, + ReleaseHealthOverview, ReleaseName, ReleasesAdoption, ReleaseSessionsTimeBounds, + StatsPeriod, ) from sentry.sentry_metrics import indexer from sentry.snuba.dataset import Dataset, EntityKey -from sentry.utils.snuba import raw_snql_query +from sentry.snuba.sessions import _make_stats, get_rollup_starts_and_buckets, parse_snuba_datetime +from sentry.utils.snuba import QueryOutsideRetentionError, raw_snql_query class MetricIndexNotFound(Exception): pass +def get_tag_values_list(org_id: int, values: Sequence[str]) -> Sequence[int]: + return [x for x in [try_get_string_index(org_id, x) for x in values] if x is not None] + + def metric_id(org_id: int, name: str) -> int: index = indexer.resolve(org_id, name) # type: ignore if index is None: @@ -50,7 +60,7 @@ def tag_value(org_id: int, name: str) -> int: return index # type: ignore -def try_get_tag_value(org_id: int, name: str) -> Optional[int]: +def try_get_string_index(org_id: int, name: str) -> Optional[int]: return indexer.resolve(org_id, name) # type: ignore @@ -63,6 +73,20 @@ def reverse_tag_value(org_id: int, index: int) -> str: return str_value # type: ignore +def filter_projects_by_project_release(project_releases: Sequence[ProjectRelease]) -> Condition: + return Condition(Column("project_id"), Op.IN, list(x for x, _ in project_releases)) + + +def filter_releases_by_project_release( + org_id: int, project_releases: Sequence[ProjectRelease] +) -> Condition: + return Condition( + Column(tag_key(org_id, "release")), + Op.IN, + get_tag_values_list(org_id, [x for _, x in project_releases]), + ) + + class MetricsReleaseHealthBackend(ReleaseHealthBackend): """Gets release health results from the metrics dataset""" @@ -180,7 +204,7 @@ def _compute_crash_free_rate(data: Dict[str, float]) -> Optional[float]: def get_release_adoption( self, - project_releases: Sequence[Tuple[ProjectId, ReleaseName]], + project_releases: Sequence[ProjectRelease], environments: Optional[Sequence[EnvironmentName]] = None, now: Optional[datetime] = None, org_id: Optional[OrganizationId] = None, @@ -192,24 +216,21 @@ def get_release_adoption( if now is None: now = datetime.now(pytz.utc) - return self._get_release_adoption_impl( - now, org_id, project_releases, project_ids, environments - ) + return self._get_release_adoption_impl(now, org_id, project_releases, environments) @staticmethod def _get_release_adoption_impl( now: datetime, org_id: int, - project_releases: Sequence[Tuple[ProjectId, ReleaseName]], - project_ids: Sequence[ProjectId], + project_releases: Sequence[ProjectRelease], environments: Optional[Sequence[EnvironmentName]] = None, ) -> ReleasesAdoption: start = now - timedelta(days=1) - def _get_common_where(total: bool) -> List[Union[BooleanCondition, Condition]]: - where_common: List[Union[BooleanCondition, Condition]] = [ + def _get_common_where(total: bool) -> List[Condition]: + where_common: List[Condition] = [ Condition(Column("org_id"), Op.EQ, org_id), - Condition(Column("project_id"), Op.IN, project_ids), + filter_projects_by_project_release(project_releases), Condition(Column("timestamp"), Op.GTE, start), Condition(Column("timestamp"), Op.LT, now), Condition( @@ -218,30 +239,16 @@ def _get_common_where(total: bool) -> List[Union[BooleanCondition, Condition]]: ] if environments is not None: - environment_tag_values = [] - - for environment in environments: - value = indexer.resolve(org_id, environment) # type: ignore - if value is not None: - environment_tag_values.append(value) - where_common.append( - Condition(Column(tag_key(org_id, "environment")), Op.IN, environment_tag_values) + Condition( + Column(tag_key(org_id, "environment")), + Op.IN, + get_tag_values_list(org_id, environments), + ) ) if not total: - release_tag_values = [] - - for _, release in project_releases: - value = indexer.resolve(org_id, release) # type: ignore - if value is not None: - # We should not append the value if it hasn't been - # observed before. - release_tag_values.append(value) - - where_common.append( - Condition(Column(tag_key(org_id, "release")), Op.IN, release_tag_values) - ) + where_common.append(filter_releases_by_project_release(org_id, project_releases)) return where_common @@ -373,7 +380,7 @@ def get_release_sessions_time_bounds( ] try: - where: List[Union[BooleanCondition, Condition]] = [ + where: List[Condition] = [ Condition(Column("org_id"), Op.EQ, org_id), Condition(Column("project_id"), Op.EQ, project_id), Condition(Column(tag_key(org_id, "release")), Op.EQ, tag_value(org_id, release)), @@ -382,13 +389,7 @@ def get_release_sessions_time_bounds( ] if environments is not None: - env_filter = [ - x - for x in [ - try_get_tag_value(org_id, environment) for environment in environments - ] - if x is not None - ] + env_filter = get_tag_values_list(org_id, environments) if not env_filter: raise MetricIndexNotFound() @@ -524,11 +525,7 @@ def check_has_health_data( if includes_releases: releases = [x[1] for x in projects_list] # type: ignore release_column_name = tag_key(org_id, "release") - releases_ids = [ - release_id - for release_id in [try_get_tag_value(org_id, release) for release in releases] - if release_id is not None - ] + releases_ids = get_tag_values_list(org_id, releases) where_clause.append(Condition(Column(release_column_name), Op.IN, releases_ids)) column_names = ["project_id", release_column_name] @@ -575,16 +572,10 @@ def check_releases_have_health_data( ) -> Set[ReleaseName]: release_column_name = tag_key(organization_id, "release") - releases_ids = [ - release_id - for release_id in [ - try_get_tag_value(organization_id, release) for release in release_versions - ] - if release_id is not None - ] + releases_ids = get_tag_values_list(organization_id, release_versions) query = Query( dataset=Dataset.Metrics.value, - match=Entity("metrics_counters"), + match=Entity(EntityKey.MetricsCounters.value), select=[Column(release_column_name)], where=[ Condition(Column("org_id"), Op.EQ, organization_id), @@ -608,6 +599,493 @@ def extract_row_info(row: Mapping[str, Union[OrganizationId, str]]) -> ReleaseNa return {extract_row_info(row) for row in result["data"]} + @staticmethod + def _get_session_duration_data_for_overview( + where: List[Condition], org_id: int + ) -> Mapping[Tuple[int, str], Any]: + """ + Percentiles of session duration + """ + rv_durations: Dict[Tuple[int, str], Any] = {} + + release_column_name = tag_key(org_id, "release") + aggregates: List[SelectableExpression] = [ + Column(release_column_name), + Column("project_id"), + ] + + for row in raw_snql_query( + Query( + dataset=Dataset.Metrics.value, + match=Entity(EntityKey.MetricsDistributions.value), + select=aggregates + [Column("percentiles")], + where=where + + [ + Condition(Column("metric_id"), Op.EQ, metric_id(org_id, "session.duration")), + Condition( + Column(tag_key(org_id, "session.status")), + Op.EQ, + tag_value(org_id, "exited"), + ), + ], + groupby=aggregates, + ), + referrer="release_health.metrics.get_session_duration_data_for_overview", + )["data"]: + # See https://github.com/getsentry/snuba/blob/8680523617e06979427bfa18c6b4b4e8bf86130f/snuba/datasets/entities/metrics.py#L184 for quantiles + key = (row["project_id"], reverse_tag_value(org_id, row[release_column_name])) + rv_durations[key] = { + "duration_p50": row["percentiles"][0], + "duration_p90": row["percentiles"][2], + } + + return rv_durations + + @staticmethod + def _get_errored_sessions_for_overview( + where: List[Condition], org_id: int + ) -> Mapping[Tuple[int, str], int]: + """ + Count of errored sessions, incl fatal (abnormal, crashed) sessions + """ + rv_errored_sessions: Dict[Tuple[int, str], int] = {} + + release_column_name = tag_key(org_id, "release") + aggregates: List[SelectableExpression] = [ + Column(release_column_name), + Column("project_id"), + ] + + for row in raw_snql_query( + Query( + dataset=Dataset.Metrics.value, + match=Entity(EntityKey.MetricsSets.value), + select=aggregates + [Column("value")], + where=where + + [ + Condition(Column("metric_id"), Op.EQ, metric_id(org_id, "session.error")), + ], + groupby=aggregates, + ), + referrer="release_health.metrics.get_errored_sessions_for_overview", + )["data"]: + key = row["project_id"], reverse_tag_value(org_id, row[release_column_name]) + rv_errored_sessions[key] = row["value"] + + return rv_errored_sessions + + @staticmethod + def _get_session_by_status_for_overview( + where: List[Condition], org_id: int + ) -> Mapping[Tuple[int, str, str], int]: + """ + Counts of init, abnormal and crashed sessions, purpose-built for overview + """ + release_column_name = tag_key(org_id, "release") + session_status_column_name = tag_key(org_id, "session.status") + + aggregates: List[SelectableExpression] = [ + Column(release_column_name), + Column("project_id"), + Column(session_status_column_name), + ] + + rv_sessions: Dict[Tuple[int, str, str], int] = {} + + for row in raw_snql_query( + Query( + dataset=Dataset.Metrics.value, + match=Entity(EntityKey.MetricsCounters.value), + select=aggregates + [Column("value")], + where=where + + [ + Condition(Column("metric_id"), Op.EQ, metric_id(org_id, "session")), + Condition( + Column(session_status_column_name), + Op.IN, + get_tag_values_list(org_id, ["abnormal", "crashed", "init"]), + ), + ], + groupby=aggregates, + ), + referrer="release_health.metrics.get_abnormal_and_crashed_sessions_for_overview", + )["data"]: + key = ( + row["project_id"], + reverse_tag_value(org_id, row[release_column_name]), + reverse_tag_value(org_id, row[session_status_column_name]), + ) + rv_sessions[key] = row["value"] + + return rv_sessions + + @staticmethod + def _get_users_and_crashed_users_for_overview( + where: List[Condition], org_id: int + ) -> Mapping[Tuple[int, str, str], int]: + release_column_name = tag_key(org_id, "release") + session_status_column_name = tag_key(org_id, "session.status") + + aggregates: List[SelectableExpression] = [ + Column(release_column_name), + Column("project_id"), + Column(session_status_column_name), + ] + + # Count of users and crashed users + rv_users: Dict[Tuple[int, str, str], int] = {} + + # Avoid mutating input parameters here + select = aggregates + [Column("value")] + where = where + [ + Condition(Column("metric_id"), Op.EQ, metric_id(org_id, "user")), + Condition( + Column(session_status_column_name), + Op.IN, + get_tag_values_list(org_id, ["crashed", "init"]), + ), + ] + + for row in raw_snql_query( + Query( + dataset=Dataset.Metrics.value, + match=Entity(EntityKey.MetricsSets.value), + select=select, + where=where, + groupby=aggregates, + ), + referrer="release_health.metrics.get_users_and_crashed_users_for_overview", + )["data"]: + key = ( + row["project_id"], + reverse_tag_value(org_id, row[release_column_name]), + reverse_tag_value(org_id, row[session_status_column_name]), + ) + rv_users[key] = row["value"] + + return rv_users + + @staticmethod + def _get_health_stats_for_overview( + where: List[Condition], + org_id: int, + health_stats_period: StatsPeriod, + stat: OverviewStat, + now: datetime, + ) -> Mapping[ProjectRelease, List[List[int]]]: + release_column_name = tag_key(org_id, "release") + session_status_column_name = tag_key(org_id, "session.status") + session_init_tag_value = tag_value(org_id, "init") + + stats_rollup, stats_start, stats_buckets = get_rollup_starts_and_buckets( + health_stats_period + ) + + aggregates: List[SelectableExpression] = [ + Column(release_column_name), + Column("project_id"), + Column("bucketed_time"), + ] + + rv: Dict[ProjectRelease, List[List[int]]] = defaultdict(lambda: _make_stats(stats_start, stats_rollup, stats_buckets)) # type: ignore + + entity = { + "users": EntityKey.MetricsSets.value, + "sessions": EntityKey.MetricsCounters.value, + }[stat] + + metric_name = metric_id(org_id, {"sessions": "session", "users": "user"}[stat]) + + for row in raw_snql_query( + Query( + dataset=Dataset.Metrics.value, + match=Entity(entity), + select=aggregates + [Column("value")], + where=where + + [ + Condition(Column("metric_id"), Op.EQ, metric_name), + Condition(Column("timestamp"), Op.GTE, stats_start), + Condition(Column("timestamp"), Op.LT, now), + Condition( + Column(session_status_column_name), + Op.EQ, + session_init_tag_value, + ), + ], + granularity=Granularity(stats_rollup), + groupby=aggregates, + ), + referrer="release_health.metrics.get_health_stats_for_overview", + )["data"]: + time_bucket = int( + (parse_snuba_datetime(row["bucketed_time"]) - stats_start).total_seconds() + / stats_rollup + ) + key = row["project_id"], reverse_tag_value(org_id, row[release_column_name]) + timeseries = rv[key] + if time_bucket < len(timeseries): + timeseries[time_bucket][1] = row["value"] + + return rv + + def get_release_health_data_overview( + self, + project_releases: Sequence[ProjectRelease], + environments: Optional[Sequence[EnvironmentName]] = None, + summary_stats_period: Optional[StatsPeriod] = None, + health_stats_period: Optional[StatsPeriod] = None, + stat: Optional[OverviewStat] = None, + ) -> Mapping[ProjectRelease, ReleaseHealthOverview]: + if stat is None: + stat = "sessions" + assert stat in ("sessions", "users") + now = datetime.now(pytz.utc) + _, summary_start, _ = get_rollup_starts_and_buckets(summary_stats_period or "24h") + + org_id = self._get_org_id([x for x, _ in project_releases]) + + where: List[Condition] = [ + Condition(Column("org_id"), Op.EQ, org_id), + filter_projects_by_project_release(project_releases), + Condition(Column("timestamp"), Op.GTE, summary_start), + Condition(Column("timestamp"), Op.LT, now), + ] + + if environments is not None: + where.append( + Condition( + Column(tag_key(org_id, "environment")), + Op.IN, + get_tag_values_list(org_id, environments), + ) + ) + + if health_stats_period: + health_stats_data = self._get_health_stats_for_overview( + where, org_id, health_stats_period, stat, now + ) + else: + health_stats_data = {} + + rv_durations = self._get_session_duration_data_for_overview(where, org_id) + rv_errored_sessions = self._get_errored_sessions_for_overview(where, org_id) + rv_sessions = self._get_session_by_status_for_overview(where, org_id) + rv_users = self._get_users_and_crashed_users_for_overview(where, org_id) + + # XXX: In order to be able to dual-read and compare results from both + # old and new backend, this should really go back through the + # release_health service instead of directly calling `self`. For now + # that makes the entire backend too hard to test though. + release_adoption = self.get_release_adoption(project_releases, environments) + + rv: Dict[ProjectRelease, ReleaseHealthOverview] = {} + + fetch_has_health_data_releases = set() + + default_adoption_info: ReleaseAdoption = { + "adoption": None, + "sessions_adoption": None, + "users_24h": None, + "project_users_24h": None, + "sessions_24h": None, + "project_sessions_24h": None, + } + + for project_id, release in project_releases: + adoption_info: ReleaseAdoption = ( + release_adoption.get((project_id, release)) or default_adoption_info + ) + + total_sessions = rv_sessions.get((project_id, release, "init")) + + total_users = rv_users.get((project_id, release, "init")) + has_health_data = bool(total_sessions) + + # has_health_data is supposed to be irrespective of the currently + # selected rollup window. Therefore we need to run another query + # over 90d just to see if health data is available to compute + # has_health_data correctly. + if not has_health_data and summary_stats_period != "90d": + fetch_has_health_data_releases.add((project_id, release)) + + sessions_crashed = rv_sessions.get((project_id, release, "crashed"), 0) + + users_crashed = rv_users.get((project_id, release, "crashed"), 0) + + rv_row = rv[project_id, release] = { + "adoption": adoption_info.get("adoption"), + "sessions_adoption": adoption_info.get("sessions_adoption"), + "total_users_24h": adoption_info.get("users_24h"), + "total_project_users_24h": adoption_info.get("project_users_24h"), + "total_sessions_24h": adoption_info.get("sessions_24h"), + "total_project_sessions_24h": adoption_info.get("project_sessions_24h"), + "total_sessions": total_sessions, + "total_users": total_users, + "has_health_data": has_health_data, + "sessions_crashed": sessions_crashed, + "crash_free_users": ( + 100 - users_crashed / total_users * 100 if total_users else None + ), + "crash_free_sessions": ( + 100 - sessions_crashed / float(total_sessions) * 100 if total_sessions else None + ), + "sessions_errored": max( + 0, + rv_errored_sessions.get((project_id, release), 0) + - sessions_crashed + - rv_sessions.get((project_id, release, "abnormal"), 0), + ), + "duration_p50": None, + "duration_p90": None, + } + + durations = rv_durations.get((project_id, release)) + if durations: + rv_row.update(durations) + + if health_stats_period: + rv_row["stats"] = {health_stats_period: health_stats_data[project_id, release]} + + if fetch_has_health_data_releases: + has_health_data = self.check_has_health_data(fetch_has_health_data_releases) # type: ignore + + for key in fetch_has_health_data_releases: + rv[key]["has_health_data"] = key in has_health_data # type: ignore + + return rv + + def _get_crash_free_breakdown_fn( + self, + org_id: int, + project_id: ProjectId, + release: ReleaseName, + start: datetime, + environments: Optional[Sequence[EnvironmentName]] = None, + ) -> Callable[[datetime], CrashFreeBreakdown]: + def generate_defaults(end: datetime) -> CrashFreeBreakdown: + """Function to use if querying snuba is not necessary""" + return { + "crash_free_sessions": None, + "crash_free_users": None, + "date": end, + "total_sessions": 0, + "total_users": 0, + } + + # 1) Get required string indexes + try: + release_key = tag_key(org_id, "release") + release_value = tag_value(org_id, release) + environment_key = tag_key(org_id, "environment") + status_key = tag_key(org_id, "session.status") + except MetricIndexNotFound: + # No need to query snuba if any of these is missing + return generate_defaults + + environment_values = None + if environments is not None: + environment_values = get_tag_values_list(org_id, environments) + + if environment_values == []: + # No need to query snuba with an empty list + return generate_defaults + + status_init = tag_value(org_id, "init") + status_crashed = tag_value(org_id, "crashed") + + conditions = [ + Condition(Column("org_id"), Op.EQ, org_id), + Condition(Column("project_id"), Op.EQ, project_id), + Condition(Column(release_key), Op.EQ, release_value), + Condition(Column("timestamp"), Op.GTE, start), + Condition(Column(status_key), Op.IN, [status_init, status_crashed]), + ] + if environment_values is not None: + conditions.append(Condition(Column(environment_key), Op.IN, environment_values)) + + def query_stats(end: datetime) -> CrashFreeBreakdown: + def _get_data(entity_key: EntityKey, metric_name: str) -> Tuple[int, int]: + total = 0 + crashed = 0 + metric_id = try_get_string_index(org_id, metric_name) + if metric_id is not None: + where = conditions + [ + Condition(Column("metric_id"), Op.EQ, metric_id), + Condition(Column("timestamp"), Op.LT, end), + ] + data = raw_snql_query( + Query( + dataset=Dataset.Metrics.value, + match=Entity(entity_key.value), + select=[Column("value")], + where=where, + groupby=[Column(status_key)], + ), + referrer="release_health.metrics.crash-free-breakdown.session", + )["data"] + for row in data: + if row[status_key] == status_init: + total = int(row["value"]) + elif row[status_key] == status_crashed: + crashed = int(row["value"]) + + return total, crashed + + sessions_total, sessions_crashed = _get_data(EntityKey.MetricsCounters, "session") + users_total, users_crashed = _get_data(EntityKey.MetricsSets, "user") + + return { + "date": end, + "total_users": users_total, + "crash_free_users": 100 - users_crashed / float(users_total) * 100 + if users_total + else None, + "total_sessions": sessions_total, + "crash_free_sessions": 100 - sessions_crashed / float(sessions_total) * 100 + if sessions_total + else None, + } + + return query_stats + + def get_crash_free_breakdown( + self, + project_id: ProjectId, + release: ReleaseName, + start: datetime, + environments: Optional[Sequence[EnvironmentName]] = None, + ) -> Sequence[CrashFreeBreakdown]: + + org_id = self._get_org_id([project_id]) + + now = datetime.now(pytz.utc) + query_fn = self._get_crash_free_breakdown_fn( + org_id, project_id, release, start, environments + ) + + last: Optional[datetime] = None + rv = [] + for offset in ( + timedelta(days=1), + timedelta(days=2), + timedelta(days=7), + timedelta(days=14), + timedelta(days=30), + ): + try: + end = start + offset + if end > now: + if last is None or (end - last).days > 1: + rv.append(query_fn(now)) + break + rv.append(query_fn(end)) + last = end + except QueryOutsideRetentionError: + # cannot query for these + pass + + return rv + def get_changed_project_release_model_adoptions( self, project_ids: Sequence[ProjectId], @@ -667,7 +1145,7 @@ def get_oldest_health_data_for_releases( releases = [x[1] for x in project_releases] releases_ids = [ release_id - for release_id in [try_get_tag_value(org_id, release) for release in releases] + for release_id in [try_get_string_index(org_id, release) for release in releases] if release_id is not None ] diff --git a/src/sentry/release_health/sessions.py b/src/sentry/release_health/sessions.py index c0ca42904acf04..3c65cdbb98ccaa 100644 --- a/src/sentry/release_health/sessions.py +++ b/src/sentry/release_health/sessions.py @@ -2,23 +2,29 @@ from typing import Mapping, Optional, Sequence, Set, Tuple from sentry.release_health.base import ( + CrashFreeBreakdown, CurrentAndPreviousCrashFreeRates, EnvironmentName, OrganizationId, + OverviewStat, ProjectId, ProjectOrRelease, ProjectRelease, ReleaseHealthBackend, + ReleaseHealthOverview, ReleaseName, ReleasesAdoption, ReleaseSessionsTimeBounds, + StatsPeriod, ) from sentry.snuba.sessions import ( _check_has_health_data, _check_releases_have_health_data, _get_changed_project_release_model_adoptions, + _get_crash_free_breakdown, _get_oldest_health_data_for_releases, _get_release_adoption, + _get_release_health_data_overview, _get_release_sessions_time_bounds, get_current_and_previous_crash_free_rates, ) @@ -89,6 +95,33 @@ def check_releases_have_health_data( end, ) + def get_release_health_data_overview( + self, + project_releases: Sequence[ProjectRelease], + environments: Optional[Sequence[EnvironmentName]] = None, + summary_stats_period: Optional[StatsPeriod] = None, + health_stats_period: Optional[StatsPeriod] = None, + stat: Optional[OverviewStat] = None, + ) -> Mapping[ProjectRelease, ReleaseHealthOverview]: + return _get_release_health_data_overview( # type: ignore + project_releases=project_releases, + environments=environments, + summary_stats_period=summary_stats_period, + health_stats_period=health_stats_period, + stat=stat, + ) + + def get_crash_free_breakdown( + self, + project_id: ProjectId, + release: ReleaseName, + start: datetime, + environments: Optional[Sequence[EnvironmentName]] = None, + ) -> Sequence[CrashFreeBreakdown]: + return _get_crash_free_breakdown( # type: ignore + project_id=project_id, release=release, start=start, environments=environments + ) + def get_changed_project_release_model_adoptions( self, project_ids: Sequence[ProjectId], diff --git a/src/sentry/reprocessing.py b/src/sentry/reprocessing.py index c2eb6cd1a03270..fe843a68f08268 100644 --- a/src/sentry/reprocessing.py +++ b/src/sentry/reprocessing.py @@ -39,12 +39,21 @@ def get_reprocessing_revision(project, cached=True): pass -def bump_reprocessing_revision(project): +def bump_reprocessing_revision(project, use_buffer=False): """Bumps the reprocessing revision.""" + from sentry import buffer from sentry.models import ProjectOption rev = uuid.uuid4().hex - ProjectOption.objects.set_value(project, REPROCESSING_OPTION, rev) + if use_buffer: + buffer.incr( + ProjectOption, + columns={}, + filters={"project": project, "key": REPROCESSING_OPTION}, + signal_only=True, + ) + else: + ProjectOption.objects.set_value(project, REPROCESSING_OPTION, rev) return rev diff --git a/src/sentry/rules/actions/notify_event_sentry_app.py b/src/sentry/rules/actions/notify_event_sentry_app.py new file mode 100644 index 00000000000000..fb0189dbf755d8 --- /dev/null +++ b/src/sentry/rules/actions/notify_event_sentry_app.py @@ -0,0 +1,69 @@ +""" +Used for notifying a *specific* sentry app with a custom webhook payload (i.e. specified UI components) +""" +from typing import Any, Mapping, Optional, Sequence + +from sentry.api.serializers import serialize +from sentry.api.serializers.models.sentry_app_component import SentryAppAlertRuleActionSerializer +from sentry.coreapi import APIError +from sentry.eventstore.models import Event +from sentry.mediators import sentry_app_components +from sentry.models import Project, SentryApp, SentryAppComponent, SentryAppInstallation +from sentry.rules.actions.base import EventAction +from sentry.tasks.sentry_apps import notify_sentry_app + + +class NotifyEventSentryAppAction(EventAction): # type: ignore + actionType = "sentryapp" + # Required field for EventAction, value is ignored + label = "" + + # TODO(Leander): As there is no form_cls (e.g. NotifyEventSentryAppActionForm) the form data will + # not be validated on the backend. This is tricky to do since the schema form is dynamic, and will + # be implemented on it's own in the future. Frontend validation is still in place in the mean time. + + def get_custom_actions(self, project: Project) -> Sequence[Mapping[str, Any]]: + action_list = [] + for install in SentryAppInstallation.get_installed_for_org(project.organization_id): + _components = SentryAppComponent.objects.filter( + sentry_app_id=install.sentry_app_id, type="alert-rule-action" + ) + for component in _components: + try: + sentry_app_components.Preparer.run( + component=component, install=install, project=project + ) + kwargs = { + "install": install, + "event_action": self, + } + action_details = serialize( + component, None, SentryAppAlertRuleActionSerializer(), **kwargs + ) + action_list.append(action_details) + except APIError: + continue + return action_list + + def get_sentry_app(self, event: Event) -> Optional[SentryApp]: + extra = {"event_id": event.event_id} + + sentry_app_installation_uuid = self.get_option("sentryAppInstallationUuid") + if not sentry_app_installation_uuid: + self.logger.info("rules.fail.is_configured", extra=extra) + return None + + try: + return SentryApp.objects.get(installations__uuid=sentry_app_installation_uuid) + except SentryApp.DoesNotExist: + self.logger.info("rules.fail.no_app", extra=extra) + + return None + + def after(self, event: Event, state: str) -> Any: + sentry_app = self.get_sentry_app(event) + yield self.future( + notify_sentry_app, + sentry_app=sentry_app, + schema_defined_settings=self.get_option("settings"), + ) diff --git a/src/sentry/rules/actions/notify_event_service.py b/src/sentry/rules/actions/notify_event_service.py index 8a5d023963a239..7b7dee276202b5 100644 --- a/src/sentry/rules/actions/notify_event_service.py +++ b/src/sentry/rules/actions/notify_event_service.py @@ -1,5 +1,5 @@ """ -Used for notifying a *specific* plugin +Used for notifying a *specific* plugin/sentry app with a generic webhook payload """ import logging diff --git a/src/sentry/search/events/base.py b/src/sentry/search/events/base.py index 59ffca21435dd2..c0235624e26456 100644 --- a/src/sentry/search/events/base.py +++ b/src/sentry/search/events/base.py @@ -1,4 +1,4 @@ -from typing import List, Mapping, Set +from typing import List, Mapping, Optional, Set from django.utils.functional import cached_property from snuba_sdk.aliased_expression import AliasedExpression @@ -12,9 +12,12 @@ class QueryBase: - def __init__(self, dataset: Dataset, params: ParamsType): - self.params = params + def __init__( + self, dataset: Dataset, params: ParamsType, functions_acl: Optional[List[str]] = None + ): self.dataset = dataset + self.params = params + self.functions_acl = set() if functions_acl is None else functions_acl # Function is a subclass of CurriedFunction self.where: List[WhereType] = [] diff --git a/src/sentry/search/events/builder.py b/src/sentry/search/events/builder.py index 3943d7fc86927a..9b84a2ab1f7d24 100644 --- a/src/sentry/search/events/builder.py +++ b/src/sentry/search/events/builder.py @@ -1,7 +1,9 @@ -from typing import List, Optional +from typing import List, Optional, Tuple +from snuba_sdk.column import Column from snuba_sdk.entity import Entity from snuba_sdk.expressions import Limit, Offset +from snuba_sdk.orderby import LimitBy from snuba_sdk.query import Query from sentry.search.events.fields import InvalidSearchQuery @@ -22,16 +24,20 @@ def __init__( orderby: Optional[List[str]] = None, auto_aggregations: bool = False, use_aggregate_conditions: bool = False, - limit: int = 50, + functions_acl: Optional[List[str]] = None, + limit: Optional[int] = 50, offset: Optional[int] = 0, + limitby: Optional[Tuple[str, int]] = None, ): - super().__init__(dataset, params) + super().__init__(dataset, params, functions_acl) # TODO: implement this in `resolve_select` self.auto_aggregations = auto_aggregations - self.limit = Limit(limit) - self.offset = Offset(0 if offset is None else offset) + self.limit = None if limit is None else Limit(limit) + self.offset = None if offset is None else Offset(offset) + + self.limitby = self.resolve_limitby(limitby) self.where, self.having = self.resolve_conditions( query, use_aggregate_conditions=use_aggregate_conditions @@ -47,6 +53,20 @@ def __init__( def select(self) -> Optional[List[SelectType]]: return self.columns + def resolve_limitby(self, limitby: Optional[Tuple[str, int]]) -> Optional[LimitBy]: + if limitby is None: + return None + + column, count = limitby + resolved = self.resolve_column(column) + + if isinstance(resolved, Column): + return LimitBy(resolved, count) + + # TODO: Limit By can only operate on a `Column`. This has the implication + # that non aggregate transforms are not allowed in the order by clause. + raise InvalidSearchQuery(f"{column} used in a limit by but is not a column.") + @property def groupby(self) -> Optional[List[SelectType]]: if self.aggregates: @@ -79,4 +99,5 @@ def get_snql_query(self) -> Query: orderby=self.orderby, limit=self.limit, offset=self.offset, + limitby=self.limitby, ) diff --git a/src/sentry/search/events/constants.py b/src/sentry/search/events/constants.py index 690f407a63efc9..9ca48450423bf2 100644 --- a/src/sentry/search/events/constants.py +++ b/src/sentry/search/events/constants.py @@ -7,7 +7,6 @@ Query timeout. Please try again. If the problem persists try a smaller date range or fewer projects. Also consider a filter on the transaction field if you're filtering performance data. """ -KEY_TRANSACTION_ALIAS = "key_transaction" PROJECT_THRESHOLD_CONFIG_INDEX_ALIAS = "project_threshold_config_index" PROJECT_THRESHOLD_OVERRIDE_CONFIG_INDEX_ALIAS = "project_threshold_override_config_index" PROJECT_THRESHOLD_CONFIG_ALIAS = "project_threshold_config" diff --git a/src/sentry/search/events/fields.py b/src/sentry/search/events/fields.py index 8666ddebc387a8..8c5173760abce6 100644 --- a/src/sentry/search/events/fields.py +++ b/src/sentry/search/events/fields.py @@ -11,7 +11,7 @@ from snuba_sdk.function import Function from snuba_sdk.orderby import Direction, OrderBy -from sentry.discover.models import KeyTransaction, TeamKeyTransaction +from sentry.discover.models import TeamKeyTransaction from sentry.exceptions import InvalidSearchQuery from sentry.models import Project, ProjectTeam, ProjectTransactionThreshold from sentry.models.transaction_threshold import ( @@ -30,7 +30,6 @@ FUNCTION_PATTERN, ISSUE_ALIAS, ISSUE_ID_ALIAS, - KEY_TRANSACTION_ALIAS, MEASUREMENTS_FRAMES_FROZEN_RATE, MEASUREMENTS_FRAMES_SLOW_RATE, MEASUREMENTS_STALL_PERCENTAGE, @@ -53,6 +52,7 @@ from sentry.utils.compat import zip from sentry.utils.numbers import format_grouped_length from sentry.utils.snuba import ( + SESSIONS_SNUBA_MAP, Dataset, get_json_type, is_duration_measurement, @@ -103,50 +103,6 @@ def validate(self): ), f"{self.name}: only one of expression, expression_fn is allowed" -def key_transaction_expression(user_id, organization_id, project_ids): - """ - This function may be called multiple times, making for repeated data bases queries. - Lifting the query higher to earlier in the call stack will require a lot more changes - as there are numerous entry points. So we will leave the duplicate query alone for now. - """ - if user_id is None or organization_id is None or project_ids is None: - raise InvalidSearchQuery("Missing necessary meta for key transaction field.") - - key_transactions = ( - KeyTransaction.objects.filter( - owner_id=user_id, - organization_id=organization_id, - project_id__in=project_ids, - ) - .order_by("transaction", "project_id") - .values("project_id", "transaction") - ) - - # if there are no key transactions, the value should always be 0 - if not len(key_transactions): - return ["toInt64", [0]] - - return [ - "has", - [ - [ - "array", - [ - [ - "tuple", - [ - ["toUInt64", [transaction["project_id"]]], - "'{}'".format(transaction["transaction"]), - ], - ] - for transaction in key_transactions - ], - ], - ["tuple", ["project_id", "transaction"]], - ], - ] - - def project_threshold_config_expression(organization_id, project_ids): """ This function returns a column with the threshold and threshold metric @@ -411,18 +367,6 @@ def normalize_count_if_value(args: Mapping[str, str]) -> Union[float, str, int]: USER_DISPLAY_ALIAS, expression=["coalesce", ["user.email", "user.username", "user.ip"]], ), - # the key transaction field is intentially not added to the discover/fields list yet - # because there needs to be some work on the front end to integrate this into discover - PseudoField( - KEY_TRANSACTION_ALIAS, - KEY_TRANSACTION_ALIAS, - expression_fn=lambda params: key_transaction_expression( - params.get("user_id"), - params.get("organization_id"), - params.get("project_id"), - ), - result_type="boolean", - ), PseudoField( PROJECT_THRESHOLD_CONFIG_ALIAS, PROJECT_THRESHOLD_CONFIG_ALIAS, @@ -431,6 +375,8 @@ def normalize_count_if_value(args: Mapping[str, str]) -> Union[float, str, int]: params.get("project_id"), ), ), + # the team key transaction field is intentially not added to the discover/fields list yet + # because there needs to be some work on the front end to integrate this into discover PseudoField( TEAM_KEY_TRANSACTION_ALIAS, TEAM_KEY_TRANSACTION_ALIAS, @@ -928,7 +874,7 @@ def is_function(field: str) -> Optional[Match[str]]: return None -def get_function_alias(field): +def get_function_alias(field: str) -> str: match = FUNCTION_PATTERN.search(field) if match is None: return field @@ -940,7 +886,7 @@ def get_function_alias(field): return get_function_alias_with_columns(function, columns) -def get_function_alias_with_columns(function_name, columns): +def get_function_alias_with_columns(function_name, columns) -> str: columns = re.sub(r"[^\w]", "_", "_".join(str(col) for col in columns)) return f"{function_name}_{columns}".rstrip("_") @@ -1338,6 +1284,16 @@ def normalize(self, value: str, _) -> str: raise InvalidFunctionArgument(f"{value} is not a valid string array column") +class SessionColumnArg(ColumnArg): + # XXX(ahmed): hack to get this to work with crash rate alerts over the sessions dataset until + # we deprecate the logic that is tightly coupled with the events dataset. At which point, + # we will just rely on dataset specific logic and refactor this class out + def normalize(self, value: str, _) -> str: + if value in SESSIONS_SNUBA_MAP: + return value + raise InvalidFunctionArgument(f"{value} is not a valid sessions dataset column") + + def with_default(default, argument): argument.has_default = True argument.get_default = lambda *_: default @@ -2146,6 +2102,12 @@ def is_accessible(self, acl=None): ], ], ), + DiscoverFunction( + "identity", + required_args=[SessionColumnArg("column")], + aggregate=["identity", ArgValue("column"), None], + private=True, + ), ] } @@ -2204,8 +2166,10 @@ def validate(self): class QueryFields(QueryBase): """Field logic for a snql query""" - def __init__(self, dataset: Dataset, params: ParamsType): - super().__init__(dataset, params) + def __init__( + self, dataset: Dataset, params: ParamsType, functions_acl: Optional[List[str]] = None + ): + super().__init__(dataset, params, functions_acl) self.field_alias_converter: Mapping[str, Callable[[str], SelectType]] = { # NOTE: `ISSUE_ALIAS` simply maps to the id, meaning that post processing @@ -2658,8 +2622,16 @@ def __init__(self, dataset: Dataset, params: ParamsType): ), default_result_type="number", ), + SnQLFunction( + "array_join", + required_args=[StringArrayColumn("column")], + snql_aggregate=lambda args, alias: Function( + "arrayJoin", [args["column"]], alias + ), + default_result_type="string", + private=True, + ), # TODO: implement these - SnQLFunction("array_join", snql_aggregate=self._resolve_unimplemented_function), SnQLFunction("histogram", snql_aggregate=self._resolve_unimplemented_function), SnQLFunction("percentage", snql_aggregate=self._resolve_unimplemented_function), SnQLFunction("t_test", snql_aggregate=self._resolve_unimplemented_function), @@ -2806,7 +2778,9 @@ def resolve_function(self, function: str, match: Optional[Match[str]] = None) -> raise NotImplementedError("Aggregate aliases not implemented in snql field parsing yet") name, arguments, alias = self.parse_function(match) - snql_function = self.function_converter.get(name) + snql_function = self.function_converter[name] + if not snql_function.is_accessible(self.functions_acl): + raise InvalidSearchQuery(f"{snql_function.name}: no access to private function") arguments = snql_function.format_as_arguments(name, arguments, self.params) for arg in snql_function.args: diff --git a/src/sentry/search/events/filter.py b/src/sentry/search/events/filter.py index c9409d82a4b239..92c358200d23f1 100644 --- a/src/sentry/search/events/filter.py +++ b/src/sentry/search/events/filter.py @@ -29,7 +29,6 @@ ERROR_UNHANDLED_ALIAS, ISSUE_ALIAS, ISSUE_ID_ALIAS, - KEY_TRANSACTION_ALIAS, MAX_SEARCH_RELEASES, NO_CONVERSION_FIELDS, OPERATOR_NEGATION_MAP, @@ -301,26 +300,6 @@ def _error_handled_filter_converter( raise InvalidSearchQuery("Invalid value for error.handled condition. Accepted values are 1, 0") -def _key_transaction_filter_converter( - search_filter: SearchFilter, - name: str, - params: Optional[Mapping[str, Union[int, str, datetime]]], -): - value = search_filter.value.value - key_transaction_expr = FIELD_ALIASES[KEY_TRANSACTION_ALIAS].get_expression(params) - - if search_filter.value.raw_value == "": - operator = "!=" if search_filter.operator == "!=" else "=" - return [key_transaction_expr, operator, 0] - if value in ("1", 1): - return [key_transaction_expr, "=", 1] - if value in ("0", 0): - return [key_transaction_expr, "=", 0] - raise InvalidSearchQuery( - "Invalid value for key_transaction condition. Accepted values are 1, 0" - ) - - def _team_key_transaction_filter_converter( search_filter: SearchFilter, name: str, @@ -337,7 +316,7 @@ def _team_key_transaction_filter_converter( if value in ("0", 0): return [key_transaction_expr, "=", 0] raise InvalidSearchQuery( - "Invalid value for key_transaction condition. Accepted values are 1, 0" + "Invalid value for team_key_transaction condition. Accepted values are 1, 0" ) @@ -596,7 +575,6 @@ def parse_semver(version, operator) -> Optional[SemverFilter]: USER_DISPLAY_ALIAS: _user_display_filter_converter, ERROR_UNHANDLED_ALIAS: _error_unhandled_filter_converter, "error.handled": _error_handled_filter_converter, - KEY_TRANSACTION_ALIAS: _key_transaction_filter_converter, TEAM_KEY_TRANSACTION_ALIAS: _team_key_transaction_filter_converter, RELEASE_STAGE_ALIAS: _release_stage_filter_converter, SEMVER_ALIAS: _semver_filter_converter, @@ -1059,8 +1037,10 @@ def format_search_filter(term, params): class QueryFilter(QueryFields): """Filter logic for a snql query""" - def __init__(self, dataset: Dataset, params: ParamsType): - super().__init__(dataset, params) + def __init__( + self, dataset: Dataset, params: ParamsType, functions_acl: Optional[List[str]] = None + ): + super().__init__(dataset, params, functions_acl) self.search_filter_converter: Mapping[ str, Callable[[SearchFilter], Optional[WhereType]] diff --git a/src/sentry/security/emails.py b/src/sentry/security/emails.py index 581f5fad5e0d01..7f87d8467e0f2e 100644 --- a/src/sentry/security/emails.py +++ b/src/sentry/security/emails.py @@ -1,9 +1,22 @@ +from datetime import datetime +from typing import TYPE_CHECKING, Any, Mapping, Optional + from django.utils import timezone from sentry.utils.email import MessageBuilder +if TYPE_CHECKING: + from sentry.models import User + -def generate_security_email(account, type, actor, ip_address, context=None, current_datetime=None): +def generate_security_email( + account: "User", + type: str, + actor: "User", + ip_address: str, + context: Optional[Mapping[str, Any]] = None, + current_datetime: Optional[datetime] = None, +) -> MessageBuilder: if current_datetime is None: current_datetime = timezone.now() diff --git a/src/sentry/security/utils.py b/src/sentry/security/utils.py index 038768484018a3..69dcf044577f53 100644 --- a/src/sentry/security/utils.py +++ b/src/sentry/security/utils.py @@ -1,16 +1,27 @@ import logging +from datetime import datetime +from typing import TYPE_CHECKING, Any, Mapping, Optional -from django.conf import settings from django.utils import timezone from .emails import generate_security_email +if TYPE_CHECKING: + from sentry.models import User + + logger = logging.getLogger("sentry.security") def capture_security_activity( - account, type, actor, ip_address, context=None, send_email=True, current_datetime=None -): + account: "User", + type: str, + actor: "User", + ip_address: str, + context: Optional[Mapping[str, Any]] = None, + send_email: bool = True, + current_datetime: Optional[datetime] = None, +) -> None: if current_datetime is None: current_datetime = timezone.now() @@ -31,7 +42,3 @@ def capture_security_activity( current_datetime=current_datetime, ) msg.send_async([account.email]) - - -def is_valid_email_address(value): - return not settings.INVALID_EMAIL_ADDRESS_PATTERN.search(value) diff --git a/src/sentry/snuba/discover.py b/src/sentry/snuba/discover.py index 504f927d588be1..a415bfa4634287 100644 --- a/src/sentry/snuba/discover.py +++ b/src/sentry/snuba/discover.py @@ -1,6 +1,9 @@ import logging import math from collections import namedtuple +from copy import deepcopy +from datetime import timedelta +from typing import Dict, Optional, Sequence import sentry_sdk @@ -26,7 +29,9 @@ SNUBA_AND, SNUBA_OR, Dataset, + SnubaQueryParams, SnubaTSResult, + bulk_raw_query, get_array_column_alias, get_array_column_field, get_measurement_name, @@ -235,6 +240,7 @@ def query( orderby=orderby, auto_aggregations=auto_aggregations, use_aggregate_conditions=use_aggregate_conditions, + functions_acl=functions_acl, limit=limit, offset=offset, ) @@ -450,7 +456,15 @@ def get_timeseries_snuba_filter(selected_columns, query, params): return snuba_filter, translated_columns -def timeseries_query(selected_columns, query, params, rollup, referrer=None, zerofill_results=True): +def timeseries_query( + selected_columns: Sequence[str], + query: str, + params: Dict[str, str], + rollup: int, + referrer: Optional[str] = None, + zerofill_results: bool = True, + comparison_delta: Optional[timedelta] = None, +): """ High-level API for doing arbitrary user timeseries queries against events. @@ -469,6 +483,9 @@ def timeseries_query(selected_columns, query, params, rollup, referrer=None, zer params (Dict[str, str]) Filtering parameters with start, end, project_id, environment, rollup (int) The bucket width in seconds referrer (str|None) A referrer string to help locate the origin of this query. + comparison_delta: A timedelta used to convert this into a comparison query. We make a second + query time-shifted back by comparison_delta, and compare the results to get the % change for each + time bucket. Requires that we only pass """ with sentry_sdk.start_span( op="discover.discover", description="timeseries.filter_transform" @@ -477,7 +494,7 @@ def timeseries_query(selected_columns, query, params, rollup, referrer=None, zer snuba_filter, _ = get_timeseries_snuba_filter(selected_columns, query, params) with sentry_sdk.start_span(op="discover.discover", description="timeseries.snuba_query"): - result = raw_query( + base_query_params = SnubaQueryParams( # Hack cause equations on aggregates have to go in selected columns instead of aggregations selected_columns=[ column @@ -499,18 +516,44 @@ def timeseries_query(selected_columns, query, params, rollup, referrer=None, zer limit=10000, referrer=referrer, ) + query_params_list = [base_query_params] + if comparison_delta: + if len(base_query_params.aggregations) != 1: + raise InvalidSearchQuery("Only one column can be selected for comparison queries") + comp_query_params = deepcopy(base_query_params) + comp_query_params.start -= comparison_delta + comp_query_params.end -= comparison_delta + query_params_list.append(comp_query_params) + query_results = bulk_raw_query(query_params_list) with sentry_sdk.start_span( op="discover.discover", description="timeseries.transform_results" ) as span: - span.set_data("result_count", len(result.get("data", []))) - result = ( - zerofill(result["data"], snuba_filter.start, snuba_filter.end, rollup, "time") - if zerofill_results - else result["data"] - ) + results = [] + for query_params, query_results in zip(query_params_list, query_results): + span.set_data("result_count", len(query_results.get("data", []))) + results.append( + zerofill( + query_results["data"], query_params.start, query_params.end, rollup, "time" + ) + if zerofill_results + else query_results["data"] + ) + + if len(results) == 2: + col_name = base_query_params.aggregations[0][2] + # If we have two sets of results then we're doing a comparison queries. Divide the primary + # results by the comparison results. + for result, cmp_result in zip(results[0], results[1]): + result_val, cmp_result_val = result.get(col_name, 0), cmp_result.get(col_name, 0) + comparison_value = 0 + if cmp_result_val: + comparison_value = ((result_val / cmp_result_val) - 1) * 100 + result[col_name] = comparison_value + + results = results[0] - return SnubaTSResult({"data": result}, snuba_filter.start, snuba_filter.end, rollup) + return SnubaTSResult({"data": results}, snuba_filter.start, snuba_filter.end, rollup) def create_result_key(result_row, fields, issues) -> str: @@ -613,18 +656,24 @@ def top_events_timeseries( { event.get(alias) for event in top_events["data"] - if field in event and not isinstance(event.get(field), list) + if (field in event or alias in event) and not isinstance(event.get(field), list) } ) if values: if field in FIELD_ALIASES: # Fallback to the alias if for whatever reason we can't find it resolved_field = alias - # Search selected columns for the resolved version of the alias - for column in snuba_filter.selected_columns: - if isinstance(column, list) and column[-1] == field: - resolved_field = column - break + # Issue needs special handling since its aliased uniquely + if field == "issue": + resolved_field = "group_id" + else: + # Search selected columns for the resolved version of the alias + for column in snuba_filter.selected_columns: + if isinstance(column, list) and ( + column[-1] == field or column[-1] == alias + ): + resolved_field = column + break else: resolved_field = resolve_discover_column(field) diff --git a/src/sentry/snuba/models.py b/src/sentry/snuba/models.py index 6d7dd76e134d41..ac7cef591bfade 100644 --- a/src/sentry/snuba/models.py +++ b/src/sentry/snuba/models.py @@ -23,6 +23,7 @@ class QueryAggregations(Enum): class QueryDatasets(Enum): EVENTS = "events" TRANSACTIONS = "transactions" + SESSIONS = "sessions" class SnubaQuery(Model): diff --git a/src/sentry/snuba/query_subscription_consumer.py b/src/sentry/snuba/query_subscription_consumer.py index ae9dd73cb5b1a6..5f2ff1c426be33 100644 --- a/src/sentry/snuba/query_subscription_consumer.py +++ b/src/sentry/snuba/query_subscription_consumer.py @@ -54,6 +54,7 @@ class QuerySubscriptionConsumer: topic_to_dataset: Dict[str, QueryDatasets] = { settings.KAFKA_EVENTS_SUBSCRIPTIONS_RESULTS: QueryDatasets.EVENTS, settings.KAFKA_TRANSACTIONS_SUBSCRIPTIONS_RESULTS: QueryDatasets.TRANSACTIONS, + settings.KAFKA_SESSIONS_SUBSCRIPTIONS_RESULTS: QueryDatasets.SESSIONS, } def __init__( diff --git a/src/sentry/snuba/sessions.py b/src/sentry/snuba/sessions.py index a326af38a31e52..c85df844edb1c3 100644 --- a/src/sentry/snuba/sessions.py +++ b/src/sentry/snuba/sessions.py @@ -355,7 +355,7 @@ def extract_duration_quantiles(raw_stats): } -def get_release_health_data_overview( +def _get_release_health_data_overview( project_releases, environments=None, summary_stats_period=None, @@ -480,7 +480,7 @@ def get_release_health_data_overview( return rv -def get_crash_free_breakdown(project_id, release, start, environments=None): +def _get_crash_free_breakdown(project_id, release, start, environments=None): filter_keys = {"project_id": [project_id]} conditions = [["release", "=", release]] if environments is not None: diff --git a/src/sentry/snuba/tasks.py b/src/sentry/snuba/tasks.py index cec626ff90ec26..fb59e6f38ce11a 100644 --- a/src/sentry/snuba/tasks.py +++ b/src/sentry/snuba/tasks.py @@ -1,10 +1,12 @@ import logging +import re from datetime import timedelta import sentry_sdk from django.utils import timezone from snuba_sdk.legacy import json_to_snql +from sentry.constants import CRASH_RATE_ALERT_SESSION_COUNT_ALIAS from sentry.search.events.fields import resolve_field_list from sentry.search.events.filter import get_filter from sentry.snuba.models import QueryDatasets, QuerySubscription @@ -47,6 +49,9 @@ def apply_dataset_query_conditions(dataset, query, event_types, discover=False): if not discover and dataset == QueryDatasets.TRANSACTIONS: return query + if dataset == QueryDatasets.SESSIONS: + return query + if event_types: event_type_conditions = " OR ".join( f"event.type:{event_type.name.lower()}" for event_type in event_types @@ -169,14 +174,31 @@ def delete_subscription_from_snuba(query_subscription_id, **kwargs): def build_snuba_filter(dataset, query, aggregate, environment, event_types, params=None): - resolve_func = ( - resolve_column(Dataset.Events) - if dataset == QueryDatasets.EVENTS - else resolve_column(Dataset.Transactions) - ) + resolve_func = { + QueryDatasets.EVENTS: resolve_column(Dataset.Events), + QueryDatasets.SESSIONS: resolve_column(Dataset.Sessions), + QueryDatasets.TRANSACTIONS: resolve_column(Dataset.Transactions), + }[dataset] + + functions_acl = None + + aggregations = [aggregate] + if dataset == QueryDatasets.SESSIONS: + # This aggregation is added to return the total number of sessions in crash + # rate alerts that is used to identify if we are below a general minimum alert threshold + count_col = re.search(r"(sessions|users)", aggregate) + count_col_matched = count_col.group() + + aggregations += [f"identity({count_col_matched}) AS {CRASH_RATE_ALERT_SESSION_COUNT_ALIAS}"] + functions_acl = ["identity"] + query = apply_dataset_query_conditions(dataset, query, event_types) snuba_filter = get_filter(query, params=params) - snuba_filter.update_with(resolve_field_list([aggregate], snuba_filter, auto_fields=False)) + snuba_filter.update_with( + resolve_field_list( + aggregations, snuba_filter, auto_fields=False, functions_acl=functions_acl + ) + ) snuba_filter = resolve_snuba_aliases(snuba_filter, resolve_func)[0] if snuba_filter.group_ids: snuba_filter.conditions.append(["group_id", "IN", list(map(int, snuba_filter.group_ids))]) @@ -204,6 +226,14 @@ def _create_in_snuba(subscription): "time_window": snuba_query.time_window, "resolution": snuba_query.resolution, } + + if Dataset(snuba_query.dataset) == Dataset.Sessions: + body.update( + { + "organization": subscription.project.organization_id, + } + ) + try: metrics.incr("snuba.snql.subscription.create", tags={"dataset": snuba_query.dataset}) snql_query = json_to_snql(body, snuba_query.dataset) diff --git a/src/sentry/tasks/assemble.py b/src/sentry/tasks/assemble.py index 3cce5657e8d686..9907c1bfebdac3 100644 --- a/src/sentry/tasks/assemble.py +++ b/src/sentry/tasks/assemble.py @@ -137,7 +137,7 @@ def assemble_dif(project_id, name, checksum, chunks, debug_id=None, **kwargs): # and might resolve processing issues. If the file was not # created, someone else has created it and will bump the # revision instead. - bump_reprocessing_revision(project) + bump_reprocessing_revision(project, use_buffer=True) except Exception: set_assemble_status( AssembleTask.DIF, diff --git a/src/sentry/tasks/check_auth.py b/src/sentry/tasks/check_auth.py index 31346de14a54ae..d1665e57a35dd1 100644 --- a/src/sentry/tasks/check_auth.py +++ b/src/sentry/tasks/check_auth.py @@ -22,13 +22,20 @@ def check_auth(**kwargs): # TODO(dcramer): we should remove identities if they've been inactivate # for a reasonable interval now = timezone.now() + chunk_size = 100 cutoff = now - timedelta(seconds=AUTH_CHECK_INTERVAL) - identity_list = list(AuthIdentity.objects.filter(last_synced__lte=cutoff)) - AuthIdentity.objects.filter(id__in=[i.id for i in identity_list]).update(last_synced=now) - for identity in identity_list: - check_auth_identity.apply_async( - kwargs={"auth_identity_id": identity.id}, expires=AUTH_CHECK_INTERVAL - ) + identity_ids_list = list( + AuthIdentity.objects.using_replica() + .filter(last_synced__lte=cutoff) + .values_list("id", flat=True) + ) + for n in range(0, len(identity_ids_list), chunk_size): + identity_ids_chunk = identity_ids_list[n : n + chunk_size] + AuthIdentity.objects.filter(id__in=identity_ids_chunk).update(last_synced=now) + for identity_id in identity_ids_chunk: + check_auth_identity.apply_async( + kwargs={"auth_identity_id": identity_id}, expires=AUTH_CHECK_INTERVAL + ) @instrumented_task(name="sentry.tasks.check_auth_identity", queue="auth") diff --git a/src/sentry/tasks/integrations.py b/src/sentry/tasks/integrations.py index 2d9381b3b265f4..b3cdf429151c94 100644 --- a/src/sentry/tasks/integrations.py +++ b/src/sentry/tasks/integrations.py @@ -1,6 +1,7 @@ import logging from datetime import timedelta from time import time +from typing import Any, Mapping from django.core.exceptions import ObjectDoesNotExist @@ -21,6 +22,7 @@ from sentry.models.apitoken import generate_token from sentry.shared_integrations.exceptions import ApiError, ApiUnauthorized, IntegrationError from sentry.tasks.base import instrumented_task, retry, track_group_async_operation +from sentry.types.activity import ActivityType logger = logging.getLogger("sentry.tasks.integrations") @@ -403,3 +405,44 @@ def vsts_subscription_check(integration_id, organization_id, **kwargs): integration.metadata["subscription"]["check"] = time() integration.save() + + +@instrumented_task( + name="sentry.tasks.integrations.sync_status_inbound", + queue="integrations", + default_retry_delay=60 * 5, + max_retries=5, +) +@retry(exclude=(Integration.DoesNotExist,)) +@track_group_async_operation +def sync_status_inbound( + integration_id: int, organization_id: int, issue_key: str, data: Mapping[str, Any] +) -> None: + from sentry.integrations.issues import ResolveSyncAction + + integration = Integration.objects.get(id=integration_id) + affected_groups = list( + Group.objects.get_groups_by_external_issue(integration, issue_key) + .filter(project__organization_id=organization_id) + .select_related("project") + ) + + if not affected_groups: + return + + installation = integration.get_installation(organization_id=organization_id) + + try: + # This makes an API call. + action = installation.get_resolve_sync_action(data) + except Exception: + return + + if action == ResolveSyncAction.RESOLVE: + Group.objects.update_group_status( + affected_groups, GroupStatus.RESOLVED, ActivityType.SET_RESOLVED + ) + elif action == ResolveSyncAction.UNRESOLVE: + Group.objects.update_group_status( + affected_groups, GroupStatus.UNRESOLVED, ActivityType.SET_UNRESOLVED + ) diff --git a/src/sentry/tasks/low_priority_symbolication.py b/src/sentry/tasks/low_priority_symbolication.py new file mode 100644 index 00000000000000..78b65d13efd635 --- /dev/null +++ b/src/sentry/tasks/low_priority_symbolication.py @@ -0,0 +1,87 @@ +""" +Tasks that automate the job of moving projects in and out of symbolicator's low priority queue based +on symbolication metrics stored in Redis. + +This has three major tasks, executed in the following general order: +1. Scan for new suspect projects in Redis that need to be checked for LPQ eligibility. Triggers 2 and 3. +2. Determine a project's eligibility for the LPQ based on their recorded metrics. +3. Remove some specified project from the LPQ. +""" + +import logging +from typing import Iterable + +from sentry.processing import realtime_metrics +from sentry.processing.realtime_metrics.base import BucketedCount, DurationHistogram +from sentry.tasks.base import instrumented_task + +logger = logging.getLogger(__name__) + + +@instrumented_task( # type: ignore + name="sentry.tasks.low_priority_symbolication.scan_for_suspect_projects", + queue="symbolications.compute_low_priority_projects", + ignore_result=True, + soft_time_limit=10, +) +def scan_for_suspect_projects() -> None: + """Scans and updates the list of projects assigned to the low priority queue.""" + _scan_for_suspect_projects() + + +def _scan_for_suspect_projects() -> None: + suspect_projects = set() + + for project_id in realtime_metrics.projects(): + suspect_projects.add(project_id) + update_lpq_eligibility.delay(project_id=project_id) + + # Prune projects we definitely know shouldn't be in the queue any more. + # `update_lpq_eligibility` should handle removing suspect projects from the list if it turns + # out they need to be evicted. + current_lpq_projects = realtime_metrics.get_lpq_projects() or set() + deleted_projects = current_lpq_projects.difference(suspect_projects) + if len(deleted_projects) == 0: + return + + realtime_metrics.remove_projects_from_lpq(deleted_projects) + + for project_id in deleted_projects: + # TODO: add metrics! + logger.warning("Moved project out of symbolicator's low priority queue: %s", project_id) + + +@instrumented_task( # type: ignore + name="sentry.tasks.low_priority_symbolication.update_lpq_eligibility", + queue="symbolications.compute_low_priority_projects", + ignore_result=True, + soft_time_limit=10, +) +def update_lpq_eligibility(project_id: int) -> None: + """ + Given a project ID, determines whether the project belongs in the low priority queue and + removes or assigns it accordingly to the low priority queue. + """ + _update_lpq_eligibility(project_id) + + +def _update_lpq_eligibility(project_id: int) -> None: + counts = realtime_metrics.get_counts_for_project(project_id) + durations = realtime_metrics.get_durations_for_project(project_id) + + is_eligible = calculation_magic(counts, durations) + + if is_eligible: + was_added = realtime_metrics.add_project_to_lpq(project_id) + if was_added: + logger.warning("Moved project to symbolicator's low priority queue: %s", project_id) + elif not is_eligible: + was_removed = realtime_metrics.remove_projects_from_lpq({project_id}) + if was_removed: + logger.warning("Moved project out of symbolicator's low priority queue: %s", project_id) + + +def calculation_magic( + invocations: Iterable[BucketedCount], durations: Iterable[DurationHistogram] +) -> bool: + return False diff --git a/src/sentry/tasks/sentry_apps.py b/src/sentry/tasks/sentry_apps.py index 6fe07649902805..c4845cab718e87 100644 --- a/src/sentry/tasks/sentry_apps.py +++ b/src/sentry/tasks/sentry_apps.py @@ -81,12 +81,16 @@ def _webhook_event_data(event, group_id, project_id): @instrumented_task(name="sentry.tasks.sentry_apps.send_alert_event", **TASK_OPTIONS) @retry(**RETRY_OPTIONS) -def send_alert_event(event, rule, sentry_app_id): +def send_alert_event( + event, rule, sentry_app_id, additional_payload_key=None, additional_payload=None +): """ When an incident alert is triggered, send incident data to the SentryApp's webhook. :param event: The `Event` for which to build a payload. :param rule: The AlertRule that was triggered. :param sentry_app_id: The SentryApp to notify. + :param additional_payload_key: The key used to attach additional data to the webhook payload + :param additional_payload: The extra data attached to the payload body at the key specified by `additional_payload_key`. :return: """ group = event.group @@ -120,6 +124,10 @@ def send_alert_event(event, rule, sentry_app_id): data = {"event": event_context, "triggered_rule": rule} + # Attach extra payload to the webhook + if additional_payload_key and additional_payload: + data[additional_payload_key] = additional_payload + request_data = AppPlatformEvent( resource="event_alert", action="triggered", install=install, data=data ) @@ -288,8 +296,23 @@ def notify_sentry_app(event, futures): if not f.kwargs.get("sentry_app"): continue + extra_kwargs = { + "additional_payload_key": None, + "additional_payload": None, + } + # If the future comes from a rule with a UI component form in the schema, append the issue alert payload + settings = f.kwargs.get("schema_defined_settings") + if settings: + extra_kwargs["additional_payload_key"] = "issue_alert" + extra_kwargs["additional_payload"] = { + "id": f.rule.id, + "title": f.rule.label, + "sentry_app_id": f.kwargs["sentry_app"].id, + "settings": settings, + } + send_alert_event.delay( - event=event, rule=f.rule.label, sentry_app_id=f.kwargs["sentry_app"].id + event=event, rule=f.rule.label, sentry_app_id=f.kwargs["sentry_app"].id, **extra_kwargs ) diff --git a/src/sentry/tasks/store.py b/src/sentry/tasks/store.py index 22667d18d6181b..5c5a24af0fe702 100644 --- a/src/sentry/tasks/store.py +++ b/src/sentry/tasks/store.py @@ -84,9 +84,16 @@ def submit_process( ) -def should_demote_symbolication(project_id): +def should_demote_symbolication(project_id: int) -> bool: """ Determines whether a project's symbolication events should be pushed to the low priority queue. + + The decision is made based on three factors, in order: + 1. is the store.symbolicate-event-lpq-never killswitch set for the project? -> normal queue + 2. is the store.symbolicate-event-lpq-always killswitch set for the project? -> low priority queue + 3. has the project been selected for the lpq according to realtime_metrics? -> low priority queue + + Note that 3 is gated behind the config setting SENTRY_ENABLE_AUTO_LOW_PRIORITY_QUEUE. """ always_lowpri = killswitch_matches_context( "store.symbolicate-event-lpq-always", @@ -100,22 +107,27 @@ def should_demote_symbolication(project_id): "project_id": project_id, }, ) - return not never_lowpri and always_lowpri + if never_lowpri: + return False + elif always_lowpri: + return True + else: + return settings.SENTRY_ENABLE_AUTO_LOW_PRIORITY_QUEUE and realtime_metrics.is_lpq_project( + project_id + ) -def submit_symbolicate(project, from_reprocessing, cache_key, event_id, start_time, data): - task = symbolicate_event_from_reprocessing if from_reprocessing else symbolicate_event - task.delay(cache_key=cache_key, start_time=start_time, event_id=event_id) +def submit_symbolicate(is_low_priority, from_reprocessing, cache_key, event_id, start_time, data): + if is_low_priority: + task = ( + symbolicate_event_from_reprocessing_low_priority + if from_reprocessing + else symbolicate_event_low_priority + ) + else: + task = symbolicate_event_from_reprocessing if from_reprocessing else symbolicate_event -def submit_symbolicate_low_priority( - project, from_reprocessing, cache_key, event_id, start_time, data -): - task = ( - symbolicate_event_from_reprocessing_low_priority - if from_reprocessing - else symbolicate_event_low_priority - ) task.delay(cache_key=cache_key, start_time=start_time, event_id=event_id) @@ -166,9 +178,8 @@ def _do_preprocess_event(cache_key, data, start_time, event_id, process_task, pr reprocessing2.backup_unprocessed_event(project=project, data=original_data) is_low_priority = should_demote_symbolication(project_id) - task = submit_symbolicate_low_priority if is_low_priority else submit_symbolicate - task( - project, + submit_symbolicate( + is_low_priority, from_reprocessing, cache_key, event_id, diff --git a/src/sentry/templates/sentry/emails/activity/release.html b/src/sentry/templates/sentry/emails/activity/release.html index 719bb9fb867f16..6830cb334f24bf 100644 --- a/src/sentry/templates/sentry/emails/activity/release.html +++ b/src/sentry/templates/sentry/emails/activity/release.html @@ -8,7 +8,7 @@ {% block activity %}

- Version {{ release.version }} was deployed to {{ environment }} + Version {{ version_parsed }} was deployed to {{ environment }}

{{ deploy.date_finished }}   {{ commit_count }} commit{{ commit_count|pluralize }}, {{ author_count }} author{{ author_count|pluralize }}, and {{ file_count }} file{{ file_count|pluralize }} changed across {{ project_count }} project{{ project_count|pluralize }}

diff --git a/src/sentry/templates/sentry/emails/activity/release.txt b/src/sentry/templates/sentry/emails/activity/release.txt index bcc4658c18f3c5..1733e761ac495c 100644 --- a/src/sentry/templates/sentry/emails/activity/release.txt +++ b/src/sentry/templates/sentry/emails/activity/release.txt @@ -1,4 +1,4 @@ -Version {{ release.version }} was deployed to {{ environment }} on {{ deploy.date_finished }} +Version {{ version_parsed }} was deployed to {{ environment }} on {{ deploy.date_finished }} {% for project, release_link, resolved_issue_count in projects %} {{ release_link }} diff --git a/src/sentry/testutils/cases.py b/src/sentry/testutils/cases.py index 554d496937c61e..7dfc770a3c45c4 100644 --- a/src/sentry/testutils/cases.py +++ b/src/sentry/testutils/cases.py @@ -968,7 +968,13 @@ def store_session(self, session): self._push_metric(session, "set", "user", {"session.status": status}, user) if status != "ok": # terminal - self._push_metric(session, "distribution", "session.duration", {}, session["duration"]) + self._push_metric( + session, + "distribution", + "session.duration", + {"session.status": status}, + session["duration"], + ) def bulk_store_sessions(self, sessions): for session in sessions: diff --git a/src/sentry/utils/__init__.py b/src/sentry/utils/__init__.py index 898851b433213b..095531bb0102d8 100644 --- a/src/sentry/utils/__init__.py +++ b/src/sentry/utils/__init__.py @@ -1,3 +1,10 @@ +""" +This is the Utilities Module. It is the home to small, self-contained classes +and functions that do useful things. This description is intentionally general +because there are basically no limits to what functionality can be considered +a util. However, within this directory we should avoid importing Sentry models +or modules with side effects. +""" # Make sure to not import anything here. We want modules below # sentry.utils to be able to import without having to pull in django # or other sources that might not exist. diff --git a/src/sentry/utils/email.py b/src/sentry/utils/email.py deleted file mode 100644 index 35c2452cbe347c..00000000000000 --- a/src/sentry/utils/email.py +++ /dev/null @@ -1,496 +0,0 @@ -import logging -import os -import subprocess -import tempfile -import time -from email.utils import parseaddr -from functools import partial -from operator import attrgetter -from random import randrange -from typing import Iterable, Mapping, Optional - -import lxml -import toronado -from django.conf import settings -from django.core import mail -from django.core.mail import EmailMultiAlternatives -from django.core.mail.backends.base import BaseEmailBackend -from django.core.signing import BadSignature, Signer -from django.utils.crypto import constant_time_compare -from django.utils.encoding import force_bytes, force_str, force_text - -from sentry import options -from sentry.logging import LoggingFormat -from sentry.models import Activity, Group, GroupEmailThread, Project, User, UserEmail, UserOption -from sentry.utils import metrics -from sentry.utils.compat import map -from sentry.utils.safe import safe_execute -from sentry.utils.strings import is_valid_dot_atom -from sentry.web.helpers import render_to_string - -# The maximum amount of recipients to display in human format. -MAX_RECIPIENTS = 5 - -# The fake TLD used to construct email addresses when one is required, -# for example by automatically generated SSO accounts. -FAKE_EMAIL_TLD = ".sentry-fake" - -logger = logging.getLogger("sentry.mail") - - -def inline_css(value: str) -> str: - tree = lxml.html.document_fromstring(value) - toronado.inline(tree) - # CSS media query support is inconsistent when the DOCTYPE declaration is - # missing, so we force it to HTML5 here. - return lxml.html.tostring(tree, doctype="", encoding=None).decode("utf-8") - - -class _CaseInsensitiveSigner(Signer): - """ - Generate a signature that is comprised of only lowercase letters. - - WARNING: Do not use this for anything that needs to be cryptographically - secure! This is losing entropy and has a much higher chance of collision - due to dropping to lowercase letters. For our purposes, this lack of entropy - is ok and doesn't pose a risk. - - NOTE: This is needed strictly for signatures used in email addresses. Some - clients, coughAirmailcough, treat email addresses as being case-insensitive, - and sends the value as all lowercase. - """ - - def signature(self, value): - sig = super().signature(value) - return sig.lower() - - def unsign(self, signed_value): - # This unsign is identical to subclass except for the lowercasing - # See: https://github.com/django/django/blob/1.6.11/django/core/signing.py#L165-L172 - signed_value = force_str(signed_value) - if self.sep not in signed_value: - raise BadSignature('No "%s" found in value' % self.sep) - value, sig = signed_value.rsplit(self.sep, 1) - if constant_time_compare(sig.lower(), self.signature(value)): - return force_text(value) - raise BadSignature('Signature "%s" does not match' % sig) - - -signer = _CaseInsensitiveSigner() - - -def email_to_group_id(address): - """ - Email address should be in the form of: - {group_id}+{signature}@example.com - """ - address = address.split("@", 1)[0] - signed_data = address.replace("+", ":") - return int(force_bytes(signer.unsign(signed_data))) - - -def group_id_to_email(group_id): - signed_data = signer.sign(str(group_id)) - return "@".join( - ( - signed_data.replace(":", "+"), - options.get("mail.reply-hostname") or get_from_email_domain(), - ) - ) - - -def domain_from_email(email): - email = parseaddr(email)[1] - try: - return email.split("@", 1)[1] - except IndexError: - # The email address is likely malformed or something - return email - - -# Slightly modified version of Django's -# `django.core.mail.message:make_msgid` because we need -# to override the domain. If we ever upgrade to -# django 1.8, we can/should replace this. -def make_msgid(domain): - """Returns a string suitable for RFC 2822 compliant Message-ID, e.g: - <20020201195627.33539.96671@nightshade.la.mastaler.com> - Optional idstring if given is a string used to strengthen the - uniqueness of the message id. Optional domain if given provides the - portion of the message id after the '@'. It defaults to the locally - defined hostname. - """ - timeval = time.time() - utcdate = time.strftime("%Y%m%d%H%M%S", time.gmtime(timeval)) - pid = os.getpid() - randint = randrange(100000) - msgid = f"<{utcdate}.{pid}.{randint}@{domain}>" - return msgid - - -# cache the domain_from_email calculation -# This is just a tuple of (email, email-domain) -_from_email_domain_cache = (None, None) - - -def get_from_email_domain(): - global _from_email_domain_cache - from_ = options.get("mail.from") - if not _from_email_domain_cache[0] == from_: - _from_email_domain_cache = (from_, domain_from_email(from_)) - return _from_email_domain_cache[1] - - -def create_fake_email(unique_id, namespace): - """ - Generate a fake email of the form: {unique_id}@{namespace}{FAKE_EMAIL_TLD} - - For example: c74e5b75-e037-4e75-ad27-1a0d21a6b203@cloudfoundry.sentry-fake - """ - return f"{unique_id}@{namespace}{FAKE_EMAIL_TLD}" - - -def is_fake_email(email): - """ - Returns True if the provided email matches the fake email pattern. - """ - return email.endswith(FAKE_EMAIL_TLD) - - -def get_email_addresses(user_ids: Iterable[int], project: Project = None) -> Mapping[int, str]: - """ - Find the best email addresses for a collection of users. If a project is - provided, prefer their project-specific notification preferences. - """ - pending = set(user_ids) - results = {} - - if project: - queryset = UserOption.objects.filter(project=project, user__in=pending, key="mail:email") - for option in (o for o in queryset if o.value and not is_fake_email(o.value)): - if UserEmail.objects.filter(user=option.user, email=option.value).exists(): - results[option.user_id] = option.value - pending.discard(option.user_id) - else: - pending.discard(option.user_id) - option.delete() - - if pending: - queryset = User.objects.filter(pk__in=pending, is_active=True) - for (user_id, email) in queryset.values_list("id", "email"): - if email and not is_fake_email(email): - results[user_id] = email - pending.discard(user_id) - - if pending: - logger.warning( - "Could not resolve email addresses for user IDs in %r, discarding...", pending - ) - - return results - - -class ListResolver: - """ - Manages the generation of RFC 2919 compliant list-id strings from varying - objects types. - """ - - class UnregisteredTypeError(Exception): - """ - Error raised when attempting to build a list-id from an unregistered object type. - """ - - def __init__(self, namespace, type_handlers): - assert is_valid_dot_atom(namespace) - - # The list-id-namespace that will be used when generating the list-id - # string. This should be a domain name under the control of the - # generator (see RFC 2919.) - self.__namespace = namespace - - # A mapping of classes to functions that accept an instance of that - # class, returning a tuple of values that will be used to generate the - # list label. Returned values must be valid RFC 2822 dot-atom-text - # values. - self.__type_handlers = type_handlers - - def __call__(self, instance): - """ - Build a list-id string from an instance. - - Raises ``UnregisteredTypeError`` if there is no registered handler for - the instance type. Raises ``AssertionError`` if a valid list-id string - cannot be generated from the values returned by the type handler. - """ - try: - handler = self.__type_handlers[type(instance)] - except KeyError: - raise self.UnregisteredTypeError( - f"Cannot generate mailing list identifier for {instance!r}" - ) - - label = ".".join(map(str, handler(instance))) - assert is_valid_dot_atom(label) - - return f"<{label}.{self.__namespace}>" - - -default_list_type_handlers = { - Activity: attrgetter("project.slug", "project.organization.slug"), - Project: attrgetter("slug", "organization.slug"), - Group: attrgetter("project.slug", "organization.slug"), -} - -make_listid_from_instance = ListResolver( - options.get("mail.list-namespace"), default_list_type_handlers -) - - -class MessageBuilder: - def __init__( - self, - subject, - context=None, - template=None, - html_template=None, - body="", - html_body=None, - headers=None, - reference=None, - reply_reference=None, - from_email=None, - type=None, - ): - assert not (body and template) - assert not (html_body and html_template) - assert context or not (template or html_template) - - if headers is None: - headers = {} - - self.subject = subject - self.context = context or {} - self.template = template - self.html_template = html_template - self._txt_body = body - self._html_body = html_body - self.headers = headers - self.reference = reference # The object that generated this message - self.reply_reference = reply_reference # The object this message is replying about - self.from_email = from_email or options.get("mail.from") - self._send_to = set() - self.type = type if type else "generic" - - if reference is not None and "List-Id" not in headers: - try: - headers["List-Id"] = make_listid_from_instance(reference) - except ListResolver.UnregisteredTypeError as error: - logger.debug(str(error)) - except AssertionError as error: - logger.warning(str(error)) - - def __render_html_body(self) -> str: - html_body = None - if self.html_template: - html_body = render_to_string(self.html_template, self.context) - else: - html_body = self._html_body - - if html_body is not None: - return inline_css(html_body) - - def __render_text_body(self) -> str: - if self.template: - return render_to_string(self.template, self.context) - return self._txt_body - - def add_users(self, user_ids: Iterable[int], project: Optional[Project] = None) -> None: - self._send_to.update(list(get_email_addresses(user_ids, project).values())) - - def build(self, to, reply_to=None, cc=None, bcc=None): - if self.headers is None: - headers = {} - else: - headers = self.headers.copy() - - if options.get("mail.enable-replies") and "X-Sentry-Reply-To" in headers: - reply_to = headers["X-Sentry-Reply-To"] - else: - reply_to = set(reply_to or ()) - reply_to.discard(to) - reply_to = ", ".join(reply_to) - - if reply_to: - headers.setdefault("Reply-To", reply_to) - - # Every message sent needs a unique message id - message_id = make_msgid(get_from_email_domain()) - headers.setdefault("Message-Id", message_id) - - subject = force_text(self.subject) - - if self.reply_reference is not None: - reference = self.reply_reference - subject = "Re: %s" % subject - else: - reference = self.reference - - if isinstance(reference, Group): - thread, created = GroupEmailThread.objects.get_or_create( - email=to, - group=reference, - defaults={"project": reference.project, "msgid": message_id}, - ) - if not created: - headers.setdefault("In-Reply-To", thread.msgid) - headers.setdefault("References", thread.msgid) - - msg = EmailMultiAlternatives( - subject=subject.splitlines()[0], - body=self.__render_text_body(), - from_email=self.from_email, - to=(to,), - cc=cc or (), - bcc=bcc or (), - headers=headers, - ) - - html_body = self.__render_html_body() - if html_body: - msg.attach_alternative(html_body, "text/html") - - return msg - - def get_built_messages(self, to=None, cc=None, bcc=None): - send_to = set(to or ()) - send_to.update(self._send_to) - results = [ - self.build(to=email, reply_to=send_to, cc=cc, bcc=bcc) for email in send_to if email - ] - if not results: - logger.debug("Did not build any messages, no users to send to.") - return results - - def format_to(self, to): - if not to: - return "" - if len(to) > MAX_RECIPIENTS: - to = to[:MAX_RECIPIENTS] + [f"and {len(to[MAX_RECIPIENTS:])} more."] - return ", ".join(to) - - def send(self, to=None, cc=None, bcc=None, fail_silently=False): - return send_messages( - self.get_built_messages(to, cc=cc, bcc=bcc), fail_silently=fail_silently - ) - - def send_async(self, to=None, cc=None, bcc=None): - from sentry.tasks.email import send_email - - fmt = options.get("system.logging-format") - messages = self.get_built_messages(to, cc=cc, bcc=bcc) - extra = {"message_type": self.type} - loggable = [v for k, v in self.context.items() if hasattr(v, "id")] - for context in loggable: - extra["%s_id" % type(context).__name__.lower()] = context.id - - log_mail_queued = partial(logger.info, "mail.queued", extra=extra) - for message in messages: - safe_execute(send_email.delay, message=message, _with_transaction=False) - extra["message_id"] = message.extra_headers["Message-Id"] - metrics.incr("email.queued", instance=self.type, skip_internal=False) - if fmt == LoggingFormat.HUMAN: - extra["message_to"] = (self.format_to(message.to),) - log_mail_queued() - elif fmt == LoggingFormat.MACHINE: - for recipient in message.to: - extra["message_to"] = recipient - log_mail_queued() - - -def send_messages(messages, fail_silently=False): - connection = get_connection(fail_silently=fail_silently) - sent = connection.send_messages(messages) - metrics.incr("email.sent", len(messages), skip_internal=False) - for message in messages: - extra = { - "message_id": message.extra_headers["Message-Id"], - "size": len(message.message().as_bytes()), - } - logger.info("mail.sent", extra=extra) - return sent - - -def get_mail_backend(): - backend = options.get("mail.backend") - try: - return settings.SENTRY_EMAIL_BACKEND_ALIASES[backend] - except KeyError: - return backend - - -def get_connection(fail_silently=False): - """ - Gets an SMTP connection using our OptionsStore - """ - return mail.get_connection( - backend=get_mail_backend(), - host=options.get("mail.host"), - port=options.get("mail.port"), - username=options.get("mail.username"), - password=options.get("mail.password"), - use_tls=options.get("mail.use-tls"), - use_ssl=options.get("mail.use-ssl"), - timeout=options.get("mail.timeout"), - fail_silently=fail_silently, - ) - - -def send_mail(subject, message, from_email, recipient_list, fail_silently=False, **kwargs): - """ - Wrapper that forces sending mail through our connection. - Uses EmailMessage class which has more options than the simple send_mail - """ - email = mail.EmailMessage( - subject, - message, - from_email, - recipient_list, - connection=get_connection(fail_silently=fail_silently), - **kwargs, - ) - return email.send(fail_silently=fail_silently) - - -def is_smtp_enabled(backend=None): - """ - Check if the current backend is SMTP based. - """ - if backend is None: - backend = get_mail_backend() - return backend not in settings.SENTRY_SMTP_DISABLED_BACKENDS - - -class PreviewBackend(BaseEmailBackend): - """ - Email backend that can be used in local development to open messages in the - local mail client as they are sent. - - Probably only works on OS X. - """ - - def send_messages(self, email_messages): - for message in email_messages: - content = bytes(message.message()) - preview = tempfile.NamedTemporaryFile( - delete=False, prefix="sentry-email-preview-", suffix=".eml" - ) - try: - preview.write(content) - preview.flush() - finally: - preview.close() - - subprocess.check_call(("open", preview.name)) - - return len(email_messages) diff --git a/src/sentry/utils/email/__init__.py b/src/sentry/utils/email/__init__.py new file mode 100644 index 00000000000000..03036417051ef9 --- /dev/null +++ b/src/sentry/utils/email/__init__.py @@ -0,0 +1,30 @@ +__all__ = ( + "_CaseInsensitiveSigner", + "create_fake_email", + "email_to_group_id", + "get_connection", + "get_email_addresses", + "group_id_to_email", + "inline_css", + "is_smtp_enabled", + "ListResolver", + "MessageBuilder", + "PreviewBackend", + "send_mail", + "send_messages", +) + +""" +This is the Email Module. It contains a hodgepodge of helpers for things like +parsing email address strings, building and sending messages, and looking up +user emails in the database. +""" + +from .address import email_to_group_id, group_id_to_email +from .backend import PreviewBackend, is_smtp_enabled +from .faker import create_fake_email +from .list_resolver import ListResolver +from .manager import get_email_addresses +from .message_builder import MessageBuilder, inline_css +from .send import get_connection, send_mail, send_messages +from .signer import _CaseInsensitiveSigner diff --git a/src/sentry/utils/email/address.py b/src/sentry/utils/email/address.py new file mode 100644 index 00000000000000..07ecf0f4ad71f9 --- /dev/null +++ b/src/sentry/utils/email/address.py @@ -0,0 +1,56 @@ +from email.utils import parseaddr + +from django.conf import settings +from django.utils.encoding import force_bytes + +from sentry import options + +from .signer import _CaseInsensitiveSigner + +# cache the domain_from_email calculation +# This is just a tuple of (email, email-domain) +_from_email_domain_cache = (None, None) + + +signer = _CaseInsensitiveSigner() + + +def get_from_email_domain(): + global _from_email_domain_cache + from_ = options.get("mail.from") + if not _from_email_domain_cache[0] == from_: + _from_email_domain_cache = (from_, domain_from_email(from_)) + return _from_email_domain_cache[1] + + +def email_to_group_id(address): + """ + Email address should be in the form of: + {group_id}+{signature}@example.com + """ + address = address.split("@", 1)[0] + signed_data = address.replace("+", ":") + return int(force_bytes(signer.unsign(signed_data))) + + +def group_id_to_email(group_id): + signed_data = signer.sign(str(group_id)) + return "@".join( + ( + signed_data.replace(":", "+"), + options.get("mail.reply-hostname") or get_from_email_domain(), + ) + ) + + +def domain_from_email(email): + email = parseaddr(email)[1] + try: + return email.split("@", 1)[1] + except IndexError: + # The email address is likely malformed or something + return email + + +def is_valid_email_address(value: str) -> bool: + return not settings.INVALID_EMAIL_ADDRESS_PATTERN.search(value) diff --git a/src/sentry/utils/email/backend.py b/src/sentry/utils/email/backend.py new file mode 100644 index 00000000000000..cddb863e49dd5a --- /dev/null +++ b/src/sentry/utils/email/backend.py @@ -0,0 +1,47 @@ +import subprocess +import tempfile + +from django.conf import settings +from django.core.mail.backends.base import BaseEmailBackend + +from sentry import options + + +def is_smtp_enabled(backend=None): + """Check if the current backend is SMTP based.""" + if backend is None: + backend = get_mail_backend() + return backend not in settings.SENTRY_SMTP_DISABLED_BACKENDS + + +def get_mail_backend(): + backend = options.get("mail.backend") + try: + return settings.SENTRY_EMAIL_BACKEND_ALIASES[backend] + except KeyError: + return backend + + +class PreviewBackend(BaseEmailBackend): + """ + Email backend that can be used in local development to open messages in the + local mail client as they are sent. + + Probably only works on OS X. + """ + + def send_messages(self, email_messages): + for message in email_messages: + content = bytes(message.message()) + preview = tempfile.NamedTemporaryFile( + delete=False, prefix="sentry-email-preview-", suffix=".eml" + ) + try: + preview.write(content) + preview.flush() + finally: + preview.close() + + subprocess.check_call(("open", preview.name)) + + return len(email_messages) diff --git a/src/sentry/utils/email/faker.py b/src/sentry/utils/email/faker.py new file mode 100644 index 00000000000000..cc8e577f466990 --- /dev/null +++ b/src/sentry/utils/email/faker.py @@ -0,0 +1,17 @@ +# The fake TLD used to construct email addresses when one is required, +# for example by automatically generated SSO accounts. +FAKE_EMAIL_TLD = ".sentry-fake" + + +def create_fake_email(unique_id, namespace): + """ + Generate a fake email of the form: {unique_id}@{namespace}{FAKE_EMAIL_TLD} + + For example: c74e5b75-e037-4e75-ad27-1a0d21a6b203@cloudfoundry.sentry-fake + """ + return f"{unique_id}@{namespace}{FAKE_EMAIL_TLD}" + + +def is_fake_email(email): + """Returns True if the provided email matches the fake email pattern.""" + return email.endswith(FAKE_EMAIL_TLD) diff --git a/src/sentry/utils/email/list_resolver.py b/src/sentry/utils/email/list_resolver.py new file mode 100644 index 00000000000000..b82d887f7b1569 --- /dev/null +++ b/src/sentry/utils/email/list_resolver.py @@ -0,0 +1,47 @@ +from sentry.utils.strings import is_valid_dot_atom + + +class ListResolver: + """ + Manages the generation of RFC 2919 compliant list-id strings from varying + objects types. + """ + + class UnregisteredTypeError(Exception): + """ + Error raised when attempting to build a list-id from an unregistered object type. + """ + + def __init__(self, namespace, type_handlers): + assert is_valid_dot_atom(namespace) + + # The list-id-namespace that will be used when generating the list-id + # string. This should be a domain name under the control of the + # generator (see RFC 2919.) + self.__namespace = namespace + + # A mapping of classes to functions that accept an instance of that + # class, returning a tuple of values that will be used to generate the + # list label. Returned values must be valid RFC 2822 dot-atom-text + # values. + self.__type_handlers = type_handlers + + def __call__(self, instance): + """ + Build a list-id string from an instance. + + Raises ``UnregisteredTypeError`` if there is no registered handler for + the instance type. Raises ``AssertionError`` if a valid list-id string + cannot be generated from the values returned by the type handler. + """ + try: + handler = self.__type_handlers[type(instance)] + except KeyError: + raise self.UnregisteredTypeError( + f"Cannot generate mailing list identifier for {instance!r}" + ) + + label = ".".join(map(str, handler(instance))) + assert is_valid_dot_atom(label) + + return f"<{label}.{self.__namespace}>" diff --git a/src/sentry/utils/email/manager.py b/src/sentry/utils/email/manager.py new file mode 100644 index 00000000000000..89a9659c03a361 --- /dev/null +++ b/src/sentry/utils/email/manager.py @@ -0,0 +1,41 @@ +import logging +from typing import Iterable, Mapping + +from sentry.models import Project, User, UserEmail, UserOption + +from .faker import is_fake_email + +logger = logging.getLogger("sentry.mail") + + +def get_email_addresses(user_ids: Iterable[int], project: Project = None) -> Mapping[int, str]: + """ + Find the best email addresses for a collection of users. If a project is + provided, prefer their project-specific notification preferences. + """ + pending = set(user_ids) + results = {} + + if project: + queryset = UserOption.objects.filter(project=project, user__in=pending, key="mail:email") + for option in (o for o in queryset if o.value and not is_fake_email(o.value)): + if UserEmail.objects.filter(user=option.user, email=option.value).exists(): + results[option.user_id] = option.value + pending.discard(option.user_id) + else: + pending.discard(option.user_id) + option.delete() + + if pending: + queryset = User.objects.filter(pk__in=pending, is_active=True) + for (user_id, email) in queryset.values_list("id", "email"): + if email and not is_fake_email(email): + results[user_id] = email + pending.discard(user_id) + + if pending: + logger.warning( + "Could not resolve email addresses for user IDs in %r, discarding...", pending + ) + + return results diff --git a/src/sentry/utils/email/message_builder.py b/src/sentry/utils/email/message_builder.py new file mode 100644 index 00000000000000..016c7a927362b3 --- /dev/null +++ b/src/sentry/utils/email/message_builder.py @@ -0,0 +1,227 @@ +import logging +import os +import time +from functools import partial +from operator import attrgetter +from random import randrange +from typing import Iterable, Optional + +import lxml +import toronado +from django.core.mail import EmailMultiAlternatives +from django.utils.encoding import force_text + +from sentry import options +from sentry.logging import LoggingFormat +from sentry.models import Activity, Group, GroupEmailThread, Project +from sentry.utils import metrics +from sentry.utils.safe import safe_execute +from sentry.web.helpers import render_to_string + +from .address import get_from_email_domain +from .list_resolver import ListResolver +from .manager import get_email_addresses +from .send import send_messages + +logger = logging.getLogger("sentry.mail") + +default_list_type_handlers = { + Activity: attrgetter("project.slug", "project.organization.slug"), + Project: attrgetter("slug", "organization.slug"), + Group: attrgetter("project.slug", "organization.slug"), +} + +make_listid_from_instance = ListResolver( + options.get("mail.list-namespace"), default_list_type_handlers +) + +# The maximum amount of recipients to display in human format. +MAX_RECIPIENTS = 5 + + +# Slightly modified version of Django's `django.core.mail.message:make_msgid` +# because we need to override the domain. If we ever upgrade to django 1.8, we +# can/should replace this. +def make_msgid(domain): + """Returns a string suitable for RFC 2822 compliant Message-ID, e.g: + <20020201195627.33539.96671@nightshade.la.mastaler.com> + Optional idstring if given is a string used to strengthen the + uniqueness of the message id. Optional domain if given provides the + portion of the message id after the '@'. It defaults to the locally + defined hostname. + """ + timeval = time.time() + utcdate = time.strftime("%Y%m%d%H%M%S", time.gmtime(timeval)) + pid = os.getpid() + randint = randrange(100000) + msgid = f"<{utcdate}.{pid}.{randint}@{domain}>" + return msgid + + +def inline_css(value: str) -> str: + tree = lxml.html.document_fromstring(value) + toronado.inline(tree) + # CSS media query support is inconsistent when the DOCTYPE declaration is + # missing, so we force it to HTML5 here. + return lxml.html.tostring(tree, doctype="", encoding=None).decode("utf-8") + + +class MessageBuilder: + def __init__( + self, + subject, + context=None, + template=None, + html_template=None, + body="", + html_body=None, + headers=None, + reference=None, + reply_reference=None, + from_email=None, + type=None, + ): + assert not (body and template) + assert not (html_body and html_template) + assert context or not (template or html_template) + + if headers is None: + headers = {} + + self.subject = subject + self.context = context or {} + self.template = template + self.html_template = html_template + self._txt_body = body + self._html_body = html_body + self.headers = headers + self.reference = reference # The object that generated this message + self.reply_reference = reply_reference # The object this message is replying about + self.from_email = from_email or options.get("mail.from") + self._send_to = set() + self.type = type if type else "generic" + + if reference is not None and "List-Id" not in headers: + try: + headers["List-Id"] = make_listid_from_instance(reference) + except ListResolver.UnregisteredTypeError as error: + logger.debug(str(error)) + except AssertionError as error: + logger.warning(str(error)) + + def __render_html_body(self) -> str: + html_body = None + if self.html_template: + html_body = render_to_string(self.html_template, self.context) + else: + html_body = self._html_body + + if html_body is not None: + return inline_css(html_body) + + def __render_text_body(self) -> str: + if self.template: + return render_to_string(self.template, self.context) + return self._txt_body + + def add_users(self, user_ids: Iterable[int], project: Optional[Project] = None) -> None: + self._send_to.update(list(get_email_addresses(user_ids, project).values())) + + def build(self, to, reply_to=None, cc=None, bcc=None): + if self.headers is None: + headers = {} + else: + headers = self.headers.copy() + + if options.get("mail.enable-replies") and "X-Sentry-Reply-To" in headers: + reply_to = headers["X-Sentry-Reply-To"] + else: + reply_to = set(reply_to or ()) + reply_to.discard(to) + reply_to = ", ".join(reply_to) + + if reply_to: + headers.setdefault("Reply-To", reply_to) + + # Every message sent needs a unique message id + message_id = make_msgid(get_from_email_domain()) + headers.setdefault("Message-Id", message_id) + + subject = force_text(self.subject) + + if self.reply_reference is not None: + reference = self.reply_reference + subject = "Re: %s" % subject + else: + reference = self.reference + + if isinstance(reference, Group): + thread, created = GroupEmailThread.objects.get_or_create( + email=to, + group=reference, + defaults={"project": reference.project, "msgid": message_id}, + ) + if not created: + headers.setdefault("In-Reply-To", thread.msgid) + headers.setdefault("References", thread.msgid) + + msg = EmailMultiAlternatives( + subject=subject.splitlines()[0], + body=self.__render_text_body(), + from_email=self.from_email, + to=(to,), + cc=cc or (), + bcc=bcc or (), + headers=headers, + ) + + html_body = self.__render_html_body() + if html_body: + msg.attach_alternative(html_body, "text/html") + + return msg + + def get_built_messages(self, to=None, cc=None, bcc=None): + send_to = set(to or ()) + send_to.update(self._send_to) + results = [ + self.build(to=email, reply_to=send_to, cc=cc, bcc=bcc) for email in send_to if email + ] + if not results: + logger.debug("Did not build any messages, no users to send to.") + return results + + def format_to(self, to): + if not to: + return "" + if len(to) > MAX_RECIPIENTS: + to = to[:MAX_RECIPIENTS] + [f"and {len(to[MAX_RECIPIENTS:])} more."] + return ", ".join(to) + + def send(self, to=None, cc=None, bcc=None, fail_silently=False): + return send_messages( + self.get_built_messages(to, cc=cc, bcc=bcc), fail_silently=fail_silently + ) + + def send_async(self, to=None, cc=None, bcc=None): + from sentry.tasks.email import send_email + + fmt = options.get("system.logging-format") + messages = self.get_built_messages(to, cc=cc, bcc=bcc) + extra = {"message_type": self.type} + loggable = [v for k, v in self.context.items() if hasattr(v, "id")] + for context in loggable: + extra["%s_id" % type(context).__name__.lower()] = context.id + + log_mail_queued = partial(logger.info, "mail.queued", extra=extra) + for message in messages: + safe_execute(send_email.delay, message=message, _with_transaction=False) + extra["message_id"] = message.extra_headers["Message-Id"] + metrics.incr("email.queued", instance=self.type, skip_internal=False) + if fmt == LoggingFormat.HUMAN: + extra["message_to"] = (self.format_to(message.to),) + log_mail_queued() + elif fmt == LoggingFormat.MACHINE: + for recipient in message.to: + extra["message_to"] = recipient + log_mail_queued() diff --git a/src/sentry/utils/email/send.py b/src/sentry/utils/email/send.py new file mode 100644 index 00000000000000..91b074a9656705 --- /dev/null +++ b/src/sentry/utils/email/send.py @@ -0,0 +1,54 @@ +import logging + +from django.core import mail + +from sentry import options +from sentry.utils import metrics + +from .backend import get_mail_backend + +logger = logging.getLogger("sentry.mail") + + +def send_messages(messages, fail_silently=False): + connection = get_connection(fail_silently=fail_silently) + sent = connection.send_messages(messages) + metrics.incr("email.sent", len(messages), skip_internal=False) + for message in messages: + extra = { + "message_id": message.extra_headers["Message-Id"], + "size": len(message.message().as_bytes()), + } + logger.info("mail.sent", extra=extra) + return sent + + +def get_connection(fail_silently=False): + """Gets an SMTP connection using our OptionsStore.""" + return mail.get_connection( + backend=get_mail_backend(), + host=options.get("mail.host"), + port=options.get("mail.port"), + username=options.get("mail.username"), + password=options.get("mail.password"), + use_tls=options.get("mail.use-tls"), + use_ssl=options.get("mail.use-ssl"), + timeout=options.get("mail.timeout"), + fail_silently=fail_silently, + ) + + +def send_mail(subject, message, from_email, recipient_list, fail_silently=False, **kwargs): + """ + Wrapper that forces sending mail through our connection. + Uses EmailMessage class which has more options than the simple send_mail + """ + email = mail.EmailMessage( + subject, + message, + from_email, + recipient_list, + connection=get_connection(fail_silently=fail_silently), + **kwargs, + ) + return email.send(fail_silently=fail_silently) diff --git a/src/sentry/utils/email/signer.py b/src/sentry/utils/email/signer.py new file mode 100644 index 00000000000000..f285b3a4855853 --- /dev/null +++ b/src/sentry/utils/email/signer.py @@ -0,0 +1,33 @@ +from django.core.signing import BadSignature, Signer +from django.utils.crypto import constant_time_compare +from django.utils.encoding import force_str, force_text + + +class _CaseInsensitiveSigner(Signer): + """ + Generate a signature that is comprised of only lowercase letters. + + WARNING: Do not use this for anything that needs to be cryptographically + secure! This is losing entropy and has a much higher chance of collision + due to dropping to lowercase letters. For our purposes, this lack of entropy + is ok and doesn't pose a risk. + + NOTE: This is needed strictly for signatures used in email addresses. Some + clients (Airmail), treat email addresses as being case-insensitive, + and sends the value as all lowercase. + """ + + def signature(self, value): + sig = super().signature(value) + return sig.lower() + + def unsign(self, signed_value): + # This `unsign` is identical to subclass except for the lower-casing + # See: https://github.com/django/django/blob/1.6.11/django/core/signing.py#L165-L172 + signed_value = force_str(signed_value) + if self.sep not in signed_value: + raise BadSignature('No "%s" found in value' % self.sep) + value, sig = signed_value.rsplit(self.sep, 1) + if constant_time_compare(sig.lower(), self.signature(value)): + return force_text(value) + raise BadSignature('Signature "%s" does not match' % sig) diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py index 97e3e0f6d65960..89932c26b4b79d 100644 --- a/src/sentry/utils/snuba.py +++ b/src/sentry/utils/snuba.py @@ -85,6 +85,10 @@ if col.value.transaction_name is not None } +SESSIONS_FIELD_LIST = ["release", "sessions", "sessions_crashed", "users", "users_crashed"] + +SESSIONS_SNUBA_MAP = {column: column for column in SESSIONS_FIELD_LIST} + # This maps the public column aliases to the discover dataset column names. # Longer term we would like to not expose the transactions dataset directly # to end users and instead have all ad-hoc queries go through the discover @@ -100,6 +104,7 @@ Dataset.Events: SENTRY_SNUBA_MAP, Dataset.Transactions: TRANSACTIONS_SNUBA_MAP, Dataset.Discover: DISCOVER_COLUMN_MAP, + Dataset.Sessions: SESSIONS_SNUBA_MAP, } # Store the internal field names to save work later on. @@ -109,6 +114,7 @@ Dataset.Events: list(SENTRY_SNUBA_MAP.values()), Dataset.Transactions: list(TRANSACTIONS_SNUBA_MAP.values()), Dataset.Discover: list(DISCOVER_COLUMN_MAP.values()), + Dataset.Sessions: SESSIONS_FIELD_LIST, } SNUBA_OR = "or" diff --git a/src/sentry/web/forms/fields.py b/src/sentry/web/forms/fields.py index 3ca255918c4ad5..a6b921929a2127 100644 --- a/src/sentry/web/forms/fields.py +++ b/src/sentry/web/forms/fields.py @@ -6,7 +6,7 @@ from django.utils.translation import ugettext_lazy as _ from sentry.models import User -from sentry.security import is_valid_email_address +from sentry.utils.email.address import is_valid_email_address class CustomTypedChoiceField(TypedChoiceField): diff --git a/src/sentry/web/frontend/debug/debug_new_release_email.py b/src/sentry/web/frontend/debug/debug_new_release_email.py index 630b0dfdc0d5fd..5fb36d9a5d62d2 100644 --- a/src/sentry/web/frontend/debug/debug_new_release_email.py +++ b/src/sentry/web/frontend/debug/debug_new_release_email.py @@ -2,6 +2,7 @@ import pytz from django.views.generic import View +from sentry_relay import parse_release from sentry.models import Commit, CommitAuthor, Deploy, Organization, Project, Release, User from sentry.notifications.types import GroupSubscriptionReason @@ -19,9 +20,11 @@ def get(self, request): Project(id=2, organization=org, slug="another-project", name="Another Project"), Project(id=3, organization=org, slug="yet-another-project", name="Yet Another Project"), ] + version = "6c998f755f304593a4713abd123eaf8833a2de5e" + version_parsed = parse_release(version)["description"] release = Release( organization_id=org.id, - version="6c998f755f304593a4713abd123eaf8833a2de5e", + version=version, date_added=datetime.datetime(2016, 10, 12, 15, 39, tzinfo=pytz.utc), ) @@ -95,16 +98,17 @@ def get(self, request): html_template="sentry/emails/activity/release.html", text_template="sentry/emails/activity/release.txt", context={ - "release": release, - "projects": zip(projects, release_links, [6, 1, 0]), - "repos": repos, - "reason": GroupSubscriptionReason.descriptions[GroupSubscriptionReason.committed], - "project_count": len(projects), - "commit_count": 4, "author_count": 1, - "file_count": 5, - "environment": "production", + "commit_count": 4, "deploy": deploy, + "environment": "production", + "file_count": 5, + "project_count": len(projects), + "projects": zip(projects, release_links, [6, 1, 0]), + "reason": GroupSubscriptionReason.descriptions[GroupSubscriptionReason.committed], + "release": release, + "repos": repos, "setup_repo_link": absolute_uri(f"/organizations/{org.slug}/repos/"), + "version_parsed": version_parsed, }, ).render(request) diff --git a/static/app/components/charts/sessionsRequest.tsx b/static/app/components/charts/sessionsRequest.tsx new file mode 100644 index 00000000000000..82275694ef1317 --- /dev/null +++ b/static/app/components/charts/sessionsRequest.tsx @@ -0,0 +1,147 @@ +import * as React from 'react'; +import isEqual from 'lodash/isEqual'; +import omitBy from 'lodash/omitBy'; + +import {addErrorMessage} from 'app/actionCreators/indicator'; +import {Client} from 'app/api'; +import {t} from 'app/locale'; +import {DateString, Organization, SessionApiResponse, SessionField} from 'app/types'; +import {getSessionsInterval} from 'app/utils/sessions'; + +const propNamesToIgnore = ['api', 'children', 'organization']; +const omitIgnoredProps = (props: Props) => + omitBy(props, (_value, key) => propNamesToIgnore.includes(key)); + +export type SessionsRequestRenderProps = { + loading: boolean; + reloading: boolean; + errored: boolean; + response: SessionApiResponse | null; +}; + +type Props = { + api: Client; + organization: Organization; + children: (renderProps: SessionsRequestRenderProps) => React.ReactNode; + field: SessionField[]; + project?: number[]; + environment?: string[]; + statsPeriod?: string; + start?: DateString; + end?: DateString; + query?: string; + groupBy?: string[]; + interval?: string; + disabled?: boolean; +}; + +type State = { + reloading: boolean; + errored: boolean; + response: SessionApiResponse | null; +}; + +class SessionsRequest extends React.Component { + state: State = { + reloading: false, + errored: false, + response: null, + }; + + componentDidMount() { + this.fetchData(); + } + + componentDidUpdate(prevProps: Props) { + if (isEqual(omitIgnoredProps(prevProps), omitIgnoredProps(this.props))) { + return; + } + + this.fetchData(); + } + + get path() { + const {organization} = this.props; + + return `/organizations/${organization.slug}/sessions/`; + } + + get baseQueryParams() { + const { + project, + environment, + field, + statsPeriod, + start, + end, + query, + groupBy, + interval, + organization, + } = this.props; + + return { + project, + environment, + field, + statsPeriod, + query, + groupBy, + start, + end, + interval: interval + ? interval + : getSessionsInterval( + {start, end, period: statsPeriod}, + {highFidelity: organization.features.includes('minute-resolution-sessions')} + ), + }; + } + + fetchData = async () => { + const {api, disabled} = this.props; + + if (disabled) { + return; + } + + api.clear(); + this.setState(state => ({ + reloading: state.response !== null, + errored: false, + })); + + try { + const response: SessionApiResponse = await api.requestPromise(this.path, { + query: this.baseQueryParams, + }); + + this.setState({ + reloading: false, + response, + }); + } catch (error) { + addErrorMessage(error.responseJSON?.detail ?? t('Error loading health data')); + this.setState({ + reloading: false, + errored: true, + }); + } + }; + + render() { + const {reloading, errored, response} = this.state; + const {children} = this.props; + + const loading = response === null; + + return children({ + loading, + reloading, + errored, + response, + }); + } +} + +export default SessionsRequest; diff --git a/static/app/components/modals/addDashboardWidgetModal.tsx b/static/app/components/modals/addDashboardWidgetModal.tsx index ca6715795855de..65bbb91b9b5abf 100644 --- a/static/app/components/modals/addDashboardWidgetModal.tsx +++ b/static/app/components/modals/addDashboardWidgetModal.tsx @@ -1,5 +1,6 @@ import * as React from 'react'; import {browserHistory} from 'react-router'; +import {components, OptionProps} from 'react-select'; import {css} from '@emotion/react'; import styled from '@emotion/styled'; import cloneDeep from 'lodash/cloneDeep'; @@ -15,7 +16,7 @@ import ButtonBar from 'app/components/buttonBar'; import WidgetQueriesForm from 'app/components/dashboards/widgetQueriesForm'; import SelectControl from 'app/components/forms/selectControl'; import {PanelAlert} from 'app/components/panels'; -import {t} from 'app/locale'; +import {t, tct} from 'app/locale'; import space from 'app/styles/space'; import { DateString, @@ -35,6 +36,7 @@ import { DashboardDetails, DashboardListItem, DisplayType, + MAX_WIDGETS, Widget, WidgetQuery, } from 'app/views/dashboardsV2/types'; @@ -47,6 +49,8 @@ import {generateFieldOptions} from 'app/views/eventsV2/utils'; import Input from 'app/views/settings/components/forms/controls/input'; import Field from 'app/views/settings/components/forms/field'; +import Tooltip from '../tooltip'; + export type DashboardWidgetModalOptions = { organization: Organization; dashboard?: DashboardDetails; @@ -54,7 +58,8 @@ export type DashboardWidgetModalOptions = { onAddWidget?: (data: Widget) => void; widget?: Widget; onUpdateWidget?: (nextWidget: Widget) => void; - defaultQuery?: string; + defaultWidgetQuery?: WidgetQuery; + defaultTableColumns?: readonly string[]; defaultTitle?: string; fromDiscover?: boolean; start?: DateString; @@ -81,8 +86,9 @@ type State = { queries: Widget['queries']; loading: boolean; errors?: Record; - dashboards: SelectValue[]; + dashboards: DashboardListItem[]; selectedDashboard?: SelectValue; + userHasModified: boolean; }; const newQuery = { @@ -95,17 +101,18 @@ class AddDashboardWidgetModal extends React.Component { constructor(props: Props) { super(props); - const {widget, defaultQuery, defaultTitle, fromDiscover} = props; + const {widget, defaultWidgetQuery, defaultTitle, fromDiscover} = props; if (!widget) { this.state = { title: defaultTitle ?? '', displayType: DisplayType.LINE, interval: '5m', - queries: [{...newQuery, ...(defaultQuery ? {conditions: defaultQuery} : {})}], + queries: [defaultWidgetQuery ? {...defaultWidgetQuery} : {...newQuery}], errors: undefined, loading: !!fromDiscover, dashboards: [], + userHasModified: false, }; return; } @@ -118,6 +125,7 @@ class AddDashboardWidgetModal extends React.Component { errors: undefined, loading: false, dashboards: [], + userHasModified: false, }; } @@ -179,8 +187,8 @@ class AddDashboardWidgetModal extends React.Component { if ( !selectedDashboard || !( - dashboards.find(({label, value}) => { - return label === selectedDashboard?.label && value === selectedDashboard?.value; + dashboards.find(({title, id}) => { + return title === selectedDashboard?.label && id === selectedDashboard?.value; }) || selectedDashboard.value === 'new' ) ) { @@ -226,13 +234,29 @@ class AddDashboardWidgetModal extends React.Component { }; handleFieldChange = (field: string) => (value: string) => { + const {defaultWidgetQuery, defaultTableColumns} = this.props; this.setState(prevState => { const newState = cloneDeep(prevState); set(newState, field, value); if (field === 'displayType') { const displayType = value as Widget['displayType']; - set(newState, 'queries', normalizeQueries(displayType, prevState.queries)); + const normalized = normalizeQueries(displayType, prevState.queries); + + // If switching to Table visualization, use saved query fields for Y-Axis if user has not made query changes + if (defaultWidgetQuery && defaultTableColumns && !prevState.userHasModified) { + if (displayType === DisplayType.TABLE) { + normalized.forEach(query => { + query.fields = [...defaultTableColumns]; + }); + } else { + normalized.forEach(query => { + query.fields = [...defaultWidgetQuery.fields]; + }); + } + } + + set(newState, 'queries', normalized); } return {...newState, errors: undefined}; @@ -243,6 +267,7 @@ class AddDashboardWidgetModal extends React.Component { this.setState(prevState => { const newState = cloneDeep(prevState); set(newState, `queries.${index}`, widgetQuery); + set(newState, 'userHasModified', true); return {...newState, errors: undefined}; }); @@ -288,10 +313,7 @@ class AddDashboardWidgetModal extends React.Component { ); try { - const response = await promise; - const dashboards = response.map(({id, title}) => { - return {label: title, value: id}; - }); + const dashboards = await promise; this.setState({ dashboards, }); @@ -311,6 +333,13 @@ class AddDashboardWidgetModal extends React.Component { } renderDashboardSelector() { const {errors, loading, dashboards} = this.state; + const dashboardOptions = dashboards.map(d => { + return { + label: d.title, + value: d.id, + isDisabled: d.widgetDisplay.length >= MAX_WIDGETS, + }; + }); return (

@@ -329,9 +358,30 @@ class AddDashboardWidgetModal extends React.Component { > ) => this.handleDashboardChange(option)} disabled={loading} + components={{ + Option: ({label, data, ...optionProps}: OptionProps) => ( + + + + ), + }} /> diff --git a/static/app/components/pageAlertBar.tsx b/static/app/components/pageAlertBar.tsx new file mode 100644 index 00000000000000..e7ac855d0f7be5 --- /dev/null +++ b/static/app/components/pageAlertBar.tsx @@ -0,0 +1,13 @@ +import styled from '@emotion/styled'; + +const PageAlertBar = styled('div')` + display: flex; + align-items: center; + justify-content: center; + color: ${p => p.theme.headerBackground}; + background-color: ${p => p.theme.bannerBackground}; + padding: 6px 30px; + font-size: 14px; +`; + +export default PageAlertBar; diff --git a/static/app/components/searchSyntax/parser.tsx b/static/app/components/searchSyntax/parser.tsx index 36068cee4d017c..779d1efd074384 100644 --- a/static/app/components/searchSyntax/parser.tsx +++ b/static/app/components/searchSyntax/parser.tsx @@ -800,7 +800,6 @@ const defaultConfig: SearchConfig = { 'error.handled', 'error.unhandled', 'stack.in_app', - 'key_transaction', 'team_key_transaction', ]), allowBoolean: true, diff --git a/static/app/types/index.tsx b/static/app/types/index.tsx index eb064df27f3f94..c22fcc72595be2 100644 --- a/static/app/types/index.tsx +++ b/static/app/types/index.tsx @@ -1655,6 +1655,7 @@ export type NewQuery = { // Graph yAxis?: string[]; display?: string; + topEvents?: string; teams?: Readonly<('myteams' | number)[]>; }; diff --git a/static/app/utils/analytics/growthAnalyticsEvents.tsx b/static/app/utils/analytics/growthAnalyticsEvents.tsx index 543f3b144a6e8e..d7408c45512f92 100644 --- a/static/app/utils/analytics/growthAnalyticsEvents.tsx +++ b/static/app/utils/analytics/growthAnalyticsEvents.tsx @@ -66,6 +66,10 @@ export type GrowthEventParameters = { 'growth.sample_transaction_docs_link_clicked': { project_id: string; }; + 'growth.sample_error_onboarding_link_clicked': { + project_id: string; + platform?: string; + }; 'growth.issue_open_in_discover_btn_clicked': {}; }; @@ -101,6 +105,8 @@ export const growthEventMap: Record = { 'growth.demo_modal_clicked_continue': 'Growth: Demo Modal Clicked Continue', 'growth.sample_transaction_docs_link_clicked': 'Growth: Sample Transacton Docs Link Clicked', + 'growth.sample_error_onboarding_link_clicked': + 'Growth: Sample Transacton Onboarding Link Clicked', 'growth.issue_open_in_discover_btn_clicked': 'Growth: Open in Discover Button in Issue Details clicked', }; diff --git a/static/app/utils/discover/eventView.tsx b/static/app/utils/discover/eventView.tsx index 6934c4e9f080d6..eeb71834b8e03e 100644 --- a/static/app/utils/discover/eventView.tsx +++ b/static/app/utils/discover/eventView.tsx @@ -29,6 +29,13 @@ import { Sort, WebVital, } from 'app/utils/discover/fields'; +import { + CHART_AXIS_OPTIONS, + DISPLAY_MODE_FALLBACK_OPTIONS, + DISPLAY_MODE_OPTIONS, + DisplayModes, + TOP_N, +} from 'app/utils/discover/types'; import {decodeList, decodeScalar} from 'app/utils/queryString'; import { FieldValueKind, @@ -43,12 +50,6 @@ import {statsPeriodToDays} from '../dates'; import {MutableSearch} from '../tokenizeSearch'; import {getSortField} from './fieldRenderers'; -import { - CHART_AXIS_OPTIONS, - DISPLAY_MODE_FALLBACK_OPTIONS, - DISPLAY_MODE_OPTIONS, - DisplayModes, -} from './types'; // Metadata mapping for discover results. export type MetaType = Record; @@ -270,6 +271,7 @@ class EventView { environment: Readonly; yAxis: string | undefined; display: string | undefined; + topEvents: string | undefined; interval: string | undefined; expired?: boolean; createdBy: User | undefined; @@ -289,6 +291,7 @@ class EventView { environment: Readonly; yAxis: string | undefined; display: string | undefined; + topEvents: string | undefined; interval?: string; expired?: boolean; createdBy: User | undefined; @@ -334,6 +337,7 @@ class EventView { this.environment = environment; this.yAxis = props.yAxis; this.display = props.display; + this.topEvents = props.topEvents; this.interval = props.interval; this.createdBy = props.createdBy; this.expired = props.expired; @@ -359,6 +363,7 @@ class EventView { environment: collectQueryStringByKey(location.query, 'environment'), yAxis: decodeScalar(location.query.yAxis), display: decodeScalar(location.query.display), + topEvents: decodeScalar(location.query.topEvents), interval: decodeScalar(location.query.interval), createdBy: undefined, additionalConditions: new MutableSearch([]), @@ -432,6 +437,7 @@ class EventView { // Workaround to only use the first yAxis since eventView yAxis doesn't accept string[] yAxis: Array.isArray(saved.yAxis) ? saved.yAxis[0] : saved.yAxis, display: saved.display, + topEvents: saved.topEvents ? saved.topEvents.toString() : undefined, createdBy: saved.createdBy, expired: saved.expired, additionalConditions: new MutableSearch([]), @@ -468,6 +474,11 @@ class EventView { // Workaround to only use the first yAxis since eventView yAxis doesn't accept string[] (Array.isArray(saved.yAxis) ? saved.yAxis[0] : saved.yAxis), display: decodeScalar(location.query.display) || saved.display, + topEvents: ( + decodeScalar(location.query.topEvents) || + saved.topEvents || + TOP_N + ).toString(), interval: decodeScalar(location.query.interval), createdBy: saved.createdBy, expired: saved.expired, @@ -498,6 +509,7 @@ class EventView { 'project', 'environment', 'display', + 'topEvents', ]; for (const key of keys) { @@ -556,6 +568,7 @@ class EventView { environment: this.environment, yAxis: this.yAxis ? [this.yAxis] : undefined, display: this.display, + topEvents: this.topEvents, }; if (!newQuery.query) { @@ -616,6 +629,7 @@ class EventView { query: undefined, yAxis: undefined, display: undefined, + topEvents: undefined, interval: undefined, }; @@ -638,6 +652,7 @@ class EventView { query: this.query, yAxis: this.yAxis || this.getYAxis(), display: this.display, + topEvents: this.topEvents, interval: this.interval, }; @@ -727,6 +742,7 @@ class EventView { environment: this.environment, yAxis: this.yAxis, display: this.display, + topEvents: this.topEvents, interval: this.interval, expired: this.expired, createdBy: this.createdBy, diff --git a/static/app/utils/discover/fieldRenderers.tsx b/static/app/utils/discover/fieldRenderers.tsx index 32317f8b526f6b..7ae0a43381ff0f 100644 --- a/static/app/utils/discover/fieldRenderers.tsx +++ b/static/app/utils/discover/fieldRenderers.tsx @@ -39,7 +39,6 @@ import { } from 'app/views/performance/transactionSummary/filter'; import ArrayValue from './arrayValue'; -import KeyTransactionField from './keyTransactionField'; import { BarContainer, Container, @@ -207,7 +206,6 @@ type SpecialFields = { 'error.handled': SpecialField; issue: SpecialField; release: SpecialField; - key_transaction: SpecialField; team_key_transaction: SpecialField; 'trend_percentage()': SpecialField; 'timestamp.to_hour': SpecialField; @@ -384,19 +382,6 @@ const SPECIAL_FIELDS: SpecialFields = { return {[1, null].includes(value) ? 'true' : 'false'}; }, }, - key_transaction: { - sortField: null, - renderFunc: (data, {organization}) => ( - - - - ), - }, team_key_transaction: { sortField: null, renderFunc: (data, {organization}) => ( diff --git a/static/app/utils/discover/fields.tsx b/static/app/utils/discover/fields.tsx index 29da299868fcfd..e73c4812fd0ef0 100644 --- a/static/app/utils/discover/fields.tsx +++ b/static/app/utils/discover/fields.tsx @@ -125,6 +125,7 @@ export const AGGREGATIONS = { { kind: 'column', columnTypes: ['string', 'integer', 'number', 'duration', 'date', 'boolean'], + defaultValue: 'user', required: true, }, ], @@ -217,6 +218,7 @@ export const AGGREGATIONS = { 'date', 'percentage', ]), + defaultValue: 'transaction.duration', required: true, }, ], @@ -235,6 +237,7 @@ export const AGGREGATIONS = { 'date', 'percentage', ]), + defaultValue: 'transaction.duration', required: true, }, ], @@ -248,6 +251,7 @@ export const AGGREGATIONS = { kind: 'column', columnTypes: validateForNumericAggregate(['duration', 'number', 'percentage']), required: true, + defaultValue: 'transaction.duration', }, ], outputType: null, @@ -260,6 +264,7 @@ export const AGGREGATIONS = { kind: 'column', columnTypes: ['string', 'integer', 'number', 'duration', 'date', 'boolean'], required: true, + defaultValue: 'transaction.duration', }, ], outputType: null, @@ -717,7 +722,6 @@ export const TRACING_FIELDS = [ 'user_misery', 'eps', 'epm', - 'key_transaction', 'team_key_transaction', ...Object.keys(MEASUREMENTS), ...SPAN_OP_BREAKDOWN_FIELDS, diff --git a/static/app/utils/discover/keyTransactionField.tsx b/static/app/utils/discover/keyTransactionField.tsx deleted file mode 100644 index 654448c187f6d7..00000000000000 --- a/static/app/utils/discover/keyTransactionField.tsx +++ /dev/null @@ -1,101 +0,0 @@ -import {Component} from 'react'; -import styled from '@emotion/styled'; - -import {toggleKeyTransaction} from 'app/actionCreators/performance'; -import {Client} from 'app/api'; -import {IconStar} from 'app/icons'; -import {Organization, Project} from 'app/types'; -import withApi from 'app/utils/withApi'; -import withProjects from 'app/utils/withProjects'; - -type Props = { - api: Client; - projects: Project[]; - /** - * This prop is only used to seed the initial rendering state of this component. - * After seeding the state, this value should not be used anymore. - */ - isKeyTransaction: boolean; - organization: Organization | undefined; - projectSlug: string | undefined; - transactionName: string | undefined; -}; - -type State = { - isKeyTransaction: boolean; -}; - -class KeyTransactionField extends Component { - constructor(props: Props) { - super(props); - - this.state = { - isKeyTransaction: !!props.isKeyTransaction, - }; - } - - getProjectId(): string | null { - const {projects, projectSlug} = this.props; - const project = projects.find(proj => proj.slug === projectSlug); - if (!project) { - return null; - } - return project.id; - } - - toggleKeyTransactionHandler = () => { - const {api, organization, transactionName} = this.props; - const {isKeyTransaction} = this.state; - const projectId = this.getProjectId(); - - // All the props are guaranteed to be not undefined at this point - // as they have all been validated in the render method. - toggleKeyTransaction( - api, - isKeyTransaction, - organization!.slug, - [projectId!], - transactionName! - ).then(() => { - this.setState({ - isKeyTransaction: !isKeyTransaction, - }); - }); - }; - - render() { - const {organization, projectSlug, transactionName} = this.props; - const {isKeyTransaction} = this.state; - - const star = ( - - ); - - // All these fields need to be defined in order to toggle a key transaction - // Since they're not defined, we just render a plain star icon with no action - // associated with it - if ( - organization === undefined || - projectSlug === undefined || - transactionName === undefined || - this.getProjectId() === null - ) { - return star; - } - - return {star}; - } -} - -const KeyColumn = styled('div')``; - -const StyledKey = styled(IconStar)` - cursor: pointer; - vertical-align: middle; -`; - -export default withApi(withProjects(KeyTransactionField)); diff --git a/static/app/utils/discover/types.tsx b/static/app/utils/discover/types.tsx index 600650e3cb6b74..d0335a25c22f4a 100644 --- a/static/app/utils/discover/types.tsx +++ b/static/app/utils/discover/types.tsx @@ -11,6 +11,8 @@ export enum DisplayModes { DAILYTOP5 = 'dailytop5', } +export const TOP_EVENT_MODES: string[] = [DisplayModes.TOP5, DisplayModes.DAILYTOP5]; + export const DISPLAY_MODE_OPTIONS: SelectValue[] = [ {value: DisplayModes.DEFAULT, label: t('Total Period')}, {value: DisplayModes.PREVIOUS, label: t('Previous Period')}, diff --git a/static/app/utils/performance/vitals/vitalsDetailsTableQuery.tsx b/static/app/utils/performance/vitals/vitalsDetailsTableQuery.tsx index d35dfcb7422081..4552915b1cfbb6 100644 --- a/static/app/utils/performance/vitals/vitalsDetailsTableQuery.tsx +++ b/static/app/utils/performance/vitals/vitalsDetailsTableQuery.tsx @@ -13,7 +13,7 @@ type BaseDataRow = { transaction: string; count: number; count_unique_user: number; - key_transaction: number; + team_key_transaction: number; [key: string]: React.ReactText; }; diff --git a/static/app/views/admin/adminSettings.tsx b/static/app/views/admin/adminSettings.tsx index f0607bd28e033c..3665c1ab2228eb 100644 --- a/static/app/views/admin/adminSettings.tsx +++ b/static/app/views/admin/adminSettings.tsx @@ -1,8 +1,9 @@ import isUndefined from 'lodash/isUndefined'; -import {ApiForm} from 'app/components/forms'; +import {Panel, PanelHeader} from 'app/components/panels'; import {t} from 'app/locale'; import AsyncView from 'app/views/asyncView'; +import {Form} from 'app/views/settings/components/forms'; import {getOption, getOptionField} from './options'; @@ -61,29 +62,34 @@ export default class AdminSettings extends AsyncView<{}, State> {

{t('Settings')}

- -

General

- {fields['system.url-prefix']} - {fields['system.admin-email']} - {fields['system.support-email']} - {fields['system.security-email']} - {fields['system.rate-limit']} + + General + {fields['system.url-prefix']} + {fields['system.admin-email']} + {fields['system.support-email']} + {fields['system.security-email']} + {fields['system.rate-limit']} + -

Security & Abuse

- {fields['auth.allow-registration']} - {fields['auth.ip-rate-limit']} - {fields['auth.user-rate-limit']} - {fields['api.rate-limit.org-create']} + + Security & Abuse + {fields['auth.allow-registration']} + {fields['auth.ip-rate-limit']} + {fields['auth.user-rate-limit']} + {fields['api.rate-limit.org-create']} + -

Beacon

- {fields['beacon.anonymous']} -
+ + Beacon + {fields['beacon.anonymous']} + +
); } diff --git a/static/app/views/admin/options.tsx b/static/app/views/admin/options.tsx index 9394eaa6fa9bba..26dd566884af27 100644 --- a/static/app/views/admin/options.tsx +++ b/static/app/views/admin/options.tsx @@ -1,14 +1,14 @@ import * as React from 'react'; import keyBy from 'lodash/keyBy'; +import {t, tct} from 'app/locale'; +import ConfigStore from 'app/stores/configStore'; import { BooleanField, EmailField, RadioBooleanField, TextField, -} from 'app/components/forms'; -import {t, tct} from 'app/locale'; -import ConfigStore from 'app/stores/configStore'; +} from 'app/views/settings/components/forms'; type Section = { key: string; @@ -26,6 +26,7 @@ type Field = { required?: boolean; allowEmpty?: boolean; disabledReason?: string; + disabled?: boolean; defaultValue?: () => string | false; component?: React.ComponentType; }; @@ -194,11 +195,11 @@ const disabledReasons = { smtpDisabled: 'SMTP mail has been disabled, so this option is unavailable', }; -export function getOption(option: string) { +export function getOption(option: string): Field { return definitionsMap[option]; } -export function getOptionDefault(option: string) { +export function getOptionDefault(option: string): string | false | undefined { const meta = getOption(option); return meta.defaultValue ? meta.defaultValue() : undefined; } diff --git a/static/app/views/alerts/incidentRules/triggers/chart/index.tsx b/static/app/views/alerts/incidentRules/triggers/chart/index.tsx index 799b776ebf3b51..e82ad6c6283372 100644 --- a/static/app/views/alerts/incidentRules/triggers/chart/index.tsx +++ b/static/app/views/alerts/incidentRules/triggers/chart/index.tsx @@ -19,22 +19,15 @@ import LoadingMask from 'app/components/loadingMask'; import Placeholder from 'app/components/placeholder'; import {t} from 'app/locale'; import space from 'app/styles/space'; -import {Organization, Project, SessionApiResponse, SessionField} from 'app/types'; +import {Organization, Project, SessionApiResponse} from 'app/types'; import {Series, SeriesDataUnit} from 'app/types/echarts'; import {getCount, getCrashFreeRateSeries} from 'app/utils/sessions'; import withApi from 'app/utils/withApi'; -import {isSessionAggregate} from 'app/views/alerts/utils'; +import {isSessionAggregate, SESSION_AGGREGATE_TO_FIELD} from 'app/views/alerts/utils'; import {AlertWizardAlertNames} from 'app/views/alerts/wizard/options'; import {getAlertTypeFromAggregateDataset} from 'app/views/alerts/wizard/utils'; -import { - Dataset, - IncidentRule, - SessionsAggregate, - TimePeriod, - TimeWindow, - Trigger, -} from '../../types'; +import {Dataset, IncidentRule, TimePeriod, TimeWindow, Trigger} from '../../types'; import ThresholdsChart from './thresholdsChart'; @@ -115,11 +108,6 @@ const AGGREGATE_FUNCTIONS = { Math.min(...seriesChunk.map(series => series.value)), }; -const SESSION_AGGREGATE_TO_FIELD = { - [SessionsAggregate.CRASH_FREE_SESSIONS]: SessionField.SESSIONS, - [SessionsAggregate.CRASH_FREE_USERS]: SessionField.USERS, -}; - const TIME_WINDOW_TO_SESSION_INTERVAL = { [TimeWindow.THIRTY_MINUTES]: '30m', [TimeWindow.ONE_HOUR]: '1h', diff --git a/static/app/views/alerts/incidentRules/triggers/chart/thresholdsChart.tsx b/static/app/views/alerts/incidentRules/triggers/chart/thresholdsChart.tsx index 96b2ab888ee1ef..fef5587988e597 100644 --- a/static/app/views/alerts/incidentRules/triggers/chart/thresholdsChart.tsx +++ b/static/app/views/alerts/incidentRules/triggers/chart/thresholdsChart.tsx @@ -2,17 +2,18 @@ import {PureComponent} from 'react'; import color from 'color'; import debounce from 'lodash/debounce'; import flatten from 'lodash/flatten'; -import round from 'lodash/round'; import Graphic from 'app/components/charts/components/graphic'; import LineChart, {LineChartSeries} from 'app/components/charts/lineChart'; import space from 'app/styles/space'; import {GlobalSelection} from 'app/types'; import {ReactEchartsRef, Series} from 'app/types/echarts'; -import {defined} from 'app/utils'; -import {axisLabelFormatter, tooltipFormatter} from 'app/utils/discover/charts'; import theme from 'app/utils/theme'; -import {isSessionAggregate} from 'app/views/alerts/utils'; +import { + alertAxisFormatter, + alertTooltipValueFormatter, + isSessionAggregate, +} from 'app/views/alerts/utils'; import {AlertRuleThresholdType, IncidentRule, Trigger} from '../../types'; @@ -273,24 +274,6 @@ export default class ThresholdsChart extends PureComponent { ); }; - tooltipValueFormatter = (value: number, seriesName?: string) => { - const {aggregate} = this.props; - if (isSessionAggregate(aggregate)) { - return defined(value) ? `${value}%` : '\u2015'; - } - - return tooltipFormatter(value, seriesName); - }; - - axisFormatter = (value: number) => { - const {data, aggregate} = this.props; - if (isSessionAggregate(aggregate)) { - return defined(value) ? `${round(value, 2)}%` : '\u2015'; - } - - return axisLabelFormatter(value, data.length ? data[0].seriesName : ''); - }; - clampMaxValue(value: number) { // When we apply top buffer to the crash free percentage (99.7% * 1.03), it // can cross 100%, so we clamp it @@ -302,7 +285,7 @@ export default class ThresholdsChart extends PureComponent { } render() { - const {data, triggers, period} = this.props; + const {data, triggers, period, aggregate} = this.props; const dataWithoutRecentBucket: LineChartSeries[] = data?.map( ({data: eventData, ...restOfData}) => ({ ...restOfData, @@ -326,13 +309,15 @@ export default class ThresholdsChart extends PureComponent { const chartOptions = { tooltip: { - valueFormatter: this.tooltipValueFormatter, + valueFormatter: (value: number, seriesName?: string) => + alertTooltipValueFormatter(value, seriesName ?? '', aggregate), }, yAxis: { min: this.state.yAxisMin ?? undefined, max: this.state.yAxisMax ?? undefined, axisLabel: { - formatter: this.axisFormatter, + formatter: (value: number) => + alertAxisFormatter(value, data[0].seriesName, aggregate), }, }, }; diff --git a/static/app/views/alerts/rules/details/body.tsx b/static/app/views/alerts/rules/details/body.tsx index f67f5bcc58795b..0bdfb28045043c 100644 --- a/static/app/views/alerts/rules/details/body.tsx +++ b/static/app/views/alerts/rules/details/body.tsx @@ -364,7 +364,9 @@ export default class DetailsBody extends React.Component { projects={projects} interval={this.getInterval()} filter={this.getFilter()} - query={queryWithTypeFilter} + query={ + rule.dataset === Dataset.SESSIONS ? query : queryWithTypeFilter + } orgId={orgId} handleZoom={handleZoom} /> diff --git a/static/app/views/alerts/rules/details/metricChart.tsx b/static/app/views/alerts/rules/details/metricChart.tsx index c65deb208f3065..0d77a17c231f5f 100644 --- a/static/app/views/alerts/rules/details/metricChart.tsx +++ b/static/app/views/alerts/rules/details/metricChart.tsx @@ -14,6 +14,7 @@ import MarkArea from 'app/components/charts/components/markArea'; import MarkLine from 'app/components/charts/components/markLine'; import EventsRequest from 'app/components/charts/eventsRequest'; import LineChart, {LineChartSeries} from 'app/components/charts/lineChart'; +import SessionsRequest from 'app/components/charts/sessionsRequest'; import {SectionHeading} from 'app/components/charts/styles'; import { parseStatsPeriod, @@ -26,16 +27,22 @@ import {t} from 'app/locale'; import ConfigStore from 'app/stores/configStore'; import space from 'app/styles/space'; import {AvatarProject, DateString, Organization, Project} from 'app/types'; -import {ReactEchartsRef} from 'app/types/echarts'; +import {ReactEchartsRef, Series} from 'app/types/echarts'; import {getUtcDateString} from 'app/utils/dates'; +import {getCrashFreeRateSeries} from 'app/utils/sessions'; import theme from 'app/utils/theme'; import {alertDetailsLink} from 'app/views/alerts/details'; import {makeDefaultCta} from 'app/views/alerts/incidentRules/incidentRulePresets'; -import {IncidentRule} from 'app/views/alerts/incidentRules/types'; +import {Dataset, IncidentRule} from 'app/views/alerts/incidentRules/types'; import {AlertWizardAlertNames} from 'app/views/alerts/wizard/options'; import {getAlertTypeFromAggregateDataset} from 'app/views/alerts/wizard/utils'; import {Incident, IncidentActivityType, IncidentStatus} from '../../types'; +import { + alertAxisFormatter, + alertTooltipValueFormatter, + SESSION_AGGREGATE_TO_FIELD, +} from '../../utils'; import {TimePeriodType} from './constants'; @@ -207,8 +214,8 @@ class MetricChart extends React.PureComponent { } const seriesData = data[0].data; - const seriesStart = seriesData[0].name as number; - const seriesEnd = seriesData[seriesData.length - 1].name as number; + const seriesStart = moment(seriesData[0].name).valueOf(); + const seriesEnd = moment(seriesData[seriesData.length - 1].name).valueOf(); const ruleChanged = moment(dateModified).valueOf(); if (ruleChanged < seriesStart) { @@ -302,94 +309,270 @@ class MetricChart extends React.PureComponent { ); } - renderChart( - data: LineChartSeries[], - series: LineChartSeries[], - areaSeries: any[], - maxThresholdValue: number, - maxSeriesValue: number - ) { + renderChart(loading: boolean, timeseriesData?: Series[]) { const { router, + selectedIncident, interval, handleZoom, + filter, + incidents, + rule, + organization, timePeriod: {start, end}, } = this.props; const {dateModified, timeWindow} = this.props.rule || {}; - return ( - handleZoom(zoomArgs.start, zoomArgs.end)} - > - {zoomRenderProps => ( - maxSeriesValue ? {max: maxThresholdValue} : undefined + if (loading || !timeseriesData) { + return this.renderEmpty(); + } + + const criticalTrigger = rule.triggers.find(({label}) => label === 'critical'); + const warningTrigger = rule.triggers.find(({label}) => label === 'warning'); + + const series: LineChartSeries[] = [...timeseriesData]; + const areaSeries: any[] = []; + // Ensure series data appears above incident lines + series[0].z = 100; + const dataArr = timeseriesData[0].data; + const maxSeriesValue = dataArr.reduce( + (currMax, coord) => Math.max(currMax, coord.value), + 0 + ); + const firstPoint = moment(dataArr[0].name).valueOf(); + const lastPoint = moment(dataArr[dataArr.length - 1].name).valueOf(); + const totalDuration = lastPoint - firstPoint; + let criticalDuration = 0; + let warningDuration = 0; + + series.push(createStatusAreaSeries(theme.green300, firstPoint, lastPoint)); + if (incidents) { + // select incidents that fall within the graph range + const periodStart = moment.utc(firstPoint); + + incidents + .filter( + incident => + !incident.dateClosed || moment(incident.dateClosed).isAfter(periodStart) + ) + .forEach(incident => { + const statusChanges = incident.activities + ?.filter( + ({type, value}) => + type === IncidentActivityType.STATUS_CHANGE && + value && + [`${IncidentStatus.WARNING}`, `${IncidentStatus.CRITICAL}`].includes( + value + ) + ) + .sort( + (a, b) => moment(a.dateCreated).valueOf() - moment(b.dateCreated).valueOf() + ); + + const incidentEnd = incident.dateClosed ?? moment().valueOf(); + + const timeWindowMs = rule.timeWindow * 60 * 1000; + const incidentColor = + warningTrigger && + statusChanges && + !statusChanges.find(({value}) => value === `${IncidentStatus.CRITICAL}`) + ? theme.yellow300 + : theme.red300; + + const incidentStartDate = moment(incident.dateStarted).valueOf(); + const incidentCloseDate = incident.dateClosed + ? moment(incident.dateClosed).valueOf() + : lastPoint; + const incidentStartValue = dataArr.find( + point => point.name >= incidentStartDate + ); + series.push( + createIncidentSeries( + router, + organization, + incidentColor, + incidentStartDate, + incident, + incidentStartValue, + series[0].seriesName + ) + ); + const areaStart = Math.max(moment(incident.dateStarted).valueOf(), firstPoint); + const areaEnd = Math.min( + statusChanges?.length && statusChanges[0].dateCreated + ? moment(statusChanges[0].dateCreated).valueOf() - timeWindowMs + : moment(incidentEnd).valueOf(), + lastPoint + ); + const areaColor = warningTrigger ? theme.yellow300 : theme.red300; + if (areaEnd > areaStart) { + series.push(createStatusAreaSeries(areaColor, areaStart, areaEnd)); + + if (areaColor === theme.yellow300) { + warningDuration += Math.abs(areaEnd - areaStart); + } else { + criticalDuration += Math.abs(areaEnd - areaStart); } - series={[...series, ...areaSeries]} - graphic={Graphic({ - elements: this.getRuleChangeThresholdElements(data), - })} - tooltip={{ - formatter: seriesParams => { - // seriesParams can be object instead of array - const pointSeries = Array.isArray(seriesParams) - ? seriesParams - : [seriesParams]; - const {marker, data: pointData, seriesName} = pointSeries[0]; - const [pointX, pointY] = pointData as [number, number]; - const isModified = - dateModified && pointX <= new Date(dateModified).getTime(); - - const startTime = formatTooltipDate(moment(pointX), 'MMM D LT'); - const {period, periodLength} = parseStatsPeriod(interval) ?? { - periodLength: 'm', - period: `${timeWindow}`, - }; - const endTime = formatTooltipDate( - moment(pointX).add( - parseInt(period, 10), - periodLength as StatsPeriodType - ), - 'MMM D LT' - ); - const title = isModified - ? `${t('Alert Rule Modified')}` - : `${marker} ${seriesName}`; - const value = isModified - ? `${seriesName} ${pointY.toLocaleString()}` - : pointY.toLocaleString(); - - return [ - `
`, - `${title}${value}`, - `
`, - `
${startTime} — ${endTime}
`, - `
`, - ].join(''); - }, - }} - onFinished={() => { - // We want to do this whenever the chart finishes re-rendering so that we can update the dimensions of - // any graphics related to the triggers (e.g. the threshold areas + boundaries) - this.updateDimensions(); - }} - /> - )} -
+ } + + statusChanges?.forEach((activity, idx) => { + const statusAreaStart = Math.max( + moment(activity.dateCreated).valueOf() - timeWindowMs, + firstPoint + ); + const statusAreaEnd = Math.min( + idx === statusChanges.length - 1 + ? moment(incidentEnd).valueOf() + : moment(statusChanges[idx + 1].dateCreated).valueOf() - timeWindowMs, + lastPoint + ); + const statusAreaColor = + activity.value === `${IncidentStatus.CRITICAL}` + ? theme.red300 + : theme.yellow300; + if (statusAreaEnd > statusAreaStart) { + series.push( + createStatusAreaSeries(statusAreaColor, statusAreaStart, statusAreaEnd) + ); + if (statusAreaColor === theme.yellow300) { + warningDuration += Math.abs(statusAreaEnd - statusAreaStart); + } else { + criticalDuration += Math.abs(statusAreaEnd - statusAreaStart); + } + } + }); + + if (selectedIncident && incident.id === selectedIncident.id) { + const selectedIncidentColor = + incidentColor === theme.yellow300 ? theme.yellow100 : theme.red100; + + areaSeries.push({ + type: 'line', + markArea: MarkArea({ + silent: true, + itemStyle: { + color: color(selectedIncidentColor).alpha(0.42).rgb().string(), + }, + data: [[{xAxis: incidentStartDate}, {xAxis: incidentCloseDate}]] as any, + }), + data: [], + }); + } + }); + } + + let maxThresholdValue = 0; + if (warningTrigger?.alertThreshold) { + const {alertThreshold} = warningTrigger; + const warningThresholdLine = createThresholdSeries(theme.yellow300, alertThreshold); + series.push(warningThresholdLine); + maxThresholdValue = Math.max(maxThresholdValue, alertThreshold); + } + + if (criticalTrigger?.alertThreshold) { + const {alertThreshold} = criticalTrigger; + const criticalThresholdLine = createThresholdSeries(theme.red300, alertThreshold); + series.push(criticalThresholdLine); + maxThresholdValue = Math.max(maxThresholdValue, alertThreshold); + } + + return ( + + + + + {AlertWizardAlertNames[getAlertTypeFromAggregateDataset(rule)]} + + {filter} + + handleZoom(zoomArgs.start, zoomArgs.end)} + > + {zoomRenderProps => ( + + alertAxisFormatter( + value, + timeseriesData[0].seriesName, + rule.aggregate + ), + }, + max: maxThresholdValue > maxSeriesValue ? maxThresholdValue : undefined, + }} + series={[...series, ...areaSeries]} + graphic={Graphic({ + elements: this.getRuleChangeThresholdElements(timeseriesData), + })} + tooltip={{ + formatter: seriesParams => { + // seriesParams can be object instead of array + const pointSeries = Array.isArray(seriesParams) + ? seriesParams + : [seriesParams]; + const {marker, data: pointData, seriesName} = pointSeries[0]; + const [pointX, pointY] = pointData as [number, number]; + const pointYFormatted = alertTooltipValueFormatter( + pointY, + seriesName ?? '', + rule.aggregate + ); + const isModified = + dateModified && pointX <= new Date(dateModified).getTime(); + + const startTime = formatTooltipDate(moment(pointX), 'MMM D LT'); + const {period, periodLength} = parseStatsPeriod(interval) ?? { + periodLength: 'm', + period: `${timeWindow}`, + }; + const endTime = formatTooltipDate( + moment(pointX).add( + parseInt(period, 10), + periodLength as StatsPeriodType + ), + 'MMM D LT' + ); + const title = isModified + ? `${t('Alert Rule Modified')}` + : `${marker} ${seriesName}`; + const value = isModified + ? `${seriesName} ${pointYFormatted}` + : pointYFormatted; + + return [ + `
`, + `${title}${value}`, + `
`, + `
${startTime} — ${endTime}
`, + `
`, + ].join(''); + }, + }} + onFinished={() => { + // We want to do this whenever the chart finishes re-rendering so that we can update the dimensions of + // any graphics related to the triggers (e.g. the threshold areas + boundaries) + this.updateDimensions(); + }} + /> + )} +
+
+ {this.renderChartActions(totalDuration, criticalDuration, warningDuration)} +
); } @@ -404,244 +587,70 @@ class MetricChart extends React.PureComponent { } render() { - const { - api, - router, - rule, - organization, - timePeriod, - selectedIncident, - projects, - interval, - filter, - query, - incidents, - } = this.props; - - const criticalTrigger = rule.triggers.find(({label}) => label === 'critical'); - const warningTrigger = rule.triggers.find(({label}) => label === 'warning'); + const {api, rule, organization, timePeriod, projects, interval, query} = this.props; + const {aggregate, timeWindow, environment, dataset} = rule; // If the chart duration isn't as long as the rollup duration the events-stats // endpoint will return an invalid timeseriesData data set const viableStartDate = getUtcDateString( moment.min( moment.utc(timePeriod.start), - moment.utc(timePeriod.end).subtract(rule.timeWindow, 'minutes') + moment.utc(timePeriod.end).subtract(timeWindow, 'minutes') ) ); const viableEndDate = getUtcDateString( - moment.utc(timePeriod.end).add(rule.timeWindow, 'minutes') + moment.utc(timePeriod.end).add(timeWindow, 'minutes') ); - return ( + return dataset === Dataset.SESSIONS ? ( + p.id).map(p => Number(p.id))} + environment={environment ? [environment] : undefined} + start={viableStartDate} + end={viableEndDate} + query={query} + interval={interval} + field={SESSION_AGGREGATE_TO_FIELD[aggregate]} + groupBy={['session.status']} + > + {({loading, response}) => + this.renderChart(loading, [ + { + seriesName: + AlertWizardAlertNames[ + getAlertTypeFromAggregateDataset({aggregate, dataset: Dataset.SESSIONS}) + ], + data: getCrashFreeRateSeries( + response?.groups, + response?.intervals, + SESSION_AGGREGATE_TO_FIELD[aggregate] + ), + }, + ]) + } + + ) : ( p && p.slug) .map(project => Number(project.id))} interval={interval} start={viableStartDate} end={viableEndDate} - yAxis={rule.aggregate} + yAxis={aggregate} includePrevious={false} - currentSeriesName={rule.aggregate} + currentSeriesName={aggregate} partial={false} referrer="api.alerts.alert-rule-chart" > - {({loading, timeseriesData}) => { - if (loading || !timeseriesData) { - return this.renderEmpty(); - } - - const series: LineChartSeries[] = [...timeseriesData]; - const areaSeries: any[] = []; - // Ensure series data appears above incident lines - series[0].z = 100; - const dataArr = timeseriesData[0].data; - const maxSeriesValue = dataArr.reduce( - (currMax, coord) => Math.max(currMax, coord.value), - 0 - ); - const firstPoint = Number(dataArr[0].name); - const lastPoint = dataArr[dataArr.length - 1].name as number; - const totalDuration = lastPoint - firstPoint; - let criticalDuration = 0; - let warningDuration = 0; - - series.push(createStatusAreaSeries(theme.green300, firstPoint, lastPoint)); - if (incidents) { - // select incidents that fall within the graph range - const periodStart = moment.utc(firstPoint); - - incidents - .filter( - incident => - !incident.dateClosed || moment(incident.dateClosed).isAfter(periodStart) - ) - .forEach(incident => { - const statusChanges = incident.activities - ?.filter( - ({type, value}) => - type === IncidentActivityType.STATUS_CHANGE && - value && - [ - `${IncidentStatus.WARNING}`, - `${IncidentStatus.CRITICAL}`, - ].includes(value) - ) - .sort( - (a, b) => - moment(a.dateCreated).valueOf() - moment(b.dateCreated).valueOf() - ); - - const incidentEnd = incident.dateClosed ?? moment().valueOf(); - - const timeWindowMs = rule.timeWindow * 60 * 1000; - const incidentColor = - warningTrigger && - statusChanges && - !statusChanges.find(({value}) => value === `${IncidentStatus.CRITICAL}`) - ? theme.yellow300 - : theme.red300; - - const incidentStartDate = moment(incident.dateStarted).valueOf(); - const incidentCloseDate = incident.dateClosed - ? moment(incident.dateClosed).valueOf() - : lastPoint; - const incidentStartValue = dataArr.find( - point => point.name >= incidentStartDate - ); - series.push( - createIncidentSeries( - router, - organization, - incidentColor, - incidentStartDate, - incident, - incidentStartValue, - series[0].seriesName - ) - ); - const areaStart = Math.max( - moment(incident.dateStarted).valueOf(), - firstPoint - ); - const areaEnd = Math.min( - statusChanges?.length && statusChanges[0].dateCreated - ? moment(statusChanges[0].dateCreated).valueOf() - timeWindowMs - : moment(incidentEnd).valueOf(), - lastPoint - ); - const areaColor = warningTrigger ? theme.yellow300 : theme.red300; - if (areaEnd > areaStart) { - series.push(createStatusAreaSeries(areaColor, areaStart, areaEnd)); - - if (areaColor === theme.yellow300) { - warningDuration += Math.abs(areaEnd - areaStart); - } else { - criticalDuration += Math.abs(areaEnd - areaStart); - } - } - - statusChanges?.forEach((activity, idx) => { - const statusAreaStart = Math.max( - moment(activity.dateCreated).valueOf() - timeWindowMs, - firstPoint - ); - const statusAreaEnd = Math.min( - idx === statusChanges.length - 1 - ? moment(incidentEnd).valueOf() - : moment(statusChanges[idx + 1].dateCreated).valueOf() - - timeWindowMs, - lastPoint - ); - const statusAreaColor = - activity.value === `${IncidentStatus.CRITICAL}` - ? theme.red300 - : theme.yellow300; - if (statusAreaEnd > statusAreaStart) { - series.push( - createStatusAreaSeries( - statusAreaColor, - statusAreaStart, - statusAreaEnd - ) - ); - if (statusAreaColor === theme.yellow300) { - warningDuration += Math.abs(statusAreaEnd - statusAreaStart); - } else { - criticalDuration += Math.abs(statusAreaEnd - statusAreaStart); - } - } - }); - - if (selectedIncident && incident.id === selectedIncident.id) { - const selectedIncidentColor = - incidentColor === theme.yellow300 ? theme.yellow100 : theme.red100; - - areaSeries.push({ - type: 'line', - markArea: MarkArea({ - silent: true, - itemStyle: { - color: color(selectedIncidentColor).alpha(0.42).rgb().string(), - }, - data: [ - [{xAxis: incidentStartDate}, {xAxis: incidentCloseDate}], - ] as any, - }), - data: [], - }); - } - }); - } - - let maxThresholdValue = 0; - if (warningTrigger?.alertThreshold) { - const {alertThreshold} = warningTrigger; - const warningThresholdLine = createThresholdSeries( - theme.yellow300, - alertThreshold - ); - series.push(warningThresholdLine); - maxThresholdValue = Math.max(maxThresholdValue, alertThreshold); - } - - if (criticalTrigger?.alertThreshold) { - const {alertThreshold} = criticalTrigger; - const criticalThresholdLine = createThresholdSeries( - theme.red300, - alertThreshold - ); - series.push(criticalThresholdLine); - maxThresholdValue = Math.max(maxThresholdValue, alertThreshold); - } - - return ( - - - - - {AlertWizardAlertNames[getAlertTypeFromAggregateDataset(rule)]} - - {filter} - - {this.renderChart( - timeseriesData, - series, - areaSeries, - maxThresholdValue, - maxSeriesValue - )} - - {this.renderChartActions(totalDuration, criticalDuration, warningDuration)} - - ); - }} + {({loading, timeseriesData}) => this.renderChart(loading, timeseriesData)} ); } diff --git a/static/app/views/alerts/rules/row.tsx b/static/app/views/alerts/rules/row.tsx index 2c1a40faaddc9f..e5de41102f2bf7 100644 --- a/static/app/views/alerts/rules/row.tsx +++ b/static/app/views/alerts/rules/row.tsx @@ -22,7 +22,7 @@ import overflowEllipsis from 'app/styles/overflowEllipsis'; import space from 'app/styles/space'; import {Actor, Organization, Project} from 'app/types'; import getDynamicText from 'app/utils/getDynamicText'; -import {Color} from 'app/utils/theme'; +import type {Color} from 'app/utils/theme'; import {AlertRuleThresholdType} from 'app/views/alerts/incidentRules/types'; import AlertBadge from '../alertBadge'; @@ -95,16 +95,16 @@ class RuleListRow extends React.Component { } const activeIncident = this.activeIncident(); - const criticalTrigger = rule?.triggers.find(({label}) => label === 'critical'); - const warningTrigger = rule?.triggers.find(({label}) => label === 'warning'); - const resolvedTrigger = rule?.resolveThreshold; + const criticalTrigger = rule.triggers.find(({label}) => label === 'critical'); + const warningTrigger = rule.triggers.find(({label}) => label === 'warning'); + const resolvedTrigger = rule.resolveThreshold; const trigger = activeIncident && rule.latestIncident?.status === IncidentStatus.CRITICAL ? criticalTrigger : warningTrigger ?? criticalTrigger; let iconColor: Color = 'green300'; - let iconDirection; + let iconDirection: 'up' | 'down' | undefined; let thresholdTypeText = activeIncident && rule.thresholdType === AlertRuleThresholdType.ABOVE ? t('Above') @@ -119,15 +119,10 @@ class RuleListRow extends React.Component { : 'green300'; iconDirection = rule.thresholdType === AlertRuleThresholdType.ABOVE ? 'up' : 'down'; } else { - if (!rule?.latestIncident) { - // If there's no latest incident, use the Resolved threshold type, which is opposite of Critical - iconColor = - rule.thresholdType === AlertRuleThresholdType.ABOVE ? 'green300' : 'red300'; - iconDirection = - rule.thresholdType === AlertRuleThresholdType.ABOVE ? 'down' : 'up'; - thresholdTypeText = - rule.thresholdType === AlertRuleThresholdType.ABOVE ? t('Below') : t('Above'); - } + // Use the Resolved threshold type, which is opposite of Critical + iconDirection = rule.thresholdType === AlertRuleThresholdType.ABOVE ? 'down' : 'up'; + thresholdTypeText = + rule.thresholdType === AlertRuleThresholdType.ABOVE ? t('Below') : t('Above'); } return ( diff --git a/static/app/views/alerts/utils/index.tsx b/static/app/views/alerts/utils/index.tsx index 1054486efdc8b2..b95bc488c57062 100644 --- a/static/app/views/alerts/utils/index.tsx +++ b/static/app/views/alerts/utils/index.tsx @@ -1,8 +1,12 @@ +import round from 'lodash/round'; + import {Client} from 'app/api'; import {t} from 'app/locale'; -import {NewQuery, Project} from 'app/types'; +import {NewQuery, Project, SessionField} from 'app/types'; import {IssueAlertRule} from 'app/types/alerts'; +import {defined} from 'app/utils'; import {getUtcDateString} from 'app/utils/dates'; +import {axisLabelFormatter, tooltipFormatter} from 'app/utils/discover/charts'; import EventView from 'app/utils/discover/eventView'; import {getAggregateAlias} from 'app/utils/discover/fields'; import {PRESET_AGGREGATES} from 'app/views/alerts/incidentRules/presets'; @@ -274,3 +278,28 @@ export function getQueryDatasource( export function isSessionAggregate(aggregate: string) { return Object.values(SessionsAggregate).includes(aggregate as SessionsAggregate); } + +export const SESSION_AGGREGATE_TO_FIELD = { + [SessionsAggregate.CRASH_FREE_SESSIONS]: SessionField.SESSIONS, + [SessionsAggregate.CRASH_FREE_USERS]: SessionField.USERS, +}; + +export function alertAxisFormatter(value: number, seriesName: string, aggregate: string) { + if (isSessionAggregate(aggregate)) { + return defined(value) ? `${round(value, 2)}%` : '\u2015'; + } + + return axisLabelFormatter(value, seriesName); +} + +export function alertTooltipValueFormatter( + value: number, + seriesName: string, + aggregate: string +) { + if (isSessionAggregate(aggregate)) { + return defined(value) ? `${value}%` : '\u2015'; + } + + return tooltipFormatter(value, seriesName); +} diff --git a/static/app/views/dashboardsV2/dashboard.tsx b/static/app/views/dashboardsV2/dashboard.tsx index a385a95a03c563..8390c9cd4cad61 100644 --- a/static/app/views/dashboardsV2/dashboard.tsx +++ b/static/app/views/dashboardsV2/dashboard.tsx @@ -18,7 +18,7 @@ import withGlobalSelection from 'app/utils/withGlobalSelection'; import {DataSet} from './widget/utils'; import AddWidget, {ADD_WIDGET_BUTTON_DRAG_ID} from './addWidget'; import SortableWidget from './sortableWidget'; -import {DashboardDetails, Widget} from './types'; +import {DashboardDetails, MAX_WIDGETS, Widget} from './types'; type Props = { api: Client; @@ -225,7 +225,7 @@ class Dashboard extends Component { {widgets.map((widget, index) => this.renderWidget(widget, index))} - {isEditing && ( + {isEditing && widgets.length < MAX_WIDGETS && ( []; onDisplayChange: (value: string) => void; + onTopEventsChange: (value: string) => void; + topEvents: string; }; export default function ChartFooter({ @@ -32,6 +35,8 @@ export default function ChartFooter({ displayMode, displayOptions, onDisplayChange, + onTopEventsChange, + topEvents, }: Props) { const elements: React.ReactNode[] = []; @@ -45,6 +50,10 @@ export default function ChartFooter({ {total.toLocaleString()} ) ); + const topEventOptions: SelectValue[] = []; + for (let i = 1; i <= 10; i++) { + topEventOptions.push({value: i.toString(), label: i.toString()}); + } return ( @@ -57,6 +66,23 @@ export default function ChartFooter({ onChange={onDisplayChange} menuWidth="170px" /> + + {({hasFeature}) => { + if (hasFeature && TOP_EVENT_MODES.includes(displayMode)) { + return ( + + ); + } else { + return null; + } + }} + { @@ -45,7 +46,7 @@ class MiniGraph extends React.Component { getRefreshProps(props: Props) { // get props that are relevant to the API payload for the graph - const {organization, location, eventView} = props; + const {organization, location, eventView, yAxis} = props; const apiPayload = eventView.getEventsAPIPayload(location); @@ -74,7 +75,7 @@ class MiniGraph extends React.Component { interval, project: eventView.project, environment: eventView.environment, - yAxis: eventView.getYAxis(), + yAxis: yAxis ?? eventView.getYAxis(), field, topEvents, orderby, @@ -86,8 +87,6 @@ class MiniGraph extends React.Component { getChartType({ showDaily, - yAxis, - timeseriesData, }: { showDaily: boolean; yAxis: string; @@ -96,16 +95,6 @@ class MiniGraph extends React.Component { if (showDaily) { return 'bar'; } - if (timeseriesData.length > 1) { - switch (aggregateMultiPlotType(yAxis)) { - case 'line': - return 'line'; - case 'area': - return 'area'; - default: - throw new Error(`Unknown multi plot type for ${yAxis}`); - } - } return 'area'; } @@ -184,7 +173,7 @@ class MiniGraph extends React.Component { const allSeries = timeseriesData ?? results ?? []; const chartType = this.getChartType({ showDaily, - yAxis, + yAxis: Array.isArray(yAxis) ? yAxis[0] : yAxis, timeseriesData: allSeries, }); const data = allSeries.map(series => ({ @@ -216,7 +205,12 @@ class MiniGraph extends React.Component { color: theme.chartLabel, fontFamily: theme.text.family, fontSize: 12, - formatter: (value: number) => axisLabelFormatter(value, yAxis, true), + formatter: (value: number) => + axisLabelFormatter( + value, + Array.isArray(yAxis) ? yAxis[0] : yAxis, + true + ), inside: true, showMinLabel: false, showMaxLabel: false, @@ -240,7 +234,9 @@ class MiniGraph extends React.Component { bottom: 0, containLabel: false, }, - stacked: typeof topEvents === 'number' && topEvents > 0, + stacked: + (typeof topEvents === 'number' && topEvents > 0) || + (Array.isArray(yAxis) && yAxis.length > 1), }; const Component = this.getChartComponent(chartType); diff --git a/static/app/views/eventsV2/queryList.tsx b/static/app/views/eventsV2/queryList.tsx index dbff4a6cb661c2..909673e56af97b 100644 --- a/static/app/views/eventsV2/queryList.tsx +++ b/static/app/views/eventsV2/queryList.tsx @@ -24,6 +24,7 @@ import EventView from 'app/utils/discover/eventView'; import parseLinkHeader from 'app/utils/parseLinkHeader'; import {decodeList} from 'app/utils/queryString'; import withApi from 'app/utils/withApi'; +import {WidgetQuery} from 'app/views/dashboardsV2/types'; import {handleCreateQuery, handleDeleteQuery} from './savedQuery/utils'; import MiniGraph from './miniGraph'; @@ -93,14 +94,25 @@ class QueryList extends React.Component { const {organization} = this.props; event.preventDefault(); event.stopPropagation(); + + const defaultWidgetQuery: WidgetQuery = { + name: '', + fields: savedQuery?.yAxis ?? ['count()'], + conditions: eventView.query, + orderby: '', + }; + openAddDashboardWidgetModal({ organization, - defaultQuery: eventView.query, start: eventView.start, end: eventView.end, statsPeriod: eventView.statsPeriod, fromDiscover: true, - defaultTitle: savedQuery?.name ?? eventView.name, + defaultWidgetQuery, + defaultTableColumns: eventView.fields.map(({field}) => field), + defaultTitle: + savedQuery?.name ?? + (eventView.name !== 'All Events' ? eventView.name : undefined), }); }; @@ -244,12 +256,20 @@ class QueryList extends React.Component { }); }} renderGraph={() => ( - + features={['connect-discover-and-dashboards']} + > + {({hasFeature}) => ( + + )} + )} renderContextMenu={() => ( diff --git a/static/app/views/eventsV2/results.tsx b/static/app/views/eventsV2/results.tsx index 552f1a5f72f65b..83cfee05cb7821 100644 --- a/static/app/views/eventsV2/results.tsx +++ b/static/app/views/eventsV2/results.tsx @@ -363,6 +363,25 @@ class Results extends React.Component { } }; + handleTopEventsChange = (value: string) => { + const {router, location} = this.props; + + const newQuery = { + ...location.query, + topEvents: value, + }; + + router.push({ + pathname: location.pathname, + query: newQuery, + }); + + // Treat display changing like the user already confirmed the query + if (!this.state.needConfirmation) { + this.handleConfirmed(); + } + }; + getDocumentTitle(): string { const {organization} = this.props; const {eventView} = this.state; @@ -490,6 +509,7 @@ class Results extends React.Component { location={location} onAxisChange={this.handleYAxisChange} onDisplayChange={this.handleDisplayChange} + onTopEventsChange={this.handleTopEventsChange} total={totalValues} confirmedQuery={confirmedQuery} yAxis={yAxisArray} diff --git a/static/app/views/eventsV2/resultsChart.tsx b/static/app/views/eventsV2/resultsChart.tsx index d86821024d3738..53de9a3cce6d6b 100644 --- a/static/app/views/eventsV2/resultsChart.tsx +++ b/static/app/views/eventsV2/resultsChart.tsx @@ -53,6 +53,7 @@ class ResultsChart extends Component { const hasConnectDiscoverAndDashboards = organization.features.includes( 'connect-discover-and-dashboards' ); + const hasTopEvents = organization.features.includes('discover-top-events'); const globalSelection = eventView.getGlobalSelection(); const start = globalSelection.datetime.start @@ -72,6 +73,8 @@ class ResultsChart extends Component { const isDaily = display === DisplayModes.DAILYTOP5 || display === DisplayModes.DAILY; const isPrevious = display === DisplayModes.PREVIOUS; const referrer = `api.discover.${display}-chart`; + const topEvents = + hasTopEvents && eventView.topEvents ? parseInt(eventView.topEvents, 10) : TOP_N; return ( @@ -94,7 +97,7 @@ class ResultsChart extends Component { field={isTopEvents ? apiPayload.field : undefined} interval={eventView.interval} showDaily={isDaily} - topEvents={isTopEvents ? TOP_N : undefined} + topEvents={isTopEvents ? topEvents : undefined} orderby={isTopEvents ? decodeScalar(apiPayload.sort) : undefined} utc={utc === 'true'} confirmedQuery={confirmedQuery} @@ -125,6 +128,7 @@ type ContainerProps = { total: number | null; onAxisChange: (value: string[]) => void; onDisplayChange: (value: string) => void; + onTopEventsChange: (value: string) => void; }; class ResultsChartContainer extends Component { @@ -151,6 +155,7 @@ class ResultsChartContainer extends Component { total, onAxisChange, onDisplayChange, + onTopEventsChange, organization, confirmedQuery, yAxis, @@ -160,6 +165,7 @@ class ResultsChartContainer extends Component { const hasConnectDiscoverAndDashboards = organization.features.includes( 'connect-discover-and-dashboards' ); + const hasTopEvents = organization.features.includes('discover-top-events'); const displayOptions = eventView .getDisplayOptions() .filter(opt => { @@ -190,6 +196,18 @@ class ResultsChartContainer extends Component { ), }; } + if (hasTopEvents && DisplayModes.TOP5 === opt.value) { + return { + value: opt.value, + label: 'Top Period', + }; + } + if (hasTopEvents && DisplayModes.DAILYTOP5 === opt.value) { + return { + value: opt.value, + label: 'Top Daily', + }; + } return opt; }); @@ -217,6 +235,8 @@ class ResultsChartContainer extends Component { displayOptions={displayOptions} displayMode={eventView.getDisplayMode()} onDisplayChange={onDisplayChange} + onTopEventsChange={onTopEventsChange} + topEvents={eventView.topEvents ?? TOP_N.toString()} /> ); diff --git a/static/app/views/eventsV2/savedQuery/index.tsx b/static/app/views/eventsV2/savedQuery/index.tsx index fa40d53ccb5b2c..5aac4bffac2260 100644 --- a/static/app/views/eventsV2/savedQuery/index.tsx +++ b/static/app/views/eventsV2/savedQuery/index.tsx @@ -25,6 +25,7 @@ import EventView from 'app/utils/discover/eventView'; import {getDiscoverLandingUrl} from 'app/utils/discover/urls'; import withApi from 'app/utils/withApi'; import withProjects from 'app/utils/withProjects'; +import {WidgetQuery} from 'app/views/dashboardsV2/types'; import InputControl from 'app/views/settings/components/forms/controls/input'; import DiscoverQueryMenu from './discoverQueryMenu'; @@ -224,13 +225,22 @@ class SavedQueryButtonGroup extends React.PureComponent { }; handleAddDashboardWidget = () => { - const {organization, eventView, savedQuery} = this.props; + const {organization, eventView, savedQuery, yAxis} = this.props; + const defaultWidgetQuery: WidgetQuery = { + name: '', + fields: yAxis ?? ['count()'], + conditions: eventView.query, + orderby: '', + }; + openAddDashboardWidgetModal({ organization, - defaultQuery: eventView.query, - defaultTitle: - savedQuery?.name ?? eventView.name !== 'All Events' ? eventView.name : undefined, fromDiscover: true, + defaultWidgetQuery, + defaultTableColumns: eventView.fields.map(({field}) => field), + defaultTitle: + savedQuery?.name ?? + (eventView.name !== 'All Events' ? eventView.name : undefined), }); }; diff --git a/static/app/views/eventsV2/table/tableView.tsx b/static/app/views/eventsV2/table/tableView.tsx index 19cb010fce02b9..6479d52d07fbad 100644 --- a/static/app/views/eventsV2/table/tableView.tsx +++ b/static/app/views/eventsV2/table/tableView.tsx @@ -244,7 +244,8 @@ class TableView extends React.Component { const isTopEvents = display === DisplayModes.TOP5 || display === DisplayModes.DAILYTOP5; - const count = Math.min(tableData?.data?.length ?? TOP_N, TOP_N); + const topEvents = eventView.topEvents ? parseInt(eventView.topEvents, 10) : TOP_N; + const count = Math.min(tableData?.data?.length ?? topEvents, topEvents); let cell = fieldRenderer(dataRow, {organization, location}); @@ -310,7 +311,7 @@ class TableView extends React.Component { return ( - {isFirstPage && isTopEvents && rowIndex < TOP_N && columnIndex === 0 ? ( + {isFirstPage && isTopEvents && rowIndex < topEvents && columnIndex === 0 ? ( // Add one if we need to include Other in the series ) : null} @@ -358,7 +359,7 @@ class TableView extends React.Component { handleCellAction = (dataRow: TableDataRow, column: TableColumn) => { return (action: Actions, value: React.ReactText) => { - const {eventView, organization, projects} = this.props; + const {eventView, organization, projects, location} = this.props; const query = new MutableSearch(eventView.query); @@ -430,7 +431,10 @@ class TableView extends React.Component { } nextView.query = query.formatString(); - browserHistory.push(nextView.getResultsViewUrlTarget(organization.slug)); + const target = nextView.getResultsViewUrlTarget(organization.slug); + // Get yAxis from location + target.query.yAxis = decodeList(location.query.yAxis); + browserHistory.push(target); }; }; diff --git a/static/app/views/integrationPipeline/pipelineView.tsx b/static/app/views/integrationPipeline/pipelineView.tsx index 1bcbe8d263476b..f4f42775fbc63c 100644 --- a/static/app/views/integrationPipeline/pipelineView.tsx +++ b/static/app/views/integrationPipeline/pipelineView.tsx @@ -1,4 +1,4 @@ -import * as React from 'react'; +import {useEffect} from 'react'; import Indicators from 'app/components/indicators'; import ThemeAndStyleProvider from 'app/components/themeAndStyleProvider'; @@ -8,10 +8,6 @@ import AwsLambdaFailureDetails from './awsLambdaFailureDetails'; import AwsLambdaFunctionSelect from './awsLambdaFunctionSelect'; import AwsLambdaProjectSelect from './awsLambdaProjectSelect'; -/** - * This component is a wrapper for specific pipeline views for integrations - */ - const pipelineMapper: Record = { awsLambdaProjectSelect: [AwsLambdaProjectSelect, 'AWS Lambda Select Project'], awsLambdaFunctionSelect: [AwsLambdaFunctionSelect, 'AWS Lambda Select Lambdas'], @@ -24,27 +20,27 @@ type Props = { [key: string]: any; }; -export default class PipelineView extends React.Component { - componentDidMount() { - // update the title based on our mappings - const title = this.mapping[1]; - document.title = title; - } - get mapping() { - const {pipelineName} = this.props; - const mapping = pipelineMapper[pipelineName]; - if (!mapping) { - throw new Error(`Invalid pipeline name ${pipelineName}`); - } - return mapping; - } - render() { - const Component = this.mapping[0]; - return ( - - - - - ); +/** + * This component is a wrapper for specific pipeline views for integrations + */ +function PipelineView({pipelineName, ...props}: Props) { + const mapping = pipelineMapper[pipelineName]; + + if (!mapping) { + throw new Error(`Invalid pipeline name ${pipelineName}`); } + + const [Component, title] = mapping; + + // Set the page title + useEffect(() => void (document.title = title), [title]); + + return ( + + + + + ); } + +export default PipelineView; diff --git a/static/app/views/issueList/container.tsx b/static/app/views/issueList/container.tsx index c3fb2d4e32cb7c..c05e562cb218a5 100644 --- a/static/app/views/issueList/container.tsx +++ b/static/app/views/issueList/container.tsx @@ -1,31 +1,55 @@ -import {Component} from 'react'; +import React, {Component} from 'react'; import DocumentTitle from 'react-document-title'; import NoProjectMessage from 'app/components/noProjectMessage'; import GlobalSelectionHeader from 'app/components/organizations/globalSelectionHeader'; -import {Organization} from 'app/types'; +import GroupStore from 'app/stores/groupStore'; +import {Organization, Project} from 'app/types'; +import {callIfFunction} from 'app/utils/callIfFunction'; import withOrganization from 'app/utils/withOrganization'; +import SampleEventAlert from 'app/views/organizationGroupDetails/sampleEventAlert'; type Props = { organization: Organization; + projects: Project[]; }; -class IssueListContainer extends Component { - getTitle() { - return `Issues - ${this.props.organization.slug} - Sentry`; - } +type State = { + showSampleEventBanner: boolean; +}; +class IssueListContainer extends Component { + state: State = { + showSampleEventBanner: false, + }; + listener = GroupStore.listen(() => this.onGroupChange(), undefined); render() { const {organization, children} = this.props; - return ( - - {children} - + + {this.state.showSampleEventBanner && } + + {children} + + ); } + + onGroupChange() { + this.setState({ + showSampleEventBanner: GroupStore.getAllItemIds().length === 1, + }); + } + + componentWillUnmount() { + callIfFunction(this.listener); + } + + getTitle() { + return `Issues - ${this.props.organization.slug} - Sentry`; + } } export default withOrganization(IssueListContainer); export {IssueListContainer}; diff --git a/static/app/views/issueList/testSessionPercent.tsx b/static/app/views/issueList/testSessionPercent.tsx index eb3d94d6109e0b..9e3b8645f6c05d 100644 --- a/static/app/views/issueList/testSessionPercent.tsx +++ b/static/app/views/issueList/testSessionPercent.tsx @@ -7,7 +7,6 @@ import * as qs from 'query-string'; import {Client} from 'app/api'; import Feature from 'app/components/acl/feature'; import FeatureDisabled from 'app/components/acl/featureDisabled'; -import Input from 'app/components/forms/input'; import * as Layout from 'app/components/layouts/thirds'; import Link from 'app/components/links/link'; import {t, tn} from 'app/locale'; @@ -23,6 +22,7 @@ import EventView from 'app/utils/discover/eventView'; import withApi from 'app/utils/withApi'; import withGlobalSelection from 'app/utils/withGlobalSelection'; import withOrganization from 'app/utils/withOrganization'; +import Input from 'app/views/settings/components/forms/controls/input'; type Props = { selection: GlobalSelection; diff --git a/static/app/views/onboarding/createSampleEventButton.tsx b/static/app/views/onboarding/createSampleEventButton.tsx index 78f55812f324c0..a0dc168652593e 100644 --- a/static/app/views/onboarding/createSampleEventButton.tsx +++ b/static/app/views/onboarding/createSampleEventButton.tsx @@ -156,7 +156,7 @@ class CreateSampleEventButton extends React.Component { } browserHistory.push( - `/organizations/${organization.slug}/issues/${eventData.groupID}/` + `/organizations/${organization.slug}/issues/${eventData.groupID}/?project=${project.id}` ); }; diff --git a/static/app/views/organizationGroupDetails/groupMerged/mergedItem.tsx b/static/app/views/organizationGroupDetails/groupMerged/mergedItem.tsx index 01a43f9deba15a..471defe7f02234 100644 --- a/static/app/views/organizationGroupDetails/groupMerged/mergedItem.tsx +++ b/static/app/views/organizationGroupDetails/groupMerged/mergedItem.tsx @@ -162,7 +162,7 @@ const Controls = styled('div')<{expanded: boolean}>` display: flex; justify-content: space-between; border-top: 1px solid ${p => p.theme.innerBorder}; - background-color: ${p => p.theme.gray100}; + background-color: ${p => p.theme.backgroundSecondary}; padding: ${space(0.5)} ${space(1)}; ${p => p.expanded && `border-bottom: 1px solid ${p.theme.innerBorder}`}; @@ -193,6 +193,7 @@ const Collapse = styled('span')` const MergedEventList = styled('div')` overflow: hidden; border: none; + background-color: ${p => p.theme.background}; `; const EventDetails = styled('div')` diff --git a/static/app/views/organizationGroupDetails/groupTags.tsx b/static/app/views/organizationGroupDetails/groupTags.tsx index ca0c7715681b1c..f8dd3e85c243ad 100644 --- a/static/app/views/organizationGroupDetails/groupTags.tsx +++ b/static/app/views/organizationGroupDetails/groupTags.tsx @@ -1,14 +1,16 @@ import * as React from 'react'; +import {RouteComponentProps} from 'react-router'; import styled from '@emotion/styled'; import isEqual from 'lodash/isEqual'; -import {Client} from 'app/api'; import Alert from 'app/components/alert'; +import AsyncComponent from 'app/components/asyncComponent'; +import Button from 'app/components/button'; import Count from 'app/components/count'; import DeviceName from 'app/components/deviceName'; import GlobalSelectionLink from 'app/components/globalSelectionLink'; -import LoadingError from 'app/components/loadingError'; -import LoadingIndicator from 'app/components/loadingIndicator'; +import ExternalLink from 'app/components/links/externalLink'; +import {extractSelectionParameters} from 'app/components/organizations/globalSelectionHeader/utils'; import {Panel, PanelBody, PanelHeader} from 'app/components/panels'; import Version from 'app/components/version'; import {t, tct} from 'app/locale'; @@ -16,144 +18,115 @@ import overflowEllipsis from 'app/styles/overflowEllipsis'; import space from 'app/styles/space'; import {Group, TagWithTopValues} from 'app/types'; import {percent} from 'app/utils'; -import withApi from 'app/utils/withApi'; -type Props = { +type Props = AsyncComponent['props'] & { baseUrl: string; group: Group; - api: Client; environments: string[]; -}; +} & RouteComponentProps<{}, {}>; -type State = { +type State = AsyncComponent['state'] & { tagList: null | TagWithTopValues[]; - loading: boolean; - error: boolean; }; -class GroupTags extends React.Component { - state: State = { - tagList: null, - loading: true, - error: false, - }; +class GroupTags extends AsyncComponent { + getDefaultState(): State { + return { + ...super.getDefaultState(), + tagList: null, + }; + } - componentDidMount() { - this.fetchData(); + getEndpoints(): ReturnType { + const {group, environments} = this.props; + return [ + [ + 'tagList', + `/issues/${group.id}/tags/`, + { + query: {environment: environments}, + }, + ], + ]; } componentDidUpdate(prevProps: Props) { if (!isEqual(prevProps.environments, this.props.environments)) { - this.fetchData(); + this.remountComponent(); } } - fetchData = () => { - const {api, group, environments} = this.props; - this.setState({ - loading: true, - error: false, - }); - - api.request(`/issues/${group.id}/tags/`, { - query: {environment: environments}, - success: data => { - this.setState({ - tagList: data, - error: false, - loading: false, - }); - }, - error: () => { - this.setState({ - error: true, - loading: false, - }); - }, - }); - }; - - getTagsDocsUrl() { - return 'https://docs.sentry.io/platform-redirect/?next=/enriching-events/tags'; - } - - render() { - const {baseUrl} = this.props; - - let children: React.ReactNode[] = []; + renderTags() { + const {baseUrl, location} = this.props; + const {tagList} = this.state; - if (this.state.loading) { - return ; - } else if (this.state.error) { - return ; - } + const alphabeticalTags = (tagList ?? []).sort((a, b) => a.key.localeCompare(b.key)); - if (this.state.tagList) { - children = this.state.tagList.map((tag, tagIdx) => { - const valueChildren = tag.topValues.map((tagValue, tagValueIdx) => { - let label: React.ReactNode = null; - const pct = percent(tagValue.count, tag.totalValues); - const query = tagValue.query || `${tag.key}:"${tagValue.value}"`; - - switch (tag.key) { - case 'release': - label = ; - break; - default: - label = ; - } - - return ( -
  • - - - {label} - - - - -
  • - ); - }); - - return ( + return ( + + {alphabeticalTags.map((tag, tagIdx) => ( - -
    {tag.key}
    - - - {t('More Details')} - - -
    + + {tag.key} + + -
      - {valueChildren} -
    + + {tag.topValues.map((tagValue, tagValueIdx) => ( +
  • + + + + {tag.key === 'release' ? ( + + ) : ( + + )} + + + + + +
  • + ))} +
    - ); - }); - } + ))} +
    + ); + } + renderBody() { return (
    - {children} + {this.renderTags()} {tct( 'Tags are automatically indexed for searching and breakdown charts. Learn how to [link: add custom tags to issues]', { - link: , + link: ( + + ), } )} @@ -162,27 +135,39 @@ class GroupTags extends React.Component { } } -const DetailsLinkWrapper = styled('div')` - display: flex; -`; - const Container = styled('div')` display: flex; flex-wrap: wrap; `; +const StyledPanelHeader = styled(PanelHeader)` + text-transform: none; +`; + +const TagHeading = styled('h5')` + font-size: ${p => p.theme.fontSizeLarge}; + margin-bottom: 0; +`; + +const UnstyledUnorderedList = styled('ul')` + list-style: none; + padding-left: 0; + margin-bottom: 0; +`; + const TagItem = styled('div')` padding: 0 ${space(1)}; width: 50%; `; -const TagBarBackground = styled('div')` +const TagBarBackground = styled('div')<{widthPercent: string}>` position: absolute; top: 0; bottom: 0; left: 0; background: ${p => p.theme.tagBar}; border-radius: ${p => p.theme.borderRadius}; + width: ${p => p.widthPercent}; `; const TagBarGlobalSelectionLink = styled(GlobalSelectionLink)` @@ -217,4 +202,4 @@ const TagBarCount = styled('div')` font-variant-numeric: tabular-nums; `; -export default withApi(GroupTags); +export default GroupTags; diff --git a/static/app/views/organizationGroupDetails/index.tsx b/static/app/views/organizationGroupDetails/index.tsx index 5b2ea3af806a51..92e632e49abdc9 100644 --- a/static/app/views/organizationGroupDetails/index.tsx +++ b/static/app/views/organizationGroupDetails/index.tsx @@ -8,6 +8,7 @@ import withOrganization from 'app/utils/withOrganization'; import withProjects from 'app/utils/withProjects'; import GroupDetails from './groupDetails'; +import SampleEventAlert from './sampleEventAlert'; type Props = { selection: GlobalSelection; @@ -27,13 +28,16 @@ class OrganizationGroupDetails extends React.Component { render() { const {selection, ...props} = this.props; - return ( - + + + + + ); } } diff --git a/static/app/views/organizationGroupDetails/sampleEventAlert.tsx b/static/app/views/organizationGroupDetails/sampleEventAlert.tsx new file mode 100644 index 00000000000000..bd1f9671565c8a --- /dev/null +++ b/static/app/views/organizationGroupDetails/sampleEventAlert.tsx @@ -0,0 +1,65 @@ +import styled from '@emotion/styled'; + +import Button from 'app/components/button'; +import PageAlertBar from 'app/components/pageAlertBar'; +import {IconLightning} from 'app/icons'; +import {t} from 'app/locale'; +import space from 'app/styles/space'; +import {GlobalSelection, Organization, Project} from 'app/types'; +import trackAdvancedAnalyticsEvent from 'app/utils/analytics/trackAdvancedAnalyticsEvent'; +import withGlobalSelection from 'app/utils/withGlobalSelection'; +import withOrganization from 'app/utils/withOrganization'; +import withProjects from 'app/utils/withProjects'; + +function SampleEventAlert({ + selection, + organization, + projects, +}: { + selection: GlobalSelection; + organization: Organization; + projects: Project[]; +}) { + if (projects.length === 0) { + return null; + } + if (selection.projects.length !== 1) { + return null; + } + const selectedProject = projects.find(p => p.id === selection.projects[0].toString()); + if (!selectedProject || selectedProject.firstEvent) { + return null; + } + return ( + + + + {t( + 'You are viewing a sample error. Configure Sentry to start viewing real errors.' + )} + + + + ); +} + +export default withProjects(withOrganization(withGlobalSelection(SampleEventAlert))); + +const TextWrapper = styled('span')` + margin: 0 ${space(1)}; +`; diff --git a/static/app/views/organizationIntegrations/sentryAppExternalForm.tsx b/static/app/views/organizationIntegrations/sentryAppExternalForm.tsx index 8ded31d66203e8..139c51af42c149 100644 --- a/static/app/views/organizationIntegrations/sentryAppExternalForm.tsx +++ b/static/app/views/organizationIntegrations/sentryAppExternalForm.tsx @@ -114,15 +114,6 @@ export class SentryAppExternalForm extends Component { this.debouncedOptionLoad(field, input, resolve); }); - getSubmitEndpoint() { - const {sentryAppInstallationUuid, element} = this.props; - if (element === 'alert-rule-action') { - // TODO(leander): Send request to the correct endpoint - return '/404/'; - } - return `/sentry-app-installations/${sentryAppInstallationUuid}/external-issue-actions/`; - } - getElementText = () => { const {element} = this.props; switch (element) { diff --git a/static/app/views/performance/table.tsx b/static/app/views/performance/table.tsx index f802da9b1a5aa7..09f8317f4ed3ce 100644 --- a/static/app/views/performance/table.tsx +++ b/static/app/views/performance/table.tsx @@ -194,11 +194,6 @@ class _Table extends React.Component { ); } - if (field.startsWith('key_transaction')) { - // don't display per cell actions for key_transaction - return rendered; - } - if (field.startsWith('team_key_transaction')) { // don't display per cell actions for team_key_transaction return rendered; @@ -311,28 +306,11 @@ class _Table extends React.Component { renderPrependCellWithData = (tableData: TableData | null) => { const {eventView} = this.props; - const keyTransactionColumn = eventView - .getColumns() - .find((col: TableColumn) => col.name === 'key_transaction'); const teamKeyTransactionColumn = eventView .getColumns() .find((col: TableColumn) => col.name === 'team_key_transaction'); return (isHeader: boolean, dataRow?: any) => { - if (keyTransactionColumn) { - if (isHeader) { - const star = ( - - ); - return [this.renderHeadCell(tableData?.meta, keyTransactionColumn, star)]; - } else { - return [this.renderBodyCell(tableData, keyTransactionColumn, dataRow)]; - } - } else if (teamKeyTransactionColumn) { + if (teamKeyTransactionColumn) { if (isHeader) { const star = ( @@ -387,11 +365,10 @@ class _Table extends React.Component { this.state; const columnOrder = eventView .getColumns() - // remove key_transactions from the column order as we'll be rendering it + // remove team_key_transactions from the column order as we'll be rendering it // via a prepended column .filter( (col: TableColumn) => - col.name !== 'key_transaction' && col.name !== 'team_key_transaction' && !col.name.startsWith('count_miserable') && col.name !== 'project_threshold_config' diff --git a/static/app/views/performance/transactionDetails/finishSetupAlert.tsx b/static/app/views/performance/transactionDetails/finishSetupAlert.tsx index 092e33d78313bf..011c06762586be 100644 --- a/static/app/views/performance/transactionDetails/finishSetupAlert.tsx +++ b/static/app/views/performance/transactionDetails/finishSetupAlert.tsx @@ -1,6 +1,7 @@ import styled from '@emotion/styled'; import Button from 'app/components/button'; +import PageAlertBar from 'app/components/pageAlertBar'; import {IconLightning} from 'app/icons'; import {t} from 'app/locale'; import space from 'app/styles/space'; @@ -15,7 +16,7 @@ export default function FinishSetupAlert({ project: Project; }) { return ( - + {t( @@ -37,20 +38,10 @@ export default function FinishSetupAlert({ > {t('Get Started')} - + ); } -const AlertBar = styled('div')` - display: flex; - align-items: center; - justify-content: center; - color: ${p => p.theme.headerBackground}; - background-color: ${p => p.theme.bannerBackground}; - padding: 6px 30px; - font-size: 14px; -`; - const TextWrapper = styled('span')` margin: 0 ${space(1)}; `; diff --git a/static/app/views/performance/vitalDetail/table.tsx b/static/app/views/performance/vitalDetail/table.tsx index d775cfe59e680f..ba2537202b5e4c 100644 --- a/static/app/views/performance/vitalDetail/table.tsx +++ b/static/app/views/performance/vitalDetail/table.tsx @@ -203,10 +203,6 @@ class Table extends React.Component { ); } - if (field.startsWith('key_transaction')) { - return rendered; - } - if (field.startsWith('team_key_transaction')) { return rendered; } @@ -273,30 +269,11 @@ class Table extends React.Component { renderPrependCellWithData = (tableData: TableData | null, vitalName: WebVital) => { const {eventView} = this.props; - const keyTransactionColumn = eventView - .getColumns() - .find((col: TableColumn) => col.name === 'key_transaction'); const teamKeyTransactionColumn = eventView .getColumns() .find((col: TableColumn) => col.name === 'team_key_transaction'); return (isHeader: boolean, dataRow?: any) => { - if (keyTransactionColumn) { - if (isHeader) { - const star = ( - - ); - return [this.renderHeadCell(tableData?.meta, keyTransactionColumn, star)]; - } else { - return [ - this.renderBodyCell(tableData, keyTransactionColumn, dataRow, vitalName), - ]; - } - } else if (teamKeyTransactionColumn) { + if (teamKeyTransactionColumn) { if (isHeader) { const star = ( { .getColumns() // remove key_transactions from the column order as we'll be rendering it // via a prepended column - .filter( - (col: TableColumn) => - col.name !== 'key_transaction' && col.name !== 'team_key_transaction' - ) + .filter((col: TableColumn) => col.name !== 'team_key_transaction') .slice(0, -1) .map((col: TableColumn, i: number) => { if (typeof widths[i] === 'number') { diff --git a/static/app/views/settings/account/accountNotificationFineTuning.tsx b/static/app/views/settings/account/accountNotificationFineTuning.tsx index 980c58d6653de3..70ec84c40609c6 100644 --- a/static/app/views/settings/account/accountNotificationFineTuning.tsx +++ b/static/app/views/settings/account/accountNotificationFineTuning.tsx @@ -65,7 +65,7 @@ const AccountNotificationsByProject = ({projects, field}: ANBPProps) => { @@ -101,7 +101,7 @@ const AccountNotificationsByOrganization = ({organizations, field}: ANBOProps) = @@ -183,7 +183,7 @@ class AccountNotificationFineTuning extends AsyncView { if (fineTuneType === 'email') { // Fetch verified email addresses - field.choices = this.emailChoices.map(({email}) => [email, email]); + field.options = this.emailChoices.map(({email}) => ({value: email, label: email})); } if (!notifications || !fineTuneData) { diff --git a/static/app/views/settings/account/notifications/fields.tsx b/static/app/views/settings/account/notifications/fields.tsx index 77a2232dd7f086..1d118a59425f4b 100644 --- a/static/app/views/settings/account/notifications/fields.tsx +++ b/static/app/views/settings/account/notifications/fields.tsx @@ -1,10 +1,11 @@ import {t} from 'app/locale'; +import {SelectValue} from 'app/types'; export type FineTuneField = { title: string; description: string; type: 'select'; - choices?: string[][]; + options?: SelectValue[]; defaultValue?: string; defaultFieldName?: string; }; @@ -16,10 +17,10 @@ export const ACCOUNT_NOTIFICATION_FIELDS: Record = { 'Notifications from Alert Rules that your team has setup. You’ll always receive notifications from Alerts configured to be sent directly to you.' ), type: 'select', - choices: [ - ['-1', t('Default')], - ['1', t('On')], - ['0', t('Off')], + options: [ + {value: '-1', label: t('Default')}, + {value: '1', label: t('On')}, + {value: '0', label: t('Off')}, ], defaultValue: '-1', defaultFieldName: 'subscribeByDefault', @@ -30,11 +31,11 @@ export const ACCOUNT_NOTIFICATION_FIELDS: Record = { 'Control workflow notifications, e.g. changes in issue assignment, resolution status, and comments.' ), type: 'select', - choices: [ - ['-1', t('Default')], - ['0', t('Always')], - ['1', t('Only on issues I subscribe to')], - ['2', t('Never')], + options: [ + {value: '-1', label: t('Default')}, + {value: '0', label: t('Always')}, + {value: '1', label: t('Only on issues I subscribe to')}, + {value: '2', label: t('Never')}, ], defaultValue: '-1', defaultFieldName: 'workflowNotifications', @@ -45,11 +46,11 @@ export const ACCOUNT_NOTIFICATION_FIELDS: Record = { 'Control deploy notifications that include release, environment, and commit overviews.' ), type: 'select', - choices: [ - ['-1', t('Default')], - ['2', t('Always')], - ['3', t('Only on deploys with my commits')], - ['4', t('Never')], + options: [ + {value: '-1', label: t('Default')}, + {value: '2', label: t('Always')}, + {value: '3', label: t('Only on deploys with my commits')}, + {value: '4', label: t('Never')}, ], defaultValue: '-1', defaultFieldName: 'deployNotifications', @@ -62,9 +63,9 @@ export const ACCOUNT_NOTIFICATION_FIELDS: Record = { type: 'select', // API only saves organizations that have this disabled, so we should default to "On" defaultValue: '1', - choices: [ - ['1', t('On')], - ['0', t('Off')], + options: [ + {value: '1', label: t('On')}, + {value: '0', label: t('Off')}, ], defaultFieldName: 'weeklyReports', }, diff --git a/static/app/views/teamInsights/filter.tsx b/static/app/views/teamInsights/filter.tsx deleted file mode 100644 index 69a844e78495ab..00000000000000 --- a/static/app/views/teamInsights/filter.tsx +++ /dev/null @@ -1,162 +0,0 @@ -import {Component, Fragment} from 'react'; -import styled from '@emotion/styled'; - -import DropdownButton from 'app/components/dropdownButton'; -import DropdownControl, {Content} from 'app/components/dropdownControl'; -import {t} from 'app/locale'; -import overflowEllipsis from 'app/styles/overflowEllipsis'; -import space from 'app/styles/space'; - -type DropdownButtonProps = React.ComponentProps; - -type DropdownSection = { - id: string; - label: string; - items: Array<{label: string; value: string; checked: boolean; filtered: boolean}>; -}; - -type SectionProps = DropdownSection & { - toggleFilter: (value: string) => void; -}; - -function FilterSection({label, items, toggleFilter}: SectionProps) { - return ( - -
    - {label} -
    - {items - .filter(item => !item.filtered) - .map(item => ( - { - toggleFilter(item.value); - }} - > - {item.label} - - ))} -
    - ); -} - -type Props = { - header: React.ReactElement; - onFilterChange: (selectedValue: string) => void; - dropdownSection: DropdownSection; -}; - -class Filter extends Component { - toggleFilter = (value: string) => { - const {onFilterChange} = this.props; - onFilterChange(value); - }; - - render() { - const {dropdownSection, header} = this.props; - const selected = this.props.dropdownSection.items.find(item => item.checked); - - const dropDownButtonProps: Pick & { - hasDarkBorderBottomColor: boolean; - } = { - priority: 'default', - hasDarkBorderBottomColor: false, - }; - - return ( - ( - - {t('Team: ')} - {selected?.label} - - )} - > - {({isOpen, getMenuProps}) => ( - - - {header} - - - - )} - - ); - } -} - -const MenuContent = styled(Content)` - max-height: 290px; - overflow-y: auto; -`; - -const Header = styled('div')` - display: grid; - grid-template-columns: auto min-content; - grid-column-gap: ${space(1)}; - align-items: center; - - margin: 0; - background-color: ${p => p.theme.backgroundSecondary}; - color: ${p => p.theme.gray300}; - font-weight: normal; - font-size: ${p => p.theme.fontSizeMedium}; - padding: ${space(1)} ${space(2)}; - border-bottom: 1px solid ${p => p.theme.border}; -`; - -const StyledDropdownButton = styled(DropdownButton)<{hasDarkBorderBottomColor?: boolean}>` - white-space: nowrap; - max-width: 200px; - height: 42px; - - z-index: ${p => p.theme.zIndex.dropdown}; -`; - -const List = styled('ul')` - list-style: none; - margin: 0; - padding: 0; -`; - -const ListItem = styled('li')<{isChecked?: boolean}>` - display: grid; - grid-template-columns: 1fr max-content; - grid-column-gap: ${space(1)}; - align-items: center; - padding: ${space(1)} ${space(2)}; - border-bottom: 1px solid ${p => p.theme.border}; - cursor: pointer; - :hover { - background-color: ${p => p.theme.backgroundSecondary}; - } - - &:hover span { - color: ${p => p.theme.blue300}; - text-decoration: underline; - } -`; - -const TeamName = styled('div')` - font-size: ${p => p.theme.fontSizeMedium}; - ${overflowEllipsis}; -`; - -export default Filter; diff --git a/static/app/views/teamInsights/index.tsx b/static/app/views/teamInsights/index.tsx index c31fcf788c6e68..e5a66e101ead93 100644 --- a/static/app/views/teamInsights/index.tsx +++ b/static/app/views/teamInsights/index.tsx @@ -1,6 +1,7 @@ import {cloneElement, isValidElement} from 'react'; import Feature from 'app/components/acl/feature'; +import NoProjectMessage from 'app/components/noProjectMessage'; import SentryDocumentTitle from 'app/components/sentryDocumentTitle'; import {t} from 'app/locale'; import {Organization} from 'app/types'; @@ -14,13 +15,15 @@ type Props = { function TeamInsightsContainer({children, organization}: Props) { return ( - - {children && isValidElement(children) - ? cloneElement(children, { - organization, - }) - : children} - + + + {children && isValidElement(children) + ? cloneElement(children, { + organization, + }) + : children} + + ); } diff --git a/static/app/views/teamInsights/overview.tsx b/static/app/views/teamInsights/overview.tsx index a12f7da68f296b..d5f3d0d95fd3db 100644 --- a/static/app/views/teamInsights/overview.tsx +++ b/static/app/views/teamInsights/overview.tsx @@ -7,6 +7,7 @@ import moment from 'moment'; import {Client} from 'app/api'; import {DateTimeObject} from 'app/components/charts/utils'; +import TeamSelector from 'app/components/forms/teamSelector'; import * as Layout from 'app/components/layouts/thirds'; import LoadingIndicator from 'app/components/loadingIndicator'; import {getParams} from 'app/components/organizations/globalSelectionHeader/getParams'; @@ -23,7 +24,6 @@ import withTeamsForUser from 'app/utils/withTeamsForUser'; import DescriptionCard from './descriptionCard'; import HeaderTabs from './headerTabs'; import TeamAlertsTriggered from './teamAlertsTriggered'; -import TeamDropdown from './teamDropdown'; import TeamMisery from './teamMisery'; import TeamStability from './teamStability'; @@ -174,10 +174,12 @@ function TeamInsightsOverview({ {!loadingTeams && ( - handleChangeTeam(choice.actor.id)} + teamFilter={filterTeam => filterTeam.isMember} /> { renderBody() { const {alertsTriggered} = this.state; + const data = Object.entries(alertsTriggered ?? {}) + .map(([bucket, count]) => ({ + value: count, + name: new Date(bucket).getTime(), + })) + .sort((a, b) => a.name - b.name); + + // Convert from days to 7 day groups + const seriesData = chunk(data, 7).map(week => { + return { + name: week[0].name, + value: week.reduce((total, currentData) => total + currentData.value, 0), + }; + }); return ( @@ -79,24 +93,17 @@ class TeamIssues extends AsyncComponent { moment(new Date(value)).format('MMM D'), - }, }} series={[ { seriesName: t('Alerts Triggered'), - data: Object.entries(alertsTriggered).map(([bucket, count]) => ({ - value: count, - name: bucket, - })), + data: seriesData, }, ].reverse()} /> diff --git a/static/app/views/teamInsights/teamDropdown.tsx b/static/app/views/teamInsights/teamDropdown.tsx deleted file mode 100644 index d1dd0ebd0c4e20..00000000000000 --- a/static/app/views/teamInsights/teamDropdown.tsx +++ /dev/null @@ -1,77 +0,0 @@ -import {useState} from 'react'; -import styled from '@emotion/styled'; - -import Input from 'app/components/forms/input'; -import {t} from 'app/locale'; -import {Team} from 'app/types'; - -import Filter from './filter'; - -const ALERT_LIST_QUERY_DEFAULT_TEAMS = ['myteams', 'unassigned']; - -type Props = { - teams: Team[]; - selectedTeam: string; - handleChangeTeam: (teamId: string) => void; -}; - -export function getTeamParams(team?: string | string[]): string[] { - if (team === undefined) { - return ALERT_LIST_QUERY_DEFAULT_TEAMS; - } - - if (team === '') { - return []; - } - - if (Array.isArray(team)) { - return team; - } - - return [team]; -} - -function TeamDropdown({teams, selectedTeam, handleChangeTeam}: Props) { - const [teamFilterSearch, setTeamFilterSearch] = useState(); - - const teamItems = teams.map(({id, name}) => ({ - label: name, - value: id, - filtered: teamFilterSearch - ? !name.toLowerCase().includes(teamFilterSearch.toLowerCase()) - : false, - checked: selectedTeam === id, - })); - - return ( - { - event.stopPropagation(); - }} - onChange={(event: React.ChangeEvent) => { - setTeamFilterSearch(event.target.value); - }} - value={teamFilterSearch || ''} - /> - } - onFilterChange={handleChangeTeam} - dropdownSection={{ - id: 'teams', - label: t('Teams'), - items: teamItems, - }} - /> - ); -} - -export default TeamDropdown; - -const StyledInput = styled(Input)` - border: none; - border-bottom: 1px solid ${p => p.theme.gray200}; - border-radius: 0; -`; diff --git a/static/app/views/teamInsights/teamMisery.tsx b/static/app/views/teamInsights/teamMisery.tsx index 886759165ea106..03ab69cbc969d7 100644 --- a/static/app/views/teamInsights/teamMisery.tsx +++ b/static/app/views/teamInsights/teamMisery.tsx @@ -1,4 +1,5 @@ import {Fragment} from 'react'; +import {css} from '@emotion/react'; import styled from '@emotion/styled'; import {Location} from 'history'; @@ -67,6 +68,7 @@ function TeamMisery({ return ( + ); + } + const commonEventView = { id: undefined, query: 'transaction.duration:<15m team_key_transaction:true', @@ -195,7 +211,7 @@ function TeamMiseryWrapper({ export default TeamMiseryWrapper; -const StyledPanelTable = styled(PanelTable)` +const StyledPanelTable = styled(PanelTable)<{isEmpty: boolean}>` grid-template-columns: 1fr 0.5fr 112px 112px 0.25fr; font-size: ${p => p.theme.fontSizeMedium}; white-space: nowrap; @@ -206,6 +222,14 @@ const StyledPanelTable = styled(PanelTable)` & > div { padding: ${space(1)} ${space(2)}; } + + ${p => + p.isEmpty && + css` + & > div:last-child { + padding: 48px ${space(2)}; + } + `} `; const ProjectBadgeContainer = styled('div')` diff --git a/static/app/views/teamInsights/teamStability.tsx b/static/app/views/teamInsights/teamStability.tsx index b23f3edd42be05..703c5314a58dbe 100644 --- a/static/app/views/teamInsights/teamStability.tsx +++ b/static/app/views/teamInsights/teamStability.tsx @@ -173,6 +173,7 @@ class TeamStability extends AsyncComponent { return ( {tct('Last [period]', {period})}, diff --git a/static/less/base.less b/static/less/base.less index b80c913481923c..34fb9d1d05b544 100644 --- a/static/less/base.less +++ b/static/less/base.less @@ -27,6 +27,7 @@ body { color: @gray-darker; background: @white-dark; -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; overflow-x: hidden; min-height: 100vh; } diff --git a/tests/acceptance/test_emails.py b/tests/acceptance/test_emails.py index f7e1b061e5280b..8106e7c969cd75 100644 --- a/tests/acceptance/test_emails.py +++ b/tests/acceptance/test_emails.py @@ -38,30 +38,30 @@ def read_txt_email_fixture(name: str) -> str: filename = name.replace(" ", "_") + ".txt" path = join(dirname(__file__), os.pardir, "fixtures", "emails", filename) - fixture = None with open(path) as f: - fixture = f.read() - return fixture + return f.read() + + +def build_url(path: str, format: str = "html") -> str: + return f"{path}?{urlencode({'format': format, 'seed': b'123'})}" class EmailTestCase(AcceptanceTestCase): def setUp(self): super().setUp() + # This email address is required to match FIXTURES. self.user = self.create_user("foo@example.com") self.login_as(self.user) - def build_url(self, path: str, format: str = "html") -> str: - return "{}?{}".format(path, urlencode({"format": format, "seed": b"123"})) - def test_emails(self): for url, name in EMAILS: # HTML output is captured as a snapshot - self.browser.get(self.build_url(url, "html")) + self.browser.get(build_url(url, "html")) self.browser.wait_until("#preview") self.browser.snapshot(f"{name} email html") # Text output is asserted against static fixture files - self.browser.get(self.build_url(url, "txt")) + self.browser.get(build_url(url, "txt")) self.browser.wait_until("#preview") elem = self.browser.find_element_by_css_selector("#preview pre") text_src = elem.get_attribute("innerHTML") diff --git a/tests/fixtures/emails/release.txt b/tests/fixtures/emails/release.txt index 9bc58b8423be9e..71aa9cdbd43101 100644 --- a/tests/fixtures/emails/release.txt +++ b/tests/fixtures/emails/release.txt @@ -1,4 +1,4 @@ -Version 6c998f755f304593a4713abd123eaf8833a2de5e was deployed to production on Oct. 12, 2016, 3:39 p.m. +Version 6c998f755f30 was deployed to production on Oct. 12, 2016, 3:39 p.m. http://testserver/organizations/organization/releases/6c998f755f304593a4713abd123eaf8833a2de5e/?project=1 diff --git a/tests/js/spec/components/createSampleEventButton.spec.jsx b/tests/js/spec/components/createSampleEventButton.spec.jsx index 56be14c5915f3a..2b96d859b49aa6 100644 --- a/tests/js/spec/components/createSampleEventButton.spec.jsx +++ b/tests/js/spec/components/createSampleEventButton.spec.jsx @@ -69,7 +69,7 @@ describe('CreateSampleEventButton', function () { ).toBe(false); expect(browserHistory.push).toHaveBeenCalledWith( - `/organizations/${org.slug}/issues/${groupID}/` + `/organizations/${org.slug}/issues/${groupID}/?project=${project.id}` ); }); @@ -115,7 +115,7 @@ describe('CreateSampleEventButton', function () { await Promise.resolve(); expect(browserHistory.push).toHaveBeenCalledWith( - `/organizations/${org.slug}/issues/${groupID}/` + `/organizations/${org.slug}/issues/${groupID}/?project=${project.id}` ); expect(trackAnalyticsEvent).toHaveBeenCalledWith( diff --git a/tests/js/spec/components/modals/addDashboardWidgetModal.spec.jsx b/tests/js/spec/components/modals/addDashboardWidgetModal.spec.jsx index 8145bfbe068302..95e0fda9831cd5 100644 --- a/tests/js/spec/components/modals/addDashboardWidgetModal.spec.jsx +++ b/tests/js/spec/components/modals/addDashboardWidgetModal.spec.jsx @@ -2,15 +2,23 @@ import {browserHistory} from 'react-router'; import {mountWithTheme} from 'sentry-test/enzyme'; import {initializeOrg} from 'sentry-test/initializeOrg'; -import {getOptionByLabel, selectByLabel} from 'sentry-test/select-new'; +import {getOptionByLabel, openMenu, selectByLabel} from 'sentry-test/select-new'; import AddDashboardWidgetModal from 'app/components/modals/addDashboardWidgetModal'; import {t} from 'app/locale'; import TagStore from 'app/stores/tagStore'; +import * as types from 'app/views/dashboardsV2/types'; const stubEl = props =>
    {props.children}
    ; -function mountModal({initialData, onAddWidget, onUpdateWidget, widget, fromDiscover}) { +function mountModal({ + initialData, + onAddWidget, + onUpdateWidget, + widget, + fromDiscover, + defaultWidgetQuery, +}) { return mountWithTheme( void 0} fromDiscover={fromDiscover} + defaultWidgetQuery={defaultWidgetQuery} />, initialData.routerContext ); @@ -97,7 +106,13 @@ describe('Modals -> AddDashboardWidgetModal', function () { }); MockApiClient.addMockResponse({ url: '/organizations/org-slug/dashboards/', - body: [{id: '1', title: t('Test Dashboard')}], + body: [ + TestStubs.Dashboard([], { + id: '1', + title: 'Test Dashboard', + widgetDisplay: ['area'], + }), + ], }); }); @@ -109,6 +124,7 @@ describe('Modals -> AddDashboardWidgetModal', function () { const wrapper = mountModal({initialData, fromDiscover: true}); // @ts-expect-error await tick(); + await wrapper.update(); selectDashboard(wrapper, {label: t('+ Create New Dashboard'), value: 'new'}); await clickSubmit(wrapper); expect(browserHistory.push).toHaveBeenCalledWith( @@ -123,6 +139,7 @@ describe('Modals -> AddDashboardWidgetModal', function () { const wrapper = mountModal({initialData, fromDiscover: true}); // @ts-expect-error await tick(); + await wrapper.update(); selectDashboard(wrapper, {label: t('Test Dashboard'), value: '1'}); await clickSubmit(wrapper); expect(browserHistory.push).toHaveBeenCalledWith( @@ -133,6 +150,22 @@ describe('Modals -> AddDashboardWidgetModal', function () { wrapper.unmount(); }); + it('disables dashboards with max widgets', async function () { + types.MAX_WIDGETS = 1; + const wrapper = mountModal({initialData, fromDiscover: true}); + // @ts-expect-error + await tick(); + await wrapper.update(); + openMenu(wrapper, {name: 'dashboard', control: true}); + + const input = wrapper.find('SelectControl[name="dashboard"]'); + expect(input.find('Option Option')).toHaveLength(2); + expect(input.find('Option Option').at(0).props().isDisabled).toBe(false); + expect(input.find('Option Option').at(1).props().isDisabled).toBe(true); + + wrapper.unmount(); + }); + it('can update the title', async function () { let widget = undefined; const wrapper = mountModal({ @@ -840,4 +873,28 @@ describe('Modals -> AddDashboardWidgetModal', function () { wrapper.unmount(); }); + + it('should use defaultWidgetQuery Y-Axis and Conditions if given a defaultWidgetQuery', async function () { + const wrapper = mountModal({ + initialData, + onAddWidget: () => undefined, + onUpdateWidget: () => undefined, + widget: undefined, + fromDiscover: true, + defaultWidgetQuery: { + name: '', + fields: ['count()', 'failure_count()', 'count_unique(user)'], + conditions: 'tag:value', + orderby: '', + }, + }); + + expect(wrapper.find('SearchBar').props().query).toEqual('tag:value'); + const queryFields = wrapper.find('QueryField'); + expect(queryFields.length).toEqual(3); + expect(queryFields.at(0).props().fieldValue.function[0]).toEqual('count'); + expect(queryFields.at(1).props().fieldValue.function[0]).toEqual('failure_count'); + expect(queryFields.at(2).props().fieldValue.function[0]).toEqual('count_unique'); + wrapper.unmount(); + }); }); diff --git a/tests/js/spec/utils/discover/fieldRenderer.spec.jsx b/tests/js/spec/utils/discover/fieldRenderer.spec.jsx index b573c1e96877ce..32ced23c1102ef 100644 --- a/tests/js/spec/utils/discover/fieldRenderer.spec.jsx +++ b/tests/js/spec/utils/discover/fieldRenderer.spec.jsx @@ -23,7 +23,6 @@ describe('getFieldRenderer', function () { query: {}, }; data = { - key_transaction: 1, team_key_transaction: 1, title: 'ValueError: something bad', transaction: 'api.do_things', @@ -197,56 +196,6 @@ describe('getFieldRenderer', function () { expect(value.text()).toEqual(project.slug); }); - it('can render key transaction as a star', async function () { - const renderer = getFieldRenderer('key_transaction', {key_transaction: 'boolean'}); - delete data.project; - - const wrapper = mountWithTheme( - renderer(data, {location, organization}), - context.routerContext - ); - - const value = wrapper.find('StyledKey'); - expect(value).toHaveLength(1); - expect(value.props().isSolid).toBeTruthy(); - - // Since there is not project column, it's not clickable - expect(wrapper.find('KeyColumn')).toHaveLength(0); - }); - - it('can render key transaction as a clickable star', async function () { - const renderer = getFieldRenderer('key_transaction', {key_transaction: 'boolean'}); - - const wrapper = mountWithTheme( - renderer(data, {location, organization}), - context.routerContext - ); - await tick(); - wrapper.update(); - - let value; - - value = wrapper.find('StyledKey'); - expect(value).toHaveLength(1); - expect(value.props().isSolid).toBeTruthy(); - - wrapper.find('KeyColumn').simulate('click'); - await tick(); - wrapper.update(); - - value = wrapper.find('StyledKey'); - expect(value).toHaveLength(1); - expect(value.props().isSolid).toBeFalsy(); - - wrapper.find('KeyColumn').simulate('click'); - await tick(); - wrapper.update(); - - value = wrapper.find('StyledKey'); - expect(value).toHaveLength(1); - expect(value.props().isSolid).toBeTruthy(); - }); - it('can render team key transaction as a star with the dropdown', async function () { const renderer = getFieldRenderer('team_key_transaction', { team_key_transaction: 'boolean', diff --git a/tests/js/spec/views/alerts/issueRuleEditor/ruleNode.spec.jsx b/tests/js/spec/views/alerts/issueRuleEditor/ruleNode.spec.jsx index a937d885e95719..bab5eeb38054a2 100644 --- a/tests/js/spec/views/alerts/issueRuleEditor/ruleNode.spec.jsx +++ b/tests/js/spec/views/alerts/issueRuleEditor/ruleNode.spec.jsx @@ -1,6 +1,7 @@ import {mountWithTheme} from 'sentry-test/enzyme'; import {getSelector, openMenu, selectByValue} from 'sentry-test/select-new'; +import ModalActions from 'app/actions/modalActions'; import RuleNode from 'app/views/alerts/issueRuleEditor/ruleNode'; describe('RuleNode', function () { @@ -58,8 +59,31 @@ describe('RuleNode', function () { // TODO: Add this node and test if it implements correctly (e.g. Jira Tickets) // const ticketNode = {actionType: 'ticket'}; - // TODO(Leander): Add this node and test if it implements correctly (e.g. Integrations w/ Alert Rule UI) - // const sentryAppNode = {actionType: 'sentryapp'} + const sentryAppNode = { + id: 'sentry.rules.schema_form_mock', + label: 'Configure SentryApp with these', + enabled: true, + actionType: 'sentryapp', + sentryAppInstallationUuid: '1027', + formFields: { + exampleStringField: { + type: 'string', + placeholder: 'placeholder', + }, + exampleNumberField: { + type: 'number', + placeholder: 100, + }, + exampleStringChoiceField: { + type: 'choice', + choices: [ + ['value1', 'label1'], + ['value2', 'label2'], + ['value3', 'label3'], + ], + }, + }, + }; const createWrapper = node => { project = TestStubs.Project(); @@ -182,6 +206,13 @@ describe('RuleNode', function () { }); it('renders sentry apps with schema forms correctly', async function () { - // TODO(Leander) + wrapper = createWrapper(sentryAppNode); + const openModal = jest.spyOn(ModalActions, 'openModal'); + + expect(wrapper.text()).toEqual(sentryAppNode.label + 'Settings'); + expect(wrapper.find('button[aria-label="Settings"]').exists()).toEqual(true); + wrapper.find('button[aria-label="Settings"]').simulate('click'); + + expect(openModal).toHaveBeenCalled(); }); }); diff --git a/tests/js/spec/views/alerts/issueRuleEditor/sentryAppRuleModal.spec.jsx b/tests/js/spec/views/alerts/issueRuleEditor/sentryAppRuleModal.spec.jsx index 01c13a4d337491..1b30ecd2b0f36e 100644 --- a/tests/js/spec/views/alerts/issueRuleEditor/sentryAppRuleModal.spec.jsx +++ b/tests/js/spec/views/alerts/issueRuleEditor/sentryAppRuleModal.spec.jsx @@ -110,14 +110,6 @@ describe('SentryAppRuleModal', function () { changeInputValue(descriptionInput, 'v'); changeInputValue(channelInput, 'v'); selectByValue(wrapper, 'valor', {name: 'channel', control: true}); - - MockApiClient.addMockResponse({ - // TODO(leander): Replace with real endpoint for alert rule actions - url: '/404/', - method: 'POST', - body: {it: 'worked'}, - }); - submitSuccess(wrapper); }); }); diff --git a/tests/js/spec/views/alerts/utils.spec.jsx b/tests/js/spec/views/alerts/utils.spec.jsx index 9b110d764efc3a..a33e86879800ea 100644 --- a/tests/js/spec/views/alerts/utils.spec.jsx +++ b/tests/js/spec/views/alerts/utils.spec.jsx @@ -5,7 +5,12 @@ import { Datasource, SessionsAggregate, } from 'app/views/alerts/incidentRules/types'; -import {getQueryDatasource, isSessionAggregate} from 'app/views/alerts/utils'; +import { + alertAxisFormatter, + alertTooltipValueFormatter, + getQueryDatasource, + isSessionAggregate, +} from 'app/views/alerts/utils'; import {getIncidentDiscoverUrl} from 'app/views/alerts/utils/getIncidentDiscoverUrl'; describe('Alert utils', function () { @@ -163,4 +168,32 @@ describe('Alert utils', function () { expect(isSessionAggregate('p95(transaction.duration)')).toBeFalsy(); }); }); + + describe('alertAxisFormatter', () => { + it('formatts', () => { + expect( + alertAxisFormatter( + 98.312, + 'Crash Free Rate', + SessionsAggregate.CRASH_FREE_SESSIONS + ) + ).toBe('98.31%'); + expect(alertAxisFormatter(0.1234, 'failure_rate()', 'failure_rate()')).toBe('12%'); + }); + }); + + describe('alertTooltipValueFormatter', () => { + it('formatts', () => { + expect( + alertTooltipValueFormatter( + 98.312, + 'Crash Free Rate', + SessionsAggregate.CRASH_FREE_SESSIONS + ) + ).toBe('98.312%'); + expect(alertTooltipValueFormatter(0.1234, 'failure_rate()', 'failure_rate()')).toBe( + '12.34%' + ); + }); + }); }); diff --git a/tests/js/spec/views/dashboardsV2/detail.spec.jsx b/tests/js/spec/views/dashboardsV2/detail.spec.jsx index 8e2fe423571316..aaad130425902e 100644 --- a/tests/js/spec/views/dashboardsV2/detail.spec.jsx +++ b/tests/js/spec/views/dashboardsV2/detail.spec.jsx @@ -8,6 +8,7 @@ import {mountGlobalModal} from 'sentry-test/modal'; import ProjectsStore from 'app/stores/projectsStore'; import {DashboardState} from 'app/views/dashboardsV2/types'; +import * as types from 'app/views/dashboardsV2/types'; import ViewEditDashboard from 'app/views/dashboardsV2/view'; describe('Dashboards > Detail', function () { @@ -225,8 +226,16 @@ describe('Dashboards > Detail', function () { MockApiClient.addMockResponse({ url: '/organizations/org-slug/dashboards/', body: [ - TestStubs.Dashboard([], {id: 'default-overview', title: 'Default'}), - TestStubs.Dashboard([], {id: '1', title: 'Custom Errors'}), + TestStubs.Dashboard([], { + id: 'default-overview', + title: 'Default', + widgetDisplay: ['area'], + }), + TestStubs.Dashboard([], { + id: '1', + title: 'Custom Errors', + widgetDisplay: ['area'], + }), ], }); MockApiClient.addMockResponse({ @@ -337,6 +346,50 @@ describe('Dashboards > Detail', function () { expect(modal.find('AddDashboardWidgetModal').props().widget).toEqual(widgets[0]); }); + it('shows add wiget option', async function () { + wrapper = mountWithTheme( + , + initialData.routerContext + ); + await tick(); + wrapper.update(); + + // Enter edit mode. + wrapper.find('Controls Button[data-test-id="dashboard-edit"]').simulate('click'); + wrapper.update(); + expect(wrapper.find('AddWidget').exists()).toBe(true); + + wrapper.unmount(); + }); + + it('hides add widget option', async function () { + types.MAX_WIDGETS = 1; + + wrapper = mountWithTheme( + , + initialData.routerContext + ); + await tick(); + wrapper.update(); + + // Enter edit mode. + wrapper.find('Controls Button[data-test-id="dashboard-edit"]').simulate('click'); + wrapper.update(); + expect(wrapper.find('AddWidget').exists()).toBe(false); + + wrapper.unmount(); + }); + it('hides and shows breadcrumbs based on feature', async function () { const newOrg = initializeOrg({ organization: TestStubs.Organization({ diff --git a/tests/js/spec/views/eventsV2/chartFooter.spec.tsx b/tests/js/spec/views/eventsV2/chartFooter.spec.tsx index 40b06c94db2130..0ca44a8b154bb1 100644 --- a/tests/js/spec/views/eventsV2/chartFooter.spec.tsx +++ b/tests/js/spec/views/eventsV2/chartFooter.spec.tsx @@ -43,6 +43,8 @@ describe('EventsV2 > ChartFooter', function () { displayMode={DisplayModes.DEFAULT} displayOptions={[{label: DisplayModes.DEFAULT, value: DisplayModes.DEFAULT}]} onDisplayChange={() => undefined} + onTopEventsChange={() => undefined} + topEvents="5" />, initialData.routerContext ); @@ -82,6 +84,8 @@ describe('EventsV2 > ChartFooter', function () { displayMode={DisplayModes.DEFAULT} displayOptions={[{label: DisplayModes.DEFAULT, value: DisplayModes.DEFAULT}]} onDisplayChange={() => undefined} + onTopEventsChange={() => undefined} + topEvents="5" />, initialData.routerContext ); @@ -94,4 +98,44 @@ describe('EventsV2 > ChartFooter', function () { expect(optionCheckboxSelector.props().title).toEqual(t('Y-Axis')); expect(optionCheckboxSelector.props().selected).toEqual(yAxisValue); }); + + it('renders display limits with default limit when top 5 mode is selected', async function () { + // @ts-expect-error + const organization = TestStubs.Organization({ + features: [...features, 'discover-top-events'], + }); + + // Start off with an invalid view (empty is invalid) + const initialData = initializeOrg({ + organization, + router: { + location: {query: {query: 'tag:value'}}, + }, + project: 1, + projects: [], + }); + + const wrapper = mountWithTheme( + undefined} + displayMode={DisplayModes.TOP5} + displayOptions={[{label: DisplayModes.DEFAULT, value: DisplayModes.DEFAULT}]} + onDisplayChange={() => undefined} + onTopEventsChange={() => undefined} + topEvents="5" + />, + initialData.routerContext + ); + + // @ts-expect-error + await tick(); + wrapper.update(); + + const optionSelector = wrapper.find('OptionSelector[title="Limit"]'); + expect(optionSelector.props().selected).toEqual('5'); + }); }); diff --git a/tests/js/spec/views/eventsV2/miniGraph.spec.tsx b/tests/js/spec/views/eventsV2/miniGraph.spec.tsx new file mode 100644 index 00000000000000..14b1ca07162c4c --- /dev/null +++ b/tests/js/spec/views/eventsV2/miniGraph.spec.tsx @@ -0,0 +1,50 @@ +import {mountWithTheme} from 'sentry-test/enzyme'; +import {initializeOrg} from 'sentry-test/initializeOrg'; + +import EventView from 'app/utils/discover/eventView'; +import MiniGraph from 'app/views/eventsV2/miniGraph'; + +describe('EventsV2 > MiniGraph', function () { + const features = ['discover-basic', 'connect-discover-and-dashboards']; + const location = { + query: {query: 'tag:value'}, + pathname: '/', + }; + + let organization, eventView, initialData; + + beforeEach(() => { + // @ts-expect-error + organization = TestStubs.Organization({ + features, + // @ts-expect-error + projects: [TestStubs.Project()], + }); + initialData = initializeOrg({ + organization, + router: { + location, + }, + project: 1, + projects: [], + }); + // @ts-expect-error + eventView = EventView.fromSavedQueryOrLocation(undefined, location); + }); + + it('makes an EventsRequest with all selected multi y axis', async function () { + const yAxis = ['count()', 'failure_count()']; + const wrapper = mountWithTheme( + , + initialData.routerContext + ); + const eventsRequestProps = wrapper.find('EventsRequest').props(); + expect(eventsRequestProps.yAxis).toEqual(yAxis); + }); +}); diff --git a/tests/js/spec/views/eventsV2/queryList.spec.jsx b/tests/js/spec/views/eventsV2/queryList.spec.jsx index 9ab80bf647201d..317e3e07eb91cd 100644 --- a/tests/js/spec/views/eventsV2/queryList.spec.jsx +++ b/tests/js/spec/views/eventsV2/queryList.spec.jsx @@ -239,4 +239,28 @@ describe('EventsV2 > QueryList', function () { expect(menuItems.at(0).find('span').children().html()).toEqual('Delete Query'); expect(menuItems.at(1).find('span').children().html()).toEqual('Duplicate Query'); }); + + it('passes yAxis from the savedQuery to MiniGraph', function () { + const featuredOrganization = TestStubs.Organization({ + features: ['connect-discover-and-dashboards', 'dashboards-edit'], + }); + const yAxis = ['count()', 'failure_count()']; + const savedQueryWithMultiYAxis = { + ...savedQueries.slice(1)[0], + yAxis, + }; + const wrapper = mountWithTheme( + , + TestStubs.routerContext() + ); + + const miniGraph = wrapper.find('MiniGraph'); + expect(miniGraph.props().yAxis).toEqual(['count()', 'failure_count()']); + }); }); diff --git a/tests/js/spec/views/eventsV2/table/columnEditModal.spec.js b/tests/js/spec/views/eventsV2/table/columnEditModal.spec.js index c896549ee40526..46e484a04f338a 100644 --- a/tests/js/spec/views/eventsV2/table/columnEditModal.spec.js +++ b/tests/js/spec/views/eventsV2/table/columnEditModal.spec.js @@ -500,7 +500,7 @@ describe('EventsV2 -> ColumnEditModal', function () { // Apply the changes so we can see the new columns. newWrapper.find('Button[priority="primary"]').simulate('click'); expect(onApply).toHaveBeenCalledWith([ - {kind: 'function', function: ['count_unique', '', undefined, undefined]}, + {kind: 'function', function: ['count_unique', 'user', undefined, undefined]}, {kind: 'function', function: ['count', '', undefined, undefined]}, {kind: 'equation', field: 'count() - count()'}, ]); @@ -534,8 +534,11 @@ describe('EventsV2 -> ColumnEditModal', function () { newWrapper.find('Button[priority="primary"]').simulate('click'); // With the way the parser works only tokens up to the error will be updated expect(onApply).toHaveBeenCalledWith([ - {kind: 'function', function: ['count_unique', '', undefined, undefined]}, - {kind: 'equation', field: 'count_unique() - count_unique() arst count() '}, + {kind: 'function', function: ['count_unique', 'user', undefined, undefined]}, + { + kind: 'equation', + field: 'count_unique(user) - count_unique(user) arst count() ', + }, ]); }); }); diff --git a/tests/js/spec/views/eventsV2/table/tableView.spec.jsx b/tests/js/spec/views/eventsV2/table/tableView.spec.jsx index 0e6531de2b566c..8019e50f8e9099 100644 --- a/tests/js/spec/views/eventsV2/table/tableView.spec.jsx +++ b/tests/js/spec/views/eventsV2/table/tableView.spec.jsx @@ -138,6 +138,21 @@ describe('TableView > CellActions', function () { }); }); + it('handles add cell action with multiple y axis', function () { + location.query.yAxis = ['count()', 'failure_count()']; + const wrapper = makeWrapper(initialData, rows, eventView); + const menu = openContextMenu(wrapper, 0); + menu.find('button[data-test-id="add-to-filter"]').simulate('click'); + + expect(browserHistory.push).toHaveBeenCalledWith({ + pathname: location.pathname, + query: expect.objectContaining({ + query: 'title:"some title"', + yAxis: ['count()', 'failure_count()'], + }), + }); + }); + it('handles exclude cell action on string value', function () { const wrapper = makeWrapper(initialData, rows, eventView); const menu = openContextMenu(wrapper, 0); diff --git a/tests/js/spec/views/integrationPipeline/pipelineView.spec.jsx b/tests/js/spec/views/integrationPipeline/pipelineView.spec.jsx index dc6230ee714920..7fd6bb4faaa906 100644 --- a/tests/js/spec/views/integrationPipeline/pipelineView.spec.jsx +++ b/tests/js/spec/views/integrationPipeline/pipelineView.spec.jsx @@ -1,20 +1,39 @@ -import {shallow} from 'sentry-test/enzyme'; +import {mountWithTheme} from 'sentry-test/reactTestingLibrary'; -import AwsLambdaProjectSelect from 'app/views/integrationPipeline/awsLambdaProjectSelect'; import PipelineView from 'app/views/integrationPipeline/pipelineView'; +function MockAwsLambdaProjectSelect() { + return
    mock_AwsLambdaProjectSelect
    ; +} + +jest.mock( + 'app/views/integrationPipeline/awsLambdaProjectSelect', + () => MockAwsLambdaProjectSelect +); + describe('PipelineView', () => { it('renders awsLambdaProjectSelect', () => { - const wrapper = shallow( + const {findByText} = mountWithTheme( , - TestStubs.routerContext() + {context: TestStubs.routerContext()} ); - expect(wrapper.find(AwsLambdaProjectSelect).prop('someField')).toBe('someVal'); + + findByText('mock_AwsLambdaProjectSelect'); + expect(document.title).toBe('AWS Lambda Select Project'); }); + it('errros on invalid pipelineName', () => { - expect(() => - shallow(, TestStubs.routerContext()) - ).toThrow('Invalid pipeline name other'); + jest.spyOn(console, 'error'); + + // eslint-disable-next-line no-console + console.error.mockImplementation(() => {}); + + expect(() => mountWithTheme()).toThrow( + 'Invalid pipeline name other' + ); + + // eslint-disable-next-line no-console + console.error.mockRestore(); }); }); diff --git a/tests/js/spec/views/organizationGroupDetails/groupDetails.spec.jsx b/tests/js/spec/views/organizationGroupDetails/groupDetails.spec.jsx index 6ba1318d58afee..fa9b7b56675bdf 100644 --- a/tests/js/spec/views/organizationGroupDetails/groupDetails.spec.jsx +++ b/tests/js/spec/views/organizationGroupDetails/groupDetails.spec.jsx @@ -10,6 +10,9 @@ import GroupDetails from 'app/views/organizationGroupDetails'; jest.unmock('app/utils/recreateRoute'); +const SAMPLE_EVENT_ALERT_TEXT = + 'You are viewing a sample error. Configure Sentry to start viewing real errors.'; + describe('groupDetails', () => { const group = TestStubs.Group(); const event = TestStubs.Event(); @@ -216,4 +219,21 @@ describe('groupDetails', () => { expect(await findByText('New Issue')).toBeTruthy(); }); + + it('renders alert for sample event', async function () { + const aProject = TestStubs.Project({firstEvent: false}); + ProjectsStore.reset(); + ProjectsStore.loadInitialData([aProject]); + const {findByText} = createWrapper(); + + expect(await findByText(SAMPLE_EVENT_ALERT_TEXT)).toBeTruthy(); + }); + it('does not render alert for non sample events', async function () { + const aProject = TestStubs.Project({firstEvent: false}); + ProjectsStore.reset(); + ProjectsStore.loadInitialData([aProject]); + const {queryByText} = createWrapper(); + + expect(await queryByText(SAMPLE_EVENT_ALERT_TEXT)).toBeNull(); + }); }); diff --git a/tests/js/spec/views/organizationGroupDetails/groupTags.spec.jsx b/tests/js/spec/views/organizationGroupDetails/groupTags.spec.jsx index bcf4bbab842eb2..394993fe0d6bff 100644 --- a/tests/js/spec/views/organizationGroupDetails/groupTags.spec.jsx +++ b/tests/js/spec/views/organizationGroupDetails/groupTags.spec.jsx @@ -1,5 +1,5 @@ -import {mountWithTheme} from 'sentry-test/enzyme'; import {initializeOrg} from 'sentry-test/initializeOrg'; +import {fireEvent, mountWithTheme} from 'sentry-test/reactTestingLibrary'; import GroupTags from 'app/views/organizationGroupDetails/groupTags'; @@ -18,12 +18,11 @@ describe('GroupTags', function () { const wrapper = mountWithTheme( , - routerContext + {context: routerContext} ); expect(tagsMock).toHaveBeenCalledWith( @@ -33,7 +32,11 @@ describe('GroupTags', function () { }) ); - wrapper.find('li[data-test-id="user"] Link').first().simulate('click', {button: 0}); + const headers = wrapper.getAllByRole('heading').map(header => header.innerHTML); + // Check headers have been sorted alphabetically + expect(headers).toEqual(['browser', 'device', 'environment', 'url', 'user']); + + fireEvent.click(wrapper.getByText('david')); expect(router.push).toHaveBeenCalledWith({ pathname: '/organizations/org-slug/issues/1/events/', diff --git a/tests/js/spec/views/performance/content.spec.jsx b/tests/js/spec/views/performance/content.spec.jsx index a0320cbe5d35bc..8894cfd48a7660 100644 --- a/tests/js/spec/views/performance/content.spec.jsx +++ b/tests/js/spec/views/performance/content.spec.jsx @@ -143,7 +143,7 @@ describe('Performance > Content', function () { } else if (!options.query.hasOwnProperty('field')) { return false; } - return !options.query.field.includes('key_transaction'); + return !options.query.field.includes('team_key_transaction'); }, } ); @@ -166,7 +166,7 @@ describe('Performance > Content', function () { }, data: [ { - key_transaction: 1, + team_key_transaction: 1, transaction: '/apple/cart', 'project.id': 1, user: 'uhoh@example.com', @@ -180,7 +180,7 @@ describe('Performance > Content', function () { user_misery_300: 0.114, }, { - key_transaction: 0, + team_key_transaction: 0, transaction: '/apple/checkout', 'project.id': 1, user: 'uhoh@example.com', @@ -203,7 +203,7 @@ describe('Performance > Content', function () { } else if (!options.query.hasOwnProperty('field')) { return false; } - return options.query.field.includes('key_transaction'); + return options.query.field.includes('team_key_transaction'); }, } ); diff --git a/tests/js/spec/views/performance/table.spec.jsx b/tests/js/spec/views/performance/table.spec.jsx index 9be51f05d022e2..28e4bc6dc98892 100644 --- a/tests/js/spec/views/performance/table.spec.jsx +++ b/tests/js/spec/views/performance/table.spec.jsx @@ -109,7 +109,7 @@ describe('Performance > Table', function () { }, data: [ { - key_transaction: 1, + team_key_transaction: 1, transaction: '/apple/cart', project: project1.slug, user: 'uhoh@example.com', @@ -124,7 +124,7 @@ describe('Performance > Table', function () { project_threshold_config: ['duration', 300], }, { - key_transaction: 0, + team_key_transaction: 0, transaction: '/apple/checkout', project: project2.slug, user: 'uhoh@example.com', diff --git a/tests/js/spec/views/performance/vitalDetail/index.spec.jsx b/tests/js/spec/views/performance/vitalDetail/index.spec.jsx index fb3f75c538d544..7cf75dedb55366 100644 --- a/tests/js/spec/views/performance/vitalDetail/index.spec.jsx +++ b/tests/js/spec/views/performance/vitalDetail/index.spec.jsx @@ -131,7 +131,7 @@ describe('Performance > VitalDetail', function () { compare_numeric_aggregate_p75_measurements_cls_greater_0_25: 'number', count: 'integer', count_unique_user: 'integer', - key_transaction: 'boolean', + team_key_transaction: 'boolean', p50_measurements_cls: 'number', p75_measurements_cls: 'number', p95_measurements_cls: 'number', @@ -144,7 +144,7 @@ describe('Performance > VitalDetail', function () { compare_numeric_aggregate_p75_measurements_cls_greater_0_25: 0, count: 10000, count_unique_user: 2740, - key_transaction: 1, + team_key_transaction: 1, p50_measurements_cls: 0.143, p75_measurements_cls: 0.215, p95_measurements_cls: 0.302, diff --git a/tests/js/spec/views/teamInsights/index.spec.tsx b/tests/js/spec/views/teamInsights/index.spec.tsx index bf0480c0be700c..6152e115d634b7 100644 --- a/tests/js/spec/views/teamInsights/index.spec.tsx +++ b/tests/js/spec/views/teamInsights/index.spec.tsx @@ -1,8 +1,13 @@ import {mountWithTheme} from 'sentry-test/reactTestingLibrary'; +import ProjectsStore from 'app/stores/projectsStore'; import TeamInsightsContainer from 'app/views/teamInsights'; describe('TeamInsightsContainer', () => { + afterEach(() => { + ProjectsStore.reset(); + }); + it('blocks access if org is missing flag', () => { // @ts-expect-error const organization = TestStubs.Organization(); @@ -18,6 +23,10 @@ describe('TeamInsightsContainer', () => { expect(wrapper.queryByText('test')).toBeNull(); }); it('allows access for orgs with flag', () => { + ProjectsStore.loadInitialData([ + // @ts-expect-error + TestStubs.Project(), + ]); // @ts-expect-error const organization = TestStubs.Organization({features: ['team-insights']}); // @ts-expect-error @@ -31,4 +40,19 @@ describe('TeamInsightsContainer', () => { expect(wrapper.getByText('test')).toBeTruthy(); }); + it('shows message for users with no teams', () => { + ProjectsStore.loadInitialData([]); + // @ts-expect-error + const organization = TestStubs.Organization({features: ['team-insights']}); + // @ts-expect-error + const context = TestStubs.routerContext([{organization}]); + const wrapper = mountWithTheme( + , + {context} + ); + + expect( + wrapper.getByText('You need at least one project to use this view') + ).toBeTruthy(); + }); }); diff --git a/tests/js/spec/views/teamInsights/overview.spec.jsx b/tests/js/spec/views/teamInsights/overview.spec.jsx index 6db28caa22a511..175b0e3b0ccc4a 100644 --- a/tests/js/spec/views/teamInsights/overview.spec.jsx +++ b/tests/js/spec/views/teamInsights/overview.spec.jsx @@ -1,5 +1,6 @@ -import {fireEvent, mountWithTheme, waitFor} from 'sentry-test/reactTestingLibrary'; +import {act, fireEvent, mountWithTheme, waitFor} from 'sentry-test/reactTestingLibrary'; +import TeamStore from 'app/stores/teamStore'; import localStorage from 'app/utils/localStorage'; import {TeamInsightsOverview} from 'app/views/teamInsights/overview'; @@ -8,8 +9,18 @@ jest.mock('app/utils/localStorage'); describe('TeamInsightsOverview', () => { const project1 = TestStubs.Project({id: '2', name: 'js', slug: 'js'}); const project2 = TestStubs.Project({id: '3', name: 'py', slug: 'py'}); - const team1 = TestStubs.Team({id: '2', name: 'frontend', projects: [project1]}); - const team2 = TestStubs.Team({id: '3', name: 'backend', projects: [project2]}); + const team1 = TestStubs.Team({ + id: '2', + slug: 'frontend', + name: 'frontend', + projects: [project1], + }); + const team2 = TestStubs.Team({ + id: '3', + slug: 'backend', + name: 'backend', + projects: [project2], + }); const mockRouter = {push: jest.fn()}; beforeEach(() => { @@ -58,6 +69,7 @@ describe('TeamInsightsOverview', () => { url: `/teams/org-slug/${team1.slug}/alerts-triggered/`, body: TestStubs.TeamAlertsTriggered(), }); + act(() => void TeamStore.loadInitialData([team1, team2])); }); afterEach(() => { @@ -92,7 +104,7 @@ describe('TeamInsightsOverview', () => { expect(wrapper.queryByTestId('loading-indicator')).not.toBeInTheDocument(); }); - expect(wrapper.getByText('Team: frontend')).toBeInTheDocument(); + expect(wrapper.getByText('#frontend')).toBeInTheDocument(); expect(wrapper.getByText('Key transaction')).toBeInTheDocument(); }); @@ -102,9 +114,9 @@ describe('TeamInsightsOverview', () => { expect(wrapper.queryByTestId('loading-indicator')).not.toBeInTheDocument(); }); - fireEvent.click(wrapper.getByText('Team: frontend')); - expect(wrapper.getByText('backend')).toBeInTheDocument(); - fireEvent.click(wrapper.getByText('backend')); + fireEvent.mouseDown(wrapper.getByText('#frontend')); + expect(wrapper.getByText('#backend')).toBeInTheDocument(); + fireEvent.click(wrapper.getByText('#backend')); expect(mockRouter.push).toHaveBeenCalledWith({query: {team: team2.id}}); expect(localStorage.setItem).toHaveBeenCalledWith( 'teamInsightsSelectedTeamId:org-slug', diff --git a/tests/js/spec/views/teamInsights/teamMisery.spec.jsx b/tests/js/spec/views/teamInsights/teamMisery.spec.jsx index f6af9226965986..bb411de6fc2dce 100644 --- a/tests/js/spec/views/teamInsights/teamMisery.spec.jsx +++ b/tests/js/spec/views/teamInsights/teamMisery.spec.jsx @@ -18,7 +18,6 @@ describe('TeamMisery', () => { }, data: [ { - key_transaction: 1, transaction: '/apple/cart', project: project.slug, tpm: 30, @@ -28,7 +27,6 @@ describe('TeamMisery', () => { project_threshold_config: ['duration', 300], }, { - key_transaction: 0, transaction: '/apple/checkout', project: project.slug, tpm: 30, @@ -59,4 +57,23 @@ describe('TeamMisery', () => { expect(wrapper.getAllByText(project.slug)).toHaveLength(2); expect(wrapper.getAllByText('0% change')).toHaveLength(2); }); + + it('should render empty state', async () => { + const routerContext = TestStubs.routerContext(); + const wrapper = mountWithTheme( + , + {context: routerContext} + ); + + await waitFor(() => { + expect(wrapper.queryByTestId('loading-indicator')).not.toBeInTheDocument(); + }); + + expect(wrapper.getByText('There are no items to display')).toBeTruthy(); + }); }); diff --git a/tests/js/spec/views/teamInsights/teamStability.spec.jsx b/tests/js/spec/views/teamInsights/teamStability.spec.jsx index 493438b067b667..8e426aeabbb575 100644 --- a/tests/js/spec/views/teamInsights/teamStability.spec.jsx +++ b/tests/js/spec/views/teamInsights/teamStability.spec.jsx @@ -43,4 +43,12 @@ describe('TeamStability', () => { expect(wrapper.getAllByText('\u2014')).toHaveLength(3); }); + + it('should render no projects', async () => { + const wrapper = mountWithTheme( + + ); + + expect(wrapper.getByText('There are no items to display')).toBeTruthy(); + }); }); diff --git a/tests/sentry/api/endpoints/test_project_release_stats.py b/tests/sentry/api/endpoints/test_project_release_stats.py new file mode 100644 index 00000000000000..c373f0f8d0a99f --- /dev/null +++ b/tests/sentry/api/endpoints/test_project_release_stats.py @@ -0,0 +1,32 @@ +from datetime import datetime + +from django.urls import reverse + +from sentry.models import Release +from sentry.testutils import APITestCase + + +class ProjectReleaseStatsTest(APITestCase): + def test_simple(self): + """Minimal test to ensure code coverage of the endpoint""" + self.login_as(user=self.user) + + project = self.create_project(name="foo") + release = Release.objects.create( + organization_id=project.organization_id, + version="1", + date_added=datetime(2013, 8, 13, 3, 8, 24, 880386), + ) + release.add_project(project) + + url = reverse( + "sentry-api-0-project-release-stats", + kwargs={ + "organization_slug": project.organization.slug, + "project_slug": project.slug, + "version": "1", + }, + ) + response = self.client.get(url, format="json") + + assert response.status_code == 200, response.content diff --git a/tests/sentry/api/endpoints/test_project_rule_details.py b/tests/sentry/api/endpoints/test_project_rule_details.py index 718c97f344a338..57968beeff9fbc 100644 --- a/tests/sentry/api/endpoints/test_project_rule_details.py +++ b/tests/sentry/api/endpoints/test_project_rule_details.py @@ -1,3 +1,5 @@ +from unittest.mock import patch + import responses from django.urls import reverse @@ -781,6 +783,77 @@ def test_update_filters(self): assert RuleActivity.objects.filter(rule=rule, type=RuleActivityType.UPDATED.value).exists() + @patch("sentry.mediators.alert_rule_actions.AlertRuleActionCreator.run") + def test_update_alert_rule_action(self, mock_alert_rule_action_creator): + """ + Ensures that Sentry Apps with schema forms (UI components) + receive a payload when an alert rule is updated with them. + """ + self.login_as(user=self.user) + + project = self.create_project() + + rule = Rule.objects.create(project=project, label="my super cool rule") + + self.create_sentry_app(name="Pied Piper", organization=project.organization) + install = self.create_sentry_app_installation( + slug="pied-piper", organization=project.organization + ) + + actions = [ + { + "id": "sentry.rules.actions.notify_event_sentry_app.NotifyEventSentryAppAction", + "settings": {"assignee": "Team Rocket", "priority": 27}, + "uri": "/sentry/alerts/", + "sentryAppInstallationUuid": install.uuid, + "hasSchemaFormConfig": True, + }, + ] + + url = reverse( + "sentry-api-0-project-rule-details", + kwargs={ + "organization_slug": project.organization.slug, + "project_slug": project.slug, + "rule_id": rule.id, + }, + ) + + response = self.client.put( + url, + data={ + "name": "my super cool rule", + "actionMatch": "any", + "filterMatch": "any", + "actions": actions, + "conditions": [], + "filters": [], + }, + format="json", + ) + + assert response.status_code == 200, response.content + assert response.data["id"] == str(rule.id) + + rule = Rule.objects.get(id=rule.id) + assert rule.data["actions"] == actions + + kwargs = { + "install": install, + "fields": actions[0].get("settings"), + "uri": actions[0].get("uri"), + "rule": rule, + } + + call_kwargs = mock_alert_rule_action_creator.call_args[1] + + assert call_kwargs["install"].id == kwargs["install"].id + assert call_kwargs["fields"] == kwargs["fields"] + assert call_kwargs["uri"] == kwargs["uri"] + assert call_kwargs["rule"].id == kwargs["rule"].id + + assert RuleActivity.objects.filter(rule=rule, type=RuleActivityType.UPDATED.value).exists() + class DeleteProjectRuleTest(APITestCase): def test_simple(self): diff --git a/tests/sentry/api/endpoints/test_project_rules.py b/tests/sentry/api/endpoints/test_project_rules.py index 1bdf963655f1a5..dd76982cb6fec6 100644 --- a/tests/sentry/api/endpoints/test_project_rules.py +++ b/tests/sentry/api/endpoints/test_project_rules.py @@ -411,3 +411,68 @@ def test_comparison_condition_validation(self): str(response.data["conditions"][0]) == "Select a valid choice. bad data is not one of the available choices." ) + + @patch("sentry.mediators.alert_rule_actions.AlertRuleActionCreator.run") + def test_runs_alert_rule_action_creator(self, mock_alert_rule_action_creator): + """ + Ensures that Sentry Apps with schema forms (UI components) + receive a payload when an alert rule is created with them. + """ + self.login_as(user=self.user) + + project = self.create_project() + + self.create_sentry_app(name="Pied Piper", organization=project.organization) + install = self.create_sentry_app_installation( + slug="pied-piper", organization=project.organization + ) + + actions = [ + { + "id": "sentry.rules.actions.notify_event_sentry_app.NotifyEventSentryAppAction", + "settings": {"assignee": "Team Rocket", "priority": 27}, + "uri": "/sentry/alerts/", + "sentryAppInstallationUuid": install.uuid, + "hasSchemaFormConfig": True, + }, + ] + + url = reverse( + "sentry-api-0-project-rules", + kwargs={"organization_slug": project.organization.slug, "project_slug": project.slug}, + ) + + response = self.client.post( + url, + data={ + "name": "my super cool rule", + "owner": f"user:{self.user.id}", + "conditions": [], + "filters": [], + "actions": actions, + "filterMatch": "any", + "actionMatch": "any", + "frequency": 30, + }, + format="json", + ) + + assert response.status_code == 200, response.content + assert response.data["id"] + + rule = Rule.objects.get(id=response.data["id"]) + assert rule.data["actions"] == actions + + kwargs = { + "install": install, + "fields": actions[0].get("settings"), + "uri": actions[0].get("uri"), + "rule": rule, + } + + call_kwargs = mock_alert_rule_action_creator.call_args[1] + + assert call_kwargs["install"].id == kwargs["install"].id + assert call_kwargs["fields"] == kwargs["fields"] + assert call_kwargs["uri"] == kwargs["uri"] + assert call_kwargs["rule"].id == kwargs["rule"].id diff --git a/tests/sentry/api/endpoints/test_project_rules_configuration.py b/tests/sentry/api/endpoints/test_project_rules_configuration.py index 856d8f02af029e..a41341589916ad 100644 --- a/tests/sentry/api/endpoints/test_project_rules_configuration.py +++ b/tests/sentry/api/endpoints/test_project_rules_configuration.py @@ -5,6 +5,7 @@ EMAIL_ACTION = "sentry.mail.actions.NotifyEmailAction" APP_ACTION = "sentry.rules.actions.notify_event_service.NotifyEventServiceAction" JIRA_ACTION = "sentry.integrations.jira.notify_action.JiraCreateTicketAction" +SENTRY_APP_ALERT_ACTION = "sentry.rules.actions.notify_event_sentry_app.NotifyEventSentryAppAction" class ProjectRuleConfigurationTest(APITestCase): @@ -162,8 +163,6 @@ def test_sentry_app_alertable_webhook(self): assert len(response.data["filters"]) == 7 def test_sentry_app_alert_rules(self): - from sentry.models import SentryAppComponent - team = self.create_team() project1 = self.create_project(teams=[team], name="foo") self.create_project(teams=[team], name="baz") @@ -176,15 +175,12 @@ def test_sentry_app_alert_rules(self): install = self.create_sentry_app_installation( slug=sentry_app.slug, organization=self.organization, user=self.user ) - component = SentryAppComponent.objects.get( - sentry_app_id=sentry_app.id, type="alert-rule-action" - ) response = self.get_valid_response(self.organization.slug, project1.slug) assert len(response.data["actions"]) == 8 assert { - "id": f"sentry.sentryapp.{sentry_app.slug}", - "uuid": str(component.uuid), + "id": SENTRY_APP_ALERT_ACTION, + "service": sentry_app.slug, "actionType": "sentryapp", "prompt": sentry_app.name, "enabled": True, diff --git a/tests/sentry/api/endpoints/test_team_alerts_triggered.py b/tests/sentry/api/endpoints/test_team_alerts_triggered.py index 18048d7d46c660..522d372ca41092 100644 --- a/tests/sentry/api/endpoints/test_team_alerts_triggered.py +++ b/tests/sentry/api/endpoints/test_team_alerts_triggered.py @@ -1,3 +1,4 @@ +from django.utils import timezone from freezegun import freeze_time from sentry.incidents.models import ( @@ -51,9 +52,9 @@ def test_simple(self): assert ( response.data[ str( - before_now(days=i).replace( - hour=0, minute=0, second=0, microsecond=0, tzinfo=None - ) + before_now(days=i) + .replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc) + .isoformat() ) ] == 1 @@ -63,9 +64,9 @@ def test_simple(self): assert ( response.data[ str( - before_now(days=i).replace( - hour=0, minute=0, second=0, microsecond=0, tzinfo=None - ) + before_now(days=i) + .replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc) + .isoformat() ) ] == 0 @@ -78,9 +79,9 @@ def test_simple(self): assert ( response.data[ str( - before_now(days=0).replace( - hour=0, minute=0, second=0, microsecond=0, tzinfo=None - ) + before_now(days=0) + .replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc) + .isoformat() ) ] == 0 @@ -89,9 +90,9 @@ def test_simple(self): assert ( response.data[ str( - before_now(days=i).replace( - hour=0, minute=0, second=0, microsecond=0, tzinfo=None - ) + before_now(days=i) + .replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc) + .isoformat() ) ] == 1 @@ -159,9 +160,9 @@ def test_not_as_simple(self): assert ( response.data[ str( - before_now(days=2).replace( - hour=0, minute=0, second=0, microsecond=0, tzinfo=None - ) + before_now(days=2) + .replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc) + .isoformat() ) ] == 1 @@ -172,9 +173,9 @@ def test_not_as_simple(self): assert ( response.data[ str( - before_now(days=i).replace( - hour=0, minute=0, second=0, microsecond=0, tzinfo=None - ) + before_now(days=i) + .replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc) + .isoformat() ) ] == 0 diff --git a/tests/sentry/api/endpoints/test_team_issue_breakdown.py b/tests/sentry/api/endpoints/test_team_issue_breakdown.py new file mode 100644 index 00000000000000..aabcc16c043d33 --- /dev/null +++ b/tests/sentry/api/endpoints/test_team_issue_breakdown.py @@ -0,0 +1,158 @@ +from datetime import timedelta + +from django.utils import timezone +from django.utils.timezone import now +from freezegun import freeze_time + +from sentry.models import GroupHistory, GroupHistoryStatus +from sentry.testutils import APITestCase +from sentry.testutils.helpers.datetime import before_now + + +@freeze_time() +class TeamIssueBreakdownTest(APITestCase): + endpoint = "sentry-api-0-team-issue-breakdown" + + def test_simple(self): + project1 = self.create_project(teams=[self.team], slug="foo") + project2 = self.create_project(teams=[self.team], slug="bar") + group1 = self.create_group(checksum="a" * 32, project=project1, times_seen=10) + group2 = self.create_group(checksum="b" * 32, project=project2, times_seen=5) + + GroupHistory.objects.create( + organization=self.organization, + group=group1, + project=project1, + actor=self.user.actor, + date_added=before_now(days=5), + status=GroupHistoryStatus.UNRESOLVED, + ) + GroupHistory.objects.create( + organization=self.organization, + group=group1, + project=project1, + actor=self.user.actor, + status=GroupHistoryStatus.RESOLVED, + date_added=before_now(days=2), + ) + GroupHistory.objects.create( + organization=self.organization, + group=group1, + project=project1, + actor=self.user.actor, + status=GroupHistoryStatus.REGRESSED, + date_added=before_now(days=2), + ) + GroupHistory.objects.create( + organization=self.organization, + group=group2, + project=project2, + actor=self.user.actor, + date_added=before_now(days=10), + status=GroupHistoryStatus.UNRESOLVED, + ) + GroupHistory.objects.create( + organization=self.organization, + group=group2, + project=project2, + actor=self.user.actor, + date_added=before_now(days=1), + status=GroupHistoryStatus.UNRESOLVED, + ) + + GroupHistory.objects.create( + organization=self.organization, + group=group2, + project=project2, + actor=self.user.actor, + status=GroupHistoryStatus.RESOLVED, + ) + GroupHistory.objects.create( + organization=self.organization, + group=group2, + project=project2, + actor=self.user.actor, + status=GroupHistoryStatus.RESOLVED, + ) + GroupHistory.objects.create( + organization=self.organization, + group=group2, + project=project2, + actor=self.user.actor, + status=GroupHistoryStatus.IGNORED, + ) + today = str( + now() + .replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc) + .isoformat() + ) + yesterday = str( + (now() - timedelta(days=1)) + .replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc) + .isoformat() + ) + two_days_ago = str( + (now() - timedelta(days=2)) + .replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc) + .isoformat() + ) + self.login_as(user=self.user) + response = self.get_success_response( + self.team.organization.slug, self.team.slug, statsPeriod="7d" + ) + assert len(response.data) == 2 + assert response.data[project1.id][today]["reviewed"] == 0 + assert response.data[project1.id][today]["total"] == 0 + assert response.data[project1.id][yesterday]["reviewed"] == 0 + assert response.data[project1.id][yesterday]["total"] == 0 + assert response.data[project1.id][two_days_ago]["reviewed"] == 1 + assert response.data[project1.id][two_days_ago]["reviewed"] == 1 + + assert response.data[project2.id][today]["reviewed"] == 3 + assert response.data[project2.id][today]["total"] == 3 + assert response.data[project2.id][yesterday]["reviewed"] == 0 + assert response.data[project2.id][yesterday]["total"] == 1 + assert response.data[project2.id][two_days_ago]["reviewed"] == 0 + assert response.data[project2.id][two_days_ago]["total"] == 0 + + GroupHistory.objects.create( + organization=self.organization, + group=group1, + project=project1, + actor=self.user.actor, + date_added=before_now(days=1), + status=GroupHistoryStatus.UNRESOLVED, + ) + GroupHistory.objects.create( + organization=self.organization, + group=group2, + project=project2, + actor=self.user.actor, + status=GroupHistoryStatus.RESOLVED, + ) + + # making sure it doesnt bork anything + GroupHistory.objects.create( + organization=self.organization, + group=group2, + project=project2, + actor=self.user.actor, + status=GroupHistoryStatus.ASSIGNED, + ) + + response = self.get_success_response(self.team.organization.slug, self.team.slug) + assert len(response.data) == 2 + + assert response.data[project1.id][today]["reviewed"] == 0 + assert response.data[project1.id][today]["total"] == 0 + assert response.data[project1.id][yesterday]["reviewed"] == 0 + assert response.data[project1.id][yesterday]["total"] == 1 + assert response.data[project1.id][two_days_ago]["reviewed"] == 1 + assert response.data[project1.id][two_days_ago]["reviewed"] == 1 + + assert response.data[project2.id][today]["reviewed"] == 4 + assert response.data[project2.id][today]["total"] == 4 + assert response.data[project2.id][yesterday]["reviewed"] == 0 + assert response.data[project2.id][yesterday]["total"] == 1 + assert response.data[project2.id][two_days_ago]["reviewed"] == 0 + assert response.data[project2.id][two_days_ago]["total"] == 0 diff --git a/tests/sentry/api/endpoints/test_team_time_to_resolution.py b/tests/sentry/api/endpoints/test_team_time_to_resolution.py new file mode 100644 index 00000000000000..8cdb23af60112f --- /dev/null +++ b/tests/sentry/api/endpoints/test_team_time_to_resolution.py @@ -0,0 +1,111 @@ +from datetime import timedelta + +from django.utils.timezone import now +from freezegun import freeze_time + +from sentry.models import GroupHistory, GroupHistoryStatus +from sentry.testutils import APITestCase +from sentry.testutils.helpers.datetime import before_now + + +@freeze_time() +class TeamTimeToResolutionTest(APITestCase): + endpoint = "sentry-api-0-team-time-to-resolution" + + def test_simple(self): + project1 = self.create_project(teams=[self.team], slug="foo") + project2 = self.create_project(teams=[self.team], slug="bar") + group1 = self.create_group(checksum="a" * 32, project=project1, times_seen=10) + group2 = self.create_group(checksum="b" * 32, project=project2, times_seen=5) + + gh1 = GroupHistory.objects.create( + organization=self.organization, + group=group1, + project=project1, + actor=self.user.actor, + date_added=before_now(days=5), + status=GroupHistoryStatus.UNRESOLVED, + prev_history=None, + prev_history_date=None, + ) + + GroupHistory.objects.create( + organization=self.organization, + group=group1, + project=project1, + actor=self.user.actor, + status=GroupHistoryStatus.RESOLVED, + prev_history=gh1, + prev_history_date=gh1.date_added, + date_added=before_now(days=2), + ) + + gh2 = GroupHistory.objects.create( + organization=self.organization, + group=group2, + project=project2, + actor=self.user.actor, + date_added=before_now(days=10), + status=GroupHistoryStatus.UNRESOLVED, + prev_history=None, + prev_history_date=None, + ) + + GroupHistory.objects.create( + organization=self.organization, + group=group2, + project=project2, + actor=self.user.actor, + status=GroupHistoryStatus.RESOLVED, + prev_history=gh2, + prev_history_date=gh2.date_added, + ) + today = str(now().date()) + yesterday = str((now() - timedelta(days=1)).date()) + two_days_ago = str((now() - timedelta(days=2)).date()) + self.login_as(user=self.user) + response = self.get_success_response( + self.team.organization.slug, self.team.slug, statsPeriod="14d" + ) + assert len(response.data) == 14 + assert response.data[today]["avg"] == timedelta(days=10).total_seconds() + assert response.data[two_days_ago]["avg"] == timedelta(days=3).total_seconds() + assert response.data[yesterday]["avg"] == 0 + + # Lower "todays" average by adding another resolution, but this time 5 days instead of 10 (avg is 7.5 now) + gh2 = GroupHistory.objects.create( + organization=self.organization, + group=group2, + project=project2, + actor=self.user.actor, + date_added=before_now(days=5), + status=GroupHistoryStatus.UNRESOLVED, + prev_history=None, + prev_history_date=None, + ) + GroupHistory.objects.create( + organization=self.organization, + group=group2, + project=project2, + actor=self.user.actor, + status=GroupHistoryStatus.RESOLVED, + prev_history=gh2, + prev_history_date=gh2.date_added, + ) + + # making sure it doesnt bork anything + GroupHistory.objects.create( + organization=self.organization, + group=group2, + project=project2, + actor=self.user.actor, + status=GroupHistoryStatus.DELETED, + prev_history=gh2, + prev_history_date=gh2.date_added, + ) + + response = self.get_success_response(self.team.organization.slug, self.team.slug) + assert len(response.data) == 90 + assert response.data[today]["avg"] == timedelta(days=7, hours=12).total_seconds() + assert response.data[two_days_ago]["avg"] == timedelta(days=3).total_seconds() + assert response.data[yesterday]["avg"] == 0 diff --git a/tests/sentry/api/endpoints/test_user_authenticator_details.py b/tests/sentry/api/endpoints/test_user_authenticator_details.py index 166208578621b7..2c3874a4d08195 100644 --- a/tests/sentry/api/endpoints/test_user_authenticator_details.py +++ b/tests/sentry/api/endpoints/test_user_authenticator_details.py @@ -1,51 +1,106 @@ import datetime from django.conf import settings +from django.core import mail from django.db.models import F -from django.urls import reverse from django.utils import timezone from sentry.auth.authenticators import RecoveryCodeInterface, SmsInterface, TotpInterface -from sentry.models import Authenticator, Organization +from sentry.models import Authenticator, Organization, User from sentry.testutils import APITestCase from sentry.utils.compat import mock -class UserAuthenticatorDetailsTest(APITestCase): +def get_auth(user: "User") -> Authenticator: + return Authenticator.objects.create( + type=3, # u2f + user=user, + config={ + "devices": [ + { + "binding": { + "publicKey": "aowekroawker", + "keyHandle": "devicekeyhandle", + "appId": "https://dev.getsentry.net:8000/auth/2fa/u2fappid.json", + }, + "name": "Amused Beetle", + "ts": 1512505334, + }, + { + "binding": { + "publicKey": "publickey", + "keyHandle": "aowerkoweraowerkkro", + "appId": "https://dev.getsentry.net:8000/auth/2fa/u2fappid.json", + }, + "name": "Sentry", + "ts": 1512505334, + }, + ] + }, + ) + + +def assert_security_email_sent(email_type: str) -> None: + """TODO(mgaeta): Move this function to a test helper directory.""" + body_fragment = { + "mfa-added": "An authenticator has been added to your Sentry account", + "mfa-removed": "An authenticator has been removed from your Sentry account", + "recovery-codes-regenerated": "Recovery codes have been regenerated for your Sentry account", + }.get(email_type) + assert len(mail.outbox) == 1 + assert body_fragment in mail.outbox[0].body + + +class UserAuthenticatorDetailsTestBase(APITestCase): def setUp(self): - self.user = self.create_user(email="test@example.com", is_superuser=False) self.login_as(user=self.user) - def _assert_security_email_sent(self, email_type, email_log): - assert email_log.info.call_count == 1 - assert "mail.queued" in email_log.info.call_args[0] - assert email_log.info.call_args[1]["extra"]["message_type"] == email_type - - def _require_2fa_for_organization(self): + def _require_2fa_for_organization(self) -> None: organization = self.create_organization(name="test monkey", owner=self.user) organization.update(flags=F("flags").bitor(Organization.flags.require_2fa)) - def test_wrong_auth_id(self): - url = reverse( - "sentry-api-0-user-authenticator-details", - kwargs={"user_id": self.user.id, "auth_id": "totp"}, - ) - resp = self.client.get(url) - assert resp.status_code == 404 +class UserAuthenticatorDeviceDetailsTest(UserAuthenticatorDetailsTestBase): + endpoint = "sentry-api-0-user-authenticator-device-details" + method = "delete" + + def test_u2f_remove_device(self): + auth = get_auth(self.user) + + with self.tasks(): + self.get_success_response(self.user.id, auth.id, "devicekeyhandle") + + authenticator = Authenticator.objects.get(id=auth.id) + assert len(authenticator.interface.get_registered_devices()) == 1 + + assert_security_email_sent("mfa-removed") + + # Can't remove last device. + # TODO(mgaeta): We should not allow the API to return a 500. + with self.tasks(): + self.get_error_response(self.user.id, auth.id, "aowerkoweraowerkkro", status_code=500) + + # Only one send. + assert_security_email_sent("mfa-removed") + + def test_require_2fa__delete_device__ok(self): + self._require_2fa_for_organization() + self.test_u2f_remove_device() + + +class UserAuthenticatorDetailsTest(UserAuthenticatorDetailsTestBase): + endpoint = "sentry-api-0-user-authenticator-details" + + def test_wrong_auth_id(self): + self.get_error_response(self.user.id, "totp", status_code=404) def test_get_authenticator_details(self): interface = TotpInterface() interface.enroll(self.user) auth = interface.authenticator - url = reverse( - "sentry-api-0-user-authenticator-details", - kwargs={"user_id": self.user.id, "auth_id": auth.id}, - ) + resp = self.get_success_response(self.user.id, auth.id) - resp = self.client.get(url) - assert resp.status_code == 200 assert resp.data["isEnrolled"] assert resp.data["id"] == "totp" assert resp.data["authId"] == str(auth.id) @@ -55,50 +110,23 @@ def test_get_authenticator_details(self): assert "form" not in resp.data assert "qrcode" not in resp.data - @mock.patch("sentry.utils.email.logger") - def test_get_recovery_codes(self, email_log): + def test_get_recovery_codes(self): interface = RecoveryCodeInterface() interface.enroll(self.user) - url = reverse( - "sentry-api-0-user-authenticator-details", - kwargs={"user_id": self.user.id, "auth_id": interface.authenticator.id}, - ) + with self.tasks(): + resp = self.get_success_response(self.user.id, interface.authenticator.id) - resp = self.client.get(url) - assert resp.status_code == 200 assert resp.data["id"] == "recovery" assert resp.data["authId"] == str(interface.authenticator.id) assert len(resp.data["codes"]) - assert email_log.info.call_count == 0 + assert len(mail.outbox) == 0 def test_u2f_get_devices(self): - auth = Authenticator.objects.create( - type=3, # u2f - user=self.user, - config={ - "devices": [ - { - "binding": { - "publicKey": "aowekroawker", - "keyHandle": "aowkeroakewrokaweokrwoer", - "appId": "https://dev.getsentry.net:8000/auth/2fa/u2fappid.json", - }, - "name": "Amused Beetle", - "ts": 1512505334, - } - ] - }, - ) - - url = reverse( - "sentry-api-0-user-authenticator-details", - kwargs={"user_id": self.user.id, "auth_id": auth.id}, - ) - - resp = self.client.get(url) - assert resp.status_code == 200 + auth = get_auth(self.user) + + resp = self.get_success_response(self.user.id, auth.id) assert resp.data["id"] == "u2f" assert resp.data["authId"] == str(auth.id) assert len(resp.data["devices"]) @@ -109,109 +137,17 @@ def test_u2f_get_devices(self): assert "response" not in resp.data def test_get_device_name(self): - auth = Authenticator.objects.create( - type=3, # u2f - user=self.user, - config={ - "devices": [ - { - "binding": { - "publicKey": "aowekroawker", - "keyHandle": "devicekeyhandle", - "appId": "https://dev.getsentry.net:8000/auth/2fa/u2fappid.json", - }, - "name": "Amused Beetle", - "ts": 1512505334, - }, - { - "binding": { - "publicKey": "publickey", - "keyHandle": "aowerkoweraowerkkro", - "appId": "https://dev.getsentry.net:8000/auth/2fa/u2fappid.json", - }, - "name": "Sentry", - "ts": 1512505334, - }, - ] - }, - ) + auth = get_auth(self.user) assert auth.interface.get_device_name("devicekeyhandle") == "Amused Beetle" assert auth.interface.get_device_name("aowerkoweraowerkkro") == "Sentry" - @mock.patch("sentry.utils.email.logger") - def test_u2f_remove_device(self, email_log): - auth = Authenticator.objects.create( - type=3, # u2f - user=self.user, - config={ - "devices": [ - { - "binding": { - "publicKey": "aowekroawker", - "keyHandle": "devicekeyhandle", - "appId": "https://dev.getsentry.net:8000/auth/2fa/u2fappid.json", - }, - "name": "Amused Beetle", - "ts": 1512505334, - }, - { - "binding": { - "publicKey": "publickey", - "keyHandle": "aowerkoweraowerkkro", - "appId": "https://dev.getsentry.net:8000/auth/2fa/u2fappid.json", - }, - "name": "Sentry", - "ts": 1512505334, - }, - ] - }, - ) - - url = reverse( - "sentry-api-0-user-authenticator-device-details", - kwargs={ - "user_id": self.user.id, - "auth_id": auth.id, - "interface_device_id": "devicekeyhandle", - }, - ) - - resp = self.client.delete(url) - assert resp.status_code == 204 - - authenticator = Authenticator.objects.get(id=auth.id) - assert len(authenticator.interface.get_registered_devices()) == 1 - - self._assert_security_email_sent("mfa-removed", email_log) - - # Can't remove last device - url = reverse( - "sentry-api-0-user-authenticator-device-details", - kwargs={ - "user_id": self.user.id, - "auth_id": auth.id, - "interface_device_id": "aowerkoweraowerkkro", - }, - ) - resp = self.client.delete(url) - assert resp.status_code == 500 - - # only one send - self._assert_security_email_sent("mfa-removed", email_log) - def test_sms_get_phone(self): interface = SmsInterface() interface.phone_number = "5551231234" interface.enroll(self.user) - url = reverse( - "sentry-api-0-user-authenticator-details", - kwargs={"user_id": self.user.id, "auth_id": interface.authenticator.id}, - ) - - resp = self.client.get(url) - assert resp.status_code == 200 + resp = self.get_success_response(self.user.id, interface.authenticator.id) assert resp.data["id"] == "sms" assert resp.data["authId"] == str(interface.authenticator.id) assert resp.data["phone"] == "5551231234" @@ -220,38 +156,30 @@ def test_sms_get_phone(self): assert "totp_secret" not in resp.data assert "form" not in resp.data - @mock.patch("sentry.utils.email.logger") - def test_recovery_codes_regenerate(self, email_log): + def test_recovery_codes_regenerate(self): interface = RecoveryCodeInterface() interface.enroll(self.user) - url = reverse( - "sentry-api-0-user-authenticator-details", - kwargs={"user_id": self.user.id, "auth_id": interface.authenticator.id}, - ) - - resp = self.client.get(url) - assert resp.status_code == 200 + resp = self.get_success_response(self.user.id, interface.authenticator.id) old_codes = resp.data["codes"] old_created_at = resp.data["createdAt"] - resp = self.client.get(url) + resp = self.get_success_response(self.user.id, interface.authenticator.id) assert old_codes == resp.data["codes"] assert old_created_at == resp.data["createdAt"] # regenerate codes tomorrow = timezone.now() + datetime.timedelta(days=1) with mock.patch.object(timezone, "now", return_value=tomorrow): - resp = self.client.put(url) - - resp = self.client.get(url) + with self.tasks(): + self.get_success_response(self.user.id, interface.authenticator.id, method="put") + resp = self.get_success_response(self.user.id, interface.authenticator.id) assert old_codes != resp.data["codes"] assert old_created_at != resp.data["createdAt"] - self._assert_security_email_sent("recovery-codes-regenerated", email_log) + assert_security_email_sent("recovery-codes-regenerated") - @mock.patch("sentry.utils.email.logger") - def test_delete(self, email_log): + def test_delete(self): new_options = settings.SENTRY_OPTIONS.copy() new_options["sms.twilio-account"] = "twilio-account" user = self.create_user(email="a@example.com", is_superuser=True) @@ -265,38 +193,28 @@ def test_delete(self, email_log): self.assertEqual(len(available_auths), 1) self.login_as(user=user, superuser=True) - url = reverse( - "sentry-api-0-user-authenticator-details", - kwargs={"user_id": user.id, "auth_id": auth.id}, - ) - resp = self.client.delete(url, format="json") - assert resp.status_code == 204, (resp.status_code, resp.content) + with self.tasks(): + self.get_success_response(user.id, auth.id, method="delete") assert not Authenticator.objects.filter(id=auth.id).exists() - self._assert_security_email_sent("mfa-removed", email_log) + assert_security_email_sent("mfa-removed") - @mock.patch("sentry.utils.email.logger") - def test_cannot_delete_without_superuser(self, email_log): + def test_cannot_delete_without_superuser(self): user = self.create_user(email="a@example.com", is_superuser=False) auth = Authenticator.objects.create(type=3, user=user) # u2f actor = self.create_user(email="b@example.com", is_superuser=False) self.login_as(user=actor) - url = reverse( - "sentry-api-0-user-authenticator-details", - kwargs={"user_id": user.id, "auth_id": auth.id}, - ) - resp = self.client.delete(url, format="json") - assert resp.status_code == 403, (resp.status_code, resp.content) + with self.tasks(): + self.get_error_response(self.user.id, auth.id, method="delete", status_code=403) assert Authenticator.objects.filter(id=auth.id).exists() - assert email_log.info.call_count == 0 + assert len(mail.outbox) == 0 - @mock.patch("sentry.utils.email.logger") - def test_require_2fa__cannot_delete_last_auth(self, email_log): + def test_require_2fa__cannot_delete_last_auth(self): self._require_2fa_for_organization() # enroll in one auth method @@ -304,21 +222,15 @@ def test_require_2fa__cannot_delete_last_auth(self, email_log): interface.enroll(self.user) auth = interface.authenticator - url = reverse( - "sentry-api-0-user-authenticator-details", - kwargs={"user_id": self.user.id, "auth_id": auth.id}, - ) - - resp = self.client.delete(url, format="json") - assert resp.status_code == 403, (resp.status_code, resp.content) - assert b"requires 2FA" in resp.content + with self.tasks(): + resp = self.get_error_response(self.user.id, auth.id, method="delete", status_code=403) + assert b"requires 2FA" in resp.content assert Authenticator.objects.filter(id=auth.id).exists() - assert email_log.info.call_count == 0 + assert len(mail.outbox) == 0 - @mock.patch("sentry.utils.email.logger") - def test_require_2fa__delete_with_multiple_auth__ok(self, email_log): + def test_require_2fa__delete_with_multiple_auth__ok(self): self._require_2fa_for_organization() new_options = settings.SENTRY_OPTIONS.copy() @@ -334,18 +246,8 @@ def test_require_2fa__delete_with_multiple_auth__ok(self, email_log): interface.enroll(self.user) auth = interface.authenticator - url = reverse( - "sentry-api-0-user-authenticator-details", - kwargs={"user_id": self.user.id, "auth_id": auth.id}, - ) - resp = self.client.delete(url, format="json") - assert resp.status_code == 204, (resp.status_code, resp.content) + with self.tasks(): + self.get_success_response(self.user.id, auth.id, method="delete") assert not Authenticator.objects.filter(id=auth.id).exists() - - self._assert_security_email_sent("mfa-removed", email_log) - - @mock.patch("sentry.utils.email.logger") - def test_require_2fa__delete_device__ok(self, email_log): - self._require_2fa_for_organization() - self.test_u2f_remove_device() + assert_security_email_sent("mfa-removed") diff --git a/tests/sentry/api/endpoints/test_user_authenticator_enroll.py b/tests/sentry/api/endpoints/test_user_authenticator_enroll.py index 06e19970acaa79..954bdd5d6775bc 100644 --- a/tests/sentry/api/endpoints/test_user_authenticator_enroll.py +++ b/tests/sentry/api/endpoints/test_user_authenticator_enroll.py @@ -2,6 +2,7 @@ from urllib.parse import parse_qsl from django.conf import settings +from django.core import mail from django.db.models import F from django.urls import reverse @@ -15,6 +16,7 @@ ) from sentry.testutils import APITestCase from sentry.utils.compat import mock +from tests.sentry.api.endpoints.test_user_authenticator_details import assert_security_email_sent # TODO(joshuarli): move all fixtures to a standard path relative to gitroot, @@ -26,46 +28,36 @@ def get_fixture_path(name): class UserAuthenticatorEnrollTest(APITestCase): + endpoint = "sentry-api-0-user-authenticator-enroll" + def setUp(self): - self.user = self.create_user(email="a@example.com", is_superuser=False) - self.organization = self.create_organization(owner=self.user) self.login_as(user=self.user) - def _assert_security_email_sent(self, email_type, email_log): - assert email_log.info.call_count == 1 - assert "mail.queued" in email_log.info.call_args[0] - assert email_log.info.call_args[1]["extra"]["message_type"] == email_type - - @mock.patch("sentry.utils.email.logger") @mock.patch("sentry.auth.authenticators.TotpInterface.validate_otp", return_value=True) - def test_totp_can_enroll(self, validate_otp, email_log): + def test_totp_can_enroll(self, validate_otp): # XXX: Pretend an unbound function exists. validate_otp.__func__ = None - url = reverse( - "sentry-api-0-user-authenticator-enroll", - kwargs={"user_id": "me", "interface_id": "totp"}, - ) - with mock.patch( "sentry.auth.authenticators.base.generate_secret_key", return_value="Z" * 32 ): - resp = self.client.get(url) + resp = self.get_success_response("me", "totp") - assert resp.status_code == 200 assert resp.data["secret"] == "Z" * 32 assert ( resp.data["qrcode"] - == "otpauth://totp/a%40example.com?issuer=Sentry&secret=ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ" + == "otpauth://totp/admin%40localhost?issuer=Sentry&secret=ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ" ) assert resp.data["form"] assert resp.data["secret"] # try to enroll - resp = self.client.post(url, data={"secret": "secret12", "otp": "1234"}) + with self.tasks(): + self.get_success_response( + "me", "totp", method="post", **{"secret": "secret12", "otp": "1234"} + ) assert validate_otp.call_count == 1 assert validate_otp.call_args == mock.call("1234") - assert resp.status_code == 204 interface = Authenticator.objects.get_interface(user=self.user, interface_id="totp") assert interface @@ -76,40 +68,42 @@ def test_totp_can_enroll(self, validate_otp, email_log): recovery = Authenticator.objects.get_interface(user=self.user, interface_id="recovery") assert recovery.is_enrolled() - self._assert_security_email_sent("mfa-added", email_log) + assert_security_email_sent("mfa-added") # can rotate in place - resp = self.client.get(url) - assert resp.status_code == 200 - resp = self.client.post(url, data={"secret": "secret56", "otp": "5678"}) + self.get_success_response("me", "totp") + self.get_success_response( + "me", "totp", method="post", **{"secret": "secret56", "otp": "5678"} + ) assert validate_otp.call_args == mock.call("5678") - assert resp.status_code == 204 + interface = Authenticator.objects.get_interface(user=self.user, interface_id="totp") assert interface.secret == "secret56" assert interface.config == {"secret": "secret56"} - @mock.patch("sentry.utils.email.logger") @mock.patch("sentry.auth.authenticators.TotpInterface.validate_otp", return_value=False) - def test_invalid_otp(self, validate_otp, email_log): + def test_invalid_otp(self, validate_otp): # XXX: Pretend an unbound function exists. validate_otp.__func__ = None - url = reverse( - "sentry-api-0-user-authenticator-enroll", - kwargs={"user_id": "me", "interface_id": "totp"}, - ) - # try to enroll - resp = self.client.post(url, data={"secret": "secret12", "otp": "1234"}) + with self.tasks(): + self.get_error_response( + "me", + "totp", + method="post", + status_code=400, + **{"secret": "secret12", "otp": "1234"}, + ) + assert validate_otp.call_count == 1 assert validate_otp.call_args == mock.call("1234") - assert resp.status_code == 400 - assert email_log.call_count == 0 - @mock.patch("sentry.utils.email.logger") + assert len(mail.outbox) == 0 + @mock.patch("sentry.auth.authenticators.SmsInterface.validate_otp", return_value=True) @mock.patch("sentry.auth.authenticators.SmsInterface.send_text", return_value=True) - def test_sms_can_enroll(self, send_text, validate_otp, email_log): + def test_sms_can_enroll(self, send_text, validate_otp): # XXX: Pretend an unbound function exists. validate_otp.__func__ = None @@ -117,47 +111,50 @@ def test_sms_can_enroll(self, send_text, validate_otp, email_log): new_options["sms.twilio-account"] = "twilio-account" with self.settings(SENTRY_OPTIONS=new_options): - url = reverse( - "sentry-api-0-user-authenticator-enroll", - kwargs={"user_id": "me", "interface_id": "sms"}, - ) - - resp = self.client.get(url) - assert resp.status_code == 200 + resp = self.get_success_response("me", "sms") assert resp.data["form"] assert resp.data["secret"] - resp = self.client.post(url, data={"secret": "secret12", "phone": "1231234"}) + self.get_success_response( + "me", "sms", method="post", **{"secret": "secret12", "phone": "1231234"} + ) assert send_text.call_count == 1 assert validate_otp.call_count == 0 - assert resp.status_code == 204 - resp = self.client.post( - url, data={"secret": "secret12", "phone": "1231234", "otp": "123123"} - ) + with self.tasks(): + self.get_success_response( + "me", + "sms", + method="post", + **{"secret": "secret12", "phone": "1231234", "otp": "123123"}, + ) assert validate_otp.call_count == 1 assert validate_otp.call_args == mock.call("123123") interface = Authenticator.objects.get_interface(user=self.user, interface_id="sms") assert interface.phone_number == "1231234" - self._assert_security_email_sent("mfa-added", email_log) + assert_security_email_sent("mfa-added") def test_sms_invalid_otp(self): new_options = settings.SENTRY_OPTIONS.copy() new_options["sms.twilio-account"] = "twilio-account" with self.settings(SENTRY_OPTIONS=new_options): - url = reverse( - "sentry-api-0-user-authenticator-enroll", - kwargs={"user_id": "me", "interface_id": "sms"}, + self.get_error_response( + "me", + "sms", + method="post", + status_code=400, + **{"secret": "secret12", "phone": "1231234", "otp": None}, ) - resp = self.client.post( - url, data={"secret": "secret12", "phone": "1231234", "otp": None} + self.get_error_response( + "me", + "sms", + method="post", + status_code=400, + **{"secret": "secret12", "phone": "1231234", "otp": ""}, ) - assert resp.status_code == 400 - resp = self.client.post(url, data={"secret": "secret12", "phone": "1231234", "otp": ""}) - assert resp.status_code == 400 def test_sms_no_verified_email(self): user = self.create_user() @@ -168,14 +165,13 @@ def test_sms_no_verified_email(self): new_options["sms.twilio-account"] = "twilio-account" with self.settings(SENTRY_OPTIONS=new_options): - url = reverse( - "sentry-api-0-user-authenticator-enroll", - kwargs={"user_id": "me", "interface_id": "sms"}, - ) - resp = self.client.post( - url, data={"secret": "secret12", "phone": "1231234", "otp": None} + resp = self.get_error_response( + "me", + "sms", + method="post", + status_code=401, + **{"secret": "secret12", "phone": "1231234", "otp": None}, ) - assert resp.status_code == 401 assert resp.data == { "detail": { "code": "email-verification-required", @@ -192,58 +188,53 @@ def test_rate_limited(self, try_enroll, is_limited): new_options = settings.SENTRY_OPTIONS.copy() new_options["system.url-prefix"] = "https://testserver" with self.settings(SENTRY_OPTIONS=new_options): - url = reverse( - "sentry-api-0-user-authenticator-enroll", - kwargs={"user_id": "me", "interface_id": "u2f"}, - ) - resp = self.client.get(url) - assert resp.status_code == 200 - - resp = self.client.post( - url, - data={ + self.get_success_response("me", "u2f") + self.get_error_response( + "me", + "u2f", + method="post", + status_code=429, + **{ "deviceName": "device name", "challenge": "challenge", "response": "response", }, ) - assert resp.status_code == 429 + assert try_enroll.call_count == 0 - @mock.patch("sentry.utils.email.logger") @mock.patch("sentry.auth.authenticators.U2fInterface.try_enroll", return_value=True) - def test_u2f_can_enroll(self, try_enroll, email_log): + def test_u2f_can_enroll(self, try_enroll): new_options = settings.SENTRY_OPTIONS.copy() new_options["system.url-prefix"] = "https://testserver" with self.settings(SENTRY_OPTIONS=new_options): - url = reverse( - "sentry-api-0-user-authenticator-enroll", - kwargs={"user_id": "me", "interface_id": "u2f"}, - ) - - resp = self.client.get(url) - assert resp.status_code == 200 + resp = self.get_success_response("me", "u2f") assert resp.data["form"] assert "secret" not in resp.data assert "qrcode" not in resp.data assert resp.data["challenge"] - resp = self.client.post( - url, - data={ - "deviceName": "device name", - "challenge": "challenge", - "response": "response", - }, - ) + with self.tasks(): + self.get_success_response( + "me", + "u2f", + method="post", + **{ + "deviceName": "device name", + "challenge": "challenge", + "response": "response", + }, + ) + assert try_enroll.call_count == 1 assert try_enroll.call_args == mock.call("challenge", "response", "device name") - assert resp.status_code == 204 - self._assert_security_email_sent("mfa-added", email_log) + assert_security_email_sent("mfa-added") class AcceptOrganizationInviteTest(APITestCase): + endpoint = "sentry-api-0-user-authenticator-enroll" + def setUp(self): self.organization = self.create_organization(owner=self.create_user("foo@example.com")) self.user = self.create_user("bar@example.com", is_superuser=False) @@ -279,7 +270,7 @@ def get_om_and_init_invite(self): return om - def assert_invite_accepted(self, response, member_id): + def assert_invite_accepted(self, response, member_id: int) -> None: om = OrganizationMember.objects.get(id=member_id) assert om.user == self.user assert om.email is None @@ -298,22 +289,16 @@ def setup_u2f(self): new_options = settings.SENTRY_OPTIONS.copy() new_options["system.url-prefix"] = "https://testserver" with self.settings(SENTRY_OPTIONS=new_options): - url = reverse( - "sentry-api-0-user-authenticator-enroll", - kwargs={"user_id": "me", "interface_id": "u2f"}, - ) - - resp = self.client.post( - url, - data={ + return self.get_success_response( + "me", + "u2f", + method="post", + **{ "deviceName": "device name", "challenge": "challenge", "response": "response", }, ) - assert resp.status_code == 204 - - return resp def test_cannot_accept_invite_pending_invite__2fa_required(self): om = self.get_om_and_init_invite() @@ -342,17 +327,14 @@ def test_accept_pending_invite__sms_enroll(self, send_text, validate_otp): new_options["sms.twilio-account"] = "twilio-account" with self.settings(SENTRY_OPTIONS=new_options): - url = reverse( - "sentry-api-0-user-authenticator-enroll", - kwargs={"user_id": "me", "interface_id": "sms"}, + self.get_success_response( + "me", "sms", method="post", **{"secret": "secret12", "phone": "1231234"} ) - - resp = self.client.post(url, data={"secret": "secret12", "phone": "1231234"}) - assert resp.status_code == 204 - - resp = self.client.post( - url, - data={ + resp = self.get_success_response( + "me", + "sms", + method="post", + **{ "secret": "secret12", "phone": "1231234", "otp": "123123", @@ -360,6 +342,7 @@ def test_accept_pending_invite__sms_enroll(self, send_text, validate_otp): "token": om.token, }, ) + assert validate_otp.call_count == 1 assert validate_otp.call_args == mock.call("123123") @@ -376,18 +359,13 @@ def test_accept_pending_invite__totp_enroll(self, validate_otp): om = self.get_om_and_init_invite() # setup totp - url = reverse( - "sentry-api-0-user-authenticator-enroll", - kwargs={"user_id": "me", "interface_id": "totp"}, - ) - - resp = self.client.get(url) - assert resp.status_code == 200 - - resp = self.client.post( - url, data={"secret": "secret12", "otp": "1234", "memberId": om.id, "token": om.token} + self.get_success_response("me", "totp") + resp = self.get_success_response( + "me", + "totp", + method="post", + **{"secret": "secret12", "otp": "1234", "memberId": om.id, "token": om.token}, ) - assert resp.status_code == 204 interface = Authenticator.objects.get_interface(user=self.user, interface_id="totp") assert interface @@ -447,18 +425,14 @@ def test_enroll_without_pending_invite__no_error(self, try_enroll, log): new_options = settings.SENTRY_OPTIONS.copy() new_options["system.url-prefix"] = "https://testserver" with self.settings(SENTRY_OPTIONS=new_options): - url = reverse( - "sentry-api-0-user-authenticator-enroll", - kwargs={"user_id": "me", "interface_id": "u2f"}, - ) - - resp = self.client.post( - url, - data={ + self.get_success_response( + "me", + "u2f", + method="post", + **{ "deviceName": "device name", "challenge": "challenge", "response": "response", }, ) - assert resp.status_code == 204 assert log.error.called is False diff --git a/tests/sentry/eventstream/kafka/test_postprocessworker.py b/tests/sentry/eventstream/kafka/test_postprocessworker.py index 501a38b7770e83..6c16b4aaebc2e9 100644 --- a/tests/sentry/eventstream/kafka/test_postprocessworker.py +++ b/tests/sentry/eventstream/kafka/test_postprocessworker.py @@ -5,7 +5,9 @@ from sentry import options from sentry.eventstream.kafka.postprocessworker import ( _CONCURRENCY_OPTION, + ErrorsPostProcessForwarderWorker, PostProcessForwarderWorker, + TransactionsPostProcessForwarderWorker, ) from sentry.eventstream.kafka.protocol import InvalidVersion from sentry.utils import json @@ -32,22 +34,50 @@ def kafka_message_payload(): ] -@patch("sentry.eventstream.kafka.postprocessworker.dispatch_post_process_group_task") -def test_post_process_forwarder(dispatch_post_process_group_task, kafka_message_payload): - """ - Test that the post process forwarder calls dispatch_post_process_group_task with the correct arguments - """ - forwarder = PostProcessForwarderWorker(concurrency=1) +@pytest.fixture +def kafka_message_without_transaction_header(kafka_message_payload): + mock_message = Mock() + mock_message.headers = MagicMock(return_value=[("timestamp", b"12345")]) + mock_message.value = MagicMock(return_value=json.dumps(kafka_message_payload)) + mock_message.partition = MagicMock("1") + return mock_message + +@pytest.fixture +def kafka_message_with_transaction_header_false(kafka_message_payload): mock_message = Mock() + mock_message.headers = MagicMock( + return_value=[("timestamp", b"12345"), ("transaction_forwarder", b"0")] + ) mock_message.value = MagicMock(return_value=json.dumps(kafka_message_payload)) mock_message.partition = MagicMock("1") + return mock_message - future = forwarder.process_message(mock_message) + +@pytest.fixture +def kafka_message_with_transaction_header_true(kafka_message_payload): + mock_message = Mock() + mock_message.headers = MagicMock( + return_value=[("timestamp", b"12345"), ("transaction_forwarder", b"1")] + ) + mock_message.value = MagicMock(return_value=json.dumps(kafka_message_payload)) + mock_message.partition = MagicMock("1") + return mock_message + + +@patch("sentry.eventstream.kafka.postprocessworker.dispatch_post_process_group_task") +def test_post_process_forwarder( + dispatch_post_process_group_task, kafka_message_without_transaction_header +): + """ + Tests that the post process forwarder calls dispatch_post_process_group_task with the correct arguments + """ + forwarder = PostProcessForwarderWorker(concurrency=1) + future = forwarder.process_message(kafka_message_without_transaction_header) forwarder.flush_batch([future]) - dispatch_post_process_group_task.assert_called_with( + dispatch_post_process_group_task.assert_called_once_with( event_id="fe0ee9a2bc3b415497bad68aaf70dc7f", project_id=1, group_id=43, @@ -66,7 +96,7 @@ def test_post_process_forwarder_bad_message_headers( dispatch_post_process_group_task, kafka_message_payload ): """ - Test that when bad message headers are received, post process forwarder still works if the payload is valid. + Tests that when bad message headers are received, post process forwarder still works if the payload is valid. """ forwarder = PostProcessForwarderWorker(concurrency=1) @@ -80,7 +110,7 @@ def test_post_process_forwarder_bad_message_headers( forwarder.flush_batch([future]) - dispatch_post_process_group_task.assert_called_with( + dispatch_post_process_group_task.assert_called_once_with( event_id="fe0ee9a2bc3b415497bad68aaf70dc7f", project_id=1, group_id=43, @@ -95,7 +125,7 @@ def test_post_process_forwarder_bad_message_headers( def test_post_process_forwarder_bad_message(kafka_message_payload): """ - Test that exception is thrown during flush_batch calls when a bad message is received. + Tests that exception is thrown during flush_batch calls when a bad message is received. """ forwarder = PostProcessForwarderWorker(concurrency=1) @@ -116,7 +146,7 @@ def test_post_process_forwarder_bad_message(kafka_message_payload): @pytest.mark.django_db def test_post_process_forwarder_concurrency(kafka_message_payload): """ - Test that the number of threads change when the option is changed. + Tests that the number of threads change when the option is changed. """ forwarder = PostProcessForwarderWorker(concurrency=1) @@ -128,3 +158,131 @@ def test_post_process_forwarder_concurrency(kafka_message_payload): assert forwarder._PostProcessForwarderWorker__current_concurrency == 5 forwarder.shutdown() + + +@pytest.mark.django_db +@patch("sentry.eventstream.kafka.postprocessworker.dispatch_post_process_group_task") +def test_errors_post_process_forwarder_missing_headers( + dispatch_post_process_group_task, kafka_message_without_transaction_header +): + """ + Tests that the errors post process forwarder calls dispatch_post_process_group_task + when the header "transaction_forwarder" is missing. + """ + forwarder = ErrorsPostProcessForwarderWorker(concurrency=1) + future = forwarder.process_message(kafka_message_without_transaction_header) + assert future is not None + + forwarder.flush_batch([future]) + + dispatch_post_process_group_task.assert_called_once_with( + event_id="fe0ee9a2bc3b415497bad68aaf70dc7f", + project_id=1, + group_id=43, + primary_hash="311ee66a5b8e697929804ceb1c456ffe", + is_new=False, + is_regression=None, + is_new_group_environment=False, + ) + + forwarder.shutdown() + + +@pytest.mark.django_db +@patch("sentry.eventstream.kafka.postprocessworker.dispatch_post_process_group_task") +def test_errors_post_process_forwarder_false_headers( + dispatch_post_process_group_task, kafka_message_with_transaction_header_false +): + """ + Test that the errors post process forwarder calls dispatch_post_process_group_task + when the header "transaction_forwarder" is set to False. + """ + forwarder = ErrorsPostProcessForwarderWorker(concurrency=1) + future = forwarder.process_message(kafka_message_with_transaction_header_false) + assert future is not None + + forwarder.flush_batch([future]) + + dispatch_post_process_group_task.assert_called_once_with( + event_id="fe0ee9a2bc3b415497bad68aaf70dc7f", + project_id=1, + group_id=43, + primary_hash="311ee66a5b8e697929804ceb1c456ffe", + is_new=False, + is_regression=None, + is_new_group_environment=False, + ) + + forwarder.shutdown() + + +@pytest.mark.django_db +def test_errors_post_process_forwarder_true_headers(kafka_message_with_transaction_header_true): + """ + Tests that the errors post process forwarder's process_message returns None + when the header "transaction_forwarder" is set to True. + """ + forwarder = ErrorsPostProcessForwarderWorker(concurrency=1) + future = forwarder.process_message(kafka_message_with_transaction_header_true) + + assert future is None + + forwarder.shutdown() + + +@pytest.mark.django_db +def test_transactions_post_process_forwarder_missing_headers( + kafka_message_without_transaction_header, +): + """ + Tests that the transactions post process forwarder's process_message returns None + when the header "transaction_forwarder" is missing. + """ + forwarder = TransactionsPostProcessForwarderWorker(concurrency=1) + future = forwarder.process_message(kafka_message_without_transaction_header) + assert future is None + + forwarder.shutdown() + + +@pytest.mark.django_db +def test_transactions_post_process_forwarder_false_headers( + kafka_message_with_transaction_header_false, +): + """ + Tests that the transactions post process forwarder's process_message returns None + when the header "transaction_forwarder" is set to False. + """ + forwarder = TransactionsPostProcessForwarderWorker(concurrency=1) + future = forwarder.process_message(kafka_message_with_transaction_header_false) + assert future is None + + forwarder.shutdown() + + +@pytest.mark.django_db +@patch("sentry.eventstream.kafka.postprocessworker.dispatch_post_process_group_task") +def test_transactions_post_process_forwarder_true_headers( + dispatch_post_process_group_task, kafka_message_with_transaction_header_true +): + """ + Tests that the transactions post process forwarder calls dispatch_post_process_group_task + when the header "transaction_forwarder" is set to True. + """ + forwarder = TransactionsPostProcessForwarderWorker(concurrency=1) + future = forwarder.process_message(kafka_message_with_transaction_header_true) + + assert future is not None + forwarder.flush_batch([future]) + + dispatch_post_process_group_task.assert_called_with( + event_id="fe0ee9a2bc3b415497bad68aaf70dc7f", + project_id=1, + group_id=43, + primary_hash="311ee66a5b8e697929804ceb1c456ffe", + is_new=False, + is_regression=None, + is_new_group_environment=False, + ) + + forwarder.shutdown() diff --git a/tests/sentry/incidents/endpoints/test_project_alert_rule_index.py b/tests/sentry/incidents/endpoints/test_project_alert_rule_index.py index 0f4de224b3c975..d373c0ff2346f7 100644 --- a/tests/sentry/incidents/endpoints/test_project_alert_rule_index.py +++ b/tests/sentry/incidents/endpoints/test_project_alert_rule_index.py @@ -542,3 +542,148 @@ def test_offset_pagination(self): self.assert_alert_rule_serialized(self.two_alert_rule, result[0], skip_dates=True) self.assert_alert_rule_serialized(self.yet_another_alert_rule, result[1], skip_dates=True) + + +@freeze_time() +class AlertRuleCreateEndpointTestCrashRateAlert(APITestCase): + endpoint = "sentry-api-0-project-alert-rules" + method = "post" + + def setUp(self): + super().setUp() + self.valid_alert_rule = { + "aggregate": "percentage(sessions_crashed, sessions) AS _crash_rate_alert_aggregate", + "query": "", + "timeWindow": "60", + "resolveThreshold": 90, + "thresholdType": 1, + "triggers": [ + { + "label": "critical", + "alertThreshold": 70, + "actions": [ + {"type": "email", "targetType": "team", "targetIdentifier": self.team.id} + ], + }, + { + "label": "warning", + "alertThreshold": 80, + "actions": [ + {"type": "email", "targetType": "team", "targetIdentifier": self.team.id}, + {"type": "email", "targetType": "user", "targetIdentifier": self.user.id}, + ], + }, + ], + "projects": [self.project.slug], + "owner": self.user.id, + "name": "JustAValidTestRule", + "dataset": "sessions", + "eventTypes": [], + } + # Login + self.create_member( + user=self.user, organization=self.organization, role="owner", teams=[self.team] + ) + self.login_as(self.user) + + @fixture + def organization(self): + return self.create_organization() + + @fixture + def project(self): + return self.create_project(organization=self.organization) + + @fixture + def user(self): + return self.create_user() + + def test_simple_crash_rate_alerts_for_sessions(self): + with self.feature(["organizations:incidents", "organizations:performance-view"]): + resp = self.get_valid_response( + self.organization.slug, self.project.slug, status_code=201, **self.valid_alert_rule + ) + assert "id" in resp.data + alert_rule = AlertRule.objects.get(id=resp.data["id"]) + assert resp.data == serialize(alert_rule, self.user) + + def test_simple_crash_rate_alerts_for_users(self): + self.valid_alert_rule.update( + { + "aggregate": "percentage(users_crashed, users) AS _crash_rate_alert_aggregate", + } + ) + with self.feature(["organizations:incidents", "organizations:performance-view"]): + resp = self.get_valid_response( + self.organization.slug, self.project.slug, status_code=201, **self.valid_alert_rule + ) + assert "id" in resp.data + alert_rule = AlertRule.objects.get(id=resp.data["id"]) + assert resp.data == serialize(alert_rule, self.user) + + def test_simple_crash_rate_alerts_for_sessions_drops_event_types(self): + self.valid_alert_rule["eventTypes"] = ["sessions", "events"] + with self.feature(["organizations:incidents", "organizations:performance-view"]): + resp = self.get_valid_response( + self.organization.slug, self.project.slug, status_code=201, **self.valid_alert_rule + ) + assert "id" in resp.data + alert_rule = AlertRule.objects.get(id=resp.data["id"]) + assert resp.data == serialize(alert_rule, self.user) + + def test_simple_crash_rate_alerts_for_sessions_with_invalid_time_window(self): + self.valid_alert_rule["timeWindow"] = "90" + with self.feature(["organizations:incidents", "organizations:performance-view"]): + resp = self.get_valid_response( + self.organization.slug, self.project.slug, status_code=400, **self.valid_alert_rule + ) + assert ( + resp.data["nonFieldErrors"][0] + == "Invalid Time Window: Allowed time windows for crash rate alerts are: " + "30min, 1h, 2h, 4h, 12h and 24h" + ) + + @patch( + "sentry.integrations.slack.utils.get_channel_id_with_timeout", + return_value=("#", None, True), + ) + @patch("sentry.integrations.slack.tasks.find_channel_id_for_alert_rule.apply_async") + @patch("sentry.integrations.slack.tasks.uuid4") + def test_crash_rate_alerts_kicks_off_slack_async_job( + self, mock_uuid4, mock_find_channel_id_for_alert_rule, mock_get_channel_id + ): + mock_uuid4.return_value = self.get_mock_uuid() + self.integration = Integration.objects.create( + provider="slack", + name="Team A", + external_id="TXXXXXXX1", + metadata={"access_token": "xoxp-xxxxxxxxx-xxxxxxxxxx-xxxxxxxxxxxx"}, + ) + self.integration.add_organization(self.organization, self.user) + self.valid_alert_rule["triggers"] = [ + { + "label": "critical", + "alertThreshold": 50, + "actions": [ + { + "type": "slack", + "targetIdentifier": "my-channel", + "targetType": "specific", + "integration": self.integration.id, + } + ], + }, + ] + with self.feature(["organizations:incidents"]): + resp = self.get_valid_response( + self.organization.slug, self.project.slug, status_code=202, **self.valid_alert_rule + ) + resp.data["uuid"] = "abc123" + assert not AlertRule.objects.filter(name="JustAValidTestRule").exists() + kwargs = { + "organization_id": self.organization.id, + "uuid": "abc123", + "data": self.valid_alert_rule, + "user_id": self.user.id, + } + mock_find_channel_id_for_alert_rule.assert_called_once_with(kwargs=kwargs) diff --git a/tests/sentry/incidents/test_subscription_processor.py b/tests/sentry/incidents/test_subscription_processor.py index d3e7985711b186..5723fc33b8025c 100644 --- a/tests/sentry/incidents/test_subscription_processor.py +++ b/tests/sentry/incidents/test_subscription_processor.py @@ -8,6 +8,7 @@ from exam import fixture, patcher from freezegun import freeze_time +from sentry.constants import CRASH_RATE_ALERT_AGGREGATE_ALIAS, CRASH_RATE_ALERT_SESSION_COUNT_ALIAS from sentry.incidents.logic import ( CRITICAL_TRIGGER_LABEL, WARNING_TRIGGER_LABEL, @@ -36,12 +37,12 @@ update_alert_rule_stats, ) from sentry.models import Integration -from sentry.snuba.models import QuerySubscription, SnubaQueryEventType +from sentry.snuba.models import QueryDatasets, QuerySubscription, SnubaQueryEventType from sentry.testutils import SnubaTestCase, TestCase from sentry.testutils.helpers.datetime import iso_format from sentry.utils import json from sentry.utils.compat import map -from sentry.utils.compat.mock import Mock, call +from sentry.utils.compat.mock import Mock, call, patch from sentry.utils.dates import to_timestamp EMPTY = object() @@ -134,6 +135,48 @@ def trigger(self): def action(self): return self.trigger.alertruletriggeraction_set.get() + @fixture + def crash_rate_alert_rule(self): + rule = self.create_alert_rule( + projects=[self.project], + dataset=QueryDatasets.SESSIONS, + name="JustAValidRule", + query="", + aggregate="percentage(sessions_crashed, sessions) AS _crash_rate_alert_aggregate", + time_window=1, + threshold_type=AlertRuleThresholdType.BELOW, + threshold_period=1, + ) + trigger = create_alert_rule_trigger(rule, "critical", 80) + create_alert_rule_trigger_action( + trigger, + AlertRuleTriggerAction.Type.EMAIL, + AlertRuleTriggerAction.TargetType.USER, + str(self.user.id), + ) + return rule + + @fixture + def crash_rate_alert_critical_trigger(self): + return self.crash_rate_alert_rule.alertruletrigger_set.get() + + @fixture + def crash_rate_alert_critical_action(self): + return self.crash_rate_alert_critical_trigger.alertruletriggeraction_set.get() + + @fixture + def crash_rate_alert_warning_trigger(self): + return create_alert_rule_trigger(self.crash_rate_alert_rule, "warning", 90) + + @fixture + def crash_rate_alert_warning_action(self): + return create_alert_rule_trigger_action( + self.crash_rate_alert_warning_trigger, + AlertRuleTriggerAction.Type.EMAIL, + AlertRuleTriggerAction.TargetType.USER, + str(self.user.id), + ) + def build_subscription_update(self, subscription, time_delta=None, value=EMPTY): if time_delta is not None: timestamp = timezone.now() + time_delta @@ -169,6 +212,44 @@ def send_update(self, rule, value, time_delta=None, subscription=None): processor.process_update(message) return processor + def send_crash_rate_alert_update(self, rule, value, subscription, time_delta=None, count=EMPTY): + self.email_action_handler.reset_mock() + if time_delta is None: + time_delta = timedelta() + processor = SubscriptionProcessor(subscription) + + if time_delta is not None: + timestamp = timezone.now() + time_delta + else: + timestamp = timezone.now() + timestamp = timestamp.replace(tzinfo=pytz.utc, microsecond=0) + + with self.feature( + ["organizations:incidents", "organizations:performance-view"] + ), self.capture_on_commit_callbacks(execute=True): + processor.process_update( + { + "subscription_id": subscription.subscription_id + if subscription + else uuid4().hex, + "values": { + "data": [ + { + CRASH_RATE_ALERT_AGGREGATE_ALIAS: value, + CRASH_RATE_ALERT_SESSION_COUNT_ALIAS: randint(0, 100) + if count is EMPTY + else count, + } + ] + }, + "timestamp": timestamp, + "interval": 1, + "partition": 1, + "offset": 1, + } + ) + return processor + def assert_slack_calls(self, trigger_labels): expected = [f"{label}: some rule 2" for label in trigger_labels] actual = [ @@ -1015,6 +1096,226 @@ def test_multiple_triggers_resolve_separately(self): self.assert_trigger_exists_with_status(incident, other_trigger, TriggerStatus.RESOLVED) self.assert_actions_resolved_for_incident(incident, [self.action]) + def test_crash_rate_alert_for_sessions_with_auto_resolve_critical(self): + """ + Test that ensures that a Critical alert is triggered when `crash_free_percentage` falls + below the Critical threshold and then is Resolved once `crash_free_percentage` goes above + the threshold (when no resolve_threshold is set) + """ + rule = self.crash_rate_alert_rule + trigger = self.crash_rate_alert_critical_trigger + action_critical = self.crash_rate_alert_critical_action + + # Send Critical Update + update_value = (1 - trigger.alert_threshold / 100) + 0.05 + self.send_crash_rate_alert_update( + rule=rule, + value=update_value, + time_delta=timedelta(minutes=-10), + subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(), + ) + incident = self.assert_active_incident(rule) + self.assert_actions_fired_for_incident(incident, [action_critical]) + self.assert_trigger_exists_with_status(incident, trigger, TriggerStatus.ACTIVE) + + update_value = (1 - trigger.alert_threshold / 100) - 0.05 + self.send_crash_rate_alert_update( + rule=rule, + value=update_value, + time_delta=timedelta(minutes=-1), + subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(), + ) + self.assert_no_active_incident(rule) + + def test_crash_rate_alert_for_sessions_with_auto_resolve_warning(self): + """ + Test that ensures that a Warning alert is triggered when `crash_free_percentage` falls + below the Warning threshold and then is Resolved once `crash_free_percentage` goes above + the threshold (when no resolve_threshold is set) + """ + rule = self.crash_rate_alert_rule + trigger_warning = self.crash_rate_alert_warning_trigger + action_warning = self.crash_rate_alert_warning_action + + # Send Warning Update + update_value = (1 - trigger_warning.alert_threshold / 100) + 0.05 + self.send_crash_rate_alert_update( + rule=rule, + value=update_value, + time_delta=timedelta(minutes=-3), + subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(), + ) + + incident = self.assert_active_incident(rule) + self.assert_actions_fired_for_incident(incident, [action_warning]) + self.assert_trigger_exists_with_status(incident, trigger_warning, TriggerStatus.ACTIVE) + + update_value = (1 - trigger_warning.alert_threshold / 100) - 0.05 + self.send_crash_rate_alert_update( + rule=rule, + value=update_value, + time_delta=timedelta(minutes=-1), + subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(), + ) + self.assert_no_active_incident(rule) + + def test_crash_rate_alert_for_sessions_with_critical_warning_then_resolved(self): + """ + Test that tests the behavior of going from Critical status to Warning status to Resolved + for Crash Rate Alerts + """ + rule = self.crash_rate_alert_rule + + trigger = self.crash_rate_alert_critical_trigger + trigger_warning = self.crash_rate_alert_warning_trigger + + action_critical = self.crash_rate_alert_critical_action + action_warning = self.crash_rate_alert_warning_action + + # Send Critical Update + update_value = (1 - trigger.alert_threshold / 100) + 0.05 + self.send_crash_rate_alert_update( + rule=rule, + value=update_value, + time_delta=timedelta(minutes=-10), + subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(), + ) + incident = self.assert_active_incident(rule) + self.assert_actions_fired_for_incident(incident, [action_critical]) + self.assert_trigger_exists_with_status(incident, trigger, TriggerStatus.ACTIVE) + + # Send Warning Update + update_value = (1 - trigger_warning.alert_threshold / 100) + 0.05 + self.send_crash_rate_alert_update( + rule=rule, + value=update_value, + time_delta=timedelta(minutes=-3), + subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(), + ) + + incident = self.assert_active_incident(rule) + self.assert_actions_fired_for_incident(incident, [action_warning]) + self.assert_trigger_exists_with_status(incident, trigger_warning, TriggerStatus.ACTIVE) + + # Send update higher than warning threshold + update_value = (1 - trigger_warning.alert_threshold / 100) - 0.05 + self.send_crash_rate_alert_update( + rule=rule, + value=update_value, + time_delta=timedelta(minutes=-1), + subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(), + ) + self.assert_no_active_incident(rule) + + def test_crash_rate_alert_for_sessions_with_triggers_lower_than_resolve_threshold(self): + """ + Test that ensures that when `crash_rate_percentage` goes above the warning threshold but + lower than the resolve threshold, incident is not resolved + """ + rule = self.crash_rate_alert_rule + rule.resolve_threshold = 95 + rule.save() + + trigger = self.crash_rate_alert_critical_trigger + trigger_warning = self.crash_rate_alert_warning_trigger + + action_critical = self.crash_rate_alert_critical_action + action_warning = self.crash_rate_alert_warning_action + + # Send Critical Update + update_value = (1 - trigger.alert_threshold / 100) + 0.05 + self.send_crash_rate_alert_update( + rule=rule, + value=update_value, + time_delta=timedelta(minutes=-10), + subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(), + ) + incident = self.assert_active_incident(rule) + self.assert_actions_fired_for_incident(incident, [action_critical]) + self.assert_trigger_exists_with_status(incident, trigger, TriggerStatus.ACTIVE) + + # Send Warning Update + update_value = (1 - trigger_warning.alert_threshold / 100) + 0.05 + self.send_crash_rate_alert_update( + rule=rule, + value=update_value, + time_delta=timedelta(minutes=-3), + subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(), + ) + + incident = self.assert_active_incident(rule) + self.assert_actions_fired_for_incident(incident, [action_warning]) + self.assert_trigger_exists_with_status(incident, trigger_warning, TriggerStatus.ACTIVE) + + # Send update higher than warning threshold but lower than resolve threshold + update_value = (1 - trigger_warning.alert_threshold / 100) - 0.05 + self.send_crash_rate_alert_update( + rule=rule, + value=update_value, + time_delta=timedelta(minutes=-1), + subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(), + ) + self.assert_active_incident(rule) + + def test_crash_rate_alert_for_sessions_with_no_sessions_data(self): + """ + Test that ensures we skip the Crash Rate Alert processing if we have no sessions data + """ + rule = self.crash_rate_alert_rule + + self.send_crash_rate_alert_update( + rule=rule, + value=None, + subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(), + ) + self.metrics.incr.assert_has_calls( + [ + call("incidents.alert_rules.ignore_update_no_session_data"), + call("incidents.alert_rules.skipping_update_invalid_aggregation_value"), + ] + ) + + @patch("sentry.incidents.subscription_processor.CRASH_RATE_ALERT_MINIMUM_THRESHOLD", 30) + def test_crash_rate_alert_when_session_count_is_lower_than_minimum_threshold(self): + rule = self.crash_rate_alert_rule + trigger = self.crash_rate_alert_critical_trigger + + # Send Critical Update + update_value = (1 - trigger.alert_threshold / 100) + 0.05 + self.send_crash_rate_alert_update( + rule=rule, + value=update_value, + count=10, + time_delta=timedelta(minutes=-10), + subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(), + ) + self.assert_no_active_incident(rule) + self.metrics.incr.assert_has_calls( + [ + call("incidents.alert_rules.ignore_update_count_lower_than_min_threshold"), + call("incidents.alert_rules.skipping_update_invalid_aggregation_value"), + ] + ) + + @patch("sentry.incidents.subscription_processor.CRASH_RATE_ALERT_MINIMUM_THRESHOLD", 30) + def test_crash_rate_alert_when_session_count_is_higher_than_minimum_threshold(self): + rule = self.crash_rate_alert_rule + trigger = self.crash_rate_alert_critical_trigger + action_critical = self.crash_rate_alert_critical_action + + # Send Critical Update + update_value = (1 - trigger.alert_threshold / 100) + 0.05 + self.send_crash_rate_alert_update( + rule=rule, + value=update_value, + count=31, + time_delta=timedelta(minutes=-10), + subscription=rule.snuba_query.subscriptions.filter(project=self.project).get(), + ) + incident = self.assert_active_incident(rule) + self.assert_actions_fired_for_incident(incident, [action_critical]) + self.assert_trigger_exists_with_status(incident, trigger, TriggerStatus.ACTIVE) + def test_comparison_alert_above(self): rule = self.comparison_rule_above comparison_delta = timedelta(seconds=rule.comparison_delta) diff --git a/tests/sentry/integrations/jira/test_webhooks.py b/tests/sentry/integrations/jira/test_webhooks.py index 8d5e165f69ac67..1b7842a656b7fa 100644 --- a/tests/sentry/integrations/jira/test_webhooks.py +++ b/tests/sentry/integrations/jira/test_webhooks.py @@ -10,45 +10,38 @@ class JiraWebhooksTest(APITestCase): + def setUp(self): + super().setUp() + self.integration = Integration.objects.create( + provider="jira", + name="Example Jira", + metadata={ + "oauth_client_id": "oauth-client-id", + "shared_secret": "a-super-secret-key-from-atlassian", + "base_url": "https://example.atlassian.net", + "domain_name": "example.atlassian.net", + }, + ) + self.integration.add_organization(self.organization, self.user) + self.path = reverse("sentry-extensions-jira-issue-updated") + @patch("sentry.integrations.jira.webhooks.sync_group_assignee_inbound") def test_simple_assign(self, mock_sync_group_assignee_inbound): - org = self.organization - - integration = Integration.objects.create(provider="jira", name="Example Jira") - integration.add_organization(org, self.user) - - path = reverse("sentry-extensions-jira-issue-updated") - with patch( - "sentry.integrations.jira.webhooks.get_integration_from_jwt", return_value=integration + "sentry.integrations.jira.webhooks.get_integration_from_jwt", + return_value=self.integration, ): data = StubService.get_stub_data("jira", "edit_issue_assignee_payload.json") - resp = self.client.post(path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken") + resp = self.client.post(self.path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken") assert resp.status_code == 200 mock_sync_group_assignee_inbound.assert_called_with( - integration, "jess@sentry.io", "APP-123", assign=True + self.integration, "jess@sentry.io", "APP-123", assign=True ) @override_settings(JIRA_USE_EMAIL_SCOPE=True) @patch("sentry.integrations.jira.webhooks.sync_group_assignee_inbound") @responses.activate def test_assign_use_email_api(self, mock_sync_group_assignee_inbound): - org = self.organization - - integration = Integration.objects.create( - provider="jira", - name="Example Jira", - metadata={ - "oauth_client_id": "oauth-client-id", - "shared_secret": "a-super-secret-key-from-atlassian", - "base_url": "https://example.atlassian.net", - "domain_name": "example.atlassian.net", - }, - ) - integration.add_organization(org, self.user) - - path = reverse("sentry-extensions-jira-issue-updated") - responses.add( responses.GET, "https://example.atlassian.net/rest/api/3/user/email", @@ -57,85 +50,62 @@ def test_assign_use_email_api(self, mock_sync_group_assignee_inbound): ) with patch( - "sentry.integrations.jira.webhooks.get_integration_from_jwt", return_value=integration + "sentry.integrations.jira.webhooks.get_integration_from_jwt", + return_value=self.integration, ): data = StubService.get_stub_data("jira", "edit_issue_assignee_payload.json") data["issue"]["fields"]["assignee"]["emailAddress"] = "" - resp = self.client.post(path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken") + resp = self.client.post(self.path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken") assert resp.status_code == 200 assert mock_sync_group_assignee_inbound.called assert len(responses.calls) == 1 @patch("sentry.integrations.jira.webhooks.sync_group_assignee_inbound") def test_assign_missing_email(self, mock_sync_group_assignee_inbound): - org = self.organization - - integration = Integration.objects.create(provider="jira", name="Example Jira") - integration.add_organization(org, self.user) - - path = reverse("sentry-extensions-jira-issue-updated") - with patch( - "sentry.integrations.jira.webhooks.get_integration_from_jwt", return_value=integration + "sentry.integrations.jira.webhooks.get_integration_from_jwt", + return_value=self.integration, ): data = StubService.get_stub_data("jira", "edit_issue_assignee_payload.json") data["issue"]["fields"]["assignee"]["emailAddress"] = "" - resp = self.client.post(path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken") + resp = self.client.post(self.path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken") assert resp.status_code == 200 assert not mock_sync_group_assignee_inbound.called @patch("sentry.integrations.jira.webhooks.sync_group_assignee_inbound") def test_simple_deassign(self, mock_sync_group_assignee_inbound): - org = self.organization - - integration = Integration.objects.create(provider="jira", name="Example Jira") - integration.add_organization(org, self.user) - - path = reverse("sentry-extensions-jira-issue-updated") - with patch( - "sentry.integrations.jira.webhooks.get_integration_from_jwt", return_value=integration + "sentry.integrations.jira.webhooks.get_integration_from_jwt", + return_value=self.integration, ): data = StubService.get_stub_data("jira", "edit_issue_no_assignee_payload.json") - resp = self.client.post(path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken") + resp = self.client.post(self.path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken") assert resp.status_code == 200 mock_sync_group_assignee_inbound.assert_called_with( - integration, None, "APP-123", assign=False + self.integration, None, "APP-123", assign=False ) @patch("sentry.integrations.jira.webhooks.sync_group_assignee_inbound") def test_simple_deassign_assignee_missing(self, mock_sync_group_assignee_inbound): - org = self.organization - - integration = Integration.objects.create(provider="jira", name="Example Jira") - integration.add_organization(org, self.user) - - path = reverse("sentry-extensions-jira-issue-updated") - with patch( - "sentry.integrations.jira.webhooks.get_integration_from_jwt", return_value=integration + "sentry.integrations.jira.webhooks.get_integration_from_jwt", + return_value=self.integration, ): data = StubService.get_stub_data("jira", "edit_issue_assignee_missing_payload.json") - resp = self.client.post(path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken") + resp = self.client.post(self.path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken") assert resp.status_code == 200 mock_sync_group_assignee_inbound.assert_called_with( - integration, None, "APP-123", assign=False + self.integration, None, "APP-123", assign=False ) @patch.object(IssueSyncMixin, "sync_status_inbound") def test_simple_status_sync_inbound(self, mock_sync_status_inbound): - org = self.organization - - integration = Integration.objects.create(provider="jira", name="Example Jira") - integration.add_organization(org, self.user) - - path = reverse("sentry-extensions-jira-issue-updated") - with patch( - "sentry.integrations.jira.webhooks.get_integration_from_jwt", return_value=integration + "sentry.integrations.jira.webhooks.get_integration_from_jwt", + return_value=self.integration, ) as mock_get_integration_from_jwt: data = StubService.get_stub_data("jira", "edit_issue_status_payload.json") - resp = self.client.post(path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken") + resp = self.client.post(self.path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken") assert resp.status_code == 200 mock_get_integration_from_jwt.assert_called_with( "anexampletoken", "/extensions/jira/issue-updated/", "jira", {}, method="POST" @@ -160,30 +130,19 @@ def test_simple_status_sync_inbound(self, mock_sync_status_inbound): ) def test_missing_changelog(self): - org = self.organization - - integration = Integration.objects.create(provider="jira", name="Example Jira") - integration.add_organization(org, self.user) - - path = reverse("sentry-extensions-jira-issue-updated") - with patch( - "sentry.integrations.jira.webhooks.get_integration_from_jwt", return_value=integration + "sentry.integrations.jira.webhooks.get_integration_from_jwt", + return_value=self.integration, ): data = StubService.get_stub_data("jira", "changelog_missing.json") - resp = self.client.post(path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken") + resp = self.client.post(self.path, data=data, HTTP_AUTHORIZATION="JWT anexampletoken") assert resp.status_code == 200 def test_missing_body(self): - org = self.organization - - integration = Integration.objects.create(provider="jira", name="Example Jira") - integration.add_organization(org, self.user) - path = reverse("sentry-extensions-jira-installed") - with patch( - "sentry.integrations.jira.webhooks.get_integration_from_jwt", return_value=integration + "sentry.integrations.jira.webhooks.get_integration_from_jwt", + return_value=self.integration, ): resp = self.client.post(path, data={}, HTTP_AUTHORIZATION="JWT anexampletoken") assert resp.status_code == 400 diff --git a/tests/sentry/integrations/jira_server/__init__.py b/tests/sentry/integrations/jira_server/__init__.py index e69de29bb2d1d6..bd3d8fbee08868 100644 --- a/tests/sentry/integrations/jira_server/__init__.py +++ b/tests/sentry/integrations/jira_server/__init__.py @@ -0,0 +1,119 @@ +from sentry.models import ( + ExternalIssue, + Group, + GroupLink, + Identity, + IdentityProvider, + IdentityStatus, + Integration, + Organization, + User, +) + +EXAMPLE_PRIVATE_KEY = """-----BEGIN RSA PRIVATE KEY----- +MIICWwIBAAKBgQC1cd9t8sA03awggLiX2gjZxyvOVUPJksLly1E662tttTeR3Wm9 +eo6onNeI8HRD+O4wubUp4h4Chc7DtLDmFEPhUZ8Qkwztiifm99Xo3s0nUq4Pygp5 +AU09KXTEPbzHLh1dnXLcxVLmGDE4drh0NWmYsd/Zp7XNIZq2TRQQ3NTdVQIDAQAB +AoGAFwMyS0eWiR30TssEnn3Q0Y4pSCoYRuCOR4bZ7pcdMPTi72UdnCKHJWt/Cqc0 +l8piq1tiVsWO+NLvvnKUXRoE4cAyrGrpf1F0uP5zYW71SQALc9wwsjDzuj7BZEuK +fg35JSceLHWE1WtzPDX5Xg20YPnMrA/xe/RwuPjuBH0wSqECQQDizzmKdKCq0ejy +3OxEto5knqpSEgRcOk0HDsdgjwkwiZJOj5ECV2FKpNHuu2thGy/aDJyLlmUso8j0 +OpvLAzOvAkEAzMwAgGexTxKm8hy3ilvVn9EvhSKjaIakqY4ONK9LZ4zMiDHI0H6C +FXlwWX7CJM0YVFMubj8SB8rnIuvFDEBMOwJABHtRyMGbNyTktH/XD1iIIcbc2LhQ +a74fLYeGOws4hEQDpxfBJsmxO3dcSppbedS+slFTepKjNymZW/IYh/9tMwJAEL5E +9DqGBn7x4y1x2//yESTbC7lvPqZzY+FXS/tg4NBkEGZxkoolPHg3NTnlyXhzGsHK +M/04DicKipJYA85l7QJAJ3u67qZXecM/oWTtJToBDuyKGHfdY1564+RbyDEjJJRb +vz4O/8FQQ1sGjdEBMMrRBCHEG8o3/XDTrB97t45TeA== +-----END RSA PRIVATE KEY-----""" + +EXAMPLE_ISSUE_SEARCH = """ +{ + "expand": "names,schema", + "startAt": 0, + "maxResults": 50, + "total": 1, + "issues": [ + { + "expand": "", + "id": "10001", + "self": "http://www.example.com/jira/rest/api/2/issue/10001", + "key": "HSP-1", + "fields": { + "summary": "this is a test issue summary" + } + } + ], + "warningMessages": [ + "The value 'splat' does not exist for the field 'Foo'." + ] +} +""" + +EXAMPLE_USER_SEARCH_RESPONSE = """ +[ + {"name": "bob", "displayName": "Bobby", "emailAddress": "bob@example.org"} +] +""" + +EXAMPLE_PAYLOAD = { + "changelog": { + "items": [ + { + "from": "10101", + "field": "status", + "fromString": "In Progress", + "to": "10102", + "toString": "Done", + "fieldtype": "jira", + "fieldId": "status", + } + ], + "id": 12345, + }, + "issue": {"project": {"key": "APP", "id": "10000"}, "key": "APP-1"}, +} + + +def get_integration(organization: Organization, user: User) -> Integration: + integration = Integration.objects.create( + provider="jira_server", + name="Example Jira", + metadata={ + "verify_ssl": False, + "webhook_secret": "a long secret value", + "base_url": "https://jira.example.org", + }, + ) + identity_provider = IdentityProvider.objects.create( + external_id="jira.example.org:sentry-test", type="jira_server" + ) + identity = Identity.objects.create( + idp=identity_provider, + user=user, + scopes=(), + status=IdentityStatus.VALID, + data={ + "consumer_key": "sentry-test", + "private_key": EXAMPLE_PRIVATE_KEY, + "access_token": "access-token", + "access_token_secret": "access-token-secret", + }, + ) + integration.add_organization(organization, user, default_auth_id=identity.id) + return integration + + +def link_group(organization: Organization, integration: Integration, group: Group) -> None: + external_issue = ExternalIssue.objects.create( + key=EXAMPLE_PAYLOAD["issue"]["key"], + integration_id=integration.id, + organization_id=organization.id, + ) + + GroupLink.objects.create( + group_id=group.id, + project_id=group.project_id, + linked_type=GroupLink.LinkedType.issue, + relationship=GroupLink.Relationship.resolves, + linked_id=external_issue.id, + ) diff --git a/tests/sentry/integrations/jira_server/test_integration.py b/tests/sentry/integrations/jira_server/test_integration.py index 13c5ae7d6e4a76..c503b7f6ee0dcf 100644 --- a/tests/sentry/integrations/jira_server/test_integration.py +++ b/tests/sentry/integrations/jira_server/test_integration.py @@ -7,7 +7,7 @@ from sentry.testutils import IntegrationTestCase from sentry.utils import json, jwt -from .testutils import EXAMPLE_PRIVATE_KEY +from . import EXAMPLE_PRIVATE_KEY class JiraServerIntegrationTest(IntegrationTestCase): diff --git a/tests/sentry/integrations/jira_server/test_search.py b/tests/sentry/integrations/jira_server/test_search.py index 109735b39ff90a..b1555ed6ed2d56 100644 --- a/tests/sentry/integrations/jira_server/test_search.py +++ b/tests/sentry/integrations/jira_server/test_search.py @@ -4,37 +4,15 @@ from django.urls import reverse from exam import fixture -from sentry.models import Identity, IdentityProvider, IdentityStatus, Integration from sentry.testutils import APITestCase -from .testutils import EXAMPLE_ISSUE_SEARCH, EXAMPLE_PRIVATE_KEY, EXAMPLE_USER_SEARCH_RESPONSE +from . import EXAMPLE_ISSUE_SEARCH, EXAMPLE_USER_SEARCH_RESPONSE, get_integration class JiraSearchEndpointTest(APITestCase): @fixture def integration(self): - integration = Integration.objects.create( - provider="jira_server", - name="Example Jira", - metadata={"verify_ssl": False, "base_url": "https://jira.example.org"}, - ) - identity_provider = IdentityProvider.objects.create( - external_id="jira.example.org:sentry-test", type="jira_server" - ) - identity = Identity.objects.create( - idp=identity_provider, - user=self.user, - scopes=(), - status=IdentityStatus.VALID, - data={ - "consumer_key": "sentry-test", - "private_key": EXAMPLE_PRIVATE_KEY, - "access_token": "access-token", - "access_token_secret": "access-token-secret", - }, - ) - integration.add_organization(self.organization, self.user, default_auth_id=identity.id) - return integration + return get_integration(self.organization, self.user) @responses.activate def test_get_success_text_search(self): diff --git a/tests/sentry/integrations/jira_server/test_webhooks.py b/tests/sentry/integrations/jira_server/test_webhooks.py index 548c33aba0cdc3..9f7a70ffc9e3c3 100644 --- a/tests/sentry/integrations/jira_server/test_webhooks.py +++ b/tests/sentry/integrations/jira_server/test_webhooks.py @@ -1,122 +1,64 @@ import jwt import responses -from django.urls import reverse -from exam import fixture from requests.exceptions import ConnectionError from sentry.integrations.jira_server.integration import JiraServerIntegration -from sentry.models import ( - Identity, - IdentityProvider, - IdentityStatus, - Integration, - OrganizationIntegration, -) +from sentry.models import OrganizationIntegration from sentry.testutils import APITestCase from sentry.utils.compat.mock import patch -from .testutils import EXAMPLE_PRIVATE_KEY +from . import EXAMPLE_PAYLOAD, get_integration, link_group class JiraServerWebhookEndpointTest(APITestCase): endpoint = "sentry-extensions-jiraserver-issue-updated" method = "post" - @fixture - def integration(self): - integration = Integration.objects.create( - provider="jira_server", - name="Example Jira", - metadata={ - "verify_ssl": False, - "webhook_secret": "a long secret value", - "base_url": "https://jira.example.org", - }, - ) - identity_provider = IdentityProvider.objects.create( - external_id="jira.example.org:sentry-test", type="jira_server" - ) - identity = Identity.objects.create( - idp=identity_provider, - user=self.user, - scopes=(), - status=IdentityStatus.VALID, - data={ - "consumer_key": "sentry-test", - "private_key": EXAMPLE_PRIVATE_KEY, - "access_token": "access-token", - "access_token_secret": "access-token-secret", - }, - ) - integration.add_organization(self.organization, self.user, default_auth_id=identity.id) - return integration + def setUp(self): + super().setUp() + self.integration = get_integration(self.organization, self.user) @property def jwt_token(self): - integration = self.integration - return jwt.encode({"id": integration.external_id}, integration.metadata["webhook_secret"]) + return jwt.encode( + {"id": self.integration.external_id}, self.integration.metadata["webhook_secret"] + ) def test_post_empty_token(self): # Read the property to get side-effects in the database. - token = self.jwt_token - token = " " - path = reverse("sentry-extensions-jiraserver-issue-updated", args=[token]) - resp = self.client.post(path) + _ = self.jwt_token - assert resp.status_code == 400 + self.get_error_response(" ", status_code=400) def test_post_missing_default_identity(self): org_integration = OrganizationIntegration.objects.get( organization_id=self.organization.id, integration_id=self.integration.id, ) - org_integration.update(default_auth_id=None) - org_integration.update(config={"sync_status_reverse": True}) - payload = { - "changelog": { - "items": [ - { - "from": "10101", - "field": "status", - "fromString": "In Progress", - "to": "10102", - "toString": "Done", - "fieldtype": "jira", - "fieldId": "status", - } - ], - "id": 12345, - }, - "issue": {"project": {"key": "APP", "id": "10000"}, "key": "APP-1"}, - } - self.get_error_response(self.jwt_token, **payload, status_code=400) + org_integration.update(default_auth_id=None, config={"sync_status_reverse": True}) + + link_group(self.organization, self.integration, self.group) + + with self.tasks(): + self.get_success_response(self.jwt_token, **EXAMPLE_PAYLOAD) def test_post_token_missing_id(self): integration = self.integration # No id key in the token token = jwt.encode({"no": integration.id}, integration.metadata["webhook_secret"]) - path = reverse("sentry-extensions-jiraserver-issue-updated", args=[token]) - resp = self.client.post(path) - - assert resp.status_code == 400 + self.get_error_response(token, status_code=400) def test_post_token_missing_integration(self): integration = self.integration # Use the wrong id in the token. token = jwt.encode({"no": integration.id}, integration.metadata["webhook_secret"]) - path = reverse("sentry-extensions-jiraserver-issue-updated", args=[token]) - resp = self.client.post(path) - - assert resp.status_code == 400 + self.get_error_response(token, status_code=400) def test_post_token_invalid_signature(self): integration = self.integration # Use the wrong id in the token. token = jwt.encode({"id": integration.external_id}, "bad-secret") - path = reverse("sentry-extensions-jiraserver-issue-updated", args=[token]) - resp = self.client.post(path) - - assert resp.status_code == 400 + self.get_error_response(token, status_code=400) @patch("sentry.integrations.jira.webhooks.sync_group_assignee_inbound") def test_post_update_assignee(self, mock_sync): @@ -127,10 +69,7 @@ def test_post_update_assignee(self, mock_sync): "changelog": {"items": [{"field": "assignee"}], "id": 12345}, "issue": {"fields": {"assignee": {"emailAddress": "bob@example.org"}}, "key": "APP-1"}, } - token = self.jwt_token - path = reverse("sentry-extensions-jiraserver-issue-updated", args=[token]) - resp = self.client.post(path, data=payload) - assert resp.status_code == 200 + self.get_success_response(self.jwt_token, **payload) mock_sync.assert_called_with(self.integration, "bob@example.org", "APP-1", assign=True) @@ -139,30 +78,14 @@ def test_post_update_status(self, mock_sync): project = self.create_project() self.create_group(project=project) - payload = { - "changelog": { - "items": [ - { - "from": "10101", - "field": "status", - "fromString": "In Progress", - "to": "10102", - "toString": "Done", - "fieldtype": "jira", - "fieldId": "status", - } - ], - "id": 12345, - }, - "issue": {"project": {"key": "APP", "id": "10000"}, "key": "APP-1"}, - } - token = self.jwt_token - path = reverse("sentry-extensions-jiraserver-issue-updated", args=[token]) - resp = self.client.post(path, data=payload) - assert resp.status_code == 200 + self.get_success_response(self.jwt_token, **EXAMPLE_PAYLOAD) mock_sync.assert_called_with( - "APP-1", {"changelog": payload["changelog"]["items"][0], "issue": payload["issue"]} + "APP-1", + { + "changelog": EXAMPLE_PAYLOAD["changelog"]["items"][0], + "issue": EXAMPLE_PAYLOAD["issue"], + }, ) @responses.activate @@ -172,31 +95,11 @@ def test_post_update_status_token_error(self): url="https://jira.example.org/rest/api/2/status", body=ConnectionError(), ) - project = self.create_project() - self.create_group(project=project) - integration = self.integration - installation = integration.get_installation(self.organization.id) + group = self.create_group(self.project) + installation = self.integration.get_installation(self.organization.id) installation.update_organization_config({"sync_status_reverse": True}) - payload = { - "changelog": { - "items": [ - { - "from": "10101", - "field": "status", - "fromString": "In Progress", - "to": "10102", - "toString": "Done", - "fieldtype": "jira", - "fieldId": "status", - } - ], - "id": 12345, - }, - "issue": {"project": {"key": "APP", "id": "10000"}, "key": "APP-1"}, - } - token = self.jwt_token - path = reverse("sentry-extensions-jiraserver-issue-updated", args=[token]) - resp = self.client.post(path, data=payload) + link_group(self.organization, self.integration, group) - assert resp.status_code == 400 + with self.tasks(): + self.get_success_response(self.jwt_token, **EXAMPLE_PAYLOAD) diff --git a/tests/sentry/integrations/jira_server/testutils.py b/tests/sentry/integrations/jira_server/testutils.py deleted file mode 100644 index b49d41595c8820..00000000000000 --- a/tests/sentry/integrations/jira_server/testutils.py +++ /dev/null @@ -1,44 +0,0 @@ -EXAMPLE_PRIVATE_KEY = """-----BEGIN RSA PRIVATE KEY----- -MIICWwIBAAKBgQC1cd9t8sA03awggLiX2gjZxyvOVUPJksLly1E662tttTeR3Wm9 -eo6onNeI8HRD+O4wubUp4h4Chc7DtLDmFEPhUZ8Qkwztiifm99Xo3s0nUq4Pygp5 -AU09KXTEPbzHLh1dnXLcxVLmGDE4drh0NWmYsd/Zp7XNIZq2TRQQ3NTdVQIDAQAB -AoGAFwMyS0eWiR30TssEnn3Q0Y4pSCoYRuCOR4bZ7pcdMPTi72UdnCKHJWt/Cqc0 -l8piq1tiVsWO+NLvvnKUXRoE4cAyrGrpf1F0uP5zYW71SQALc9wwsjDzuj7BZEuK -fg35JSceLHWE1WtzPDX5Xg20YPnMrA/xe/RwuPjuBH0wSqECQQDizzmKdKCq0ejy -3OxEto5knqpSEgRcOk0HDsdgjwkwiZJOj5ECV2FKpNHuu2thGy/aDJyLlmUso8j0 -OpvLAzOvAkEAzMwAgGexTxKm8hy3ilvVn9EvhSKjaIakqY4ONK9LZ4zMiDHI0H6C -FXlwWX7CJM0YVFMubj8SB8rnIuvFDEBMOwJABHtRyMGbNyTktH/XD1iIIcbc2LhQ -a74fLYeGOws4hEQDpxfBJsmxO3dcSppbedS+slFTepKjNymZW/IYh/9tMwJAEL5E -9DqGBn7x4y1x2//yESTbC7lvPqZzY+FXS/tg4NBkEGZxkoolPHg3NTnlyXhzGsHK -M/04DicKipJYA85l7QJAJ3u67qZXecM/oWTtJToBDuyKGHfdY1564+RbyDEjJJRb -vz4O/8FQQ1sGjdEBMMrRBCHEG8o3/XDTrB97t45TeA== ------END RSA PRIVATE KEY-----""" - -EXAMPLE_ISSUE_SEARCH = """ -{ - "expand": "names,schema", - "startAt": 0, - "maxResults": 50, - "total": 1, - "issues": [ - { - "expand": "", - "id": "10001", - "self": "http://www.example.com/jira/rest/api/2/issue/10001", - "key": "HSP-1", - "fields": { - "summary": "this is a test issue summary" - } - } - ], - "warningMessages": [ - "The value 'splat' does not exist for the field 'Foo'." - ] -} -""" - -EXAMPLE_USER_SEARCH_RESPONSE = """ -[ - {"name": "bob", "displayName": "Bobby", "emailAddress": "bob@example.org"} -] -""" diff --git a/tests/sentry/integrations/test_issues.py b/tests/sentry/integrations/test_issues.py index dfc441753a206b..90eda0f87b726f 100644 --- a/tests/sentry/integrations/test_issues.py +++ b/tests/sentry/integrations/test_issues.py @@ -44,7 +44,7 @@ def test_status_sync_inbound_resolve(self): installation = integration.get_installation(group.organization.id) - with self.feature("organizations:integrations-issue-sync"): + with self.feature("organizations:integrations-issue-sync"), self.tasks(): installation.sync_status_inbound( external_issue.key, {"project_id": "APP", "status": {"id": "12345", "category": "done"}}, @@ -87,7 +87,7 @@ def test_status_sync_inbound_unresolve(self): installation = integration.get_installation(group.organization.id) - with self.feature("organizations:integrations-issue-sync"): + with self.feature("organizations:integrations-issue-sync"), self.tasks(): installation.sync_status_inbound( external_issue.key, {"project_id": "APP", "status": {"id": "12345", "category": "in_progress"}}, diff --git a/tests/sentry/integrations/vsts/test_issues.py b/tests/sentry/integrations/vsts/test_issues.py index f2fe3f24b4d9c1..271aea9ed2e471 100644 --- a/tests/sentry/integrations/vsts/test_issues.py +++ b/tests/sentry/integrations/vsts/test_issues.py @@ -5,6 +5,7 @@ from django.test import RequestFactory from exam import fixture +from sentry.integrations.issues import ResolveSyncAction from sentry.integrations.vsts.integration import VstsIntegration from sentry.models import ( ExternalIssue, @@ -334,27 +335,42 @@ def test_get_issue_url(self): @responses.activate def test_should_resolve_active_to_resolved(self): - should_resolve = self.integration.should_resolve( - {"project": self.project_id_with_states, "old_state": "Active", "new_state": "Resolved"} + assert ( + self.integration.get_resolve_sync_action( + { + "project": self.project_id_with_states, + "old_state": "Active", + "new_state": "Resolved", + } + ) + == ResolveSyncAction.RESOLVE ) - assert should_resolve is True @responses.activate def test_should_resolve_resolved_to_active(self): - should_resolve = self.integration.should_resolve( - {"project": self.project_id_with_states, "old_state": "Resolved", "new_state": "Active"} + assert ( + self.integration.get_resolve_sync_action( + { + "project": self.project_id_with_states, + "old_state": "Resolved", + "new_state": "Active", + } + ) + == ResolveSyncAction.UNRESOLVE ) - assert should_resolve is False @responses.activate def test_should_resolve_new(self): - should_resolve = self.integration.should_resolve( - {"project": self.project_id_with_states, "old_state": None, "new_state": "New"} + assert ( + self.integration.get_resolve_sync_action( + {"project": self.project_id_with_states, "old_state": None, "new_state": "New"} + ) + == ResolveSyncAction.UNRESOLVE ) - assert should_resolve is False @responses.activate def test_should_resolve_done_status_failure(self): + """TODO(mgaeta): Should this be NOOP instead of UNRESOLVE when we lose connection?""" responses.reset() responses.add( responses.GET, @@ -364,38 +380,30 @@ def test_should_resolve_done_status_failure(self): "error": "The requested operation is not allowed. Your account is pending deletion." }, ) - should_resolve = self.integration.should_resolve( - {"project": self.project_id_with_states, "old_state": "Active", "new_state": "Resolved"} - ) - assert should_resolve is False - - @responses.activate - def test_should_unresolve_active_to_resolved(self): - should_unresolve = self.integration.should_unresolve( - {"project": self.project_id_with_states, "old_state": "Active", "new_state": "Resolved"} - ) - assert should_unresolve is False - @responses.activate - def test_should_unresolve_resolved_to_active(self): - should_unresolve = self.integration.should_unresolve( - {"project": self.project_id_with_states, "old_state": "Resolved", "new_state": "Active"} + assert ( + self.integration.get_resolve_sync_action( + { + "project": self.project_id_with_states, + "old_state": "Active", + "new_state": "Resolved", + } + ) + == ResolveSyncAction.UNRESOLVE ) - assert should_unresolve is True @responses.activate def test_should_not_unresolve_resolved_to_closed(self): - should_unresolve = self.integration.should_unresolve( - {"project": self.project_id_with_states, "old_state": "Resolved", "new_state": "Closed"} - ) - assert should_unresolve is False - - @responses.activate - def test_should_unresolve_new(self): - should_unresolve = self.integration.should_unresolve( - {"project": self.project_id_with_states, "old_state": None, "new_state": "New"} + assert ( + self.integration.get_resolve_sync_action( + { + "project": self.project_id_with_states, + "old_state": "Resolved", + "new_state": "Closed", + } + ) + == ResolveSyncAction.NOOP ) - assert should_unresolve is True class VstsIssueFormTest(VstsIssueBase): diff --git a/tests/sentry/integrations/vsts/test_webhooks.py b/tests/sentry/integrations/vsts/test_webhooks.py index 0d9eba70ec6722..cdf2d346394b12 100644 --- a/tests/sentry/integrations/vsts/test_webhooks.py +++ b/tests/sentry/integrations/vsts/test_webhooks.py @@ -161,19 +161,18 @@ def test_inbound_status_sync_resolve(self): # Change so that state is changing from unresolved to resolved work_item = self.set_workitem_state("Active", "Resolved") - with self.feature("organizations:integrations-issue-sync"): + with self.feature("organizations:integrations-issue-sync"), self.tasks(): resp = self.client.post( absolute_uri("/extensions/vsts/issue-updated/"), data=work_item, HTTP_SHARED_SECRET=self.shared_secret, ) - assert resp.status_code == 200 - group_ids = [g.id for g in groups] - assert ( - len(Group.objects.filter(id__in=group_ids, status=GroupStatus.RESOLVED)) - == num_groups - ) - assert len(Activity.objects.filter(group_id__in=group_ids)) == num_groups + assert resp.status_code == 200 + group_ids = [g.id for g in groups] + assert ( + len(Group.objects.filter(id__in=group_ids, status=GroupStatus.RESOLVED)) == num_groups + ) + assert len(Activity.objects.filter(group_id__in=group_ids)) == num_groups @responses.activate def test_inbound_status_sync_unresolve(self): @@ -195,19 +194,18 @@ def test_inbound_status_sync_unresolve(self): # Change so that state is changing from resolved to unresolved work_item = self.set_workitem_state("Resolved", "Active") - with self.feature("organizations:integrations-issue-sync"): + with self.feature("organizations:integrations-issue-sync"), self.tasks(): resp = self.client.post( absolute_uri("/extensions/vsts/issue-updated/"), data=work_item, HTTP_SHARED_SECRET=self.shared_secret, ) - assert resp.status_code == 200 - group_ids = [g.id for g in groups] - assert ( - len(Group.objects.filter(id__in=group_ids, status=GroupStatus.UNRESOLVED)) - == num_groups - ) - assert len(Activity.objects.filter(group_id__in=group_ids)) == num_groups + assert resp.status_code == 200 + group_ids = [g.id for g in groups] + assert ( + len(Group.objects.filter(id__in=group_ids, status=GroupStatus.UNRESOLVED)) == num_groups + ) + assert len(Activity.objects.filter(group_id__in=group_ids)) == num_groups @responses.activate def test_inbound_status_sync_new_workitem(self): diff --git a/tests/sentry/models/test_organization.py b/tests/sentry/models/test_organization.py index 84ba867ca6b220..784827118269c8 100644 --- a/tests/sentry/models/test_organization.py +++ b/tests/sentry/models/test_organization.py @@ -279,8 +279,7 @@ def is_pending_organization_member(self, user_id, member_id, was_booted=True): assert member.token is None assert member.token_expires_at is None - @mock.patch("sentry.utils.email.logger") - def test_handle_2fa_required__compliant_and_non_compliant_members(self, email_log): + def test_handle_2fa_required__compliant_and_non_compliant_members(self): compliant_user, compliant_member = self._create_user_and_member(has_2fa=True) non_compliant_user, non_compliant_member = self._create_user_and_member() @@ -292,7 +291,6 @@ def test_handle_2fa_required__compliant_and_non_compliant_members(self, email_lo assert len(mail.outbox) == 1 assert mail.outbox[0].to == [non_compliant_user.email] - assert email_log.info.call_count == 2 # mail.queued, mail.sent audit_logs = AuditLogEntry.objects.filter( event=AuditLogEntryEvent.MEMBER_PENDING, organization=self.org, actor=self.owner @@ -301,8 +299,7 @@ def test_handle_2fa_required__compliant_and_non_compliant_members(self, email_lo assert audit_logs[0].data["email"] == non_compliant_user.email assert audit_logs[0].target_user_id == non_compliant_user.id - @mock.patch("sentry.utils.email.logger") - def test_handle_2fa_required__compliant_members(self, email_log): + def test_handle_2fa_required__compliant_members(self): compliant = [] for num in range(0, 4): user, member = self._create_user_and_member(has_2fa=True) @@ -314,13 +311,12 @@ def test_handle_2fa_required__compliant_members(self, email_log): for user, member in compliant: self.is_organization_member(user.id, member.id) - assert len(mail.outbox) == email_log.info.call_count == 0 + assert len(mail.outbox) == 0 assert not AuditLogEntry.objects.filter( event=AuditLogEntryEvent.MEMBER_PENDING, organization=self.org, actor=self.owner ).exists() - @mock.patch("sentry.utils.email.logger") - def test_handle_2fa_required__non_compliant_members(self, email_log): + def test_handle_2fa_required__non_compliant_members(self): non_compliant = [] for num in range(0, 4): user, member = self._create_user_and_member() @@ -333,13 +329,11 @@ def test_handle_2fa_required__non_compliant_members(self, email_log): self.is_pending_organization_member(user.id, member.id) assert len(mail.outbox) == len(non_compliant) - assert email_log.info.call_count == len(non_compliant) * 2 # mail.queued, mail.sent assert AuditLogEntry.objects.filter( event=AuditLogEntryEvent.MEMBER_PENDING, organization=self.org, actor=self.owner ).count() == len(non_compliant) - @mock.patch("sentry.utils.email.logger") - def test_handle_2fa_required__pending_member__ok(self, email_log): + def test_handle_2fa_required__pending_member__ok(self): user, member = self._create_user_and_member(has_member_email=True) member.user = None member.save() @@ -348,14 +342,13 @@ def test_handle_2fa_required__pending_member__ok(self, email_log): self.org.handle_2fa_required(self.request) self.is_pending_organization_member(user.id, member.id, was_booted=False) - assert len(mail.outbox) == email_log.info.call_count == 0 + assert len(mail.outbox) == 0 assert not AuditLogEntry.objects.filter( event=AuditLogEntryEvent.MEMBER_PENDING, organization=self.org, actor=self.owner ).exists() @mock.patch("sentry.tasks.auth.logger") - @mock.patch("sentry.utils.email.logger") - def test_handle_2fa_required__no_user_email__ok(self, email_log, auth_log): + def test_handle_2fa_required__no_user_email__ok(self, auth_log): user, member = self._create_user_and_member(has_user_email=False, has_member_email=True) assert not user.email assert member.email @@ -365,7 +358,6 @@ def test_handle_2fa_required__no_user_email__ok(self, email_log, auth_log): self.is_pending_organization_member(user.id, member.id) - assert email_log.info.call_count == 2 # mail.queued, mail.sent assert len(mail.outbox) == 1 assert mail.outbox[0].to == [member.email] @@ -376,8 +368,7 @@ def test_handle_2fa_required__no_user_email__ok(self, email_log, auth_log): ) @mock.patch("sentry.tasks.auth.logger") - @mock.patch("sentry.utils.email.logger") - def test_handle_2fa_required__no_email__warning(self, email_log, auth_log): + def test_handle_2fa_required__no_email__warning(self, auth_log): user, member = self._create_user_and_member(has_user_email=False) assert not user.email assert not member.email @@ -392,8 +383,7 @@ def test_handle_2fa_required__no_email__warning(self, email_log, auth_log): ) @mock.patch("sentry.tasks.auth.logger") - @mock.patch("sentry.utils.email.logger") - def test_handle_2fa_required__no_actor_and_api_key__ok(self, email_log, auth_log): + def test_handle_2fa_required__no_actor_and_api_key__ok(self, auth_log): user, member = self._create_user_and_member() with self.options({"system.url-prefix": "http://example.com"}), self.tasks(): @@ -408,7 +398,6 @@ def test_handle_2fa_required__no_actor_and_api_key__ok(self, email_log, auth_log self.is_pending_organization_member(user.id, member.id) assert len(mail.outbox) == 1 - assert email_log.info.call_count == 2 # mail.queued, mail.sent assert ( AuditLogEntry.objects.filter( event=AuditLogEntryEvent.MEMBER_PENDING, @@ -420,8 +409,7 @@ def test_handle_2fa_required__no_actor_and_api_key__ok(self, email_log, auth_log ) @mock.patch("sentry.tasks.auth.logger") - @mock.patch("sentry.utils.email.logger") - def test_handle_2fa_required__no_ip_address__ok(self, email_log, auth_log): + def test_handle_2fa_required__no_ip_address__ok(self, auth_log): user, member = self._create_user_and_member() with self.options({"system.url-prefix": "http://example.com"}), self.tasks(): @@ -431,7 +419,6 @@ def test_handle_2fa_required__no_ip_address__ok(self, email_log, auth_log): self.is_pending_organization_member(user.id, member.id) assert len(mail.outbox) == 1 - assert email_log.info.call_count == 2 # mail.queued, mail.sent assert ( AuditLogEntry.objects.filter( event=AuditLogEntryEvent.MEMBER_PENDING, diff --git a/tests/sentry/models/test_release.py b/tests/sentry/models/test_release.py index e2bbb457197611..bf30a3afe314f1 100644 --- a/tests/sentry/models/test_release.py +++ b/tests/sentry/models/test_release.py @@ -1204,3 +1204,77 @@ def test_follows_semver_check_when_project_only_has_two_releases(self): ) is False ) + + +class ClearCommitsTestCase(TestCase): + def test_simple(self): + org = self.create_organization() + project = self.create_project(organization=org, name="foo") + group = self.create_group(project=project) + + repo = Repository.objects.create(organization_id=org.id, name="test/repo") + + author = CommitAuthor.objects.create( + name="foo bar baz", email="foo@example.com", organization_id=org.id + ) + + author2 = CommitAuthor.objects.create( + name="foo bar boo", email="baroo@example.com", organization_id=org.id + ) + + commit = Commit.objects.create( + organization_id=org.id, + repository_id=repo.id, + author=author, + date_added="2019-03-01 12:00:00", + message="fixes %s" % (group.qualified_short_id), + key="alksdflskdfjsldkfajsflkslk", + ) + commit2 = Commit.objects.create( + organization_id=org.id, + repository_id=repo.id, + author=author2, + date_added="2019-03-01 12:02:00", + message="i fixed something", + key="lskfslknsdkcsnlkdflksfdkls", + ) + + release = Release.objects.create(version="abcdabc", organization=org) + release.add_project(project) + release.set_commits( + [ + {"id": commit.key, "repository": repo.name}, + {"id": commit2.key, "repository": repo.name}, + ] + ) + # Confirm setup works + assert ReleaseCommit.objects.filter(commit=commit, release=release).exists() + assert ReleaseCommit.objects.filter(commit=commit2, release=release).exists() + + assert release.commit_count == 2 + assert release.authors == [str(author.id), str(author2.id)] + assert release.last_commit_id == commit.id + + assert ReleaseHeadCommit.objects.filter( + release_id=release.id, commit_id=commit.id, repository_id=repo.id + ).exists() + + # Now clear the release; + release.clear_commits() + assert not ReleaseCommit.objects.filter(commit=commit, release=release).exists() + assert not ReleaseCommit.objects.filter(commit=commit2, release=release).exists() + assert not ReleaseHeadCommit.objects.filter( + release_id=release.id, commit_id=commit.id, repository_id=repo.id + ).exists() + + assert release.commit_count == 0 + assert release.authors == [] + assert not release.last_commit_id + + # Commits should still exist + assert Commit.objects.filter( + id=commit.id, organization_id=org.id, repository_id=repo.id + ).exists() + assert Commit.objects.filter( + id=commit2.id, organization_id=org.id, repository_id=repo.id + ).exists() diff --git a/tests/sentry/notifications/test_notifications.py b/tests/sentry/notifications/test_notifications.py index 885a3b5aa6d664..b1ed9179e29741 100644 --- a/tests/sentry/notifications/test_notifications.py +++ b/tests/sentry/notifications/test_notifications.py @@ -6,6 +6,7 @@ import responses from django.core import mail from django.utils import timezone +from sentry_relay import parse_release from sentry.event_manager import EventManager from sentry.models import ( @@ -219,6 +220,7 @@ def test_sends_deployment_notification(self): """ release = self.create_release() + version_parsed = self.version_parsed = parse_release(release.version)["description"] url = f"/api/0/organizations/{self.organization.slug}/releases/{release.version}/deploys/" with self.tasks(): response = self.client.post( @@ -228,10 +230,10 @@ def test_sends_deployment_notification(self): msg = mail.outbox[0] # check the txt version - assert f"Version {release.version} was deployed to {self.environment.name} on" in msg.body + assert f"Version {version_parsed} was deployed to {self.environment.name} on" in msg.body # check the html version assert ( - f"Version {release.version} was deployed to {self.environment.name}\n \n" + f"Version {version_parsed} was deployed to {self.environment.name}\n \n" in msg.alternatives[0][0] ) @@ -239,7 +241,7 @@ def test_sends_deployment_notification(self): assert ( text - == f"Release {release.version} was deployed to {self.environment.name} for this project" + == f"Release {version_parsed} was deployed to {self.environment.name} for this project" ) assert ( attachment["actions"][0]["url"] diff --git a/tests/sentry/processing/realtime_metrics/test_redis.py b/tests/sentry/processing/realtime_metrics/test_redis.py index fe510ddafe29ef..b40bdaccf69215 100644 --- a/tests/sentry/processing/realtime_metrics/test_redis.py +++ b/tests/sentry/processing/realtime_metrics/test_redis.py @@ -4,8 +4,13 @@ import pytest -from sentry.processing import realtime_metrics # type: ignore -from sentry.processing.realtime_metrics.redis import RedisRealtimeMetricsStore # type: ignore +from sentry.processing import realtime_metrics +from sentry.processing.realtime_metrics.base import ( + BucketedCount, + BucketedDurations, + DurationHistogram, +) +from sentry.processing.realtime_metrics.redis import RedisRealtimeMetricsStore from sentry.utils import redis if TYPE_CHECKING: @@ -46,6 +51,13 @@ def test_default() -> None: realtime_metrics.increment_project_duration_counter(17, 1234, 55) +# TODO: group tests using classes + +# +# increment_project_event_counter() +# + + def test_increment_project_event_counter_simple( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster ) -> None: @@ -80,6 +92,11 @@ def test_increment_project_event_counter_different_buckets( assert redis_cluster.get("symbolicate_event_low_priority:counter:10:17:1150") == "1" +# +# increment_project_duration_counter() +# + + def test_increment_project_duration_counter_simple( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster ) -> None: @@ -112,3 +129,440 @@ def test_increment_project_duration_counter_different_buckets( assert redis_cluster.hget("symbolicate_event_low_priority:histogram:10:17:1140", "20") == "1" assert redis_cluster.hget("symbolicate_event_low_priority:histogram:10:17:1150", "40") == "1" + + +# +# get_lpq_projects() +# + + +def test_get_lpq_projects_unset(store: RedisRealtimeMetricsStore) -> None: + in_lpq = store.get_lpq_projects() + assert in_lpq == set() + + +def test_get_lpq_projects_empty( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1) + redis_cluster.srem("store.symbolicate-event-lpq-selected", 1) + + in_lpq = store.get_lpq_projects() + assert in_lpq == set() + + +def test_get_lpq_projects_filled( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1) + in_lpq = store.get_lpq_projects() + assert in_lpq == {1} + + +# +# add_project_to_lpq() +# + + +def test_add_project_to_lpq_unset( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + added = store.add_project_to_lpq(1) + assert added + in_lpq = redis_cluster.smembers("store.symbolicate-event-lpq-selected") + assert in_lpq == {"1"} + + +def test_add_project_to_lpq_empty( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1) + redis_cluster.srem("store.symbolicate-event-lpq-selected", 1) + + added = store.add_project_to_lpq(1) + assert added + in_lpq = redis_cluster.smembers("store.symbolicate-event-lpq-selected") + assert in_lpq == {"1"} + + +def test_add_project_to_lpq_dupe( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1) + + added = store.add_project_to_lpq(1) + assert not added + in_lpq = redis_cluster.smembers("store.symbolicate-event-lpq-selected") + assert in_lpq == {"1"} + + +def test_add_project_to_lpq_filled( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.sadd("store.symbolicate-event-lpq-selected", 11) + + added = store.add_project_to_lpq(1) + assert added + in_lpq = redis_cluster.smembers("store.symbolicate-event-lpq-selected") + assert in_lpq == {"1", "11"} + + +# +# remove_projects_from_lpq() +# + + +def test_remove_projects_from_lpq_unset( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + removed = store.remove_projects_from_lpq({1}) + assert removed == 0 + + remaining = redis_cluster.smembers("store.symbolicate-event-lpq-selected") + assert remaining == set() + + +def test_remove_projects_from_lpq_empty( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1) + redis_cluster.srem("store.symbolicate-event-lpq-selected", 1) + + removed = store.remove_projects_from_lpq({1}) + assert removed == 0 + remaining = redis_cluster.smembers("store.symbolicate-event-lpq-selected") + assert remaining == set() + + +def test_remove_projects_from_lpq_only_member( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1) + + removed = store.remove_projects_from_lpq({1}) + assert removed == 1 + + remaining = redis_cluster.smembers("store.symbolicate-event-lpq-selected") + assert remaining == set() + + +def test_remove_projects_from_lpq_nonmember( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.sadd("store.symbolicate-event-lpq-selected", 11) + + removed = store.remove_projects_from_lpq({1}) + assert removed == 0 + + remaining = redis_cluster.smembers("store.symbolicate-event-lpq-selected") + assert remaining == {"11"} + + +def test_remove_projects_from_lpq_subset( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1) + redis_cluster.sadd("store.symbolicate-event-lpq-selected", 11) + + removed = store.remove_projects_from_lpq({1}) + assert removed == 1 + + remaining = redis_cluster.smembers("store.symbolicate-event-lpq-selected") + assert remaining == {"11"} + + +def test_remove_projects_from_lpq_all_members( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1) + redis_cluster.sadd("store.symbolicate-event-lpq-selected", 11) + + removed = store.remove_projects_from_lpq({1, 11}) + assert removed == 2 + + remaining = redis_cluster.smembers("store.symbolicate-event-lpq-selected") + assert remaining == set() + + +def test_remove_projects_from_lpq_no_members( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1) + + removed = store.remove_projects_from_lpq(set()) + assert removed == 0 + + remaining = redis_cluster.smembers("store.symbolicate-event-lpq-selected") + assert remaining == {"1"} + + +# +# projects() +# + + +def test_projects_unset(store: RedisRealtimeMetricsStore) -> None: + candidates = store.projects() + assert list(candidates) == [] + + +def test_projects_empty( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.set( + "symbolicate_event_low_priority:counter:10:42:111", + 0, + ) + redis_cluster.delete("symbolicate_event_low_priority:counter:10:42:111") + + candidates = store.projects() + assert list(candidates) == [] + + +def test_projects_different_bucket( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.set("symbolicate_event_low_priority:counter:5:42:111", 0) + + candidates = store.projects() + assert list(candidates) == [] + + +def test_projects_negative_timestamp( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.set("symbolicate_event_low_priority:counter:10:42:-111", 0) + + candidates = store.projects() + assert list(candidates) == [42] + + +def test_projects_one_count( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.set("symbolicate_event_low_priority:counter:10:42:111", 0) + + candidates = store.projects() + assert list(candidates) == [42] + + +def test_projects_one_histogram( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:111:0", 0, 123) + + candidates = store.projects() + assert list(candidates) == [42] + + +def test_projects_multiple_metric_types( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.set("symbolicate_event_low_priority:counter:10:42:111", 0) + redis_cluster.hset("symbolicate_event_low_priority:histogram:10:53:111:20", 20, 456) + + candidates = store.projects() + assert list(candidates) == [42, 53] + + +def test_projects_mixed_buckets( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.set("symbolicate_event_low_priority:counter:10:42:111", 0) + redis_cluster.set("symbolicate_event_low_priority:counter:5:53:111", 0) + + candidates = store.projects() + assert list(candidates) == [42] + + +# +# get_counts_for_project() +# + + +def test_get_counts_for_project_unset(store: RedisRealtimeMetricsStore) -> None: + counts = store.get_counts_for_project(42) + assert list(counts) == [] + + +def test_get_counts_for_project_empty( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.set( + "symbolicate_event_low_priority:counter:10:42:111", + 0, + ) + redis_cluster.delete("symbolicate_event_low_priority:counter:10:42:111") + + counts = store.get_counts_for_project(42) + assert list(counts) == [] + + +def test_get_counts_for_project_no_matching_keys( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.set("symbolicate_event_low_priority:counter:10:53:111", 0) + + counts = store.get_counts_for_project(42) + assert list(counts) == [] + + +def test_get_counts_for_project_negative_key( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.set("symbolicate_event_low_priority:counter:10:42:-111", 0) + + counts = store.get_counts_for_project(42) + assert list(counts) == [ + BucketedCount(timestamp=-111, count=0), + ] + + +def test_get_counts_for_project_negative_count( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.set("symbolicate_event_low_priority:counter:10:42:111", -10) + + counts = store.get_counts_for_project(42) + assert list(counts) == [ + BucketedCount(timestamp=111, count=-10), + ] + + +def test_get_counts_for_project_multiple_projects( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.set("symbolicate_event_low_priority:counter:10:42:111", 0) + redis_cluster.set("symbolicate_event_low_priority:counter:10:42:222", 0) + redis_cluster.set("symbolicate_event_low_priority:counter:10:53:111", 0) + + counts = store.get_counts_for_project(42) + assert list(counts) == [ + BucketedCount(timestamp=111, count=0), + BucketedCount(timestamp=222, count=0), + ] + + +def test_get_counts_for_project_multi_metric( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.set("symbolicate_event_low_priority:counter:10:42:111", 0) + redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:222:0", 0, 123) + + counts = store.get_counts_for_project(42) + assert list(counts) == [ + BucketedCount(timestamp=111, count=0), + ] + + +def test_get_counts_for_project_different_buckets( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.set("symbolicate_event_low_priority:counter:10:42:111", 0) + redis_cluster.set("symbolicate_event_low_priority:counter:5:42:111", 0) + + counts = store.get_counts_for_project(42) + assert list(counts) == [ + BucketedCount(timestamp=111, count=0), + ] + + +# +# get_durations_for_project() +# + + +def test_get_durations_for_project_unset(store: RedisRealtimeMetricsStore) -> None: + counts = store.get_durations_for_project(42) + assert list(counts) == [] + + +def test_get_durations_for_project_empty( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.hset( + "symbolicate_event_low_priority:histogram:10:42:111", + 0, + 123, + ) + redis_cluster.delete("symbolicate_event_low_priority:histogram:10:42:111") + + counts = store.get_durations_for_project(42) + assert list(counts) == [] + + +def test_get_durations_for_project_no_matching_keys( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.hset("symbolicate_event_low_priority:histogram:10:53:111", 0, 123) + + counts = store.get_durations_for_project(42) + assert list(counts) == [] + + +def test_get_durations_for_project_negative_timestamp( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:-111", 0, 123) + + counts = store.get_durations_for_project(42) + assert list(counts) == [ + DurationHistogram(timestamp=-111, histogram=BucketedDurations({0: 123})) + ] + + +def test_get_durations_for_project_negative_duration( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:111", -20, 123) + + counts = store.get_durations_for_project(42) + assert list(counts) == [ + DurationHistogram(timestamp=111, histogram=BucketedDurations({-20: 123})) + ] + + +def test_get_durations_for_project_negative_count( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:111", 0, -123) + + counts = store.get_durations_for_project(42) + assert list(counts) == [ + DurationHistogram(timestamp=111, histogram=BucketedDurations({0: -123})) + ] + + +def test_get_durations_for_project_multi_key_multi_durations( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:111", 0, 123) + redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:111", 10, 456) + redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:222", 20, 123) + redis_cluster.hset("symbolicate_event_low_priority:histogram:10:53:111", 0, 123) + + counts = store.get_durations_for_project(42) + assert list(counts) == [ + DurationHistogram(timestamp=111, histogram=BucketedDurations({0: 123, 10: 456})), + DurationHistogram(timestamp=222, histogram=BucketedDurations({20: 123})), + ] + + +def test_get_durations_for_project_multi_metric( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.set("symbolicate_event_low_priority:counter:10:42:111", 0) + redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:222", 0, 123) + + counts = store.get_durations_for_project(42) + assert list(counts) == [DurationHistogram(timestamp=222, histogram=BucketedDurations({0: 123}))] + + +def test_get_durations_for_project_different_buckets( + store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster +) -> None: + redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:111", 0, 123) + redis_cluster.hset("symbolicate_event_low_priority:histogram:5:42:111", 20, 456) + + counts = store.get_durations_for_project(42) + assert list(counts) == [DurationHistogram(timestamp=111, histogram=BucketedDurations({0: 123}))] diff --git a/tests/sentry/rules/actions/test_notify_event_sentry_app.py b/tests/sentry/rules/actions/test_notify_event_sentry_app.py new file mode 100644 index 00000000000000..b4e29647eeac9b --- /dev/null +++ b/tests/sentry/rules/actions/test_notify_event_sentry_app.py @@ -0,0 +1,91 @@ +from sentry.rules.actions.notify_event_sentry_app import NotifyEventSentryAppAction +from sentry.tasks.sentry_apps import notify_sentry_app +from sentry.testutils.cases import RuleTestCase + +SENTRY_APP_ALERT_ACTION = "sentry.rules.actions.notify_event_sentry_app.NotifyEventSentryAppAction" + + +class NotifyEventSentryAppActionTest(RuleTestCase): + rule_cls = NotifyEventSentryAppAction + schema = { + "elements": [ + { + "type": "alert-rule-action", + "title": "Create Alert Rule UI Example Task", + "settings": { + "type": "alert-rule-settings", + "uri": "/test/", + "required_fields": [ + {"type": "text", "label": "Title", "name": "title"}, + {"type": "textarea", "label": "Description", "name": "description"}, + ], + }, + } + ] + } + schema_data = {"title": "foo", "description": "bar"} + + def test_applies_correctly_for_sentry_apps(self): + event = self.get_event() + + self.app = self.create_sentry_app( + organization=event.organization, + name="Test Application", + is_alertable=True, + schema=self.schema, + ) + + self.install = self.create_sentry_app_installation( + slug="test-application", organization=event.organization + ) + + rule = self.get_rule( + data={ + "sentryAppInstallationUuid": self.install.uuid, + "settings": self.schema_data, + } + ) + + assert rule.id == SENTRY_APP_ALERT_ACTION + + futures = list(rule.after(event=event, state=self.get_state())) + assert len(futures) == 1 + assert futures[0].callback is notify_sentry_app + assert futures[0].kwargs["sentry_app"].id == self.app.id + assert futures[0].kwargs["schema_defined_settings"] == self.schema_data + + def test_sentry_app_actions(self): + event = self.get_event() + + self.project = self.create_project(organization=event.organization) + + self.app = self.create_sentry_app( + organization=event.organization, + name="Test Application", + is_alertable=True, + schema=self.schema, + ) + + self.install = self.create_sentry_app_installation( + slug="test-application", organization=event.organization + ) + + rule = self.get_rule( + data={ + "sentryAppInstallationUuid": self.install.uuid, + "settings": self.schema_data, + } + ) + + action_list = rule.get_custom_actions(self.project) + assert len(action_list) == 1 + + action = action_list[0] + alert_element = self.schema["elements"][0] + assert action["id"] == SENTRY_APP_ALERT_ACTION + assert action["service"] == self.app.slug + assert action["prompt"] == self.app.name + assert action["actionType"] == "sentryapp" + assert action["enabled"] + assert action["formFields"] == alert_element["settings"] + assert alert_element["title"] in action["label"] diff --git a/tests/sentry/search/events/test_builder.py b/tests/sentry/search/events/test_builder.py index 41101d7475c606..31c0cc8c29081e 100644 --- a/tests/sentry/search/events/test_builder.py +++ b/tests/sentry/search/events/test_builder.py @@ -6,7 +6,7 @@ from snuba_sdk.column import Column from snuba_sdk.conditions import Condition, Op, Or from snuba_sdk.function import Function -from snuba_sdk.orderby import Direction, OrderBy +from snuba_sdk.orderby import Direction, LimitBy, OrderBy from sentry.exceptions import InvalidSearchQuery from sentry.search.events.builder import QueryBuilder @@ -85,6 +85,19 @@ def test_simple_orderby(self): ) query.get_snql_query().validate() + def test_simple_limitby(self): + query = QueryBuilder( + dataset=Dataset.Discover, + params=self.params, + query="", + selected_columns=["message"], + orderby="message", + limitby=("message", 1), + limit=4, + ) + + assert query.limitby == LimitBy(Column("message"), 1) + def test_environment_filter(self): query = QueryBuilder( Dataset.Discover, diff --git a/tests/sentry/search/events/test_fields.py b/tests/sentry/search/events/test_fields.py index 53508f59a87d0f..727c8dc0e21d2d 100644 --- a/tests/sentry/search/events/test_fields.py +++ b/tests/sentry/search/events/test_fields.py @@ -169,6 +169,7 @@ def test_get_json_meta_type(field_alias, snuba_type, function, expected): r'to_other(release,"asdf @ \"qwer: (3,2)")', ("to_other", ["release", r'"asdf @ \"qwer: (3,2)"'], None), ), + ("identity(sessions)", ("identity", ["sessions"], None)), ], ) def test_parse_function(function, expected): @@ -360,14 +361,15 @@ def test_field_alias_with_aggregates(self): ] def test_aggregate_function_expansion(self): - fields = ["count_unique(user)", "count(id)", "min(timestamp)"] - result = resolve_field_list(fields, eventstore.Filter()) + fields = ["count_unique(user)", "count(id)", "min(timestamp)", "identity(sessions)"] + result = resolve_field_list(fields, eventstore.Filter(), functions_acl=["identity"]) # Automatic fields should be inserted, count() should have its column dropped. assert result["selected_columns"] == [] assert result["aggregations"] == [ ["uniq", "user", "count_unique_user"], ["count", None, "count_id"], ["min", "timestamp", "min_timestamp"], + ["identity", "sessions", "identity_sessions"], ] assert result["groupby"] == [] diff --git a/tests/sentry/security/test_utils.py b/tests/sentry/security/test_utils.py deleted file mode 100644 index 18cd614a76a5f1..00000000000000 --- a/tests/sentry/security/test_utils.py +++ /dev/null @@ -1,9 +0,0 @@ -from sentry.security.utils import is_valid_email_address - - -def test_is_valid_email_address_number_at_qqcom(): - assert is_valid_email_address("12345@qq.com") is False - - -def test_is_valid_email_address_normal_human_email_address(): - assert is_valid_email_address("dcramer@gmail.com") is True diff --git a/tests/sentry/snuba/test_discover.py b/tests/sentry/snuba/test_discover.py index 665bf243711ea1..372ecf5635a2d8 100644 --- a/tests/sentry/snuba/test_discover.py +++ b/tests/sentry/snuba/test_discover.py @@ -2871,7 +2871,7 @@ def test_conditions_with_timestamps(self): for t, ev in enumerate(events): val = ev[0] * 32 for i in range(ev[1]): - data = load_data("transaction", timestamp=before_now(seconds=3 * t + 1)) + data = load_data("transaction", timestamp=self.now - timedelta(seconds=3 * t + 1)) data["transaction"] = f"{val}" self.store_event(data=data, project_id=self.project.id) @@ -2879,8 +2879,8 @@ def test_conditions_with_timestamps(self): results = discover.query( selected_columns=["transaction", "count()"], query="event.type:transaction AND (timestamp:<{} OR timestamp:>{})".format( - iso_format(before_now(seconds=5)), - iso_format(before_now(seconds=3)), + iso_format(self.now - timedelta(seconds=5)), + iso_format(self.now - timedelta(seconds=3)), ), params={ "project_id": [self.project.id], @@ -2937,14 +2937,49 @@ def test_count_with_or(self): assert data[0]["transaction"] == "a" * 32 assert data[0]["count"] == 1 - def test_access_to_private_functions(self): - # using private functions directly without access should error - with pytest.raises(InvalidSearchQuery, match="array_join: no access to private function"): - discover.query( - selected_columns=["array_join(tags.key)"], + def test_array_join(self): + data = load_data("transaction", timestamp=before_now(seconds=3)) + data["measurements"] = { + "fp": {"value": 1000}, + "fcp": {"value": 1000}, + "lcp": {"value": 1000}, + } + self.store_event(data=data, project_id=self.project.id) + + for use_snql in [False, True]: + results = discover.query( + selected_columns=["array_join(measurements_key)"], query="", - params={"project_id": [self.project.id]}, + params={ + "project_id": [self.project.id], + "start": self.two_min_ago, + "end": self.now, + }, + functions_acl=["array_join"], + use_snql=use_snql, ) + assert {"fcp", "fp", "lcp"} == { + row["array_join_measurements_key"] for row in results["data"] + } + + def test_access_to_private_functions(self): + for use_snql in [False, True]: + # using private functions directly without access should error + with pytest.raises( + InvalidSearchQuery, match="array_join: no access to private function" + ): + discover.query( + selected_columns=["array_join(tags.key)"], + query="", + params={ + "project_id": [self.project.id], + "start": self.two_min_ago, + "end": self.now, + }, + use_snql=use_snql, + ) + + # TODO: test the following with `use_snql=True` once histogram is using snql # using private functions in an aggregation without access should error with pytest.raises(InvalidSearchQuery, match="histogram: no access to private function"): @@ -5665,6 +5700,101 @@ def test_aggregate_function(self): assert "count_unique_user" in keys assert "time" in keys + def test_comparison_aggregate_function_invalid(self): + with pytest.raises( + InvalidSearchQuery, match="Only one column can be selected for comparison queries" + ): + discover.timeseries_query( + selected_columns=["count()", "count_unique(user)"], + query="", + params={ + "start": self.day_ago, + "end": self.day_ago + timedelta(hours=2), + "project_id": [self.project.id], + }, + rollup=3600, + comparison_delta=timedelta(days=1), + ) + + def test_comparison_aggregate_function(self): + self.store_event( + data={ + "timestamp": iso_format(self.day_ago + timedelta(hours=1)), + "user": {"id": 1}, + }, + project_id=self.project.id, + ) + + result = discover.timeseries_query( + selected_columns=["count()"], + query="", + params={ + "start": self.day_ago, + "end": self.day_ago + timedelta(hours=2), + "project_id": [self.project.id], + }, + rollup=3600, + comparison_delta=timedelta(days=1), + ) + assert len(result.data["data"]) == 3 + # Values should all be 0, since there is no comparison period data at all. + assert [0, 0, 0] == [val["count"] for val in result.data["data"] if "count" in val] + + self.store_event( + data={ + "timestamp": iso_format(self.day_ago + timedelta(days=-1, hours=1)), + "user": {"id": 1}, + }, + project_id=self.project.id, + ) + self.store_event( + data={ + "timestamp": iso_format(self.day_ago + timedelta(days=-1, hours=1, minutes=2)), + "user": {"id": 2}, + }, + project_id=self.project.id, + ) + self.store_event( + data={ + "timestamp": iso_format(self.day_ago + timedelta(days=-1, hours=2, minutes=1)), + }, + project_id=self.project.id, + ) + + result = discover.timeseries_query( + selected_columns=["count()"], + query="", + params={ + "start": self.day_ago, + "end": self.day_ago + timedelta(hours=2, minutes=1), + "project_id": [self.project.id], + }, + rollup=3600, + comparison_delta=timedelta(days=1), + ) + assert len(result.data["data"]) == 3 + # In the second bucket we have 3 events in the current period and 2 in the comparison, so + # we get a result of 50% increase + assert [0, 50, 0] == [val["count"] for val in result.data["data"] if "count" in val] + + result = discover.timeseries_query( + selected_columns=["count_unique(user)"], + query="", + params={ + "start": self.day_ago, + "end": self.day_ago + timedelta(hours=2, minutes=2), + "project_id": [self.project.id], + }, + rollup=3600, + comparison_delta=timedelta(days=1), + ) + assert len(result.data["data"]) == 3 + # In the second bucket we have 1 unique user in the current period and 2 in the comparison, so + # we get a result of -50% + assert [0, -50, 0] == [ + val["count_unique_user"] for val in result.data["data"] if "count_unique_user" in val + ] + def test_count_miserable(self): event_data = load_data("transaction") # Half of duration so we don't get weird rounding differences when comparing the results diff --git a/tests/sentry/snuba/test_tasks.py b/tests/sentry/snuba/test_tasks.py index fa3f061b22e2ed..80b3a3c275d25e 100644 --- a/tests/sentry/snuba/test_tasks.py +++ b/tests/sentry/snuba/test_tasks.py @@ -199,6 +199,42 @@ def test_simple_transactions(self): assert snuba_filter.conditions == [] assert snuba_filter.aggregations == [["uniq", "user", "count_unique_user"]] + def test_simple_sessions(self): + snuba_filter = build_snuba_filter( + dataset=QueryDatasets.SESSIONS, + query="", + aggregate="percentage(sessions_crashed, sessions) AS _crash_rate_alert_aggregate", + environment=None, + event_types=[], + ) + assert snuba_filter + assert snuba_filter.aggregations == [ + [ + "if(greater(sessions,0),divide(sessions_crashed,sessions),null)", + None, + "_crash_rate_alert_aggregate", + ], + ["identity", "sessions", "_total_count"], + ] + + def test_simple_users(self): + snuba_filter = build_snuba_filter( + dataset=QueryDatasets.SESSIONS, + query="", + aggregate="percentage(users_crashed, users) AS _crash_rate_alert_aggregate", + environment=None, + event_types=[], + ) + assert snuba_filter + assert snuba_filter.aggregations == [ + [ + "if(greater(users,0),divide(users_crashed,users),null)", + None, + "_crash_rate_alert_aggregate", + ], + ["identity", "users", "_total_count"], + ] + def test_aliased_query_events(self): snuba_filter = build_snuba_filter( QueryDatasets.EVENTS, "release:latest", "count_unique(user)", None, None @@ -210,6 +246,52 @@ def test_aliased_query_events(self): ] assert snuba_filter.aggregations == [["uniq", "tags[sentry:user]", "count_unique_user"]] + def test_query_and_environment_sessions(self): + env = self.create_environment(self.project, name="development") + snuba_filter = build_snuba_filter( + dataset=QueryDatasets.SESSIONS, + query="release:ahmed@12.2", + aggregate="percentage(sessions_crashed, sessions) AS _crash_rate_alert_aggregate", + environment=env, + event_types=[], + ) + assert snuba_filter + assert snuba_filter.aggregations == [ + [ + "if(greater(sessions,0),divide(sessions_crashed,sessions),null)", + None, + "_crash_rate_alert_aggregate", + ], + ["identity", "sessions", "_total_count"], + ] + assert snuba_filter.conditions == [ + ["release", "=", "ahmed@12.2"], + ["environment", "=", "development"], + ] + + def test_query_and_environment_users(self): + env = self.create_environment(self.project, name="development") + snuba_filter = build_snuba_filter( + dataset=QueryDatasets.SESSIONS, + query="release:ahmed@12.2", + aggregate="percentage(users_crashed, users) AS _crash_rate_alert_aggregate", + environment=env, + event_types=[], + ) + assert snuba_filter + assert snuba_filter.aggregations == [ + [ + "if(greater(users,0),divide(users_crashed,users),null)", + None, + "_crash_rate_alert_aggregate", + ], + ["identity", "users", "_total_count"], + ] + assert snuba_filter.conditions == [ + ["release", "=", "ahmed@12.2"], + ["environment", "=", "development"], + ] + def test_aliased_query_transactions(self): snuba_filter = build_snuba_filter( QueryDatasets.TRANSACTIONS, @@ -360,6 +442,15 @@ def test_event_types_no_discover(self): ) == "release:123" ) + assert ( + apply_dataset_query_conditions( + QueryDatasets.SESSIONS, + "release:123", + [], + False, + ) + == "release:123" + ) def test_event_types_discover(self): assert ( diff --git a/tests/sentry/tasks/test_low_priority_symbolication.py b/tests/sentry/tasks/test_low_priority_symbolication.py new file mode 100644 index 00000000000000..86a3063bacaabf --- /dev/null +++ b/tests/sentry/tasks/test_low_priority_symbolication.py @@ -0,0 +1,24 @@ +import pytest + +from sentry.processing import realtime_metrics +from sentry.tasks.low_priority_symbolication import _scan_for_suspect_projects, calculation_magic +from sentry.testutils.helpers.task_runner import TaskRunner +from sentry.utils import redis +from sentry.utils.compat import mock + + +@pytest.fixture +def redis_cluster() -> redis._RedisCluster: + return redis.redis_clusters.get("default") + + +@mock.patch("sentry.tasks.low_priority_symbolication.calculation_magic", lambda x, y: True) +def test_scan_for_suspect_projects() -> None: + realtime_metrics.increment_project_event_counter(17, 0) + with TaskRunner(): + _scan_for_suspect_projects() + assert realtime_metrics.get_lpq_projects() == {17} + + +def test_calculation_magic(): + assert not calculation_magic([], []) diff --git a/tests/sentry/tasks/test_sentry_apps.py b/tests/sentry/tasks/test_sentry_apps.py index caf63fdb9f8849..ce62dde8fda1e2 100644 --- a/tests/sentry/tasks/test_sentry_apps.py +++ b/tests/sentry/tasks/test_sentry_apps.py @@ -177,6 +177,40 @@ def test_send_alert_event(self, safe_urlopen): assert requests[0]["response_code"] == 200 assert requests[0]["event_type"] == "event_alert.triggered" + @patch("sentry.tasks.sentry_apps.safe_urlopen", return_value=MockResponseInstance) + def test_send_alert_event_with_additional_payload(self, safe_urlopen): + event = self.store_event(data={}, project_id=self.project.id) + settings = { + "alert_prefix": "[Not Good]", + "channel": "#ignored-errors", + "best_emoji": ":fire:", + } + rule_future = RuleFuture( + rule=self.rule, + kwargs={"sentry_app": self.sentry_app, "schema_defined_settings": settings}, + ) + + with self.tasks(): + notify_sentry_app(event, [rule_future]) + + payload = json.loads(faux(safe_urlopen).kwargs["data"]) + + assert payload["action"] == "triggered" + assert payload["data"]["triggered_rule"] == self.rule.label + assert payload["data"]["issue_alert"] == { + "id": self.rule.id, + "title": self.rule.label, + "sentry_app_id": self.sentry_app.id, + "settings": settings, + } + + buffer = SentryAppWebhookRequestsBuffer(self.sentry_app) + requests = buffer.get_requests() + + assert len(requests) == 1 + assert requests[0]["response_code"] == 200 + assert requests[0]["event_type"] == "event_alert.triggered" + @patch("sentry.tasks.sentry_apps.safe_urlopen", return_value=MockResponseInstance) class TestProcessResourceChange(TestCase): diff --git a/tests/sentry/utils/email/test_address.py b/tests/sentry/utils/email/test_address.py new file mode 100644 index 00000000000000..f456f46038d920 --- /dev/null +++ b/tests/sentry/utils/email/test_address.py @@ -0,0 +1,22 @@ +from sentry.testutils import TestCase +from sentry.utils.email.address import get_from_email_domain, is_valid_email_address + + +class GetFromEmailDomainTest(TestCase): + def test_get_from_email_domain(self): + with self.options({"mail.from": "matt@example.com"}): + assert get_from_email_domain() == "example.com" + + with self.options({"mail.from": "root@localhost"}): + assert get_from_email_domain() == "localhost" + + with self.options({"mail.from": "garbage"}): + assert get_from_email_domain() == "garbage" + + +class ValidEmailTest(TestCase): + def test_is_valid_email_address_number_at_qqcom(self): + assert is_valid_email_address("12345@qq.com") is False + + def test_is_valid_email_address_normal_human_email_address(self): + assert is_valid_email_address("dcramer@gmail.com") is True diff --git a/tests/sentry/utils/email/test_backend.py b/tests/sentry/utils/email/test_backend.py new file mode 100644 index 00000000000000..2c319161645052 --- /dev/null +++ b/tests/sentry/utils/email/test_backend.py @@ -0,0 +1,17 @@ +from sentry.testutils import TestCase +from sentry.utils.email.backend import get_mail_backend + + +class GetMailBackendTest(TestCase): + def test_get_mail_backend(self): + with self.options({"mail.backend": "smtp"}): + assert get_mail_backend() == "django.core.mail.backends.smtp.EmailBackend" + + with self.options({"mail.backend": "dummy"}): + assert get_mail_backend() == "django.core.mail.backends.dummy.EmailBackend" + + with self.options({"mail.backend": "console"}): + assert get_mail_backend() == "django.core.mail.backends.console.EmailBackend" + + with self.options({"mail.backend": "something.else"}): + assert get_mail_backend() == "something.else" diff --git a/tests/sentry/utils/email/test_list_resolver.py b/tests/sentry/utils/email/test_list_resolver.py new file mode 100644 index 00000000000000..145a81e6398458 --- /dev/null +++ b/tests/sentry/utils/email/test_list_resolver.py @@ -0,0 +1,28 @@ +import pytest + +from sentry.testutils import TestCase +from sentry.utils.email import ListResolver +from sentry.utils.email.message_builder import default_list_type_handlers + + +class ListResolverTestCase(TestCase): + resolver = ListResolver("namespace", default_list_type_handlers) + + def test_rejects_invalid_namespace(self): + with pytest.raises(AssertionError): + ListResolver("\x00", {}) + + def test_rejects_invalid_types(self): + with pytest.raises(ListResolver.UnregisteredTypeError): + self.resolver(object()) + + def test_generates_list_ids(self): + expected = "<{0.project.slug}.{0.organization.slug}.namespace>".format(self.event) + assert self.resolver(self.event.group) == expected + assert self.resolver(self.event.project) == expected + + def test_rejects_invalid_objects(self): + resolver = ListResolver("namespace", {object: lambda value: ("\x00",)}) + + with pytest.raises(AssertionError): + resolver(object()) diff --git a/tests/sentry/utils/email/tests.py b/tests/sentry/utils/email/test_message_builder.py similarity index 76% rename from tests/sentry/utils/email/tests.py rename to tests/sentry/utils/email/test_message_builder.py index c17cb538bcaf5b..5eecad9fbcff77 100644 --- a/tests/sentry/utils/email/tests.py +++ b/tests/sentry/utils/email/test_message_builder.py @@ -1,44 +1,13 @@ import functools -import pytest from django.core import mail from sentry import options -from sentry.models import GroupEmailThread, User, UserOption +from sentry.models import GroupEmailThread, User, UserEmail, UserOption from sentry.testutils import TestCase from sentry.utils.compat.mock import patch -from sentry.utils.email import ( - ListResolver, - MessageBuilder, - create_fake_email, - default_list_type_handlers, - get_from_email_domain, - get_mail_backend, - send_mail, -) - - -class ListResolverTestCase(TestCase): - resolver = ListResolver("namespace", default_list_type_handlers) - - def test_rejects_invalid_namespace(self): - with pytest.raises(AssertionError): - ListResolver("\x00", {}) - - def test_rejects_invalid_types(self): - with pytest.raises(ListResolver.UnregisteredTypeError): - self.resolver(object()) - - def test_generates_list_ids(self): - expected = "<{0.project.slug}.{0.organization.slug}.namespace>".format(self.event) - assert self.resolver(self.event.group) == expected - assert self.resolver(self.event.project) == expected - - def test_rejects_invalid_objects(self): - resolver = ListResolver("namespace", {object: lambda value: ("\x00",)}) - - with pytest.raises(AssertionError): - resolver(object()) +from sentry.utils.email import MessageBuilder +from sentry.utils.email.faker import create_fake_email class MessageBuilderTest(TestCase): @@ -115,8 +84,10 @@ def test_with_users(self): user_b = User.objects.create(email="bar@example.com") user_c = User.objects.create(email="baz@example.com") + alternate_email = "bazzer@example.com" + UserEmail.objects.create(user=user_c, email=alternate_email) UserOption.objects.create( - user=user_c, project=project, key="mail:email", value="bazzer@example.com" + user=user_c, project=project, key="mail:email", value=alternate_email ) msg = MessageBuilder( @@ -155,7 +126,7 @@ def test_fake_dont_send(self): assert len(mail.outbox) == 0 - @patch("sentry.utils.email.make_msgid") + @patch("sentry.utils.email.message_builder.make_msgid") def test_message_id(self, make_msgid): make_msgid.return_value = "abc123" @@ -180,7 +151,7 @@ def test_message_id(self, make_msgid): "text/html", ) - @patch("sentry.utils.email.make_msgid") + @patch("sentry.utils.email.message_builder.make_msgid") def test_add_groupemailthread(self, make_msgid): make_msgid.return_value = "abc123" @@ -211,7 +182,7 @@ def test_add_groupemailthread(self, make_msgid): assert thread.email == "foo@example.com" assert thread.group == self.group - @patch("sentry.utils.email.make_msgid") + @patch("sentry.utils.email.message_builder.make_msgid") def test_reply_reference(self, make_msgid): make_msgid.return_value = "abc123" @@ -327,47 +298,3 @@ def test_stripped_newline(self): assert len(mail.outbox) == 1 assert mail.outbox[0].subject == "Foo" - - -class MiscTestCase(TestCase): - def test_get_from_email_domain(self): - with self.options({"mail.from": "matt@example.com"}): - assert get_from_email_domain() == "example.com" - - with self.options({"mail.from": "root@localhost"}): - assert get_from_email_domain() == "localhost" - - with self.options({"mail.from": "garbage"}): - assert get_from_email_domain() == "garbage" - - def test_get_mail_backend(self): - with self.options({"mail.backend": "smtp"}): - assert get_mail_backend() == "django.core.mail.backends.smtp.EmailBackend" - - with self.options({"mail.backend": "dummy"}): - assert get_mail_backend() == "django.core.mail.backends.dummy.EmailBackend" - - with self.options({"mail.backend": "console"}): - assert get_mail_backend() == "django.core.mail.backends.console.EmailBackend" - - with self.options({"mail.backend": "something.else"}): - assert get_mail_backend() == "something.else" - - -class SendMail(TestCase): - @patch("django.core.mail.EmailMessage", autospec=True) - @patch("django.core.mail.get_connection", return_value="connection") - def test_send_mail_with_kwargs(self, get_connection, MockEmailMessage): - patch.object(MockEmailMessage.return_value, "send") - send_mail( - "subject", "my_message", "fake@example.com", ["a@b.com"], reply_to=["emusk@tesla.com"] - ) - MockEmailMessage.assert_called_once_with( - "subject", - "my_message", - "fake@example.com", - ["a@b.com"], - connection="connection", - reply_to=["emusk@tesla.com"], - ) - MockEmailMessage.return_value.send.assert_called_once_with(fail_silently=False) diff --git a/tests/sentry/utils/email/test_send_mail.py b/tests/sentry/utils/email/test_send_mail.py new file mode 100644 index 00000000000000..a113ca9d258823 --- /dev/null +++ b/tests/sentry/utils/email/test_send_mail.py @@ -0,0 +1,22 @@ +from sentry.testutils import TestCase +from sentry.utils.compat.mock import patch +from sentry.utils.email import send_mail + + +class SendMail(TestCase): + @patch("django.core.mail.EmailMessage", autospec=True) + @patch("django.core.mail.get_connection", return_value="connection") + def test_send_mail_with_kwargs(self, get_connection, MockEmailMessage): + patch.object(MockEmailMessage.return_value, "send") + send_mail( + "subject", "my_message", "fake@example.com", ["a@b.com"], reply_to=["emusk@tesla.com"] + ) + MockEmailMessage.assert_called_once_with( + "subject", + "my_message", + "fake@example.com", + ["a@b.com"], + connection="connection", + reply_to=["emusk@tesla.com"], + ) + MockEmailMessage.return_value.send.assert_called_once_with(fail_silently=False) diff --git a/tests/snuba/api/endpoints/test_organization_events_stats.py b/tests/snuba/api/endpoints/test_organization_events_stats.py index b6622df018627c..f2356f786d70dd 100644 --- a/tests/snuba/api/endpoints/test_organization_events_stats.py +++ b/tests/snuba/api/endpoints/test_organization_events_stats.py @@ -17,6 +17,8 @@ class OrganizationEventsStatsEndpointTest(APITestCase, SnubaTestCase): + endpoint = "sentry-api-0-organization-events-stats" + def setUp(self): super().setUp() self.login_as(user=self.user) @@ -737,7 +739,7 @@ def test_multiple_yaxis_only_one_query(self, mock_query): assert mock_query.call_count == 1 - @mock.patch("sentry.snuba.discover.raw_query", return_value={"data": []}) + @mock.patch("sentry.snuba.discover.bulk_raw_query", return_value=[{"data": []}]) def test_invalid_interval(self, mock_query): with self.feature("organizations:discover-basic"): response = self.client.get( @@ -754,7 +756,7 @@ def test_invalid_interval(self, mock_query): assert response.status_code == 200 assert mock_query.call_count == 1 # Should've reset to the default for 24h - assert mock_query.mock_calls[0].kwargs["rollup"] == 300 + assert mock_query.mock_calls[0].args[0][0].rollup == 300 with self.feature("organizations:discover-basic"): response = self.client.get( @@ -771,7 +773,7 @@ def test_invalid_interval(self, mock_query): assert response.status_code == 200 assert mock_query.call_count == 2 # Should've reset to the default for 24h - assert mock_query.mock_calls[1].kwargs["rollup"] == 300 + assert mock_query.mock_calls[0].args[0][0].rollup == 300 def test_out_of_retention(self): with self.options({"system.event-retention-days": 10}): @@ -869,6 +871,61 @@ def test_without_zerofill(self): assert response.data["start"] == parse_date.parse(start).timestamp() assert response.data["end"] == parse_date.parse(end).timestamp() + def test_comparison(self): + self.store_event( + data={ + "timestamp": iso_format(self.day_ago + timedelta(days=-1, minutes=1)), + }, + project_id=self.project.id, + ) + self.store_event( + data={ + "timestamp": iso_format(self.day_ago + timedelta(days=-1, minutes=2)), + }, + project_id=self.project.id, + ) + self.store_event( + data={ + "timestamp": iso_format(self.day_ago + timedelta(days=-1, hours=1, minutes=1)), + }, + project_id=self.project2.id, + ) + + response = self.get_success_response( + self.organization.slug, + start=iso_format(self.day_ago), + end=iso_format(self.day_ago + timedelta(hours=2)), + interval="1h", + comparisonDelta=int(timedelta(days=1).total_seconds()), + ) + + assert [attrs for time, attrs in response.data["data"]] == [ + [{"count": -50}], + [{"count": 100}], + ] + + def test_comparison_invalid(self): + response = self.get_error_response( + self.organization.slug, + start=iso_format(self.day_ago), + end=iso_format(self.day_ago + timedelta(hours=2)), + interval="1h", + comparisonDelta="17h", + ) + assert response.data["detail"] == "comparisonDelta must be an integer" + + start = before_now(days=85) + end = start + timedelta(days=7) + with self.options({"system.event-retention-days": 90}): + response = self.get_error_response( + self.organization.slug, + start=iso_format(start), + end=iso_format(end), + interval="1h", + comparisonDelta=int(timedelta(days=7).total_seconds()), + ) + assert response.data["detail"] == "Comparison period is outside retention window" + class OrganizationEventsStatsTopNEvents(APITestCase, SnubaTestCase): def setUp(self): @@ -1155,6 +1212,35 @@ def test_top_events_with_issue(self): assert other["order"] == 5 assert [{"count": 1}] in [attrs for _, attrs in other["data"]] + @mock.patch( + "sentry.snuba.discover.raw_query", + side_effect=[{"data": [{"group_id": 1}], "meta": []}, {"data": [], "meta": []}], + ) + def test_top_events_with_issue_check_query_conditions(self, mock_query): + """ "Intentionally separate from test_top_events_with_issue + + This is to test against a bug where the condition for issues wasn't included and we'd be missing data for + the interval since we'd cap out the max rows. This was not caught by the previous test since the results + would still be correct given the smaller interval & lack of data + """ + with self.feature(self.enabled_features): + self.client.get( + self.url, + data={ + "start": iso_format(self.day_ago), + "end": iso_format(self.day_ago + timedelta(hours=2)), + "interval": "1h", + "yAxis": "count()", + "orderby": ["-count()"], + "field": ["count()", "message", "issue"], + "topEvents": 5, + "query": "!event.type:transaction", + }, + format="json", + ) + + assert ["group_id", "IN", [1]] in mock_query.mock_calls[1].kwargs["conditions"] + def test_top_events_with_functions(self): with self.feature(self.enabled_features): response = self.client.get( @@ -1808,8 +1894,9 @@ def test_top_events_with_equations(self): assert other["order"] == 5 assert [{"count": 0.03}] in [attrs for _, attrs in other["data"]] + @mock.patch("sentry.snuba.discover.bulk_raw_query", return_value=[{"data": [], "meta": []}]) @mock.patch("sentry.snuba.discover.raw_query", return_value={"data": [], "meta": []}) - def test_invalid_interval(self, mock_query): + def test_invalid_interval(self, mock_raw_query, mock_bulk_query): with self.feature("organizations:discover-basic"): response = self.client.get( self.url, @@ -1825,7 +1912,7 @@ def test_invalid_interval(self, mock_query): }, ) assert response.status_code == 200 - assert mock_query.call_count == 1 + assert mock_bulk_query.call_count == 1 with self.feature("organizations:discover-basic"): response = self.client.get( @@ -1843,9 +1930,9 @@ def test_invalid_interval(self, mock_query): }, ) assert response.status_code == 200 - assert mock_query.call_count == 3 + assert mock_raw_query.call_count == 2 # Should've reset to the default for between 1 and 24h - assert mock_query.mock_calls[2].kwargs["rollup"] == 300 + assert mock_raw_query.mock_calls[1].kwargs["rollup"] == 300 with self.feature("organizations:discover-basic"): response = self.client.get( @@ -1863,9 +1950,9 @@ def test_invalid_interval(self, mock_query): }, ) assert response.status_code == 200 - assert mock_query.call_count == 5 + assert mock_raw_query.call_count == 4 # Should've left the interval alone since we're just below the limit - assert mock_query.mock_calls[4].kwargs["rollup"] == 1 + assert mock_raw_query.mock_calls[3].kwargs["rollup"] == 1 with self.feature("organizations:discover-basic"): response = self.client.get( @@ -1882,9 +1969,9 @@ def test_invalid_interval(self, mock_query): }, ) assert response.status_code == 200 - assert mock_query.call_count == 7 + assert mock_raw_query.call_count == 6 # Should've default to 24h's default of 5m - assert mock_query.mock_calls[6].kwargs["rollup"] == 300 + assert mock_raw_query.mock_calls[5].kwargs["rollup"] == 300 def test_top_events_timestamp_fields(self): with self.feature("organizations:discover-basic"): diff --git a/tests/snuba/api/endpoints/test_organization_events_v2.py b/tests/snuba/api/endpoints/test_organization_events_v2.py index cd74b27372fb14..83744174a021d7 100644 --- a/tests/snuba/api/endpoints/test_organization_events_v2.py +++ b/tests/snuba/api/endpoints/test_organization_events_v2.py @@ -5,7 +5,7 @@ from django.utils import timezone from pytz import utc -from sentry.discover.models import KeyTransaction, TeamKeyTransaction +from sentry.discover.models import TeamKeyTransaction from sentry.models import ( ApiKey, ProjectTeam, @@ -3914,178 +3914,6 @@ def test_compare_numeric_aggregate(self): assert len(response.data["data"]) == 1 assert response.data["data"][0]["compare_numeric_aggregate_p75_equals_0"] == 0 - def test_no_key_transactions(self): - transactions = [ - "/blah_transaction/", - "/foo_transaction/", - "/zoo_transaction/", - ] - - for transaction in transactions: - self.transaction_data["transaction"] = transaction - self.store_event(self.transaction_data, self.project.id) - - query = { - "project": [self.project.id], - # use the order by to ensure the result order - "orderby": "transaction", - "field": [ - "key_transaction", - "transaction", - "transaction.status", - "project", - "epm()", - "failure_rate()", - "percentile(transaction.duration, 0.95)", - ], - } - response = self.do_request(query) - - assert response.status_code == 200, response.content - data = response.data["data"] - assert len(data) == 3 - assert data[0]["key_transaction"] == 0 - assert data[0]["transaction"] == "/blah_transaction/" - assert data[1]["key_transaction"] == 0 - assert data[1]["transaction"] == "/foo_transaction/" - assert data[2]["key_transaction"] == 0 - assert data[2]["transaction"] == "/zoo_transaction/" - - def test_key_transactions_orderby(self): - transactions = ["/blah_transaction/"] - key_transactions = [ - "/foo_transaction/", - "/zoo_transaction/", - ] - - for transaction in transactions: - self.transaction_data["transaction"] = transaction - self.store_event(self.transaction_data, self.project.id) - - for transaction in key_transactions: - self.transaction_data["transaction"] = transaction - self.store_event(self.transaction_data, self.project.id) - KeyTransaction.objects.create( - owner=self.user, - organization=self.organization, - transaction=transaction, - project=self.project, - ) - - query = { - "project": [self.project.id], - "field": [ - "key_transaction", - "transaction", - "transaction.status", - "project", - "epm()", - "failure_rate()", - "percentile(transaction.duration, 0.95)", - ], - } - - # test ascending order - query["orderby"] = ["key_transaction", "transaction"] - response = self.do_request(query) - assert response.status_code == 200, response.content - data = response.data["data"] - assert len(data) == 3 - assert data[0]["key_transaction"] == 0 - assert data[0]["transaction"] == "/blah_transaction/" - assert data[1]["key_transaction"] == 1 - assert data[1]["transaction"] == "/foo_transaction/" - assert data[2]["key_transaction"] == 1 - assert data[2]["transaction"] == "/zoo_transaction/" - - # test descending order - query["orderby"] = ["-key_transaction", "-transaction"] - response = self.do_request(query) - assert response.status_code == 200, response.content - data = response.data["data"] - assert len(data) == 3 - assert data[0]["key_transaction"] == 1 - assert data[0]["transaction"] == "/zoo_transaction/" - assert data[1]["key_transaction"] == 1 - assert data[1]["transaction"] == "/foo_transaction/" - assert data[2]["key_transaction"] == 0 - assert data[2]["transaction"] == "/blah_transaction/" - - def test_key_transactions_query(self): - transactions = ["/blah_transaction/"] - key_transactions = [ - "/foo_transaction/", - "/zoo_transaction/", - ] - - for transaction in transactions: - self.transaction_data["transaction"] = transaction - self.store_event(self.transaction_data, self.project.id) - - for transaction in key_transactions: - self.transaction_data["transaction"] = transaction - self.store_event(self.transaction_data, self.project.id) - KeyTransaction.objects.create( - owner=self.user, - organization=self.organization, - transaction=transaction, - project=self.project, - ) - - query = { - "project": [self.project.id], - "orderby": "transaction", - "field": [ - "key_transaction", - "transaction", - "transaction.status", - "project", - "epm()", - "failure_rate()", - "percentile(transaction.duration, 0.95)", - ], - } - - # key transactions - query["query"] = "has:key_transaction" - response = self.do_request(query) - assert response.status_code == 200, response.content - data = response.data["data"] - assert len(data) == 2 - assert data[0]["key_transaction"] == 1 - assert data[0]["transaction"] == "/foo_transaction/" - assert data[1]["key_transaction"] == 1 - assert data[1]["transaction"] == "/zoo_transaction/" - - # key transactions - query["query"] = "key_transaction:true" - response = self.do_request(query) - assert response.status_code == 200, response.content - data = response.data["data"] - assert len(data) == 2 - assert data[0]["key_transaction"] == 1 - assert data[0]["transaction"] == "/foo_transaction/" - assert data[1]["key_transaction"] == 1 - assert data[1]["transaction"] == "/zoo_transaction/" - - # not key transactions - query["query"] = "!has:key_transaction" - response = self.do_request(query) - assert response.status_code == 200, response.content - data = response.data["data"] - assert len(data) == 1 - assert data[0]["key_transaction"] == 0 - assert data[0]["transaction"] == "/blah_transaction/" - - # not key transactions - query["query"] = "key_transaction:false" - response = self.do_request(query) - assert response.status_code == 200, response.content - data = response.data["data"] - assert len(data) == 1 - assert data[0]["key_transaction"] == 0 - assert data[0]["transaction"] == "/blah_transaction/" - def test_no_team_key_transactions(self): transactions = [ "/blah_transaction/", diff --git a/tests/snuba/sessions/test_sessions.py b/tests/snuba/sessions/test_sessions.py index c7048106ec3a20..e6ca0595e0fb85 100644 --- a/tests/snuba/sessions/test_sessions.py +++ b/tests/snuba/sessions/test_sessions.py @@ -11,7 +11,6 @@ _make_stats, get_project_releases_by_stability, get_project_releases_count, - get_release_health_data_overview, ) from sentry.testutils import SnubaTestCase, TestCase from sentry.testutils.cases import SessionMetricsTestCase @@ -362,7 +361,7 @@ def test_get_release_adoption_lowered(self): } def test_get_release_health_data_overview_users(self): - data = get_release_health_data_overview( + data = self.backend.get_release_health_data_overview( [ (self.project.id, self.session_release), (self.project.id, self.session_crashed_release), @@ -416,7 +415,7 @@ def test_get_release_health_data_overview_users(self): } def test_get_release_health_data_overview_sessions(self): - data = get_release_health_data_overview( + data = self.backend.get_release_health_data_overview( [ (self.project.id, self.session_release), (self.project.id, self.session_crashed_release), @@ -569,6 +568,101 @@ def test_fetching_release_sessions_time_bounds_for_different_release_with_no_ses "sessions_upper_bound": None, } + def test_get_crash_free_breakdown(self): + start = timezone.now() - timedelta(days=4) + data = self.backend.get_crash_free_breakdown( + project_id=self.project.id, + release=self.session_release, + start=start, + environments=["prod"], + ) + + # Last returned date is generated within function, should be close to now: + last_date = data[-1].pop("date") + assert timezone.now() - last_date < timedelta(seconds=1) + + assert data == [ + { + "crash_free_sessions": None, + "crash_free_users": None, + "date": start + timedelta(days=1), + "total_sessions": 0, + "total_users": 0, + }, + { + "crash_free_sessions": None, + "crash_free_users": None, + "date": start + timedelta(days=2), + "total_sessions": 0, + "total_users": 0, + }, + { + "crash_free_sessions": 100.0, + "crash_free_users": 100.0, + "total_sessions": 2, + "total_users": 1, + }, + ] + + data = self.backend.get_crash_free_breakdown( + project_id=self.project.id, + release=self.session_crashed_release, + start=start, + environments=["prod"], + ) + data[-1].pop("date") + assert data == [ + { + "crash_free_sessions": None, + "crash_free_users": None, + "date": start + timedelta(days=1), + "total_sessions": 0, + "total_users": 0, + }, + { + "crash_free_sessions": None, + "crash_free_users": None, + "date": start + timedelta(days=2), + "total_sessions": 0, + "total_users": 0, + }, + { + "crash_free_sessions": 0.0, + "crash_free_users": 0.0, + "total_sessions": 1, + "total_users": 1, + }, + ] + data = self.backend.get_crash_free_breakdown( + project_id=self.project.id, + release="non-existing", + start=start, + environments=["prod"], + ) + data[-1].pop("date") + assert data == [ + { + "crash_free_sessions": None, + "crash_free_users": None, + "date": start + timedelta(days=1), + "total_sessions": 0, + "total_users": 0, + }, + { + "crash_free_sessions": None, + "crash_free_users": None, + "date": start + timedelta(days=2), + "total_sessions": 0, + "total_users": 0, + }, + { + "crash_free_sessions": None, + "crash_free_users": None, + "total_sessions": 0, + "total_users": 0, + }, + ] + def test_basic_release_model_adoptions(self): """ Test that the basic (project,release) data is returned diff --git a/webpack.config.ts b/webpack.config.ts index 50e2088cf365f2..8324073d75ce6b 100644 --- a/webpack.config.ts +++ b/webpack.config.ts @@ -60,6 +60,7 @@ const WEBPACK_MODE: Configuration['mode'] = IS_PRODUCTION ? 'production' : 'deve const SENTRY_BACKEND_PORT = env.SENTRY_BACKEND_PORT; const SENTRY_WEBPACK_PROXY_HOST = env.SENTRY_WEBPACK_PROXY_HOST; const SENTRY_WEBPACK_PROXY_PORT = env.SENTRY_WEBPACK_PROXY_PORT; +const SENTRY_RELEASE_VERSION = env.SENTRY_RELEASE_VERSION; // Used by sentry devserver runner to force using webpack-dev-server const FORCE_WEBPACK_DEV_SERVER = !!env.FORCE_WEBPACK_DEV_SERVER; @@ -335,6 +336,7 @@ let appConfig: Configuration = { DEPLOY_PREVIEW_CONFIG: JSON.stringify(DEPLOY_PREVIEW_CONFIG), EXPERIMENTAL_SPA: JSON.stringify(SENTRY_EXPERIMENTAL_SPA), SPA_DSN: JSON.stringify(SENTRY_SPA_DSN), + SENTRY_RELEASE_VERSION: JSON.stringify(SENTRY_RELEASE_VERSION), }, }),