diff --git a/.github/actions/install-frontend-dependencies/action.yml b/.github/actions/install-frontend-dependencies/action.yml index df1f3678bc..6d3706e34c 100644 --- a/.github/actions/install-frontend-dependencies/action.yml +++ b/.github/actions/install-frontend-dependencies/action.yml @@ -20,7 +20,7 @@ runs: # yamllint enable rule:line-length - uses: actions/setup-node@v4 with: - node-version: 18.16.0 + node-version: 20.15.1 cache: "yarn" cache-dependency-path: ${{ steps.yarn-lock-location.outputs.yarn-lock-location }} - name: Use cached frontend dependencies diff --git a/Tiltfile b/Tiltfile index f6e07226f1..df0e53fa29 100644 --- a/Tiltfile +++ b/Tiltfile @@ -158,7 +158,7 @@ cmd_button( helm_oncall_values = ["./dev/helm-local.yml", "./dev/helm-local.dev.yml"] if is_ci: helm_oncall_values = helm_oncall_values + ["./.github/helm-ci.yml"] -yaml = helm("helm/oncall", name=HELM_PREFIX, values=helm_oncall_values, set=twilio_values) +yaml = helm("helm/oncall", name=HELM_PREFIX, values=helm_oncall_values, set=twilio_values, namespace="default") k8s_yaml(yaml) diff --git a/dev/README.md b/dev/README.md index bef3aef6aa..c6f82c5abf 100644 --- a/dev/README.md +++ b/dev/README.md @@ -11,7 +11,7 @@ Related: [How to develop integrations](/engine/config_integrations/README.md) - [Tilt | Kubernetes for Prod, Tilt for Dev](https://tilt.dev/) - [tilt-dev/ctlptl: Making local Kubernetes clusters fun and easy to set up](https://github.com/tilt-dev/ctlptl) - [Kind](https://kind.sigs.k8s.io) -- [Node.js v18.x](https://nodejs.org/en/download) +- [Node.js v20.x](https://nodejs.org/en/download) - [Yarn](https://classic.yarnpkg.com/lang/en/docs/install/#mac-stable) ### Launch the environment diff --git a/engine/apps/alerts/constants.py b/engine/apps/alerts/constants.py index f5c9f19611..496836caaa 100644 --- a/engine/apps/alerts/constants.py +++ b/engine/apps/alerts/constants.py @@ -16,6 +16,8 @@ class ActionSource(IntegerChoices): NEXT_ESCALATION_DELAY = 5 +BUNDLED_NOTIFICATION_DELAY_SECONDS = 60 * 2 # 2 min + # AlertGroup states verbal class AlertGroupState(str, Enum): diff --git a/engine/apps/alerts/incident_appearance/renderers/base_renderer.py b/engine/apps/alerts/incident_appearance/renderers/base_renderer.py index d316123900..2207c354d5 100644 --- a/engine/apps/alerts/incident_appearance/renderers/base_renderer.py +++ b/engine/apps/alerts/incident_appearance/renderers/base_renderer.py @@ -1,10 +1,11 @@ import typing from abc import ABC, abstractmethod +from django.db.models import QuerySet from django.utils.functional import cached_property if typing.TYPE_CHECKING: - from apps.alerts.models import Alert, AlertGroup + from apps.alerts.models import Alert, AlertGroup, BundledNotification class AlertBaseRenderer(ABC): @@ -33,3 +34,11 @@ def __init__(self, alert_group: "AlertGroup", alert: typing.Optional["Alert"] = @abstractmethod def alert_renderer_class(self): raise NotImplementedError + + +class AlertGroupBundleBaseRenderer: + MAX_ALERT_GROUPS_TO_RENDER = 3 + MAX_CHANNELS_TO_RENDER = 1 + + def __init__(self, notifications: "QuerySet[BundledNotification]"): + self.notifications = notifications diff --git a/engine/apps/alerts/incident_appearance/renderers/sms_renderer.py b/engine/apps/alerts/incident_appearance/renderers/sms_renderer.py index fe533fc4e1..8044bb0ac8 100644 --- a/engine/apps/alerts/incident_appearance/renderers/sms_renderer.py +++ b/engine/apps/alerts/incident_appearance/renderers/sms_renderer.py @@ -1,4 +1,10 @@ -from apps.alerts.incident_appearance.renderers.base_renderer import AlertBaseRenderer, AlertGroupBaseRenderer +from django.db.models import Count + +from apps.alerts.incident_appearance.renderers.base_renderer import ( + AlertBaseRenderer, + AlertGroupBaseRenderer, + AlertGroupBundleBaseRenderer, +) from apps.alerts.incident_appearance.renderers.constants import DEFAULT_BACKUP_TITLE from apps.alerts.incident_appearance.templaters import AlertSmsTemplater from common.utils import str_or_backup @@ -24,3 +30,53 @@ def render(self): f"integration: {self.alert_group.channel.short_name}, " f"alerts registered: {self.alert_group.alerts.count()}." ) + + +class AlertGroupSMSBundleRenderer(AlertGroupBundleBaseRenderer): + def render(self) -> str: + """ + Renders SMS message for notification bundle: gets total count of unique alert groups and alert receive channels + in the bundle, renders text with `inside_organization_number` of 3 alert groups (MAX_ALERT_GROUPS_TO_RENDER) and + `short_name` of 1 alert receive channel (MAX_CHANNELS_TO_RENDER). If there are more unique alert groups / alert + receive channels to notify about, adds "and X more" to the SMS message + """ + + channels_and_alert_groups_count = self.notifications.aggregate( + channels_count=Count("alert_receive_channel", distinct=True), + alert_groups_count=Count("alert_group", distinct=True), + ) + alert_groups_count = channels_and_alert_groups_count["alert_groups_count"] + channels_count = channels_and_alert_groups_count["channels_count"] + + # get 3 unique alert groups from notifications + alert_groups_to_render = [] + for notification in self.notifications: + if notification.alert_group not in alert_groups_to_render: + alert_groups_to_render.append(notification.alert_group) + if len(alert_groups_to_render) == self.MAX_ALERT_GROUPS_TO_RENDER: + break + # render text for alert groups + alert_group_inside_organization_numbers = [ + alert_group.inside_organization_number for alert_group in alert_groups_to_render + ] + numbers_str = ", ".join(f"#{x}" for x in alert_group_inside_organization_numbers) + alert_groups_text = "Alert groups " if alert_groups_count > 1 else "Alert group " + alert_groups_text += numbers_str + + if alert_groups_count > self.MAX_ALERT_GROUPS_TO_RENDER: + alert_groups_text += f" and {alert_groups_count - self.MAX_ALERT_GROUPS_TO_RENDER} more" + + # render text for alert receive channels + channels_to_render = [alert_groups_to_render[i].channel for i in range(self.MAX_CHANNELS_TO_RENDER)] + channel_names = ", ".join([channel.short_name for channel in channels_to_render]) + channels_text = "integrations: " if channels_count > 1 else "integration: " + channels_text += channel_names + + if channels_count > self.MAX_CHANNELS_TO_RENDER: + channels_text += f" and {channels_count - self.MAX_CHANNELS_TO_RENDER} more" + + return ( + f"Grafana OnCall: {alert_groups_text} " + f"from stack: {channels_to_render[0].organization.stack_slug}, " + f"{channels_text}." + ) diff --git a/engine/apps/alerts/migrations/0052_alter_channelfilter_filtering_term_type.py b/engine/apps/alerts/migrations/0052_alter_channelfilter_filtering_term_type.py new file mode 100644 index 0000000000..52af3902fc --- /dev/null +++ b/engine/apps/alerts/migrations/0052_alter_channelfilter_filtering_term_type.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.11 on 2024-07-04 20:20 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('alerts', '0051_remove_escalationpolicy_custom_button_trigger'), + ] + + operations = [ + migrations.AlterField( + model_name='channelfilter', + name='filtering_term_type', + field=models.IntegerField(choices=[(0, 'regex'), (1, 'jinja2'), (2, 'labels')], default=0), + ), + ] diff --git a/engine/apps/alerts/migrations/0053_channelfilter_filtering_labels.py b/engine/apps/alerts/migrations/0053_channelfilter_filtering_labels.py new file mode 100644 index 0000000000..84ec98d969 --- /dev/null +++ b/engine/apps/alerts/migrations/0053_channelfilter_filtering_labels.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.11 on 2024-07-08 18:20 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('alerts', '0052_alter_channelfilter_filtering_term_type'), + ] + + operations = [ + migrations.AddField( + model_name='channelfilter', + name='filtering_labels', + field=models.JSONField(default=None, null=True), + ), + ] diff --git a/engine/apps/alerts/migrations/0054_usernotificationbundle_bundlednotification_and_more.py b/engine/apps/alerts/migrations/0054_usernotificationbundle_bundlednotification_and_more.py new file mode 100644 index 0000000000..a4d4fcc85c --- /dev/null +++ b/engine/apps/alerts/migrations/0054_usernotificationbundle_bundlednotification_and_more.py @@ -0,0 +1,45 @@ +# Generated by Django 4.2.10 on 2024-06-20 11:00 + +import apps.base.models.user_notification_policy +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('base', '0005_drop_unused_dynamic_settings'), + ('user_management', '0022_alter_team_unique_together'), + ('alerts', '0053_channelfilter_filtering_labels'), + ] + + operations = [ + migrations.CreateModel( + name='UserNotificationBundle', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('important', models.BooleanField()), + ('notification_channel', models.PositiveSmallIntegerField(default=None, null=True, validators=[apps.base.models.user_notification_policy.validate_channel_choice])), + ('last_notified_at', models.DateTimeField(default=None, null=True)), + ('notification_task_id', models.CharField(default=None, max_length=100, null=True)), + ('eta', models.DateTimeField(default=None, null=True)), + ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notification_bundles', to='user_management.user')), + ], + ), + migrations.CreateModel( + name='BundledNotification', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('bundle_uuid', models.CharField(db_index=True, default=None, max_length=100, null=True)), + ('alert_group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='alerts.alertgroup')), + ('alert_receive_channel', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='alerts.alertreceivechannel')), + ('notification_bundle', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notifications', to='alerts.usernotificationbundle')), + ('notification_policy', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='base.usernotificationpolicy')), + ], + ), + migrations.AddConstraint( + model_name='usernotificationbundle', + constraint=models.UniqueConstraint(fields=('user', 'important', 'notification_channel'), name='unique_user_notification_bundle'), + ), + ] diff --git a/engine/apps/alerts/models/__init__.py b/engine/apps/alerts/models/__init__.py index c537fc3119..51b4415844 100644 --- a/engine/apps/alerts/models/__init__.py +++ b/engine/apps/alerts/models/__init__.py @@ -15,3 +15,4 @@ from .maintainable_object import MaintainableObject # noqa: F401 from .resolution_note import ResolutionNote, ResolutionNoteSlackMessage # noqa: F401 from .user_has_notification import UserHasNotification # noqa: F401 +from .user_notification_bundle import BundledNotification, UserNotificationBundle # noqa: F401 diff --git a/engine/apps/alerts/models/channel_filter.py b/engine/apps/alerts/models/channel_filter.py index a8df73bb8d..969f6c66b5 100644 --- a/engine/apps/alerts/models/channel_filter.py +++ b/engine/apps/alerts/models/channel_filter.py @@ -20,7 +20,7 @@ from django.db.models.manager import RelatedManager from apps.alerts.models import Alert, AlertGroup, AlertReceiveChannel - from apps.labels.types import AlertLabels + from apps.labels.types import AlertLabels, LabelPair logger = logging.getLogger(__name__) @@ -45,6 +45,7 @@ class ChannelFilter(OrderedModel): """ alert_groups: "RelatedManager['AlertGroup']" + filtering_labels: typing.Optional[list["LabelPair"]] order_with_respect_to = ["alert_receive_channel_id", "is_default"] @@ -87,11 +88,14 @@ class ChannelFilter(OrderedModel): FILTERING_TERM_TYPE_REGEX = 0 FILTERING_TERM_TYPE_JINJA2 = 1 + FILTERING_TERM_TYPE_LABELS = 2 FILTERING_TERM_TYPE_CHOICES = [ (FILTERING_TERM_TYPE_REGEX, "regex"), (FILTERING_TERM_TYPE_JINJA2, "jinja2"), + (FILTERING_TERM_TYPE_LABELS, "labels"), ] filtering_term_type = models.IntegerField(choices=FILTERING_TERM_TYPE_CHOICES, default=FILTERING_TERM_TYPE_REGEX) + filtering_labels = models.JSONField(null=True, default=None) is_default = models.BooleanField(default=False) @@ -145,6 +149,15 @@ def check_filter( except re.error: logger.error(f"channel_filter={self.id} failed to parse regex={self.filtering_term}") return False + if self.filtering_term_type == ChannelFilter.FILTERING_TERM_TYPE_LABELS: + if not self.filtering_labels or alert_labels is None: + return False + for item in self.filtering_labels: + key = item["key"]["name"] + value = item["value"]["name"] + if key not in alert_labels or alert_labels[key] != value: + return False + return True return False @property @@ -164,6 +177,10 @@ def str_for_clients(self): return "default" if self.filtering_term_type == ChannelFilter.FILTERING_TERM_TYPE_JINJA2: return str(self.filtering_term) + elif self.filtering_term_type == ChannelFilter.FILTERING_TERM_TYPE_LABELS: + if not self.filtering_labels: + return "{}" + return ", ".join(f"{item['key']['name']}={item['value']['name']}" for item in self.filtering_labels) elif self.filtering_term_type == ChannelFilter.FILTERING_TERM_TYPE_REGEX or self.filtering_term_type is None: return str(self.filtering_term).replace("`", "") raise Exception("Unknown filtering term") diff --git a/engine/apps/alerts/models/user_has_notification.py b/engine/apps/alerts/models/user_has_notification.py index 967a93cfe2..1dc51d0beb 100644 --- a/engine/apps/alerts/models/user_has_notification.py +++ b/engine/apps/alerts/models/user_has_notification.py @@ -16,3 +16,11 @@ class UserHasNotification(models.Model): class Meta: unique_together = ("user", "alert_group") + + def update_active_task_id(self, task_id): + """ + `active_notification_policy_id` keeps celery task_id of the next scheduled `notify_user_task` + for the current user + """ + self.active_notification_policy_id = task_id + self.save(update_fields=["active_notification_policy_id"]) diff --git a/engine/apps/alerts/models/user_notification_bundle.py b/engine/apps/alerts/models/user_notification_bundle.py new file mode 100644 index 0000000000..4c7936378b --- /dev/null +++ b/engine/apps/alerts/models/user_notification_bundle.py @@ -0,0 +1,87 @@ +import datetime +import typing + +from django.db import models +from django.utils import timezone + +from apps.alerts.constants import BUNDLED_NOTIFICATION_DELAY_SECONDS +from apps.base.models import UserNotificationPolicy +from apps.base.models.user_notification_policy import validate_channel_choice + +if typing.TYPE_CHECKING: + from django.db.models.manager import RelatedManager + + from apps.alerts.models import AlertGroup, AlertReceiveChannel + from apps.user_management.models import User + + +class UserNotificationBundle(models.Model): + user: "User" + notifications: "RelatedManager['BundledNotification']" + + NOTIFICATION_CHANNELS_TO_BUNDLE = [ + UserNotificationPolicy.NotificationChannel.SMS, + ] + + user = models.ForeignKey("user_management.User", on_delete=models.CASCADE, related_name="notification_bundles") + important = models.BooleanField() + notification_channel = models.PositiveSmallIntegerField( + validators=[validate_channel_choice], null=True, default=None + ) + last_notified_at = models.DateTimeField(default=None, null=True) + notification_task_id = models.CharField(max_length=100, null=True, default=None) + # estimated time of arrival for scheduled send_bundled_notification task + eta = models.DateTimeField(default=None, null=True) + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["user", "important", "notification_channel"], name="unique_user_notification_bundle" + ) + ] + + def notified_recently(self) -> bool: + return ( + timezone.now() - self.last_notified_at < timezone.timedelta(seconds=BUNDLED_NOTIFICATION_DELAY_SECONDS) + if self.last_notified_at + else False + ) + + def eta_is_valid(self) -> bool: + """ + `eta` shows eta of scheduled send_bundled_notification task and should never be less than the current time + (with a 1 minute buffer provided). + `eta` is None means that there is no scheduled task. + """ + if not self.eta or self.eta + timezone.timedelta(minutes=1) >= timezone.now(): + return True + return False + + def get_notification_eta(self) -> datetime.datetime: + last_notified = self.last_notified_at if self.last_notified_at else timezone.now() + return last_notified + timezone.timedelta(seconds=BUNDLED_NOTIFICATION_DELAY_SECONDS) + + def append_notification(self, alert_group: "AlertGroup", notification_policy: "UserNotificationPolicy"): + self.notifications.create( + alert_group=alert_group, notification_policy=notification_policy, alert_receive_channel=alert_group.channel + ) + + @classmethod + def notification_is_bundleable(cls, notification_channel): + return notification_channel in cls.NOTIFICATION_CHANNELS_TO_BUNDLE + + +class BundledNotification(models.Model): + alert_group: "AlertGroup" + alert_receive_channel: "AlertReceiveChannel" + notification_policy: typing.Optional["UserNotificationPolicy"] + notification_bundle: "UserNotificationBundle" + + alert_group = models.ForeignKey("alerts.AlertGroup", on_delete=models.CASCADE) + alert_receive_channel = models.ForeignKey("alerts.AlertReceiveChannel", on_delete=models.CASCADE) + notification_policy = models.ForeignKey("base.UserNotificationPolicy", on_delete=models.SET_NULL, null=True) + notification_bundle = models.ForeignKey( + UserNotificationBundle, on_delete=models.CASCADE, related_name="notifications" + ) + created_at = models.DateTimeField(auto_now_add=True) + bundle_uuid = models.CharField(max_length=100, null=True, default=None, db_index=True) diff --git a/engine/apps/alerts/paging.py b/engine/apps/alerts/paging.py index 6a3cadd32e..b03f52d7a0 100644 --- a/engine/apps/alerts/paging.py +++ b/engine/apps/alerts/paging.py @@ -187,8 +187,7 @@ def unpage_user(alert_group: AlertGroup, user: User, from_user: User) -> None: user_has_notification = UserHasNotification.objects.filter( user=user, alert_group=alert_group ).select_for_update()[0] - user_has_notification.active_notification_policy_id = None - user_has_notification.save(update_fields=["active_notification_policy_id"]) + user_has_notification.update_active_task_id(task_id=None) except IndexError: return finally: diff --git a/engine/apps/alerts/tasks/notify_user.py b/engine/apps/alerts/tasks/notify_user.py index cdfd35d617..bbcec36d0b 100644 --- a/engine/apps/alerts/tasks/notify_user.py +++ b/engine/apps/alerts/tasks/notify_user.py @@ -1,23 +1,94 @@ -import time import typing from functools import partial +from uuid import uuid4 from celery.exceptions import Retry from django.conf import settings from django.db import transaction +from django.db.models import Count from django.utils import timezone from kombu.utils.uuid import uuid as celery_uuid from telegram.error import RetryAfter from apps.alerts.constants import NEXT_ESCALATION_DELAY -from apps.alerts.signals import user_notification_action_triggered_signal +from apps.alerts.tasks.send_update_log_report_signal import send_update_log_report_signal from apps.base.messaging import get_messaging_backend_from_id from apps.metrics_exporter.tasks import update_metrics_for_user from apps.phone_notifications.phone_backend import PhoneBackend from common.custom_celery_tasks import shared_dedicated_queue_retry_task +from .compare_escalations import compare_escalations from .task_logger import task_logger +if typing.TYPE_CHECKING: + from apps.alerts.models import AlertGroup, UserNotificationBundle + from apps.base.models import UserNotificationPolicy + from apps.user_management.models import User + + +def schedule_send_bundled_notification_task( + user_notification_bundle: "UserNotificationBundle", alert_group: "AlertGroup" +): + """Schedule a task to send bundled notifications""" + send_bundled_notification.apply_async( + (user_notification_bundle.id,), + eta=user_notification_bundle.eta, + task_id=user_notification_bundle.notification_task_id, + ) + task_logger.info( + f"Scheduled send_bundled_notification task {user_notification_bundle.notification_task_id}, " + f"user_notification_bundle: {user_notification_bundle.id}, alert_group {alert_group.id}, " + f"eta: {user_notification_bundle.eta}" + ) + + +def schedule_perform_notification_task( + log_record_pk: int, alert_group_pk: int, use_default_notification_policy_fallback: bool +): + task = perform_notification.apply_async((log_record_pk, use_default_notification_policy_fallback)) + task_logger.info( + f"Created perform_notification task {task} log_record={log_record_pk} " f"alert_group={alert_group_pk}" + ) + + +def build_notification_reason_for_log_record( + notification_policies: typing.List["UserNotificationPolicy"], reason: typing.Optional[str] +) -> str: + from apps.base.models import UserNotificationPolicy + + # Here we collect a brief overview of notification steps configured for user to send it to thread. + collected_steps_ids = [] + for next_notification_policy in notification_policies: + if next_notification_policy.step == UserNotificationPolicy.Step.NOTIFY: + if next_notification_policy.notify_by not in collected_steps_ids: + collected_steps_ids.append(next_notification_policy.notify_by) + + collected_steps = ", ".join( + UserNotificationPolicy.NotificationChannel(step_id).label for step_id in collected_steps_ids + ) + reason = ("Reason: " + reason + "\n") if reason is not None else "" + reason += ("Further notification plan: " + collected_steps) if len(collected_steps_ids) > 0 else "" + return reason + + +def update_metric_if_needed(user: "User", active_alert_group_ids: typing.List[int]): + from apps.base.models import UserNotificationPolicyLogRecord + + # get count of alert groups with only one personal log record with type "triggered" + alert_groups_with_one_log = ( + user.personal_log_records.filter( + type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_TRIGGERED, + alert_group_id__in=active_alert_group_ids, + ) + .values("alert_group") + .annotate(count=Count("alert_group")) + .filter(count=1) + .count() + ) + + if alert_groups_with_one_log > 0: + update_metrics_for_user.apply_async((user.id, alert_groups_with_one_log)) + @shared_dedicated_queue_retry_task( autoretry_for=(Exception,), retry_backoff=True, max_retries=1 if settings.DEBUG else None @@ -32,7 +103,7 @@ def notify_user_task( important=False, notify_anyway=False, ): - from apps.alerts.models import AlertGroup, UserHasNotification + from apps.alerts.models import AlertGroup, UserHasNotification, UserNotificationBundle from apps.base.models import UserNotificationPolicy, UserNotificationPolicyLogRecord from apps.user_management.models import User @@ -44,6 +115,8 @@ def notify_user_task( countdown = 0 stop_escalation = False log_record = None + is_notification_bundled = False + user_notification_bundle = None with transaction.atomic(): try: @@ -51,8 +124,6 @@ def notify_user_task( except User.DoesNotExist: return f"notify_user_task: user {user_pk} doesn't exist" - organization = alert_group.channel.organization - if not user.is_notification_allowed: task_logger.info(f"notify_user_task: user {user.pk} notification is not allowed") UserNotificationPolicyLogRecord( @@ -82,21 +153,8 @@ def notify_user_task( f"notify_user_task: Failed to notify. No notification policies. user_id={user_pk} alert_group_id={alert_group_pk} important={important}" ) return - - # Here we collect a brief overview of notification steps configured for user to send it to thread. - collected_steps_ids = [] - for next_notification_policy in notification_policies: - if next_notification_policy.step == UserNotificationPolicy.Step.NOTIFY: - if next_notification_policy.notify_by not in collected_steps_ids: - collected_steps_ids.append(next_notification_policy.notify_by) - + reason = build_notification_reason_for_log_record(notification_policies, reason) notification_policy = notification_policies[0] - - collected_steps = ", ".join( - UserNotificationPolicy.NotificationChannel(step_id).label for step_id in collected_steps_ids - ) - reason = ("Reason: " + reason + "\n") if reason is not None else "" - reason += ("Further notification plan: " + collected_steps) if len(collected_steps_ids) > 0 else "" else: if notify_user_task.request.id != user_has_notification.active_notification_policy_id: task_logger.info( @@ -108,14 +166,14 @@ def notify_user_task( try: notification_policy = UserNotificationPolicy.objects.get(pk=previous_notification_policy_pk) - if notification_policy.user.organization != organization: + if notification_policy.user != user: notification_policy = UserNotificationPolicy.objects.get( order=notification_policy.order, user=user, important=important ) notification_policy = notification_policy.next() except UserNotificationPolicy.DoesNotExist: task_logger.info( - f"notify_user_taskLNotification policy {previous_notification_policy_pk} has been deleted" + f"notify_user_task: Notification policy {previous_notification_policy_pk} has been deleted" ) return reason = None @@ -138,27 +196,25 @@ def _create_notification_finished_user_notification_policy_log_record(): if notification_policy is None: stop_escalation = True log_record = _create_notification_finished_user_notification_policy_log_record() + task_logger.info(f"Personal escalation exceeded. User: {user.pk}, alert_group: {alert_group.pk}") else: if ( (alert_group.acknowledged and not notify_even_acknowledged) + or (alert_group.silenced and not notify_anyway) or alert_group.resolved or alert_group.wiped_at or alert_group.root_alert_group ): - return "Acknowledged, resolved, attached or wiped." - - if alert_group.silenced and not notify_anyway: task_logger.info( - f"notify_user_task: skip notification user {user.pk} because alert_group {alert_group.pk} is silenced" + f"notify_user_task: skip notification user {user.pk}, alert_group {alert_group.pk} is " + f"{alert_group.state} and/or attached or wiped" ) - return + return "Acknowledged, resolved, silenced, attached or wiped." if notification_policy.step == UserNotificationPolicy.Step.WAIT: - if notification_policy.wait_delay is not None: - delay_in_seconds = notification_policy.wait_delay.total_seconds() - else: - delay_in_seconds = 0 - countdown = delay_in_seconds + countdown = ( + notification_policy.wait_delay.total_seconds() if notification_policy.wait_delay is not None else 0 + ) log_record = _create_user_notification_policy_log_record( author=user, type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_TRIGGERED, @@ -167,7 +223,7 @@ def _create_notification_finished_user_notification_policy_log_record(): slack_prevent_posting=prevent_posting_to_thread, notification_step=notification_policy.step, ) - task_logger.info(f"notify_user_task: Waiting {delay_in_seconds} to notify user {user.pk}") + task_logger.info(f"notify_user_task: Waiting {countdown} to notify user {user.pk}") elif notification_policy.step == UserNotificationPolicy.Step.NOTIFY: user_to_be_notified_in_slack = ( notification_policy.notify_by == UserNotificationPolicy.NotificationChannel.SLACK @@ -185,17 +241,49 @@ def _create_notification_finished_user_notification_policy_log_record(): notification_error_code=UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_POSTING_TO_SLACK_IS_DISABLED, ) else: - log_record = _create_user_notification_policy_log_record( - author=user, - type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_TRIGGERED, - notification_policy=notification_policy, - alert_group=alert_group, - reason=reason, - slack_prevent_posting=prevent_posting_to_thread, - notification_step=notification_policy.step, - notification_channel=notification_policy.notify_by, - ) - + if ( + settings.FEATURE_NOTIFICATION_BUNDLE_ENABLED + and UserNotificationBundle.notification_is_bundleable(notification_policy.notify_by) + ): + user_notification_bundle, _ = UserNotificationBundle.objects.select_for_update().get_or_create( + user=user, important=important, notification_channel=notification_policy.notify_by + ) + # check if notification needs to be bundled + if user_notification_bundle.notified_recently(): + user_notification_bundle.append_notification(alert_group, notification_policy) + # schedule send_bundled_notification task if it hasn't been scheduled or the task eta is + # outdated + eta_is_valid = user_notification_bundle.eta_is_valid() + if not eta_is_valid: + task_logger.warning( + f"ETA is not valid - {user_notification_bundle.eta}, " + f"user_notification_bundle {user_notification_bundle.id}, " + f"task_id {user_notification_bundle.notification_task_id}. " + f"Rescheduling the send_bundled_notification task" + ) + if not user_notification_bundle.notification_task_id or not eta_is_valid: + user_notification_bundle.notification_task_id = celery_uuid() + user_notification_bundle.eta = user_notification_bundle.get_notification_eta() + user_notification_bundle.save(update_fields=["notification_task_id", "eta"]) + + transaction.on_commit( + partial( + schedule_send_bundled_notification_task, user_notification_bundle, alert_group + ) + ) + is_notification_bundled = True + + if not is_notification_bundled: + log_record = _create_user_notification_policy_log_record( + author=user, + type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_TRIGGERED, + notification_policy=notification_policy, + alert_group=alert_group, + reason=reason, + slack_prevent_posting=prevent_posting_to_thread, + notification_step=notification_policy.step, + notification_channel=notification_policy.notify_by, + ) if log_record: # log_record is None if user notification policy step is unspecified # if this is the first notification step, and user hasn't been notified for this alert group - update metric if ( @@ -207,48 +295,41 @@ def _create_notification_finished_user_notification_policy_log_record(): ).exists() ): update_metrics_for_user.apply_async((user.id,)) - log_record.save() - if notify_user_task.request.retries == 0: - transaction.on_commit(partial(send_user_notification_signal.apply_async, (log_record.pk,))) - - def _create_perform_notification_task(log_record_pk, alert_group_pk, use_default_notification_policy_fallback): - task = perform_notification.apply_async((log_record_pk, use_default_notification_policy_fallback)) - task_logger.info( - f"Created perform_notification task {task} log_record={log_record_pk} " f"alert_group={alert_group_pk}" - ) - - def _update_user_has_notification_active_notification_policy_id(active_policy_id: typing.Optional[str]) -> None: - user_has_notification.active_notification_policy_id = active_policy_id - user_has_notification.save(update_fields=["active_notification_policy_id"]) - - def _reset_user_has_notification_active_notification_policy_id() -> None: - _update_user_has_notification_active_notification_policy_id(None) - - create_perform_notification_task = partial( - _create_perform_notification_task, - log_record.pk, - alert_group_pk, - using_fallback_default_notification_policy_step, - ) if using_fallback_default_notification_policy_step: # if we are using default notification policy, we're done escalating.. there's no further notification # policy steps in this case. Kick off the perform_notification task, create the # TYPE_PERSONAL_NOTIFICATION_FINISHED log record, and reset the active_notification_policy_id - transaction.on_commit(create_perform_notification_task) + transaction.on_commit( + partial( + schedule_perform_notification_task, + log_record.pk, + alert_group_pk, + using_fallback_default_notification_policy_step, + ) + ) _create_notification_finished_user_notification_policy_log_record() - _reset_user_has_notification_active_notification_policy_id() + user_has_notification.update_active_task_id(None) elif not stop_escalation: - if notification_policy.step != UserNotificationPolicy.Step.WAIT: - transaction.on_commit(create_perform_notification_task) + # if the step is NOTIFY and notification was not not bundled, perform regular notification + # and update time when user was notified + if notification_policy.step != UserNotificationPolicy.Step.WAIT and not is_notification_bundled: + transaction.on_commit( + partial( + schedule_perform_notification_task, + log_record.pk, + alert_group_pk, + using_fallback_default_notification_policy_step, + ) + ) - delay = NEXT_ESCALATION_DELAY - if countdown is not None: - delay += countdown - task_id = celery_uuid() + if user_notification_bundle: + user_notification_bundle.last_notified_at = timezone.now() + user_notification_bundle.save(update_fields=["last_notified_at"]) - _update_user_has_notification_active_notification_policy_id(task_id) + task_id = celery_uuid() + user_has_notification.update_active_task_id(task_id) transaction.on_commit( partial( @@ -259,12 +340,12 @@ def _reset_user_has_notification_active_notification_policy_id() -> None: "notify_anyway": notify_anyway, "prevent_posting_to_thread": prevent_posting_to_thread, }, - countdown=delay, + countdown=countdown + NEXT_ESCALATION_DELAY, task_id=task_id, ) ) else: - _reset_user_has_notification_active_notification_policy_id() + user_has_notification.update_active_task_id(None) @shared_dedicated_queue_retry_task( @@ -459,17 +540,136 @@ def perform_notification(log_record_pk, use_default_notification_policy_fallback @shared_dedicated_queue_retry_task( - autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None + autoretry_for=(Exception,), retry_backoff=True, max_retries=1 if settings.DEBUG else None ) -def send_user_notification_signal(log_record_pk): - start_time = time.time() +def send_bundled_notification(user_notification_bundle_id: int): + """ + The task filters bundled notifications, attached to the current user_notification_bundle, by active alert groups, + creates notification log records and updates user_notification_bundle. + If there are no active alert groups - nothing else happens. If there is only one active alert group - regular + notification will be performed (called perform_notification task). Otherwise - "send bundled notification" method of + the current notification channel will be called. + """ + from apps.alerts.models import AlertGroup, UserNotificationBundle + from apps.base.models import UserNotificationPolicy, UserNotificationPolicyLogRecord - from apps.base.models import UserNotificationPolicyLogRecord + task_logger.info(f"Start send_bundled_notification for user_notification_bundle {user_notification_bundle_id}") + with transaction.atomic(): + try: + user_notification_bundle = UserNotificationBundle.objects.filter( + pk=user_notification_bundle_id + ).select_for_update()[0] + except IndexError: + task_logger.info( + f"user_notification_bundle {user_notification_bundle_id} doesn't exist. " + f"The user associated with this notification bundle may have been deleted." + ) + return - task_logger.debug(f"LOG RECORD PK: {log_record_pk}") - task_logger.debug(f"LOG RECORD LAST: {UserNotificationPolicyLogRecord.objects.last()}") + if not compare_escalations(send_bundled_notification.request.id, user_notification_bundle.notification_task_id): + task_logger.info( + f"send_bundled_notification: notification_task_id mismatch. " + f"Duplication or non-active notification triggered. " + f"Active: {user_notification_bundle.notification_task_id}" + ) + return - log_record = UserNotificationPolicyLogRecord.objects.get(pk=log_record_pk) - user_notification_action_triggered_signal.send(sender=send_user_notification_signal, log_record=log_record) + notifications = user_notification_bundle.notifications.filter(bundle_uuid__isnull=True).select_related( + "alert_group" + ) - task_logger.debug("--- USER SIGNAL TOOK %s seconds ---" % (time.time() - start_time)) + log_records_to_create: typing.List["UserNotificationPolicyLogRecord"] = [] + skip_notification_ids: typing.List[int] = [] + active_alert_group_ids: typing.Set[int] = set() + log_record_notification_triggered = None + is_notification_allowed = user_notification_bundle.user.is_notification_allowed + + # create logs + for notification in notifications: + if notification.alert_group.status != AlertGroup.NEW: + task_logger.info(f"alert_group {notification.alert_group_id} is not active, skip notification") + skip_notification_ids.append(notification.id) + continue + elif not is_notification_allowed: + log_record_notification_failed = UserNotificationPolicyLogRecord( + author=user_notification_bundle.user, + type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED, + reason="notification is not allowed for user", + alert_group=notification.alert_group, + notification_error_code=UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_FORBIDDEN, + ) + log_records_to_create.append(log_record_notification_failed) + active_alert_group_ids.add(notification.alert_group_id) + continue + + # collect notifications for active alert groups + active_alert_group_ids.add(notification.alert_group_id) + + log_record_notification_triggered = UserNotificationPolicyLogRecord( + author=user_notification_bundle.user, + type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_TRIGGERED, + alert_group=notification.alert_group, + notification_step=UserNotificationPolicy.Step.NOTIFY, + notification_channel=user_notification_bundle.notification_channel, + ) + log_records_to_create.append(log_record_notification_triggered) + + if len(log_records_to_create) == 1 and log_record_notification_triggered: + # perform regular notification + log_record_notification_triggered.save() + task_logger.info( + f"there is only one alert group in bundled notification, perform regular notification. " + f"alert_group {log_record_notification_triggered.alert_group_id}" + ) + transaction.on_commit( + partial( + schedule_perform_notification_task, + log_record_notification_triggered.pk, + log_record_notification_triggered.alert_group_id, + ) + ) + notifications.delete() + else: + UserNotificationPolicyLogRecord.objects.bulk_create(log_records_to_create, batch_size=5000) + + if not active_alert_group_ids or not is_notification_allowed: + task_logger.info( + f"no alert groups to notify about or notification is not allowed for user " + f"{user_notification_bundle.user_id}" + ) + notifications.delete() + else: + notifications.filter(id__in=skip_notification_ids).delete() + bundle_uuid = uuid4() + notifications.update(bundle_uuid=bundle_uuid) + task_logger.info( + f"perform bundled notification for alert groups with ids: {active_alert_group_ids}, " + f"bundle_uuid: {bundle_uuid}" + ) + if user_notification_bundle.notification_channel == UserNotificationPolicy.NotificationChannel.SMS: + PhoneBackend.notify_by_sms_bundle_async(user_notification_bundle.user, bundle_uuid) + + user_notification_bundle.notification_task_id = None + user_notification_bundle.last_notified_at = timezone.now() + user_notification_bundle.eta = None + user_notification_bundle.save(update_fields=["notification_task_id", "last_notified_at", "eta"]) + + for alert_group_id in active_alert_group_ids: + transaction.on_commit(partial(send_update_log_report_signal.apply_async, (None, alert_group_id))) + + # update metric + transaction.on_commit(partial(update_metric_if_needed, user_notification_bundle.user, active_alert_group_ids)) + + task_logger.info(f"Finished send_bundled_notification for user_notification_bundle {user_notification_bundle_id}") + + +# deprecated +@shared_dedicated_queue_retry_task( + autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None +) +def send_user_notification_signal(log_record_pk): + # Triggers user_notification_action_triggered_signal + # This signal is only connected to UserSlackRepresentative and triggers posting message to Slack about some + # FAILED notifications (see NotificationDeliveryStep and ERRORS_TO_SEND_IN_SLACK_CHANNEL). + # No need to call it here. + pass diff --git a/engine/apps/alerts/tests/factories.py b/engine/apps/alerts/tests/factories.py index 2b72c80533..f07ef90046 100644 --- a/engine/apps/alerts/tests/factories.py +++ b/engine/apps/alerts/tests/factories.py @@ -13,6 +13,7 @@ Invitation, ResolutionNote, ResolutionNoteSlackMessage, + UserNotificationBundle, ) from common.utils import UniqueFaker @@ -85,3 +86,8 @@ class Meta: class InvitationFactory(factory.DjangoModelFactory): class Meta: model = Invitation + + +class UserNotificationBundleFactory(factory.DjangoModelFactory): + class Meta: + model = UserNotificationBundle diff --git a/engine/apps/alerts/tests/test_alert_group_renderer.py b/engine/apps/alerts/tests/test_alert_group_renderer.py index 911cd61fb1..8cc1249e9e 100644 --- a/engine/apps/alerts/tests/test_alert_group_renderer.py +++ b/engine/apps/alerts/tests/test_alert_group_renderer.py @@ -1,7 +1,9 @@ import pytest +from apps.alerts.incident_appearance.renderers.sms_renderer import AlertGroupSMSBundleRenderer from apps.alerts.incident_appearance.templaters import AlertSlackTemplater, AlertWebTemplater from apps.alerts.models import AlertGroup +from apps.base.models import UserNotificationPolicy from config_integrations import grafana @@ -163,3 +165,58 @@ def test_get_resolved_text( alert_group.resolve(resolved_by=source, resolved_by_user=user) assert alert_group.get_resolve_text() == expected_text.format(username=user.get_username_with_slack_verbal()) + + +@pytest.mark.django_db +def test_alert_group_sms_bundle_renderer( + make_organization_and_user, + make_alert_receive_channel, + make_alert_group, + make_user_notification_bundle, +): + organization, user = make_organization_and_user() + alert_receive_channel_1 = make_alert_receive_channel( + organization, + ) + alert_receive_channel_2 = make_alert_receive_channel( + organization, + ) + alert_group_1 = make_alert_group(alert_receive_channel_1) + alert_group_2 = make_alert_group(alert_receive_channel_1) + alert_group_3 = make_alert_group(alert_receive_channel_1) + alert_group_4 = make_alert_group(alert_receive_channel_2) + + notification_bundle = make_user_notification_bundle(user, UserNotificationPolicy.NotificationChannel.SMS) + + # render 1 alert group and 1 channel + notification_bundle.append_notification(alert_group_1, None) + renderer = AlertGroupSMSBundleRenderer(notification_bundle.notifications.all()) + message = renderer.render() + assert message == ( + f"Grafana OnCall: Alert group #{alert_group_1.inside_organization_number} " + f"from stack: {organization.stack_slug}, " + f"integration: {alert_receive_channel_1.short_name}." + ) + + # render 3 alert groups and 1 channel + notification_bundle.append_notification(alert_group_2, None) + notification_bundle.append_notification(alert_group_3, None) + renderer = AlertGroupSMSBundleRenderer(notification_bundle.notifications.all()) + message = renderer.render() + assert message == ( + f"Grafana OnCall: Alert groups #{alert_group_1.inside_organization_number}, " + f"#{alert_group_2.inside_organization_number}, #{alert_group_3.inside_organization_number} " + f"from stack: {organization.stack_slug}, " + f"integration: {alert_receive_channel_1.short_name}." + ) + + # render 4 alert groups and 2 channels + notification_bundle.append_notification(alert_group_4, None) + renderer = AlertGroupSMSBundleRenderer(notification_bundle.notifications.all()) + message = renderer.render() + assert message == ( + f"Grafana OnCall: Alert groups #{alert_group_1.inside_organization_number}, " + f"#{alert_group_2.inside_organization_number}, #{alert_group_3.inside_organization_number} and 1 more " + f"from stack: {organization.stack_slug}, " + f"integrations: {alert_receive_channel_1.short_name} and 1 more." + ) diff --git a/engine/apps/alerts/tests/test_channel_filter.py b/engine/apps/alerts/tests/test_channel_filter.py index af9364534f..ba19e68894 100644 --- a/engine/apps/alerts/tests/test_channel_filter.py +++ b/engine/apps/alerts/tests/test_channel_filter.py @@ -101,3 +101,54 @@ def test_channel_filter_select_filter_labels( assert ChannelFilter.select_filter(alert_receive_channel, {"title": "Test Title", "value": 5}, labels) == ( custom_channel_filter if should_match else default_channel_filter ) + + +@pytest.mark.django_db +@pytest.mark.parametrize( + "filtering_labels,labels,should_match", + [ + ([{"key": {"id": "1", "name": "foo"}, "value": {"id": "2", "name": "bar"}}], {"foo": "bar"}, True), + ([{"key": {"id": "1", "name": "foo"}, "value": {"id": "2", "name": "bar"}}], None, False), + (None, {"foo": "bar"}, False), + ([], {"foo": "bar"}, False), + ( + [ + {"key": {"id": "1", "name": "foo"}, "value": {"id": "2", "name": "bar"}}, + {"key": {"id": "3", "name": "bar"}, "value": {"id": "4", "name": "baz"}}, + ], + {"foo": "bar", "bar": "baz"}, + True, + ), + ( + [ + {"key": {"id": "1", "name": "foo"}, "value": {"id": "2", "name": "bar"}}, + {"key": {"id": "3", "name": "bar"}, "value": {"id": "4", "name": "bar"}}, + ], + {"foo": "bar", "bar": "baz"}, + False, + ), + ], +) +def test_channel_filter_using_filter_labels( + make_organization, + make_alert_receive_channel, + make_channel_filter, + filtering_labels, + labels, + should_match, +): + organization = make_organization() + alert_receive_channel = make_alert_receive_channel(organization) + default_channel_filter = make_channel_filter(alert_receive_channel, is_default=True) # default channel filter + custom_channel_filter = make_channel_filter( + alert_receive_channel, + filtering_labels=filtering_labels, + filtering_term_type=ChannelFilter.FILTERING_TERM_TYPE_LABELS, + is_default=False, + ) + + assert custom_channel_filter.filtering_labels == filtering_labels + + assert ChannelFilter.select_filter(alert_receive_channel, {"title": "Test Title", "value": 5}, labels) == ( + custom_channel_filter if should_match else default_channel_filter + ) diff --git a/engine/apps/alerts/tests/test_notify_user.py b/engine/apps/alerts/tests/test_notify_user.py index 7dd2b81210..e482f8964d 100644 --- a/engine/apps/alerts/tests/test_notify_user.py +++ b/engine/apps/alerts/tests/test_notify_user.py @@ -1,10 +1,11 @@ from unittest.mock import patch import pytest +from django.utils import timezone from telegram.error import RetryAfter from apps.alerts.models import AlertGroup -from apps.alerts.tasks.notify_user import notify_user_task, perform_notification +from apps.alerts.tasks.notify_user import notify_user_task, perform_notification, send_bundled_notification from apps.api.permissions import LegacyAccessControlRole from apps.base.models.user_notification_policy import UserNotificationPolicy from apps.base.models.user_notification_policy_log_record import UserNotificationPolicyLogRecord @@ -369,3 +370,207 @@ def test_perform_notification_use_default_notification_policy_fallback( perform_notification(log_record.pk, True) mock_notify_user.assert_called_once_with(user, alert_group, fallback_notification_policy) + + +@pytest.mark.django_db +def test_notify_user_task_notification_bundle_is_enabled( + make_organization_and_user, + make_user_for_organization, + make_user_notification_policy, + make_alert_receive_channel, + make_alert_group, + settings, +): + settings.FEATURE_NOTIFICATION_BUNDLE_ENABLED = True + organization, user_1 = make_organization_and_user() + user_2 = make_user_for_organization(organization) + make_user_notification_policy( + user=user_1, + step=UserNotificationPolicy.Step.NOTIFY, + notify_by=UserNotificationPolicy.NotificationChannel.SMS, # channel is in NOTIFICATION_CHANNELS_TO_BUNDLE + ) + make_user_notification_policy( + user=user_1, + step=UserNotificationPolicy.Step.NOTIFY, + notify_by=UserNotificationPolicy.NotificationChannel.SMS, + important=True, + ) + make_user_notification_policy( + user=user_2, + step=UserNotificationPolicy.Step.NOTIFY, + notify_by=UserNotificationPolicy.NotificationChannel.SLACK, # channel is not in NOTIFICATION_CHANNELS_TO_BUNDLE + ) + alert_receive_channel = make_alert_receive_channel(organization=organization) + alert_group_1 = make_alert_group(alert_receive_channel=alert_receive_channel) + alert_group_2 = make_alert_group(alert_receive_channel=alert_receive_channel) + assert not user_1.notification_bundles.exists() + # send 1st notification to user_1, check notification_bundle was created + # without scheduling send_bundled_notification task + notify_user_task(user_1.id, alert_group_1.id) + assert user_1.notification_bundles.count() == 1 + notification_bundle = user_1.notification_bundles.first() + assert notification_bundle.notification_task_id is None + assert not notification_bundle.notifications.exists() + # send 2nd notification to user_1, check bundled notification was attached to notification_bundle + # and send_bundled_notification was scheduled + notify_user_task(user_1.id, alert_group_2.id) + notification_bundle.refresh_from_db() + assert notification_bundle.notifications.count() == 1 + assert notification_bundle.notification_task_id is not None + # send important notification to user_1, check new notification_bundle was created + notify_user_task(user_1.id, alert_group_1.id, important=True) + assert user_1.notification_bundles.count() == 2 + important_notification_bundle = user_1.notification_bundles.get(important=True) + assert important_notification_bundle.notification_task_id is None + assert not important_notification_bundle.notifications.exists() + # send notification to user_2 (notification channel is not in NOTIFICATION_CHANNELS_TO_BUNDLE), + # check notification_bundle was not created + notify_user_task(user_2.id, alert_group_1.id) + assert not user_2.notification_bundles.exists() + + +@pytest.mark.django_db +def test_notify_user_task_notification_bundle_is_not_enabled( + make_organization_and_user, + make_user_notification_policy, + make_alert_receive_channel, + make_alert_group, + settings, +): + settings.FEATURE_NOTIFICATION_BUNDLE_ENABLED = False + organization, user = make_organization_and_user() + make_user_notification_policy( + user=user, + step=UserNotificationPolicy.Step.NOTIFY, + notify_by=UserNotificationPolicy.NotificationChannel.SMS, # channel is in NOTIFICATION_CHANNELS_TO_BUNDLE + ) + alert_receive_channel = make_alert_receive_channel(organization=organization) + alert_group = make_alert_group(alert_receive_channel=alert_receive_channel) + + # send notification, check notification_bundle was not created + notify_user_task(user.id, alert_group.id) + assert not user.notification_bundles.exists() + + +@pytest.mark.django_db +def test_send_bundle_notification( + make_organization_and_user, + make_user_notification_policy, + make_user_notification_bundle, + make_alert_receive_channel, + make_alert_group, + settings, + caplog, +): + settings.FEATURE_NOTIFICATION_BUNDLE_ENABLED = True + organization, user = make_organization_and_user() + notification_policy = make_user_notification_policy( + user=user, + step=UserNotificationPolicy.Step.NOTIFY, + notify_by=UserNotificationPolicy.NotificationChannel.SMS, # channel is in NOTIFICATION_CHANNELS_TO_BUNDLE + ) + alert_receive_channel = make_alert_receive_channel(organization=organization) + alert_group_1 = make_alert_group(alert_receive_channel=alert_receive_channel) + alert_group_2 = make_alert_group(alert_receive_channel=alert_receive_channel) + alert_group_3 = make_alert_group(alert_receive_channel=alert_receive_channel) + notification_bundle = make_user_notification_bundle( + user, UserNotificationPolicy.NotificationChannel.SMS, notification_task_id="test_task_id", eta=timezone.now() + ) + notification_bundle.append_notification(alert_group_1, notification_policy) + notification_bundle.append_notification(alert_group_2, notification_policy) + notification_bundle.append_notification(alert_group_3, notification_policy) + assert notification_bundle.notifications.filter(bundle_uuid__isnull=True).count() == 3 + alert_group_3.resolve() + with patch("apps.alerts.tasks.notify_user.compare_escalations", return_value=True): + # send notification for 2 active alert groups + send_bundled_notification(notification_bundle.id) + assert f"alert_group {alert_group_3.id} is not active, skip notification" in caplog.text + assert "perform bundled notification for alert groups with ids:" in caplog.text + # check bundle_uuid was set, notification for resolved alert group was deleted + assert notification_bundle.notifications.filter(bundle_uuid__isnull=True).count() == 0 + assert notification_bundle.notifications.all().count() == 2 + assert not notification_bundle.notifications.filter(alert_group=alert_group_3).exists() + + # send notification for 1 active alert group + notification_bundle.notifications.update(bundle_uuid=None) + alert_group_2.resolve() + send_bundled_notification(notification_bundle.id) + assert f"alert_group {alert_group_2.id} is not active, skip notification" in caplog.text + assert ( + f"there is only one alert group in bundled notification, perform regular notification. " + f"alert_group {alert_group_1.id}" + ) in caplog.text + # check all notifications were deleted + assert notification_bundle.notifications.all().count() == 0 + + # send notification for 0 active alert group + notification_bundle.append_notification(alert_group_1, notification_policy) + alert_group_1.resolve() + send_bundled_notification(notification_bundle.id) + assert f"alert_group {alert_group_1.id} is not active, skip notification" in caplog.text + assert f"no alert groups to notify about or notification is not allowed for user {user.id}" in caplog.text + # check all notifications were deleted + assert notification_bundle.notifications.all().count() == 0 + + +@pytest.mark.django_db +def test_send_bundle_notification_task_id_mismatch( + make_organization_and_user, + make_user_notification_bundle, + settings, + caplog, +): + settings.FEATURE_NOTIFICATION_BUNDLE_ENABLED = True + organization, user = make_organization_and_user() + notification_bundle = make_user_notification_bundle( + user, UserNotificationPolicy.NotificationChannel.SMS, notification_task_id="test_task_id", eta=timezone.now() + ) + send_bundled_notification(notification_bundle.id) + assert ( + f"send_bundled_notification: notification_task_id mismatch. " + f"Duplication or non-active notification triggered. " + f"Active: {notification_bundle.notification_task_id}" + ) in caplog.text + + +@pytest.mark.django_db +def test_notify_user_task_notification_bundle_eta_is_outdated( + make_organization_and_user, + make_user_for_organization, + make_user_notification_policy, + make_user_notification_bundle, + make_alert_receive_channel, + make_alert_group, + settings, +): + settings.FEATURE_NOTIFICATION_BUNDLE_ENABLED = True + organization, user = make_organization_and_user() + notification_policy = make_user_notification_policy( + user=user, + step=UserNotificationPolicy.Step.NOTIFY, + notify_by=UserNotificationPolicy.NotificationChannel.SMS, # channel is in NOTIFICATION_CHANNELS_TO_BUNDLE + ) + alert_receive_channel = make_alert_receive_channel(organization=organization) + alert_group_1 = make_alert_group(alert_receive_channel=alert_receive_channel) + alert_group_2 = make_alert_group(alert_receive_channel=alert_receive_channel) + now = timezone.now() + outdated_eta = now - timezone.timedelta(minutes=5) + test_task_id = "test_task_id" + notification_bundle = make_user_notification_bundle( + user, + UserNotificationPolicy.NotificationChannel.SMS, + eta=outdated_eta, + notification_task_id=test_task_id, + last_notified_at=now, + ) + notification_bundle.append_notification(alert_group_1, notification_policy) + assert not notification_bundle.eta_is_valid() + assert notification_bundle.notifications.count() == 1 + + # call notify_user_task and check that new notification task for notification_bundle was scheduled + notify_user_task(user.id, alert_group_2.id) + notification_bundle.refresh_from_db() + assert notification_bundle.eta_is_valid() + assert notification_bundle.notification_task_id != test_task_id + assert notification_bundle.last_notified_at == now + assert notification_bundle.notifications.count() == 2 diff --git a/engine/apps/api/serializers/channel_filter.py b/engine/apps/api/serializers/channel_filter.py index 7815a584ae..7ff0f0f12e 100644 --- a/engine/apps/api/serializers/channel_filter.py +++ b/engine/apps/api/serializers/channel_filter.py @@ -3,6 +3,7 @@ from rest_framework import serializers from apps.alerts.models import AlertReceiveChannel, ChannelFilter, EscalationChain +from apps.api.serializers.labels import LabelPairSerializer from apps.base.messaging import get_messaging_backend_from_id from apps.telegram.models import TelegramToOrganizationConnector from common.api_helpers.custom_fields import OrganizationFilteredPrimaryKeyRelatedField @@ -30,6 +31,7 @@ class ChannelFilterSerializer(EagerLoadingMixin, serializers.ModelSerializer): telegram_channel_details = serializers.SerializerMethodField() filtering_term_as_jinja2 = serializers.SerializerMethodField() filtering_term = serializers.CharField(required=False, allow_null=True, allow_blank=True) + filtering_labels = LabelPairSerializer(many=True, required=False) SELECT_RELATED = ["escalation_chain", "alert_receive_channel"] @@ -41,6 +43,7 @@ class Meta: "escalation_chain", "slack_channel", "created_at", + "filtering_labels", "filtering_term", "filtering_term_type", "telegram_channel", @@ -75,6 +78,10 @@ def validate(self, data): if filtering_term is not None: if not is_regex_valid(filtering_term): raise serializers.ValidationError(["Regular expression is incorrect"]) + elif filtering_term_type == ChannelFilter.FILTERING_TERM_TYPE_LABELS: + filtering_labels = data.get("filtering_labels") + if filtering_labels is None: + raise serializers.ValidationError(["Filtering labels field is required"]) else: raise serializers.ValidationError(["Expression type is incorrect"]) return data @@ -134,14 +141,21 @@ def validate_notification_backends(self, notification_backends): return notification_backends def get_filtering_term_as_jinja2(self, obj): - """ - Returns the regex filtering term as a jinja2, for the preview before migration from regex to jinja2""" + """Returns the regex or labels filtering term as a jinja2, for preview purposes.""" if obj.filtering_term_type == ChannelFilter.FILTERING_TERM_TYPE_JINJA2: return obj.filtering_term elif obj.filtering_term_type == ChannelFilter.FILTERING_TERM_TYPE_REGEX: # Four curly braces will result in two curly braces in the final string # rf"..." is a raw f string, to keep original filtering_term return rf'{{{{ payload | json_dumps | regex_search("{obj.filtering_term}") }}}}' + elif obj.filtering_labels and obj.filtering_term_type == ChannelFilter.FILTERING_TERM_TYPE_LABELS: + # required labels + labels = [ + f"labels.{item['key']['name']} and labels.{item['key']['name']} == '{item['value']['name']}'" + for item in obj.filtering_labels + ] + template = "{{{{ {conditions} }}}}".format(conditions=" and ".join(labels)) + return template class ChannelFilterCreateSerializer(ChannelFilterSerializer): @@ -156,6 +170,7 @@ class Meta: "escalation_chain", "slack_channel", "created_at", + "filtering_labels", "filtering_term", "filtering_term_type", "telegram_channel", diff --git a/engine/apps/api/tests/test_channel_filter.py b/engine/apps/api/tests/test_channel_filter.py index 2fd0944d94..9cd4e08806 100644 --- a/engine/apps/api/tests/test_channel_filter.py +++ b/engine/apps/api/tests/test_channel_filter.py @@ -554,6 +554,99 @@ def test_channel_filter_convert_from_regex_to_jinja2( assert bool(jinja2_channel_filter.is_satisfying(payload)) is True +@pytest.mark.django_db +def test_channel_filter_labels_filter( + make_organization_and_user_with_plugin_token, + make_alert_receive_channel, + make_channel_filter, + make_user_auth_headers, +): + organization, user, token = make_organization_and_user_with_plugin_token() + alert_receive_channel = make_alert_receive_channel(organization) + + default_channel_filter = make_channel_filter(alert_receive_channel, is_default=True) + filtering_labels = [ + {"key": {"id": "1", "name": "foo", "prescribed": True}, "value": {"id": "2", "name": "bar"}}, + {"key": {"id": "3", "name": "bar"}, "value": {"id": "4", "name": "baz", "prescribed": True}}, + ] + label_channel_filter = make_channel_filter( + alert_receive_channel, + is_default=False, + filtering_labels=filtering_labels, + filtering_term_type=ChannelFilter.FILTERING_TERM_TYPE_LABELS, + ) + + client = APIClient() + url = reverse("api-internal:channel_filter-detail", kwargs={"pk": label_channel_filter.public_primary_key}) + response = client.get(url, format="json", **make_user_auth_headers(user, token)) + response_data = response.json() + assert response.status_code == status.HTTP_200_OK + expected_jinja2_template = "{{ labels.foo and labels.foo == 'bar' and labels.bar and labels.bar == 'baz' }}" + assert response_data["filtering_term_as_jinja2"] == expected_jinja2_template + assert response_data["filtering_term_type"] == ChannelFilter.FILTERING_TERM_TYPE_LABELS + # returned labels key/value will have a prescribed=False if not set + for item in filtering_labels: + if "prescribed" not in item["key"]: + item["key"]["prescribed"] = False + if "prescribed" not in item["value"]: + item["value"]["prescribed"] = False + assert response_data["filtering_labels"] == filtering_labels + + url = reverse("api-internal:channel_filter-detail", kwargs={"pk": default_channel_filter.public_primary_key}) + response = client.get(url, format="json", **make_user_auth_headers(user, token)) + response_data = response.json() + assert response.status_code == status.HTTP_200_OK + assert response_data["filtering_labels"] is None + + +@pytest.mark.django_db +def test_update_channel_filter_labels_filter( + make_organization_and_user_with_plugin_token, + make_alert_receive_channel, + make_channel_filter, + make_user_auth_headers, +): + organization, user, token = make_organization_and_user_with_plugin_token() + alert_receive_channel = make_alert_receive_channel(organization) + + make_channel_filter(alert_receive_channel, is_default=True) + label_channel_filter = make_channel_filter(alert_receive_channel, is_default=False) + + client = APIClient() + filtering_labels = [{"key": {"id": "1", "name": "foo"}, "value": {"id": "2", "name": "bar"}}] + data = { + "filtering_labels": filtering_labels, + "filtering_term_type": ChannelFilter.FILTERING_TERM_TYPE_LABELS, + } + url = reverse("api-internal:channel_filter-detail", kwargs={"pk": label_channel_filter.public_primary_key}) + response = client.put(url, data=data, format="json", **make_user_auth_headers(user, token)) + response_data = response.json() + assert response.status_code == status.HTTP_200_OK + assert response_data["filtering_term_type"] == ChannelFilter.FILTERING_TERM_TYPE_LABELS + filtering_labels[0]["key"]["prescribed"] = False + filtering_labels[0]["value"]["prescribed"] = False + assert response_data["filtering_labels"] == filtering_labels + + empty_labels = { + "filtering_labels": [], + "filtering_term_type": ChannelFilter.FILTERING_TERM_TYPE_LABELS, + } + url = reverse("api-internal:channel_filter-detail", kwargs={"pk": label_channel_filter.public_primary_key}) + response = client.put(url, data=empty_labels, format="json", **make_user_auth_headers(user, token)) + response_data = response.json() + assert response_data["filtering_labels"] == [] + assert response.status_code == status.HTTP_200_OK + + invalid_data = { + "filtering_labels": "key1&key2=value2", + "filtering_term_type": ChannelFilter.FILTERING_TERM_TYPE_LABELS, + } + url = reverse("api-internal:channel_filter-detail", kwargs={"pk": label_channel_filter.public_primary_key}) + response = client.put(url, data=invalid_data, format="json", **make_user_auth_headers(user, token)) + response_data = response.json() + assert response.status_code == status.HTTP_400_BAD_REQUEST + + @pytest.mark.django_db def test_channel_filter_long_filtering_term( make_organization_and_user_with_plugin_token, diff --git a/engine/apps/exotel/status_callback.py b/engine/apps/exotel/status_callback.py index 4132075cde..90411f4fee 100644 --- a/engine/apps/exotel/status_callback.py +++ b/engine/apps/exotel/status_callback.py @@ -15,7 +15,7 @@ def get_call_status_callback_url(): def update_exotel_call_status(call_id: str, call_status: str, user_choice: Optional[str] = None): - from apps.base.models import UserNotificationPolicyLogRecord + from apps.base.models import UserNotificationPolicy, UserNotificationPolicyLogRecord status_code = ExotelCallStatuses.DETERMINANT.get(call_status) if status_code is None: @@ -62,12 +62,8 @@ def update_exotel_call_status(call_id: str, call_status: str, user_choice: Optio author=phone_call_record.receiver, notification_policy=phone_call_record.notification_policy, alert_group=phone_call_record.represents_alert_group, - notification_step=phone_call_record.notification_policy.step - if phone_call_record.notification_policy - else None, - notification_channel=phone_call_record.notification_policy.notify_by - if phone_call_record.notification_policy - else None, + notification_step=UserNotificationPolicy.Step.NOTIFY, + notification_channel=UserNotificationPolicy.NotificationChannel.PHONE_CALL, ) log_record.save() logger.info( diff --git a/engine/apps/grafana_plugin/tasks/sync.py b/engine/apps/grafana_plugin/tasks/sync.py index 35754ee9d1..c33c85414c 100644 --- a/engine/apps/grafana_plugin/tasks/sync.py +++ b/engine/apps/grafana_plugin/tasks/sync.py @@ -20,7 +20,7 @@ # to make sure that orgs are synced every 30 minutes, SYNC_PERIOD should be a little lower SYNC_PERIOD = timezone.timedelta(minutes=25) INACTIVE_PERIOD = timezone.timedelta(minutes=55) -CLEANUP_PERIOD = timezone.timedelta(hours=12) +CLEANUP_PERIOD = timezone.timedelta(hours=13) @shared_dedicated_queue_retry_task(autoretry_for=(Exception,), retry_backoff=True, max_retries=0) @@ -168,6 +168,7 @@ def cleanup_empty_deleted_integrations(organization_pk, dry_run=True): @shared_dedicated_queue_retry_task(autoretry_for=(Exception,), max_retries=0) def start_cleanup_organizations(): + # TODO: Remove next release after tasks revoked cleanup_threshold = timezone.now() - INACTIVE_PERIOD organization_qs = Organization.objects.filter(last_time_synced__lte=cleanup_threshold) organization_pks = organization_qs.values_list("pk", flat=True) @@ -176,3 +177,15 @@ def start_cleanup_organizations(): for idx, organization_pk in enumerate(organization_pks): countdown = idx % max_countdown # Spread orgs evenly cleanup_organization_async.apply_async((organization_pk,), countdown=countdown) + + +@shared_dedicated_queue_retry_task(autoretry_for=(Exception,), max_retries=0) +def start_cleanup_deleted_integrations(): + cleanup_threshold = timezone.now() - CLEANUP_PERIOD + channels_qs = AlertReceiveChannel.objects_with_deleted.filter(deleted_at__gte=cleanup_threshold) + organization_pks = set(channels_qs.values_list("organization_id", flat=True)) + logger.debug(f"Found {len(organization_pks)} organizations") + for organization_pk in enumerate(organization_pks): + cleanup_empty_deleted_integrations.apply_async( + (organization_pk, False), + ) diff --git a/engine/apps/metrics_exporter/helpers.py b/engine/apps/metrics_exporter/helpers.py index 7baf85a607..db1164bcb7 100644 --- a/engine/apps/metrics_exporter/helpers.py +++ b/engine/apps/metrics_exporter/helpers.py @@ -327,8 +327,11 @@ def metrics_update_alert_groups_response_time_cache(integrations_response_time: cache.set(metric_alert_groups_response_time_key, metric_alert_groups_response_time, timeout=metrics_cache_timeout) -def metrics_update_user_cache(user): - """Update "user_was_notified_of_alert_groups" metric cache.""" +def metrics_update_user_cache(user, counter=1): + """ + Increase "user_was_notified_of_alert_groups" metric cache by counter. + Counter shows how many alert groups user was notified of. + """ metrics_cache_timeout = get_metrics_cache_timeout(user.organization_id) metric_user_was_notified_key = get_metric_user_was_notified_of_alert_groups_key(user.organization_id) metric_user_was_notified: typing.Dict[int, UserWasNotifiedOfAlertGroupsMetricsDict] = cache.get( @@ -344,6 +347,6 @@ def metrics_update_user_cache(user): "id": user.organization.stack_id, "counter": 0, }, - )["counter"] += 1 + )["counter"] += counter cache.set(metric_user_was_notified_key, metric_user_was_notified, timeout=metrics_cache_timeout) diff --git a/engine/apps/metrics_exporter/tasks.py b/engine/apps/metrics_exporter/tasks.py index af1f6393a1..1663ddecc1 100644 --- a/engine/apps/metrics_exporter/tasks.py +++ b/engine/apps/metrics_exporter/tasks.py @@ -285,8 +285,8 @@ def update_metrics_for_alert_group(alert_group_id, organization_id, previous_sta @shared_dedicated_queue_retry_task( autoretry_for=(Exception,), retry_backoff=True, max_retries=1 if settings.DEBUG else 10 ) -def update_metrics_for_user(user_id): +def update_metrics_for_user(user_id, counter=1): from apps.user_management.models import User user = User.objects.get(id=user_id) - metrics_update_user_cache(user) + metrics_update_user_cache(user, counter) diff --git a/engine/apps/metrics_exporter/tests/test_update_metrics_cache.py b/engine/apps/metrics_exporter/tests/test_update_metrics_cache.py index 7292f953ea..84a5a34237 100644 --- a/engine/apps/metrics_exporter/tests/test_update_metrics_cache.py +++ b/engine/apps/metrics_exporter/tests/test_update_metrics_cache.py @@ -6,6 +6,7 @@ from apps.alerts.signals import alert_group_created_signal from apps.alerts.tasks import notify_user_task +from apps.alerts.tasks.notify_user import update_metric_if_needed from apps.base.models import UserNotificationPolicy, UserNotificationPolicyLogRecord from apps.metrics_exporter.constants import NO_SERVICE_VALUE, SERVICE_LABEL from apps.metrics_exporter.helpers import ( @@ -607,6 +608,82 @@ def get_called_arg_index_and_compare_results(cache_was_updated=False): arg_idx = get_called_arg_index_and_compare_results() +@override_settings(CELERY_TASK_ALWAYS_EAGER=True) +@pytest.mark.django_db +def test_update_metric_if_needed( + mock_apply_async, + make_organization, + make_user_for_organization, + make_alert_receive_channel, + make_alert_group, + make_user_notification_policy_log_record, + make_user_was_notified_metrics_cache_params, + monkeypatch, + mock_get_metrics_cache, +): + organization = make_organization( + org_id=METRICS_TEST_ORG_ID, + stack_slug=METRICS_TEST_INSTANCE_SLUG, + stack_id=METRICS_TEST_INSTANCE_ID, + ) + alert_receive_channel = make_alert_receive_channel( + organization, + verbal_name=METRICS_TEST_INTEGRATION_NAME, + ) + user = make_user_for_organization(organization, username=METRICS_TEST_USER_USERNAME) + + alert_group_1 = make_alert_group(alert_receive_channel) + alert_group_2 = make_alert_group(alert_receive_channel) + alert_group_3 = make_alert_group(alert_receive_channel) + + # create 1 log record for alert_group_1, 2 log records for alert_group_2 and 3 log records for alert_group_3 + for idx, alert_group in enumerate([alert_group_1, alert_group_2, alert_group_3], start=1): + for _ in range(idx): + make_user_notification_policy_log_record( + type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_TRIGGERED, + author=user, + alert_group=alert_group, + ) + + metrics_cache = make_user_was_notified_metrics_cache_params(user.id, organization.id) + monkeypatch.setattr(cache, "get", metrics_cache) + + metric_user_was_notified_key = get_metric_user_was_notified_of_alert_groups_key(organization.id) + + expected_result_metric_user_was_notified = { + user.id: { + "org_id": organization.org_id, + "slug": organization.stack_slug, + "id": organization.stack_id, + "user_username": METRICS_TEST_USER_USERNAME, + "counter": 1, + } + } + + with patch("apps.metrics_exporter.tasks.cache.set") as mock_cache_set: + # check current metric value + assert cache.get(metric_user_was_notified_key, {}) == expected_result_metric_user_was_notified + + expected_result_metric_user_was_notified[user.id]["counter"] += 1 + update_metric_if_needed(user, [alert_group_1.id, alert_group_2.id, alert_group_3.id]) + + # check user_was_notified_of_alert_groups metric counter was increased by 1 + # (for alert_group_1 that has only one log record with type "triggered") + mock_cache_set.assert_called_once() + mock_cache_set_called_args = mock_cache_set.call_args_list + assert mock_cache_set_called_args[0].args[0] == metric_user_was_notified_key + assert mock_cache_set_called_args[0].args[1] == expected_result_metric_user_was_notified + # create new log record for alert_group_1 + make_user_notification_policy_log_record( + type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_TRIGGERED, + author=user, + alert_group=alert_group_1, + ) + # check metric was not updated + update_metric_if_needed(user, [alert_group_1.id, alert_group_2.id, alert_group_3.id]) + mock_cache_set.assert_called_once() + + @pytest.mark.django_db def test_metrics_add_integrations_to_cache(make_organization, make_alert_receive_channel): organization = make_organization( diff --git a/engine/apps/phone_notifications/migrations/0003_smsrecord_represents_bundle_uuid.py b/engine/apps/phone_notifications/migrations/0003_smsrecord_represents_bundle_uuid.py new file mode 100644 index 0000000000..cf5c89ea27 --- /dev/null +++ b/engine/apps/phone_notifications/migrations/0003_smsrecord_represents_bundle_uuid.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.10 on 2024-07-03 08:15 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('phone_notifications', '0002_bannedphonenumber'), + ] + + operations = [ + migrations.AddField( + model_name='smsrecord', + name='represents_bundle_uuid', + field=models.CharField(db_index=True, default=None, max_length=100, null=True), + ), + ] diff --git a/engine/apps/phone_notifications/models/sms.py b/engine/apps/phone_notifications/models/sms.py index 9763d706f9..2eabc9eec9 100644 --- a/engine/apps/phone_notifications/models/sms.py +++ b/engine/apps/phone_notifications/models/sms.py @@ -45,7 +45,7 @@ class Meta: notification_policy = models.ForeignKey( "base.UserNotificationPolicy", on_delete=models.SET_NULL, null=True, default=None ) - + represents_bundle_uuid = models.CharField(max_length=100, null=True, default=None, db_index=True) receiver = models.ForeignKey("user_management.User", on_delete=models.CASCADE, null=True, default=None) grafana_cloud_notification = models.BooleanField(default=False) created_at = models.DateTimeField(auto_now_add=True) diff --git a/engine/apps/phone_notifications/phone_backend.py b/engine/apps/phone_notifications/phone_backend.py index c3db109426..19d6bc88df 100644 --- a/engine/apps/phone_notifications/phone_backend.py +++ b/engine/apps/phone_notifications/phone_backend.py @@ -1,14 +1,15 @@ import logging -from typing import Optional +from typing import Optional, Tuple import requests from django.conf import settings from apps.alerts.incident_appearance.renderers.phone_call_renderer import AlertGroupPhoneCallRenderer -from apps.alerts.incident_appearance.renderers.sms_renderer import AlertGroupSmsRenderer +from apps.alerts.incident_appearance.renderers.sms_renderer import AlertGroupSMSBundleRenderer, AlertGroupSmsRenderer from apps.alerts.signals import user_notification_action_triggered_signal from apps.base.utils import live_settings from common.api_helpers.utils import create_engine_url +from common.custom_celery_tasks import shared_dedicated_queue_retry_task from common.utils import clean_markup from .exceptions import ( @@ -27,6 +28,19 @@ logger = logging.getLogger(__name__) +@shared_dedicated_queue_retry_task( + autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None +) +def notify_by_sms_bundle_async_task(user_id, bundle_uuid): + from apps.user_management.models import User + + user = User.objects.filter(id=user_id).first() + if not user: + return + phone_backend = PhoneBackend() + phone_backend.notify_by_sms_bundle(user, bundle_uuid) + + class PhoneBackend: def __init__(self): self.phone_provider: PhoneProvider = self._get_phone_provider() @@ -148,16 +162,90 @@ def notify_by_sms(self, user, alert_group, notification_policy): from apps.base.models import UserNotificationPolicyLogRecord - log_record_error_code = None - renderer = AlertGroupSmsRenderer(alert_group) message = renderer.render() + _, log_record_error_code = self._send_sms( + user=user, + alert_group=alert_group, + notification_policy=notification_policy, + message=message, + ) + + if log_record_error_code is not None: + log_record = UserNotificationPolicyLogRecord( + author=user, + type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED, + notification_policy=notification_policy, + alert_group=alert_group, + notification_error_code=log_record_error_code, + notification_step=notification_policy.step if notification_policy else None, + notification_channel=notification_policy.notify_by if notification_policy else None, + ) + log_record.save() + user_notification_action_triggered_signal.send(sender=PhoneBackend.notify_by_sms, log_record=log_record) + @staticmethod + def notify_by_sms_bundle_async(user, bundle_uuid): + notify_by_sms_bundle_async_task.apply_async((user.id, bundle_uuid)) + + def notify_by_sms_bundle(self, user, bundle_uuid): + """ + notify_by_sms_bundle sends an sms notification bundle to a user using configured phone provider. + It handles business logic - limits, cloud notifications and UserNotificationPolicyLogRecord creation. + It creates UserNotificationPolicyLogRecord for every notification in bundle, but only one SMSRecord. + SMS itself is handled by phone provider. + """ + + from apps.alerts.models import BundledNotification + from apps.base.models import UserNotificationPolicy, UserNotificationPolicyLogRecord + + notifications = BundledNotification.objects.filter(bundle_uuid=bundle_uuid).select_related("alert_group") + + if not notifications: + logger.info("Notification bundle is empty, related alert groups might have been deleted") + return + renderer = AlertGroupSMSBundleRenderer(notifications) + message = renderer.render() + + _, log_record_error_code = self._send_sms(user=user, message=message, bundle_uuid=bundle_uuid) + + if log_record_error_code is not None: + log_records_to_create = [] + for notification in notifications: + log_record = UserNotificationPolicyLogRecord( + author=user, + type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED, + notification_policy=notification.notification_policy, + alert_group=notification.alert_group, + notification_error_code=log_record_error_code, + notification_step=UserNotificationPolicy.Step.NOTIFY, + notification_channel=UserNotificationPolicy.NotificationChannel.SMS, + ) + log_records_to_create.append(log_record) + if log_records_to_create: + if log_record_error_code in UserNotificationPolicyLogRecord.ERRORS_TO_SEND_IN_SLACK_CHANNEL: + # create last log record outside of the bulk_create to get it as an object to send + # the user_notification_action_triggered_signal + log_record = log_records_to_create.pop() + log_record.save() + user_notification_action_triggered_signal.send( + sender=PhoneBackend.notify_by_sms_bundle, log_record=log_record + ) + + UserNotificationPolicyLogRecord.objects.bulk_create(log_records_to_create, batch_size=5000) + + def _send_sms( + self, user, message, alert_group=None, notification_policy=None, bundle_uuid=None + ) -> Tuple[bool, Optional[int]]: + from apps.base.models import UserNotificationPolicyLogRecord + + log_record_error_code = None record = SMSRecord( represents_alert_group=alert_group, receiver=user, notification_policy=notification_policy, exceeded_limit=False, + represents_bundle_uuid=bundle_uuid, ) try: @@ -180,18 +268,7 @@ def notify_by_sms(self, user, alert_group, notification_policy): except NumberNotVerified: log_record_error_code = UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_PHONE_NUMBER_IS_NOT_VERIFIED - if log_record_error_code is not None: - log_record = UserNotificationPolicyLogRecord( - author=user, - type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED, - notification_policy=notification_policy, - alert_group=alert_group, - notification_error_code=log_record_error_code, - notification_step=notification_policy.step if notification_policy else None, - notification_channel=notification_policy.notify_by if notification_policy else None, - ) - log_record.save() - user_notification_action_triggered_signal.send(sender=PhoneBackend.notify_by_sms, log_record=log_record) + return log_record_error_code is None, log_record_error_code def _notify_by_provider_sms(self, user, message) -> Optional[ProviderSMS]: """ diff --git a/engine/apps/phone_notifications/tests/test_phone_backend_sms.py b/engine/apps/phone_notifications/tests/test_phone_backend_sms.py index ff9b07aa99..f8b52006f6 100644 --- a/engine/apps/phone_notifications/tests/test_phone_backend_sms.py +++ b/engine/apps/phone_notifications/tests/test_phone_backend_sms.py @@ -1,7 +1,9 @@ from unittest import mock +from unittest.mock import patch import pytest from django.test import override_settings +from django.utils import timezone from apps.base.models import UserNotificationPolicy, UserNotificationPolicyLogRecord from apps.phone_notifications.exceptions import ( @@ -234,3 +236,62 @@ def test_notify_by_cloud_sms_handles_exceptions_from_cloud( ).count() == 1 ) + + +@pytest.mark.django_db +def test_notify_by_sms_bundle( + make_organization_and_user, + make_alert_receive_channel, + make_alert_group, + make_user_notification_bundle, + make_user_notification_policy, +): + organization, user = make_organization_and_user() + alert_receive_channel = make_alert_receive_channel(organization) + alert_group_1 = make_alert_group(alert_receive_channel) + alert_group_2 = make_alert_group(alert_receive_channel) + notification_policy = make_user_notification_policy( + user=user, + step=UserNotificationPolicy.Step.NOTIFY, + notify_by=UserNotificationPolicy.NotificationChannel.SMS, + ) + notification_bundle = make_user_notification_bundle( + user, UserNotificationPolicy.NotificationChannel.SMS, notification_task_id="test_task_id", eta=timezone.now() + ) + notification_bundle.append_notification(alert_group_1, notification_policy) + notification_bundle.append_notification(alert_group_2, notification_policy) + + bundle_uuid = "test_notifications_bundle" + + notification_bundle.notifications.update(bundle_uuid=bundle_uuid) + + assert not user.personal_log_records.exists() + assert not user.smsrecord_set.exists() + + with patch( + "apps.phone_notifications.phone_backend.PhoneBackend._notify_by_cloud_sms", side_effect=SMSLimitExceeded + ): + phone_backend = PhoneBackend() + phone_backend.notify_by_sms_bundle(user, bundle_uuid) + + # check that 2 error log records (1 for each bundled notification) and 1 sms record have been created + assert ( + user.personal_log_records.filter( + notification_error_code=UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_SMS_LIMIT_EXCEEDED + ).count() + == notification_bundle.notifications.count() + == 2 + ) + assert user.smsrecord_set.count() == 1 + + with patch("apps.phone_notifications.phone_backend.PhoneBackend._notify_by_cloud_sms"): + phone_backend = PhoneBackend() + phone_backend.notify_by_sms_bundle(user, bundle_uuid) + + # check that 0 new error log records and 1 new sms record have been created + assert ( + user.personal_log_records.filter(notification_error_code__isnull=False).count() + == notification_bundle.notifications.count() + == 2 + ) + assert user.smsrecord_set.count() == 2 diff --git a/engine/apps/twilioapp/status_callback.py b/engine/apps/twilioapp/status_callback.py index 55de098af6..fcd16a5c1c 100644 --- a/engine/apps/twilioapp/status_callback.py +++ b/engine/apps/twilioapp/status_callback.py @@ -2,7 +2,8 @@ from django.urls import reverse -from apps.alerts.signals import user_notification_action_triggered_signal +from apps.alerts.models import BundledNotification +from apps.alerts.tasks import send_update_log_report_signal from apps.twilioapp.models import TwilioCallStatuses, TwilioPhoneCall, TwilioSMS, TwilioSMSstatuses from common.api_helpers.utils import create_engine_url @@ -20,7 +21,7 @@ def update_twilio_call_status(call_sid, call_status): Returns: """ - from apps.base.models import UserNotificationPolicyLogRecord + from apps.base.models import UserNotificationPolicy, UserNotificationPolicyLogRecord if call_sid and call_status: logger.info(f"twilioapp.update_twilio_call_status: processing sid={call_sid} status={call_status}") @@ -68,19 +69,14 @@ def update_twilio_call_status(call_sid, call_status): author=phone_call_record.receiver, notification_policy=phone_call_record.notification_policy, alert_group=phone_call_record.represents_alert_group, - notification_step=phone_call_record.notification_policy.step - if phone_call_record.notification_policy - else None, - notification_channel=phone_call_record.notification_policy.notify_by - if phone_call_record.notification_policy - else None, + notification_step=UserNotificationPolicy.Step.NOTIFY, + notification_channel=UserNotificationPolicy.NotificationChannel.PHONE_CALL, ) log_record.save() logger.info( f"twilioapp.update_twilio_call_status: created log_record log_record_id={log_record.id} " f"type={log_record_type}" ) - user_notification_action_triggered_signal.send(sender=update_twilio_call_status, log_record=log_record) def get_error_code_by_twilio_status(status): @@ -106,7 +102,7 @@ def update_twilio_sms_status(message_sid, message_status): Returns: """ - from apps.base.models import UserNotificationPolicyLogRecord + from apps.base.models import UserNotificationPolicy, UserNotificationPolicyLogRecord if message_sid and message_status: logger.info(f"twilioapp.update_twilio_message_status: processing sid={message_sid} status={message_status}") @@ -143,23 +139,44 @@ def update_twilio_sms_status(message_sid, message_status): log_record_type = UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED log_record_error_code = get_sms_error_code_by_twilio_status(status_code) if log_record_type is not None: - log_record = UserNotificationPolicyLogRecord( - type=log_record_type, - notification_error_code=log_record_error_code, - author=sms_record.receiver, - notification_policy=sms_record.notification_policy, - alert_group=sms_record.represents_alert_group, - notification_step=sms_record.notification_policy.step if sms_record.notification_policy else None, - notification_channel=sms_record.notification_policy.notify_by - if sms_record.notification_policy - else None, - ) - log_record.save() - logger.info( - f"twilioapp.update_twilio_sms_status: created log_record log_record_id={log_record.id} " - f"type={log_record_type}" - ) - user_notification_action_triggered_signal.send(sender=update_twilio_sms_status, log_record=log_record) + if sms_record.represents_bundle_uuid: + notifications = BundledNotification.objects.filter(bundle_uuid=sms_record.represents_bundle_uuid) + log_records_to_create = [] + for notification in notifications: + log_record = UserNotificationPolicyLogRecord( + type=log_record_type, + notification_error_code=log_record_error_code, + author=sms_record.receiver, + notification_policy=notification.notification_policy, + alert_group=notification.alert_group, + notification_step=UserNotificationPolicy.Step.NOTIFY, + notification_channel=UserNotificationPolicy.NotificationChannel.SMS, + ) + log_records_to_create.append(log_record) + # send send_update_log_report_signal with 10 seconds delay + send_update_log_report_signal.apply_async( + kwargs={"alert_group_pk": notification.alert_group_id}, countdown=10 + ) + UserNotificationPolicyLogRecord.objects.bulk_create(log_records_to_create, batch_size=5000) + logger.info( + f"twilioapp.update_twilio_sms_status: created log_records for sms bundle " + f"{sms_record.represents_bundle_uuid} type={log_record_type}" + ) + else: + log_record = UserNotificationPolicyLogRecord( + type=log_record_type, + notification_error_code=log_record_error_code, + author=sms_record.receiver, + notification_policy=sms_record.notification_policy, + alert_group=sms_record.represents_alert_group, + notification_step=UserNotificationPolicy.Step.NOTIFY, + notification_channel=UserNotificationPolicy.NotificationChannel.SMS, + ) + log_record.save() + logger.info( + f"twilioapp.update_twilio_sms_status: created log_record log_record_id={log_record.id} " + f"type={log_record_type}" + ) def get_sms_error_code_by_twilio_status(status): diff --git a/engine/apps/twilioapp/tests/test_sms_message.py b/engine/apps/twilioapp/tests/test_sms_message.py index bba035b5bb..9406dd8cf2 100644 --- a/engine/apps/twilioapp/tests/test_sms_message.py +++ b/engine/apps/twilioapp/tests/test_sms_message.py @@ -2,6 +2,7 @@ import pytest from django.urls import reverse +from django.utils import timezone from django.utils.datastructures import MultiValueDict from django.utils.http import urlencode from rest_framework.test import APIClient @@ -101,3 +102,64 @@ def test_update_status(mock_has_permission, mock_slack_api_call, make_twilio_sms assert response.data == "" twilio_sms.refresh_from_db() assert twilio_sms.status == TwilioSMSstatuses.DETERMINANT[status] + + +@mock.patch("apps.twilioapp.views.AllowOnlyTwilio.has_permission") +@pytest.mark.django_db +def test_update_status_for_bundled_notifications( + mock_has_permission, + mock_slack_api_call, + make_organization_and_user, + make_alert_receive_channel, + make_user_notification_policy, + make_user_notification_bundle, + make_alert_group, + make_sms_record, +): + """The test for SMSMessage status update via api for notification bundle""" + organization, user = make_organization_and_user() + alert_receive_channel = make_alert_receive_channel(organization) + alert_group_1 = make_alert_group(alert_receive_channel) + alert_group_2 = make_alert_group(alert_receive_channel) + notification_policy = make_user_notification_policy( + user=user, + step=UserNotificationPolicy.Step.NOTIFY, + notify_by=UserNotificationPolicy.NotificationChannel.SMS, + ) + + notification_bundle = make_user_notification_bundle( + user, UserNotificationPolicy.NotificationChannel.SMS, notification_task_id="test_task_id", eta=timezone.now() + ) + notification_bundle.append_notification(alert_group_1, notification_policy) + notification_bundle.append_notification(alert_group_2, notification_policy) + bundle_uuid = "test_notifications_bundle" + + notification_bundle.notifications.update(bundle_uuid=bundle_uuid) + sms_record = make_sms_record( + receiver=user, + represents_bundle_uuid=bundle_uuid, + notification_policy=notification_policy, + ) + twilio_sms = TwilioSMS.objects.create(sid="SMa12312312a123a123123c6dd2f1aee77", sms_record=sms_record) + + mock_has_permission.return_value = True + status = "delivered" + data = { + "MessageSid": twilio_sms.sid, + "MessageStatus": status, + "AccountSid": "Because of mock_has_permission there are may be any value", + } + assert user.personal_log_records.count() == 0 + + client = APIClient() + response = client.post( + path=reverse("twilioapp:sms_status_events"), + data=urlencode(MultiValueDict(data), doseq=True), + content_type="application/x-www-form-urlencoded", + ) + assert response.status_code == 204 + assert response.data == "" + twilio_sms.refresh_from_db() + assert twilio_sms.status == TwilioSMSstatuses.DETERMINANT[status] + + assert user.personal_log_records.count() == 2 diff --git a/engine/apps/zvonok/status_callback.py b/engine/apps/zvonok/status_callback.py index 3a265cb549..61e505aadb 100644 --- a/engine/apps/zvonok/status_callback.py +++ b/engine/apps/zvonok/status_callback.py @@ -10,7 +10,7 @@ def update_zvonok_call_status(call_id: str, call_status: str, user_choice: Optional[str] = None): - from apps.base.models import UserNotificationPolicyLogRecord + from apps.base.models import UserNotificationPolicy, UserNotificationPolicyLogRecord status_code = ZvonokCallStatuses.DETERMINANT.get(call_status) if status_code is None: @@ -57,12 +57,8 @@ def update_zvonok_call_status(call_id: str, call_status: str, user_choice: Optio author=phone_call_record.receiver, notification_policy=phone_call_record.notification_policy, alert_group=phone_call_record.represents_alert_group, - notification_step=phone_call_record.notification_policy.step - if phone_call_record.notification_policy - else None, - notification_channel=phone_call_record.notification_policy.notify_by - if phone_call_record.notification_policy - else None, + notification_step=UserNotificationPolicy.Step.NOTIFY, + notification_channel=UserNotificationPolicy.NotificationChannel.PHONE_CALL, ) log_record.save() logger.info( diff --git a/engine/conftest.py b/engine/conftest.py index 899679fd23..c4de173052 100644 --- a/engine/conftest.py +++ b/engine/conftest.py @@ -37,6 +37,7 @@ InvitationFactory, ResolutionNoteFactory, ResolutionNoteSlackMessageFactory, + UserNotificationBundleFactory, ) from apps.api.permissions import ( ACTION_PREFIX, @@ -145,6 +146,7 @@ register(LabelValueFactory) register(AlertReceiveChannelAssociatedLabelFactory) register(GoogleOAuth2UserFactory) +register(UserNotificationBundleFactory) IS_RBAC_ENABLED = os.getenv("ONCALL_TESTING_RBAC_ENABLED", "True") == "True" @@ -1077,3 +1079,13 @@ def _make_google_oauth2_user_for_user(user): return GoogleOAuth2UserFactory(user=user) return _make_google_oauth2_user_for_user + + +@pytest.fixture +def make_user_notification_bundle(): + def _make_user_notification_bundle(user, notification_channel, important=False, **kwargs): + return UserNotificationBundleFactory( + user=user, notification_channel=notification_channel, important=important, **kwargs + ) + + return _make_user_notification_bundle diff --git a/engine/settings/base.py b/engine/settings/base.py index 6638d16bc6..fea5c687a4 100644 --- a/engine/settings/base.py +++ b/engine/settings/base.py @@ -72,6 +72,7 @@ # Enable labels feature for organizations from the list. Use OnCall organization ID, for this flag FEATURE_LABELS_ENABLED_PER_ORG = getenv_list("FEATURE_LABELS_ENABLED_PER_ORG", default=list()) FEATURE_ALERT_GROUP_SEARCH_ENABLED = getenv_boolean("FEATURE_ALERT_GROUP_SEARCH_ENABLED", default=False) +FEATURE_NOTIFICATION_BUNDLE_ENABLED = getenv_boolean("FEATURE_NOTIFICATION_BUNDLE_ENABLED", default=False) TWILIO_API_KEY_SID = os.environ.get("TWILIO_API_KEY_SID") TWILIO_API_KEY_SECRET = os.environ.get("TWILIO_API_KEY_SECRET") @@ -551,8 +552,8 @@ class BrokerTypes: "schedule": crontab(minute="*/30"), "args": (), }, - "start_cleanup_organizations": { - "task": "apps.grafana_plugin.tasks.sync.start_cleanup_organizations", + "start_cleanup_deleted_integrations": { + "task": "apps.grafana_plugin.tasks.sync.start_cleanup_deleted_integrations", "schedule": crontab(hour="4, 16", minute=35), "args": (), }, diff --git a/engine/settings/celery_task_routes.py b/engine/settings/celery_task_routes.py index 9d8e0537d2..ea866c1a7a 100644 --- a/engine/settings/celery_task_routes.py +++ b/engine/settings/celery_task_routes.py @@ -105,6 +105,7 @@ "apps.alerts.tasks.notify_ical_schedule_shift.notify_ical_schedule_shift": {"queue": "critical"}, "apps.alerts.tasks.notify_user.notify_user_task": {"queue": "critical"}, "apps.alerts.tasks.notify_user.perform_notification": {"queue": "critical"}, + "apps.alerts.tasks.notify_user.send_bundled_notification": {"queue": "critical"}, "apps.alerts.tasks.notify_user.send_user_notification_signal": {"queue": "critical"}, "apps.alerts.tasks.resolve_alert_group_by_source_if_needed.resolve_alert_group_by_source_if_needed": { "queue": "critical" @@ -129,6 +130,7 @@ "queue": "critical" }, "apps.mobile_app.fcm_relay.fcm_relay_async": {"queue": "critical"}, + "apps.phone_notifications.phone_backend.notify_by_sms_bundle_async_task": {"queue": "critical"}, "apps.schedules.tasks.drop_cached_ical.drop_cached_ical_for_custom_events_for_organization": {"queue": "critical"}, "apps.schedules.tasks.drop_cached_ical.drop_cached_ical_task": {"queue": "critical"}, # GRAFANA @@ -144,6 +146,7 @@ "apps.grafana_plugin.tasks.sync.cleanup_organization_async": {"queue": "long"}, "apps.grafana_plugin.tasks.sync.cleanup_empty_deleted_integrations": {"queue": "long"}, "apps.grafana_plugin.tasks.sync.start_cleanup_organizations": {"queue": "long"}, + "apps.grafana_plugin.tasks.sync.start_cleanup_deleted_integrations": {"queue": "long"}, "apps.grafana_plugin.tasks.sync.start_cleanup_deleted_organizations": {"queue": "long"}, "apps.grafana_plugin.tasks.sync.start_sync_organizations": {"queue": "long"}, "apps.grafana_plugin.tasks.sync.sync_organization_async": {"queue": "long"}, diff --git a/grafana-plugin/Dockerfile.dev b/grafana-plugin/Dockerfile.dev index bc35eef8cc..b84571c92a 100644 --- a/grafana-plugin/Dockerfile.dev +++ b/grafana-plugin/Dockerfile.dev @@ -1,4 +1,4 @@ -FROM node:18.16.0-alpine +FROM node:20.15.1-alpine WORKDIR /etc/app ENV PATH /etc/app/node_modules/.bin:$PATH diff --git a/grafana-plugin/package.json b/grafana-plugin/package.json index 7cd3135645..a6b3bcdf98 100644 --- a/grafana-plugin/package.json +++ b/grafana-plugin/package.json @@ -67,7 +67,7 @@ "@types/jest": "^29.5.0", "@types/lodash": "^4.14.194", "@types/lodash-es": "^4.17.6", - "@types/node": "^20.8.7", + "@types/node": "^20.14.10", "@types/query-string": "^6.3.0", "@types/react-copy-to-clipboard": "^5.0.4", "@types/react-dom": "^18.0.6", @@ -128,7 +128,7 @@ "webpack-livereload-plugin": "^3.0.2" }, "engines": { - "node": "18.x" + "node": "20.x" }, "dependencies": { "@dnd-kit/core": "^6.0.8", diff --git a/grafana-plugin/src/assets/style/utils.css b/grafana-plugin/src/assets/style/utils.css index d60a67422c..38795f9ae2 100644 --- a/grafana-plugin/src/assets/style/utils.css +++ b/grafana-plugin/src/assets/style/utils.css @@ -138,14 +138,6 @@ padding-bottom: 24px; } -/* ----- - * Icons - */ - -.icon-exclamation { - color: var(--error-text-color); -} - .loadingPlaceholder { margin-bottom: 0; margin-right: 4px; diff --git a/grafana-plugin/src/components/IntegrationCollapsibleTreeView/IntegrationCollapsibleTreeView.styles.ts b/grafana-plugin/src/components/IntegrationCollapsibleTreeView/IntegrationCollapsibleTreeView.styles.ts index edeed6f3a7..d1986cd750 100644 --- a/grafana-plugin/src/components/IntegrationCollapsibleTreeView/IntegrationCollapsibleTreeView.styles.ts +++ b/grafana-plugin/src/components/IntegrationCollapsibleTreeView/IntegrationCollapsibleTreeView.styles.ts @@ -1,6 +1,6 @@ import { css } from '@emotion/css'; import { GrafanaTheme2 } from '@grafana/data'; -import { COLORS } from 'styles/utils.styles'; +import { Colors } from 'styles/utils.styles'; export const getIntegrationCollapsibleTreeStyles = (theme: GrafanaTheme2) => { return { @@ -54,7 +54,7 @@ export const getIntegrationCollapsibleTreeStyles = (theme: GrafanaTheme2) => { position: absolute; top: 0; left: -30px; - color: ${COLORS.ALWAYS_GREY}; + color: ${Colors.ALWAYS_GREY}; width: 28px; height: 28px; text-align: center; @@ -70,7 +70,7 @@ export const getIntegrationCollapsibleTreeStyles = (theme: GrafanaTheme2) => { path { // this will overwrite all icons below to be gray - fill: ${COLORS.ALWAYS_GREY}; + fill: ${Colors.ALWAYS_GREY}; } `, diff --git a/grafana-plugin/src/components/IntegrationSendDemoAlertModal/IntegrationSendDemoAlertModal.tsx b/grafana-plugin/src/components/IntegrationSendDemoAlertModal/IntegrationSendDemoAlertModal.tsx index faf746c3de..541d974e0b 100644 --- a/grafana-plugin/src/components/IntegrationSendDemoAlertModal/IntegrationSendDemoAlertModal.tsx +++ b/grafana-plugin/src/components/IntegrationSendDemoAlertModal/IntegrationSendDemoAlertModal.tsx @@ -6,7 +6,7 @@ import CopyToClipboard from 'react-copy-to-clipboard'; import Emoji from 'react-emoji-render'; import { debounce } from 'throttle-debounce'; -import { MonacoEditor, MONACO_LANGUAGE } from 'components/MonacoEditor/MonacoEditor'; +import { MonacoEditor, MonacoLanguage } from 'components/MonacoEditor/MonacoEditor'; import { MONACO_EDITABLE_CONFIG } from 'components/MonacoEditor/MonacoEditor.config'; import { PluginLink } from 'components/PluginLink/PluginLink'; import { Text } from 'components/Text/Text'; @@ -74,7 +74,7 @@ export const IntegrationSendDemoAlertModal: React.FC = ({ labels, onClic } /> ) : null; + +interface LabelBadgesProps { + labels: Array; + maxCount?: number; +} + +export const LabelBadges: React.FC = ({ labels = [], maxCount = 3 }) => { + const renderer = (values: LabelBadgesProps['labels']) => { + return ( + + {values.map((label) => ( + + ))} + + ); + }; + + return ( + + {renderer(labels.slice(0, maxCount))} + + maxCount}> + +
{labels.length > maxCount ? `+ ${labels.length - maxCount}` : ``}
+
+
+
+ ); +}; diff --git a/grafana-plugin/src/components/ManualAlertGroup/ManualAlertGroup.tsx b/grafana-plugin/src/components/ManualAlertGroup/ManualAlertGroup.tsx index b2f39a5e0d..6a9f2fa898 100644 --- a/grafana-plugin/src/components/ManualAlertGroup/ManualAlertGroup.tsx +++ b/grafana-plugin/src/components/ManualAlertGroup/ManualAlertGroup.tsx @@ -1,5 +1,6 @@ import React, { FC, useCallback } from 'react'; +import { css } from '@emotion/css'; import { Button, Drawer, Field, HorizontalGroup, TextArea, useStyles2, VerticalGroup } from '@grafana/ui'; import { observer } from 'mobx-react'; import { Controller, FormProvider, useForm } from 'react-hook-form'; @@ -50,7 +51,6 @@ export const ManualAlertGroup: FC = observer(({ onCreate, const onSubmit = async (data: FormData) => { const transformedData = prepareForUpdate(selectedUserResponders, selectedTeamResponder, data); - const resp = await directPagingStore.createManualAlertRule(transformedData); if (!resp) { @@ -65,13 +65,14 @@ export const ManualAlertGroup: FC = observer(({ onCreate, onHide(); }; - const utils = useStyles2(getUtilStyles); + const utilStyles = useStyles2(getUtilStyles); + const styles = useStyles2(getStyles); return ( -
+ = observer(({ onCreate, )} /> + -
+ +
); @@ -281,11 +287,11 @@ export const AddRespondersPopup = observer( key="search" className={cx('responders-filters')} data-testid="add-responders-search-input" - value={searchTerm} + value={search} placeholder="Search" // @ts-ignore width={'unset'} - onChange={handleSetSearchTerm} + onChange={handleSetSearch} /> {isCreateMode && ( { onUpdate: () => void; } +interface FormFields { + name: string; +} + export const ApiTokenForm = observer((props: TokenCreationModalProps) => { const { onHide = () => {}, onUpdate = () => {} } = props; - const [name, setName] = useState(''); const [token, setToken] = useState(''); const store = useStore(); + const formMethods = useForm({ + mode: 'onChange', + }); - const onCreateTokenCallback = useCallback(async () => { - try { - const data = await store.apiTokenStore.create({ name }); - setToken(data.token); - onUpdate(); - } catch (error) { - openErrorNotification(get(error, 'response.data.detail', 'error creating token')); - } - }, [name]); + const { + control, + watch, + formState: { errors }, + handleSubmit, + } = formMethods; - const handleNameChange = useCallback((event) => { - setName(event.target.value); - }, []); + const name = watch('name'); return ( - - -
- {renderTokenInput()} - {renderCopyToClipboard()} -
- - {renderCurlExample()} - - - - {!token && ( - - )} - -
+ + + + +
+ {renderTokenInput()} + {renderCopyToClipboard()} +
+ + {renderCurlExample()} + + + + + + + + +
+ +
); function renderTokenInput() { - return token ? ( - - ) : ( - ( + + <> + {token ? ( + + ) : ( + + )} + + + )} /> ); } @@ -104,6 +125,16 @@ export const ApiTokenForm = observer((props: TokenCreationModalProps) => { ); } + + async function onCreateTokenCallback() { + try { + const data = await store.apiTokenStore.create({ name }); + setToken(data.token); + onUpdate(); + } catch (error) { + openErrorNotification(get(error, 'response.data.detail', 'error creating token')); + } + } }); function getCurlExample(token, onCallApiUrl) { diff --git a/grafana-plugin/src/containers/ColumnsSelectorWrapper/ColumnsModal.tsx b/grafana-plugin/src/containers/ColumnsSelectorWrapper/ColumnsModal.tsx index 7ef9376226..7ca7d07ddc 100644 --- a/grafana-plugin/src/containers/ColumnsSelectorWrapper/ColumnsModal.tsx +++ b/grafana-plugin/src/containers/ColumnsSelectorWrapper/ColumnsModal.tsx @@ -159,7 +159,7 @@ export const ColumnsModal: React.FC = observer( {values.slice(0, 2).map((val) => ( ))} -
{values.length > 2 ? `+ ${values.length - 2}` : ``}
+
{values.length > 2 ? `+ ${values.length - 2}` : ``}
); } diff --git a/grafana-plugin/src/containers/ColumnsSelectorWrapper/ColumnsSelectorWrapper.styles.ts b/grafana-plugin/src/containers/ColumnsSelectorWrapper/ColumnsSelectorWrapper.styles.ts index c6bc99cc3f..eee2f3aa14 100644 --- a/grafana-plugin/src/containers/ColumnsSelectorWrapper/ColumnsSelectorWrapper.styles.ts +++ b/grafana-plugin/src/containers/ColumnsSelectorWrapper/ColumnsSelectorWrapper.styles.ts @@ -21,9 +21,6 @@ export const getColumnsSelectorWrapperStyles = (theme: GrafanaTheme2) => { removalModal: css` max-width: 500px; `, - totalValuesCount: css` - margin-left: 16px; - `, valuesBlock: css` margin-bottom: 12px; `, diff --git a/grafana-plugin/src/containers/EscalationChainForm/EscalationChainForm.tsx b/grafana-plugin/src/containers/EscalationChainForm/EscalationChainForm.tsx index ec694f7b0f..a2fb59d4da 100644 --- a/grafana-plugin/src/containers/EscalationChainForm/EscalationChainForm.tsx +++ b/grafana-plugin/src/containers/EscalationChainForm/EscalationChainForm.tsx @@ -1,8 +1,9 @@ -import React, { ChangeEvent, FC, useCallback, useState } from 'react'; +import React, { FC } from 'react'; import { Button, Field, HorizontalGroup, Input, Modal } from '@grafana/ui'; import cn from 'classnames/bind'; import { observer } from 'mobx-react'; +import { Controller, FormProvider, useForm } from 'react-hook-form'; import { GSelect } from 'containers/GSelect/GSelect'; import { EscalationChain } from 'models/escalation_chain/escalation_chain.types'; @@ -25,6 +26,11 @@ interface EscalationChainFormProps { onSubmit: (id: EscalationChain['id']) => Promise; } +interface EscalationFormFields { + team: string; + name: string; +} + const cx = cn.bind(styles); export const EscalationChainForm: FC = observer((props) => { @@ -40,96 +46,131 @@ export const EscalationChainForm: FC = observer((props } = store; const user = userStore.currentUser; - const escalationChain = escalationChainId ? escalationChainStore.items[escalationChainId] : undefined; + const isCreateMode = mode === EscalationChainFormMode.Create; + const isCopyMode = mode === EscalationChainFormMode.Copy; + + const formMethods = useForm({ + mode: 'onChange', + defaultValues: { + team: escalationChain?.team || user.current_team, + name: isCopyMode ? `${escalationChain.name} copy` : '', + }, + }); - const [name, setName] = useState( - mode === EscalationChainFormMode.Copy ? `${escalationChain?.name} copy` : escalationChain?.name + const { + control, + setError, + getValues, + formState: { errors }, + handleSubmit, + } = formMethods; + + return ( + +
+ +
+ ( + + + {...field} + items={grafanaTeamItems} + fetchItemsFn={grafanaTeamStore.updateItems} + fetchItemFn={grafanaTeamStore.fetchItemById} + getSearchResult={grafanaTeamStore.getSearchResult} + displayField="name" + valueField="id" + allowClear + placeholder="Select a team" + className={cx('team-select')} + /> + + )} + /> + + ( + + + + )} + /> + + + + + + +
+
+
); - const [selectedTeam, setSelectedTeam] = useState(escalationChain?.team || user.current_team); - const [errors, setErrors] = useState<{ [key: string]: string }>({}); - const onSubmit = useCallback(async () => { + async function onSubmit() { let escalationChain: EscalationChain | void; - const isCreateMode = mode === EscalationChainFormMode.Create; - const isCopyMode = mode === EscalationChainFormMode.Copy; - - if (isCreateMode) { - escalationChain = await escalationChainStore.create({ name, team: selectedTeam }); - } else if (isCopyMode) { - escalationChain = await escalationChainStore.clone(escalationChainId, { name, team: selectedTeam }); - } else { - escalationChain = await escalationChainStore.update(escalationChainId, { - name, - team: selectedTeam, - }); - } - - if (!escalationChain) { - let verb: string; + const teamName = getValues('team'); + const escalationChainName = getValues('name'); + try { if (isCreateMode) { - verb = 'creating'; + escalationChain = await escalationChainStore.create({ + name: escalationChainName, + team: teamName, + }); } else if (isCopyMode) { - verb = 'copying'; + escalationChain = await escalationChainStore.clone(escalationChainId, { + name: escalationChainName, + team: teamName, + }); } else { - verb = 'updating'; + escalationChain = await escalationChainStore.update(escalationChainId, { + name: escalationChainName, + team: teamName, + }); } - openWarningNotification(`There was an issue ${verb} the escalation chain. Please try again`); - return; - } + if (!escalationChain) { + let verb: string; + + if (isCreateMode) { + verb = 'creating'; + } else if (isCopyMode) { + verb = 'copying'; + } else { + verb = 'updating'; + } + + openWarningNotification(`There was an issue ${verb} the escalation chain. Please try again`); + return; + } - try { await onSubmitProp(escalationChain.id); onHide(); } catch (err) { - setErrors({ - name: err.response.data.name || err.response.data.detail || err.response.data.non_field_errors, - }); + if (err?.response?.data) { + const keys = Object.keys(err.response.data); + keys.forEach((key: keyof EscalationFormFields) => { + const message = Array.isArray(err.response.data[key]) ? err.response.data[key][0] : err.response.data[key]; + setError('name', { message }); + }); + } } - }, [name, selectedTeam, mode, onSubmitProp]); - - const handleNameChange = useCallback((event: ChangeEvent) => { - setName(event.target.value); - }, []); - - return ( - -
- - - items={grafanaTeamItems} - fetchItemsFn={grafanaTeamStore.updateItems} - fetchItemFn={grafanaTeamStore.fetchItemById} - getSearchResult={grafanaTeamStore.getSearchResult} - displayField="name" - valueField="id" - allowClear - placeholder="Select a team" - className={cx('team-select')} - onChange={setSelectedTeam} - value={selectedTeam} - /> - - - - - - - - -
-
- ); + } }); diff --git a/grafana-plugin/src/containers/IntegrationContainers/CollapsedIntegrationRouteDisplay/CollapsedIntegrationRouteDisplay.module.scss b/grafana-plugin/src/containers/IntegrationContainers/CollapsedIntegrationRouteDisplay/CollapsedIntegrationRouteDisplay.module.scss index 4f4686cfb9..ebd2a9cf25 100644 --- a/grafana-plugin/src/containers/IntegrationContainers/CollapsedIntegrationRouteDisplay/CollapsedIntegrationRouteDisplay.module.scss +++ b/grafana-plugin/src/containers/IntegrationContainers/CollapsedIntegrationRouteDisplay/CollapsedIntegrationRouteDisplay.module.scss @@ -10,7 +10,7 @@ display: flex; white-space: nowrap; flex-direction: row; - gap: 4px; + gap: 8px; } &__item--large { diff --git a/grafana-plugin/src/containers/IntegrationContainers/CollapsedIntegrationRouteDisplay/CollapsedIntegrationRouteDisplay.tsx b/grafana-plugin/src/containers/IntegrationContainers/CollapsedIntegrationRouteDisplay/CollapsedIntegrationRouteDisplay.tsx index 6642c88f3c..63e402ed1e 100644 --- a/grafana-plugin/src/containers/IntegrationContainers/CollapsedIntegrationRouteDisplay/CollapsedIntegrationRouteDisplay.tsx +++ b/grafana-plugin/src/containers/IntegrationContainers/CollapsedIntegrationRouteDisplay/CollapsedIntegrationRouteDisplay.tsx @@ -7,9 +7,9 @@ import { observer } from 'mobx-react'; import { IntegrationBlock } from 'components/Integrations/IntegrationBlock'; import { PluginLink } from 'components/PluginLink/PluginLink'; import { Text } from 'components/Text/Text'; -import { TooltipBadge } from 'components/TooltipBadge/TooltipBadge'; import styles from 'containers/IntegrationContainers/CollapsedIntegrationRouteDisplay/CollapsedIntegrationRouteDisplay.module.scss'; import { RouteButtonsDisplay } from 'containers/IntegrationContainers/ExpandedIntegrationRouteDisplay/ExpandedIntegrationRouteDisplay'; +import { RouteHeading } from 'containers/IntegrationContainers/RouteHeading'; import { ChannelFilter } from 'models/channel_filter/channel_filter.types'; import { ApiSchemas } from 'network/oncall-api/api.types'; import { CommonIntegrationHelper } from 'pages/integration/CommonIntegration.helper'; @@ -70,34 +70,13 @@ export const CollapsedIntegrationRouteDisplay: React.FC -
- - {routeWording === 'Default' && Unmatched alerts routed to default route} - {routeWording !== 'Default' && - (channelFilter.filtering_term ? ( - - {channelFilter.filtering_term} - - ) : ( - <> -
- -
- - Routing template not set - - - ))} -
+
= observer( ({ alertReceiveChannelId, @@ -83,7 +104,12 @@ export const ExpandedIntegrationRouteDisplay: React.FC(undefined); + const [labels, setLabels] = useState>([]); + const [labelErrors, setLabelErrors] = useState([]); const [{ isEscalationCollapsed, isRefreshingEscalationChains, routeIdForDeletion }, setState] = useReducer( (state: ExpandedIntegrationRouteDisplayState, newState: Partial) => ({ @@ -105,7 +131,21 @@ export const ExpandedIntegrationRouteDisplay: React.FC { + if (channelFilter && !labels?.length) { + setLabels(channelFilter.filtering_labels); + } + + if (channelFilter && !routingOption) { + setRoutingOption( + (channelFilter.filtering_term_type === FilteringTermType.labels + ? QueryBuilderOptions[0] + : QueryBuilderOptions[1] + ).value + ); + } + }, [channelFilter]); + if (!channelFilter) { return null; } @@ -121,6 +161,7 @@ export const ExpandedIntegrationRouteDisplay: React.FC; } + const hasLabels = store.hasFeature(AppFeature.Labels); const escChainDisplayName = escalationChainStore.items[channelFilter.escalation_chain]?.name; const getTreeViewElements = () => { const configs: IntegrationCollapsibleItem[] = [ @@ -143,41 +184,73 @@ export const ExpandedIntegrationRouteDisplay: React.FC - Use routing template + {hasLabels ? 'Alerts matched by' : 'Use routing template'} - -
- + +
+ +
+ + + + + + + + Alerts by default will not match this route if no labels are being provided + + ) as unknown as string + } + /> + + + +
+ + + + + +
+ +
+
-
@@ -287,23 +360,24 @@ export const ExpandedIntegrationRouteDisplay: React.FC - - - - +
+ + +
setState({ routeIdForDeletion: channelFilterId })} openRouteTemplateEditor={() => handleEditRoutingTemplate(channelFilter, channelFilterId)} /> - - +
+
} content={ @@ -337,6 +411,52 @@ export const ExpandedIntegrationRouteDisplay: React.FC ); + async function onLabelsChange(labels: Array) { + setLabelErrors([]); + setLabels(labels); + + if (!areLabelsValid(labels)) { + return; + } + + await alertReceiveChannelStore.saveChannelFilter(channelFilterId, { + filtering_labels: labels, + filtering_term_type: FilteringTermType.labels, + }); + } + + async function onRouteOptionChange(option: string) { + if (option === RoutingOption.TEMPLATE) { + await alertReceiveChannelStore.saveChannelFilter(channelFilterId, { + filtering_term: channelFilter.filtering_term || '', + filtering_term_type: FilteringTermType.jinja2, + }); + } + + if (option === RoutingOption.LABELS) { + await alertReceiveChannelStore.saveChannelFilter(channelFilterId, { + filtering_labels: channelFilter.filtering_labels || [], + filtering_term_type: FilteringTermType.labels, + }); + } + + setRoutingOption(option); + } + + function shouldShowLabelAlert() { + if (!labels?.length) { + return true; + } + if (labels.length === 1) { + return !areLabelsValid(labels); + } + return false; + } + + function areLabelsValid(labels: Array): boolean { + return labels.every((v) => v.key?.id !== undefined && v.value?.id !== undefined); + } + async function onRouteDeleteConfirm() { setState({ routeIdForDeletion: undefined }); onRouteDelete(routeIdForDeletion); diff --git a/grafana-plugin/src/containers/IntegrationContainers/RouteHeading.tsx b/grafana-plugin/src/containers/IntegrationContainers/RouteHeading.tsx new file mode 100644 index 0000000000..f55317cee5 --- /dev/null +++ b/grafana-plugin/src/containers/IntegrationContainers/RouteHeading.tsx @@ -0,0 +1,103 @@ +import React from 'react'; + +import { css } from '@emotion/css'; +import { GrafanaTheme2 } from '@grafana/data'; +import { Icon, useStyles2 } from '@grafana/ui'; + +import { LabelBadges } from 'components/LabelsTooltipBadge/LabelsTooltipBadge'; +import { RenderConditionally } from 'components/RenderConditionally/RenderConditionally'; +import { Text } from 'components/Text/Text'; +import { TooltipBadge } from 'components/TooltipBadge/TooltipBadge'; +import { ChannelFilter, FilteringTermType } from 'models/channel_filter/channel_filter.types'; +import { CommonIntegrationHelper } from 'pages/integration/CommonIntegration.helper'; +import { AppFeature } from 'state/features'; +import { useStore } from 'state/useStore'; + +interface RouteHeadingProps { + className: string; + routeWording: string; + routeIndex: number; + channelFilter: ChannelFilter; + channelFilterIds: Array; +} + +export const RouteHeading: React.FC = ({ + className, + routeWording, + channelFilterIds, + channelFilter, + routeIndex, +}) => { + const styles = useStyles2(getStyles); + + return ( +
+ + + {routeWording === 'Default' && Unmatched alerts routed to default route} + {routeWording !== 'Default' && } +
+ ); +}; + +const RouteHeadingDisplay: React.FC<{ channelFilter: ChannelFilter }> = ({ channelFilter }) => { + const store = useStore(); + const styles = useStyles2(getStyles); + const hasLabels = store.hasFeature(AppFeature.Labels); + + if (channelFilter?.filtering_term || channelFilter?.filtering_labels) { + return ( + <> + + + {channelFilter.filtering_term} + + + + + + + + ); + } + + return ( + <> +
+ +
+ Routing template not set + + ); +}; + +const getStyles = (theme: GrafanaTheme2) => { + return { + badge: css` + margin-right: 4px; + `, + + iconExclamation: css` + color: ${theme.colors.error.main}; + `, + + routeHeading: css` + max-width: 80%; + display: block; + text-overflow: ellipsis; + overflow: hidden; + `, + }; +}; diff --git a/grafana-plugin/src/containers/IntegrationLabelsForm/IntegrationLabelsForm.tsx b/grafana-plugin/src/containers/IntegrationLabelsForm/IntegrationLabelsForm.tsx index df04222b10..bee1cb6227 100644 --- a/grafana-plugin/src/containers/IntegrationLabelsForm/IntegrationLabelsForm.tsx +++ b/grafana-plugin/src/containers/IntegrationLabelsForm/IntegrationLabelsForm.tsx @@ -16,7 +16,7 @@ import cn from 'classnames/bind'; import { observer } from 'mobx-react'; import { Collapse } from 'components/Collapse/Collapse'; -import { MonacoEditor, MONACO_LANGUAGE } from 'components/MonacoEditor/MonacoEditor'; +import { MonacoEditor, MonacoLanguage } from 'components/MonacoEditor/MonacoEditor'; import { PluginLink } from 'components/PluginLink/PluginLink'; import { RenderConditionally } from 'components/RenderConditionally/RenderConditionally'; import { Text } from 'components/Text/Text'; @@ -178,7 +178,7 @@ export const IntegrationLabelsForm = observer((props: IntegrationLabelsFormProps height="200px" data={{}} showLineNumbers={false} - language={MONACO_LANGUAGE.jinja2} + language={MonacoLanguage.jinja2} onChange={(value) => { setAlertGroupLabels({ ...alertGroupLabels, template: value }); }} diff --git a/grafana-plugin/src/containers/IntegrationTemplate/IntegrationTemplate.tsx b/grafana-plugin/src/containers/IntegrationTemplate/IntegrationTemplate.tsx index be11565390..a3adcd8cd5 100644 --- a/grafana-plugin/src/containers/IntegrationTemplate/IntegrationTemplate.tsx +++ b/grafana-plugin/src/containers/IntegrationTemplate/IntegrationTemplate.tsx @@ -17,8 +17,9 @@ import { } from 'components/CheatSheet/CheatSheet.config'; import { MonacoEditor } from 'components/MonacoEditor/MonacoEditor'; import { Text } from 'components/Text/Text'; +import { TemplatePage } from 'containers/TemplatePreview/TemplatePreview'; import { TemplateResult } from 'containers/TemplateResult/TemplateResult'; -import { TemplatesAlertGroupsList, TEMPLATE_PAGE } from 'containers/TemplatesAlertGroupsList/TemplatesAlertGroupsList'; +import { TemplatesAlertGroupsList } from 'containers/TemplatesAlertGroupsList/TemplatesAlertGroupsList'; import { WithPermissionControlTooltip } from 'containers/WithPermissionControl/WithPermissionControlTooltip'; import { AlertTemplatesDTO } from 'models/alert_templates/alert_templates'; import { ChannelFilter } from 'models/channel_filter/channel_filter.types'; @@ -192,7 +193,7 @@ export const IntegrationTemplate = observer((props: IntegrationTemplateProps) =>
{ return { gray: css` - color: ${COLORS.ALWAYS_GREY}; + color: ${Colors.ALWAYS_GREY}; `, }; }; diff --git a/grafana-plugin/src/containers/RouteLabelsDisplay/RouteLabelsDisplay.tsx b/grafana-plugin/src/containers/RouteLabelsDisplay/RouteLabelsDisplay.tsx new file mode 100644 index 0000000000..17743e8ea9 --- /dev/null +++ b/grafana-plugin/src/containers/RouteLabelsDisplay/RouteLabelsDisplay.tsx @@ -0,0 +1,98 @@ +import React from 'react'; + +import { ServiceLabels } from '@grafana/labels'; +import { Button, VerticalGroup } from '@grafana/ui'; + +import { splitToGroups } from 'models/label/label.helpers'; +import { components } from 'network/oncall-api/autogenerated-api.types'; +import { useStore } from 'state/useStore'; +import { GENERIC_ERROR } from 'utils/consts'; +import { openErrorNotification } from 'utils/utils'; + +interface RouteLabelsDisplayProps { + labels: Array; + labelErrors: any; + onChange: (value: Array) => void; +} + +const INPUT_WIDTH = 280; +const DUPLICATE_ERROR = 'Duplicate values are not allowed'; + +export const RouteLabelsDisplay: React.FC = ({ labels, labelErrors, onChange }) => { + const { labelsStore } = useStore(); + + const onLabelAdd = () => { + onChange([ + ...labels, + { + key: { id: undefined, name: undefined, prescribed: false }, + value: { id: undefined, name: undefined, prescribed: false }, + }, + ]); + }; + + const onLoadKeys = async (search?: string) => { + let result = undefined; + + try { + result = await labelsStore.loadKeys(search); + } catch (error) { + openErrorNotification('There was an error processing your request. Please try again'); + } + + return splitToGroups(result); + }; + + const onLoadValuesForKey = async (key: string, search?: string) => { + let result = undefined; + + try { + const { values } = await labelsStore.loadValuesForKey(key, search); + result = values; + } catch (error) { + openErrorNotification('There was an error processing your request. Please try again'); + } + + return splitToGroups(result); + }; + + return ( + + { + if (res?.response?.status === 409) { + openErrorNotification(DUPLICATE_ERROR); + } else { + openErrorNotification(GENERIC_ERROR); + } + }} + renderValue={(option, index, renderValueDefault) => renderValueDefault(option, index)} + onDataUpdate={(value) => { + onChange([...value]); + }} + getIsKeyEditable={(option) => !option.prescribed} + getIsValueEditable={(option) => !option.prescribed} + /> + + + + ); +}; + +const getIsAddBtnDisabled = (labels: Array = []) => { + const lastItem = labels.at(-1); + return lastItem && (lastItem.key?.id === undefined || lastItem.value?.id === undefined); +}; diff --git a/grafana-plugin/src/containers/ScheduleSlot/ScheduleSlot.tsx b/grafana-plugin/src/containers/ScheduleSlot/ScheduleSlot.tsx index 81161a8372..a585f5ee49 100644 --- a/grafana-plugin/src/containers/ScheduleSlot/ScheduleSlot.tsx +++ b/grafana-plugin/src/containers/ScheduleSlot/ScheduleSlot.tsx @@ -5,7 +5,7 @@ import { GrafanaTheme2 } from '@grafana/data'; import { Button, HorizontalGroup, Icon, Tooltip, useStyles2, VerticalGroup } from '@grafana/ui'; import dayjs from 'dayjs'; import { observer } from 'mobx-react'; -import { COLORS, getLabelCss } from 'styles/utils.styles'; +import { Colors, getLabelCss } from 'styles/utils.styles'; import NonExistentUserName from 'components/NonExistentUserName/NonExistentUserName'; import { RenderConditionally } from 'components/RenderConditionally/RenderConditionally'; @@ -535,7 +535,7 @@ const getStyles = (theme: GrafanaTheme2) => { return { root: css` height: 28px; - background: ${COLORS.GRAY_8}; + background: ${Colors.GRAY_8}; border-radius: 2px; position: relative; display: flex; diff --git a/grafana-plugin/src/containers/TemplatePreview/TemplatePreview.tsx b/grafana-plugin/src/containers/TemplatePreview/TemplatePreview.tsx index 20abf9bebb..93113f5b68 100644 --- a/grafana-plugin/src/containers/TemplatePreview/TemplatePreview.tsx +++ b/grafana-plugin/src/containers/TemplatePreview/TemplatePreview.tsx @@ -27,14 +27,14 @@ interface TemplatePreviewProps { alertReceiveChannelId: ApiSchemas['AlertReceiveChannel']['id']; alertGroupId?: ApiSchemas['AlertGroup']['pk']; outgoingWebhookId?: ApiSchemas['Webhook']['id']; - templatePage: TEMPLATE_PAGE; + templatePage: TemplatePage; } interface ConditionalResult { isResult?: boolean; value?: string; } -export enum TEMPLATE_PAGE { +export enum TemplatePage { Integrations, Webhooks, } @@ -62,7 +62,7 @@ export const TemplatePreview = observer((props: TemplatePreviewProps) => { const handleTemplateBodyChange = useDebouncedCallback(async () => { try { - const data = await (templatePage === TEMPLATE_PAGE.Webhooks + const data = await (templatePage === TemplatePage.Webhooks ? outgoingWebhookStore.renderPreview(outgoingWebhookId, templateName, templateBody, payload) : alertGroupId ? AlertGroupHelper.renderPreview(alertGroupId, templateName, templateBody) diff --git a/grafana-plugin/src/containers/TemplateResult/TemplateResult.tsx b/grafana-plugin/src/containers/TemplateResult/TemplateResult.tsx index 5cd6f088f6..ddbf24c9ec 100644 --- a/grafana-plugin/src/containers/TemplateResult/TemplateResult.tsx +++ b/grafana-plugin/src/containers/TemplateResult/TemplateResult.tsx @@ -7,7 +7,7 @@ import { TemplateForEdit } from 'components/AlertTemplates/CommonAlertTemplatesF import { Block } from 'components/GBlock/Block'; import { Text } from 'components/Text/Text'; import styles from 'containers/IntegrationTemplate/IntegrationTemplate.module.scss'; -import { TemplatePreview, TEMPLATE_PAGE } from 'containers/TemplatePreview/TemplatePreview'; +import { TemplatePreview, TemplatePage } from 'containers/TemplatePreview/TemplatePreview'; import { ApiSchemas } from 'network/oncall-api/api.types'; const cx = cn.bind(styles); @@ -23,7 +23,7 @@ interface ResultProps { error?: string; onSaveAndFollowLink?: (link: string) => void; templateIsRoute?: boolean; - templatePage?: TEMPLATE_PAGE; + templatePage?: TemplatePage; } export const TemplateResult = (props: ResultProps) => { @@ -37,7 +37,7 @@ export const TemplateResult = (props: ResultProps) => { error, isAlertGroupExisting, onSaveAndFollowLink, - templatePage = TEMPLATE_PAGE.Integrations, + templatePage = TemplatePage.Integrations, } = props; return ( @@ -91,7 +91,7 @@ export const TemplateResult = (props: ResultProps) => {
- ← Select {templatePage === TEMPLATE_PAGE.Webhooks ? 'event' : 'alert group'} or "Use custom payload" + ← Select {templatePage === TemplatePage.Webhooks ? 'event' : 'alert group'} or "Use custom payload"
diff --git a/grafana-plugin/src/containers/TemplatesAlertGroupsList/TemplatesAlertGroupsList.tsx b/grafana-plugin/src/containers/TemplatesAlertGroupsList/TemplatesAlertGroupsList.tsx index 0b1d0e554e..93f4ec8252 100644 --- a/grafana-plugin/src/containers/TemplatesAlertGroupsList/TemplatesAlertGroupsList.tsx +++ b/grafana-plugin/src/containers/TemplatesAlertGroupsList/TemplatesAlertGroupsList.tsx @@ -4,10 +4,11 @@ import { Button, HorizontalGroup, Icon, IconButton, Badge, LoadingPlaceholder } import cn from 'classnames/bind'; import { debounce } from 'lodash-es'; -import { MonacoEditor, MONACO_LANGUAGE } from 'components/MonacoEditor/MonacoEditor'; +import { MonacoEditor, MonacoLanguage } from 'components/MonacoEditor/MonacoEditor'; import { MONACO_EDITABLE_CONFIG } from 'components/MonacoEditor/MonacoEditor.config'; import { Text } from 'components/Text/Text'; import { TooltipBadge } from 'components/TooltipBadge/TooltipBadge'; +import { TemplatePage } from 'containers/TemplatePreview/TemplatePreview'; import { AlertTemplatesDTO } from 'models/alert_templates/alert_templates'; import { AlertGroupHelper } from 'models/alertgroup/alertgroup.helpers'; import { OutgoingWebhookResponse } from 'models/outgoing_webhook/outgoing_webhook.types'; @@ -18,13 +19,8 @@ import styles from './TemplatesAlertGroupsList.module.css'; const cx = cn.bind(styles); -export enum TEMPLATE_PAGE { - Integrations, - Webhooks, -} - interface TemplatesAlertGroupsListProps { - templatePage: TEMPLATE_PAGE; + templatePage: TemplatePage; templates: AlertTemplatesDTO[]; alertReceiveChannelId?: ApiSchemas['AlertReceiveChannel']['id']; outgoingwebhookId?: ApiSchemas['Webhook']['id']; @@ -58,12 +54,12 @@ export const TemplatesAlertGroupsList = (props: TemplatesAlertGroupsListProps) = useEffect(() => { (async () => { - if (templatePage === TEMPLATE_PAGE.Webhooks) { + if (templatePage === TemplatePage.Webhooks) { if (outgoingwebhookId !== 'new') { const res = await store.outgoingWebhookStore.getLastResponses(outgoingwebhookId); setOutgoingWebhookLastResponses(res); } - } else if (templatePage === TEMPLATE_PAGE.Integrations) { + } else if (templatePage === TemplatePage.Integrations) { const result = await AlertGroupHelper.getAlertGroupsForIntegration(alertReceiveChannelId); setAlertGroupsList(result.slice(0, 30)); onLoadAlertGroupsList(result.length > 0); @@ -140,7 +136,7 @@ export const TemplatesAlertGroupsList = (props: TemplatesAlertGroupsListProps) = value={null} disabled={true} useAutoCompleteList={false} - language={MONACO_LANGUAGE.json} + language={MonacoLanguage.json} data={templates} monacoOptions={{ ...MONACO_EDITABLE_CONFIG, @@ -168,7 +164,7 @@ export const TemplatesAlertGroupsList = (props: TemplatesAlertGroupsListProps) =
- {templatePage === TEMPLATE_PAGE.Webhooks ? renderOutgoingWebhookLastResponses() : renderAlertGroupList()} + {templatePage === TemplatePage.Webhooks ? renderOutgoingWebhookLastResponses() : renderAlertGroupList()}
)} @@ -263,7 +259,7 @@ export const TemplatesAlertGroupsList = (props: TemplatesAlertGroupsListProps) = onChange={getChangeHandler()} showLineNumbers useAutoCompleteList={false} - language={MONACO_LANGUAGE.json} + language={MonacoLanguage.json} monacoOptions={MONACO_EDITABLE_CONFIG} />
@@ -301,7 +297,7 @@ export const TemplatesAlertGroupsList = (props: TemplatesAlertGroupsListProps) = height="100%" onChange={getChangeHandler()} useAutoCompleteList={false} - language={MONACO_LANGUAGE.json} + language={MonacoLanguage.json} monacoOptions={{ ...MONACO_EDITABLE_CONFIG, readOnly: true, diff --git a/grafana-plugin/src/containers/WebhooksTemplateEditor/WebhooksTemplateEditor.tsx b/grafana-plugin/src/containers/WebhooksTemplateEditor/WebhooksTemplateEditor.tsx index 36a7d98b21..ce14b3c319 100644 --- a/grafana-plugin/src/containers/WebhooksTemplateEditor/WebhooksTemplateEditor.tsx +++ b/grafana-plugin/src/containers/WebhooksTemplateEditor/WebhooksTemplateEditor.tsx @@ -9,8 +9,9 @@ import { genericTemplateCheatSheet, webhookPayloadCheatSheet } from 'components/ import { MonacoEditor } from 'components/MonacoEditor/MonacoEditor'; import { Text } from 'components/Text/Text'; import styles from 'containers/IntegrationTemplate/IntegrationTemplate.module.scss'; +import { TemplatePage } from 'containers/TemplatePreview/TemplatePreview'; import { TemplateResult } from 'containers/TemplateResult/TemplateResult'; -import { TemplatesAlertGroupsList, TEMPLATE_PAGE } from 'containers/TemplatesAlertGroupsList/TemplatesAlertGroupsList'; +import { TemplatesAlertGroupsList } from 'containers/TemplatesAlertGroupsList/TemplatesAlertGroupsList'; import { WithPermissionControlTooltip } from 'containers/WithPermissionControl/WithPermissionControlTooltip'; import { ApiSchemas } from 'network/oncall-api/api.types'; import { UserActions } from 'utils/authorization/authorization'; @@ -117,7 +118,7 @@ export const WebhooksTemplateEditor: React.FC = ({
= ({ )} { if (!key) { return { key: undefined, values: [] }; } diff --git a/grafana-plugin/src/models/channel_filter/channel_filter.types.ts b/grafana-plugin/src/models/channel_filter/channel_filter.types.ts index ea8bf9f30a..f55651ae81 100644 --- a/grafana-plugin/src/models/channel_filter/channel_filter.types.ts +++ b/grafana-plugin/src/models/channel_filter/channel_filter.types.ts @@ -2,10 +2,12 @@ import { EscalationChain } from 'models/escalation_chain/escalation_chain.types' import { SlackChannel } from 'models/slack_channel/slack_channel.types'; import { TelegramChannel, TelegramChannelDetails } from 'models/telegram_channel/telegram_channel.types'; import { ApiSchemas } from 'network/oncall-api/api.types'; +import { components } from 'network/oncall-api/autogenerated-api.types'; export enum FilteringTermType { regex, jinja2, + labels, } export interface ChannelFilter { @@ -15,6 +17,7 @@ export interface ChannelFilter { slack_channel?: SlackChannel; telegram_channel?: TelegramChannel['id']; telegram_channel_details?: TelegramChannelDetails; + filtering_labels?: Array; created_at: string; filtering_term: string; filtering_term_as_jinja2: string; diff --git a/grafana-plugin/src/models/label/label.helpers.ts b/grafana-plugin/src/models/label/label.helpers.ts index a0c8f1d2cb..fb2d9a23e2 100644 --- a/grafana-plugin/src/models/label/label.helpers.ts +++ b/grafana-plugin/src/models/label/label.helpers.ts @@ -1,6 +1,15 @@ import { ApiSchemas } from 'network/oncall-api/api.types'; -export const splitToGroups = (labels: Array | Array) => { +export interface SplitGroupsResult { + name: string; + id: string; + expanded: boolean; + options: Array | Array; +} + +export const splitToGroups = ( + labels: Array | Array +): SplitGroupsResult[] => { return labels?.reduce( (memo, option) => { memo.find(({ name }) => name === (option.prescribed ? 'System' : 'User added')).options.push(option); diff --git a/grafana-plugin/src/models/label/label.ts b/grafana-plugin/src/models/label/label.ts index fabef1de11..0ca89ce35d 100644 --- a/grafana-plugin/src/models/label/label.ts +++ b/grafana-plugin/src/models/label/label.ts @@ -3,6 +3,7 @@ import { action, makeObservable } from 'mobx'; import { BaseStore } from 'models/base_store'; import { makeRequest } from 'network/network'; import { ApiSchemas } from 'network/oncall-api/api.types'; +import { components } from 'network/oncall-api/autogenerated-api.types'; import { onCallApi } from 'network/oncall-api/http-client'; import { RootStore } from 'state/rootStore'; import { WithGlobalNotification } from 'utils/decorators'; @@ -17,7 +18,7 @@ export class LabelStore extends BaseStore { } @action.bound - public async loadKeys(search = '') { + public async loadKeys(search = ''): Promise> { const { data } = await onCallApi().GET('/labels/keys/', undefined); const filtered = data.filter((k) => k.name.toLowerCase().includes(search.toLowerCase())); @@ -26,11 +27,10 @@ export class LabelStore extends BaseStore { } @action.bound - async loadValuesForKey(key: ApiSchemas['LabelKey']['id'], search = '') { - if (!key) { - return []; - } - + async loadValuesForKey( + key: ApiSchemas['LabelKey']['id'], + search = '' + ): Promise { const result = await makeRequest(`${this.path}id/${key}`, { params: { search }, }); diff --git a/grafana-plugin/src/models/user/user.helpers.tsx b/grafana-plugin/src/models/user/user.helpers.tsx index 501b8e1bcd..fecb1db2c6 100644 --- a/grafana-plugin/src/models/user/user.helpers.tsx +++ b/grafana-plugin/src/models/user/user.helpers.tsx @@ -34,7 +34,7 @@ export class UserHelper { /** * NOTE: if is_currently_oncall=all the backend will not paginate the results, it will send back an array of ALL users */ - static async search(f: any = { searchTerm: '' }, page = 1) { + static async search(f: { search: string; is_currently_oncall?: boolean } | string = { search: '' }, page = 1) { const filters = typeof f === 'string' ? { search: f } : f; // for GSelect compatibility return (await onCallApi().GET('/users/', { params: { query: { ...filters, page } } })).data; } diff --git a/grafana-plugin/src/models/user/user.ts b/grafana-plugin/src/models/user/user.ts index df5afbdb90..aad9e64a44 100644 --- a/grafana-plugin/src/models/user/user.ts +++ b/grafana-plugin/src/models/user/user.ts @@ -3,7 +3,6 @@ import dayjs from 'dayjs'; import { get } from 'lodash-es'; import { action, computed, runInAction, makeAutoObservable } from 'mobx'; -import { RemoteFiltersType } from 'containers/RemoteFilters/RemoteFilters.types'; import { ActionKey } from 'models/loader/action-keys'; import { NotificationPolicyType } from 'models/notification_policy/notification_policy'; import { makeRequest } from 'network/network'; @@ -38,7 +37,7 @@ export class UserStore { } async fetchItems( - f: RemoteFiltersType | string = { searchTerm: '', type: undefined, used: undefined }, + f: { search: ''; type?: string; used?: boolean; mine?: boolean } | string = { search: '' }, page = 1, invalidateFn?: () => boolean ): Promise { diff --git a/grafana-plugin/src/pages/incident/Incident.tsx b/grafana-plugin/src/pages/incident/Incident.tsx index 6c4a60c7ce..3176ca45c2 100644 --- a/grafana-plugin/src/pages/incident/Incident.tsx +++ b/grafana-plugin/src/pages/incident/Incident.tsx @@ -27,7 +27,7 @@ import CopyToClipboard from 'react-copy-to-clipboard'; import Emoji from 'react-emoji-render'; import { RouteComponentProps, withRouter } from 'react-router-dom'; import reactStringReplace from 'react-string-replace'; -import { COLORS, getLabelBackgroundTextColorObject } from 'styles/utils.styles'; +import { Colors, getLabelBackgroundTextColorObject } from 'styles/utils.styles'; import { OnCallPluginExtensionPoints } from 'types'; import errorSVG from 'assets/img/error.svg'; @@ -987,7 +987,7 @@ const getStyles = (theme: GrafanaTheme2) => { incidentsContent: css` > div:not(:last-child) { - border-bottom: 1px solid ${COLORS.BORDER}; + border-bottom: 1px solid ${Colors.BORDER}; padding-bottom: 16px; } diff --git a/grafana-plugin/src/pages/integration/CommonIntegration.helper.ts b/grafana-plugin/src/pages/integration/CommonIntegration.helper.ts index ac09abc8b7..f00177a834 100644 --- a/grafana-plugin/src/pages/integration/CommonIntegration.helper.ts +++ b/grafana-plugin/src/pages/integration/CommonIntegration.helper.ts @@ -1,4 +1,4 @@ -import { ChannelFilter } from 'models/channel_filter/channel_filter.types'; +import { ChannelFilter, FilteringTermType } from 'models/channel_filter/channel_filter.types'; export const CommonIntegrationHelper = { getRouteConditionWording(channelFilters: Array, routeIndex: number): 'Default' | 'Else' | 'If' { @@ -10,12 +10,22 @@ export const CommonIntegrationHelper = { return routeIndex ? 'Else' : 'If'; }, - getRouteConditionTooltipWording(channelFilters: Array, routeIndex: number) { + getRouteConditionTooltipWording( + channelFilters: Array, + routeIndex: number, + filteringTermType: FilteringTermType + ) { const totalCount = Object.keys(channelFilters).length; if (routeIndex === totalCount - 1) { - return 'If the alert payload does not match to the previous routes, it will go to this default route.'; + return 'If the alert payload does not match any of the previous routes, it will stick to the default route.'; } - return 'If Routing Template is True for incoming alert payload, it will be go only to this route. Alert will be grouped based on Grouping Template and escalated'; + + if (filteringTermType === FilteringTermType.labels) { + return 'Alerts will be grouped if they match these labels and then escalated'; + } + + // Templating + return 'Alerts will be grouped based on the evaluation of the route template and then escalated'; }, }; diff --git a/grafana-plugin/src/pages/integration/Integration.module.scss b/grafana-plugin/src/pages/integration/Integration.module.scss index c94139a357..8584a2294f 100644 --- a/grafana-plugin/src/pages/integration/Integration.module.scss +++ b/grafana-plugin/src/pages/integration/Integration.module.scss @@ -238,3 +238,18 @@ $LARGE-MARGIN: 24px; display: inline-flex; gap: 4px; } + +/* ----- + * Icons + */ + +.icon-exclamation { + color: var(--error-text-color); +} + +.route-heading { + max-width: 80%; + display: block; + text-overflow: ellipsis; + overflow: hidden; +} diff --git a/grafana-plugin/src/pages/integrations/Integrations.styles.tsx b/grafana-plugin/src/pages/integrations/Integrations.styles.tsx index 031a0150a3..fdd31f9da5 100644 --- a/grafana-plugin/src/pages/integrations/Integrations.styles.tsx +++ b/grafana-plugin/src/pages/integrations/Integrations.styles.tsx @@ -1,6 +1,6 @@ import { css } from '@emotion/css'; import { GrafanaTheme2 } from '@grafana/data'; -import { COLORS } from 'styles/utils.styles'; +import { Colors } from 'styles/utils.styles'; export const getIntegrationsStyles = (theme: GrafanaTheme2) => { return { @@ -56,7 +56,7 @@ export const getIntegrationsStyles = (theme: GrafanaTheme2) => { flex-direction: row; &:hover { - background: ${theme.isLight ? 'rgba(244, 245, 245)' : COLORS.GRAY_9}; + background: ${theme.isLight ? 'rgba(244, 245, 245)' : Colors.GRAY_9}; } `, diff --git a/grafana-plugin/src/pages/users/Users.tsx b/grafana-plugin/src/pages/users/Users.tsx index cf18d7895e..c65105b9e9 100644 --- a/grafana-plugin/src/pages/users/Users.tsx +++ b/grafana-plugin/src/pages/users/Users.tsx @@ -19,7 +19,6 @@ import { PluginLink } from 'components/PluginLink/PluginLink'; import { Text } from 'components/Text/Text'; import { TooltipBadge } from 'components/TooltipBadge/TooltipBadge'; import { RemoteFilters } from 'containers/RemoteFilters/RemoteFilters'; -import { RemoteFiltersType } from 'containers/RemoteFilters/RemoteFilters.types'; import { UserSettings } from 'containers/UserSettings/UserSettings'; import { WithPermissionControlTooltip } from 'containers/WithPermissionControl/WithPermissionControlTooltip'; import { UserHelper } from 'models/user/user.helpers'; @@ -46,7 +45,7 @@ interface UsersState extends PageBaseState { isWrongTeam: boolean; userPkToEdit?: ApiSchemas['User']['pk'] | 'new'; - filters: RemoteFiltersType; + filters: { search: ''; type: undefined; used: undefined; mine: undefined }; } @observer @@ -62,7 +61,7 @@ class Users extends React.Component { this.state = { isWrongTeam: false, userPkToEdit: undefined, - filters: { searchTerm: '', type: undefined, used: undefined, mine: undefined }, + filters: { search: '', type: undefined, used: undefined, mine: undefined }, errorData: initErrorDataState(), }; @@ -246,7 +245,7 @@ class Users extends React.Component { ); } - handleFiltersChange = (filters: RemoteFiltersType, _isOnMount: boolean) => { + handleFiltersChange = (filters: UsersState['filters'], _isOnMount: boolean) => { const { filtersStore } = this.props.store; const currentTablePage = filtersStore.currentTablePageNum[PAGE.Users]; diff --git a/grafana-plugin/src/styles/utils.styles.ts b/grafana-plugin/src/styles/utils.styles.ts index 13a1786af6..997ddc6760 100644 --- a/grafana-plugin/src/styles/utils.styles.ts +++ b/grafana-plugin/src/styles/utils.styles.ts @@ -116,7 +116,7 @@ export const bem = (...args: string[]) => return (out += '-'); }, ''); -export enum COLORS { +export enum Colors { ALWAYS_GREY = '#ccccdc', GRAY_8 = '#595959', GRAY_9 = '#434343', diff --git a/grafana-plugin/yarn.lock b/grafana-plugin/yarn.lock index f28148c9fd..01e8bcbaa5 100644 --- a/grafana-plugin/yarn.lock +++ b/grafana-plugin/yarn.lock @@ -3388,10 +3388,10 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-14.18.63.tgz#1788fa8da838dbb5f9ea994b834278205db6ca2b" integrity sha512-fAtCfv4jJg+ExtXhvCkCqUKZ+4ok/JQk01qDKhL5BDDoS3AxKXhV5/MAVUZyQnSEd2GT92fkgZl0pz0Q0AzcIQ== -"@types/node@^20.8.7": - version "20.11.16" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.11.16.tgz#4411f79411514eb8e2926f036c86c9f0e4ec6708" - integrity sha512-gKb0enTmRCzXSSUJDq6/sPcqrfCv2mkkG6Jt/clpn5eiCbKTY+SgZUxo+p8ZKMof5dCp9vHQUAB7wOUTod22wQ== +"@types/node@^20.14.10": + version "20.14.10" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.14.10.tgz#a1a218290f1b6428682e3af044785e5874db469a" + integrity sha512-MdiXf+nDuMvY0gJKxyfZ7/6UFsETO7mGKF54MVD/ekJS6HdFtpZFBgrh6Pseu64XTb2MLyFPlbW6hj8HYRQNOQ== dependencies: undici-types "~5.26.4" @@ -13469,16 +13469,7 @@ string-template@~0.2.1: resolved "https://registry.yarnpkg.com/string-template/-/string-template-0.2.1.tgz#42932e598a352d01fc22ec3367d9d84eec6c9add" integrity sha512-Yptehjogou2xm4UJbxJ4CxgZx12HBfeystp0y3x7s4Dj32ltVVG1Gg8YhKjHZkHicuKpZX/ffilA8505VbUbpw== -"string-width-cjs@npm:string-width@^4.2.0": - version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - -string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3: +"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3: version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -13596,7 +13587,7 @@ stringify-object@^3.3.0: is-obj "^1.0.1" is-regexp "^1.0.0" -"strip-ansi-cjs@npm:strip-ansi@^6.0.1": +"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== @@ -13617,13 +13608,6 @@ strip-ansi@^5.2.0: dependencies: ansi-regex "^4.1.0" -strip-ansi@^6.0.0, strip-ansi@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== - dependencies: - ansi-regex "^5.0.1" - strip-ansi@^7.0.1: version "7.1.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" @@ -14964,7 +14948,8 @@ wordwrap@^1.0.0: resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" integrity sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q== -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: + name wrap-ansi-cjs version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== @@ -14982,15 +14967,6 @@ wrap-ansi@^6.2.0: string-width "^4.1.0" strip-ansi "^6.0.0" -wrap-ansi@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" - integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - wrap-ansi@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" diff --git a/tools/scripts/oncall_reports.py b/tools/scripts/oncall_reports.py index 1c908ce8e1..0084bff844 100644 --- a/tools/scripts/oncall_reports.py +++ b/tools/scripts/oncall_reports.py @@ -1,14 +1,16 @@ # requires requests (pip install requests) -# This script will output 3 .csv files: +# This script will output 4 .csv files: # - oncall.escalation_chains.csv: escalation chains names and their respective serialized steps # - oncall.orphaned_schedules.csv: schedules ID and name for schedules not linked to any escalation chain +# - oncall.teams.csv: teams alert groups count, mean time to acknowledge and mean time to resolve # - oncall.users.csv: users information in the speficied period -# (team, notification policies, hours on-call, # acknowledged, # resolved) +# (team, notification policies, hours on-call, has shifts scheduled, # acknowledged, # resolved) # You can run it like this: -# $ ONCALL_API_TOKEN= DAYS=7 python oncall.reports.py +# $ ONCALL_API_TOKEN= DAYS=7 python oncall_reports.py +import collections import csv import os @@ -24,10 +26,12 @@ # number of days to consider (default: last 30 days) NUM_LAST_DAYS = int(os.environ.get("DAYS", 30)) +REQUIRED_PERSONAL_NOTIFICATION_METHODS = ["phone_call", "mobile_app"] # output CSV filenames with the data ESCALATION_CHAINS_OUTPUT_FILE_NAME = "oncall.escalation_chains.csv" ORPHANED_SCHEDULES_OUTPUT_FILE_NAME = "oncall.orphaned_schedules.csv" +TEAMS_OUTPUT_FILE_NAME = "oncall.teams.csv" USERS_OUTPUT_FILE_NAME = "oncall.users.csv" @@ -38,7 +42,11 @@ users = {} teams = {} escalation_chains = {} +integrations = {} schedules = {} +ag_per_team = collections.defaultdict(int) +ttr_acc = collections.defaultdict(int) +tta_acc = collections.defaultdict(int) end_date = datetime.now(timezone.utc).replace(hour=0, minute=0, microsecond=0) start_date = end_date - timedelta(days=NUM_LAST_DAYS) @@ -99,6 +107,7 @@ def _serialize_step(p): "acknowledged_count": 0, "resolved_count": 0, hours_field_name: 0, + "shifts_scheduled": False, } page += 1 total_pages = int(response_data.get("total_pages")) @@ -121,6 +130,18 @@ def _serialize_step(p): users[u][key] = policy +# fetch integrations +# https://grafana.com/docs/grafana-cloud/alerting-and-irm/oncall/oncall-api-reference/integrations/#list-integrations +# GET {{API_URL}}/api/v1/integrations/ +print("Fetching integrations data...") +url = ONCALL_API_BASE_URL + "/api/v1/integrations/" +r = requests.get(url, params={"perpage": 100}, headers=headers) # TODO: handle pagination +r.raise_for_status() +results = r.json().get("results") +for i in results: + integrations[i["id"]] = i + + # get on-call schedule time # https://grafana.com/docs/grafana-cloud/alerting-and-irm/oncall/oncall-api-reference/schedules/#export-a-schedules-final-shifts @@ -173,6 +194,14 @@ def _serialize_step(p): users[ack_by]["acknowledged_count"] += 1 if resolved_by: users[resolved_by]["resolved_count"] += 1 + team_id = integrations.get(ag["integration_id"], {}).get("team_id", None) + ag_per_team[team_id] += 1 + if ag["acknowledged_at"]: + acknowledged_at = datetime.fromisoformat(ag["acknowledged_at"].replace('Z', '+00:00')) + tta_acc[team_id] += (acknowledged_at - created_at).total_seconds() + if ag["resolved_at"]: + resolved_at = datetime.fromisoformat(ag["resolved_at"].replace('Z', '+00:00')) + ttr_acc[team_id] += (resolved_at - created_at).total_seconds() page += 1 @@ -204,6 +233,27 @@ def _serialize_step(p): orphaned_schedules.remove(schedule_id) +# check shifts from non-orphaned schedules, flag users shifts +# https://grafana.com/docs/grafana-cloud/alerting-and-irm/oncall/oncall-api-reference/on_call_shifts/#list-oncall-shifts +# GET {{API_URL}}/api/v1/on_call_shifts/?schedule_id= + +print("Checking shifts from non-orphaned schedules...") +for schedule_id in schedules: + if schedule_id in orphaned_schedules: + continue + url = ONCALL_API_BASE_URL + "/api/v1/on_call_shifts/" + r = requests.get(url, params={"schedule_id": schedule_id}, headers=headers) + r.raise_for_status() + results = r.json().get("results") + for shift in results: + on_call_users = shift.get("users", []) + list({u for r in shift.get("rolling_users", []) for u in r}) + for user_id in on_call_users: + if user_id not in users: + print("Warning: user {} from schedule {} not found".format(user_id, schedule_id)) + else: + users[user_id]["shifts_scheduled"] = True + + # write orphaned schedules report with open(ORPHANED_SCHEDULES_OUTPUT_FILE_NAME, "w") as fp: fieldnames = ["schedule_id", "name"] @@ -223,10 +273,31 @@ def _serialize_step(p): csv_writer.writerow(chain_info) +# write teams report +with open(TEAMS_OUTPUT_FILE_NAME, "w") as fp: + fieldnames = ["team", "alert_group_count", "mtta", "mttr"] + csv_writer = csv.DictWriter(fp, fieldnames) + csv_writer.writeheader() + for team_id, ag_count in ag_per_team.items(): + team_name = teams[team_id] if team_id else "(None)" + csv_writer.writerow({ + "team": team_name, + "alert_group_count": ag_count, + "mtta": tta_acc[team_id] / ag_count, + "mttr": ttr_acc[team_id] / ag_count, + }) + + # write users report with open(USERS_OUTPUT_FILE_NAME, "w") as fp: - fieldnames = ["username", "email", "teams", "important", "default", hours_field_name, "acknowledged_count", "resolved_count"] + fieldnames = ["username", "email", "teams", "important", "default", "warning", hours_field_name, "shifts_scheduled", "acknowledged_count", "resolved_count"] csv_writer = csv.DictWriter(fp, fieldnames) csv_writer.writeheader() for user_info in users.values(): + warnings = [] + for method in REQUIRED_PERSONAL_NOTIFICATION_METHODS: + expected = "notify_by_{}".format(method) + if expected not in user_info["important"] and method not in user_info["default"]: + warnings.append("Missing {}".format(method)) + user_info["warning"] = ','.join(warnings) csv_writer.writerow(user_info)