From eb5f97cca98c04cc64ddd1393111041ddfeec1ca Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 16 Oct 2024 13:52:57 +0300 Subject: [PATCH] feat: Storing information about who authored the link between Alert and an Incident (#2195) --- keep-ui/app/alerts/alert-sidebar.tsx | 6 +- keep-ui/app/alerts/models.tsx | 3 + .../app/incidents/[id]/incident-alerts.tsx | 14 +++ keep/api/core/db.py | 54 +++++++++-- keep/api/models/alert.py | 11 +++ keep/api/models/db/alert.py | 62 ++++++++++-- .../versions/2024-10-14-08-34_83c1020be97d.py | 95 +++++++++++++++++++ keep/api/routes/incidents.py | 16 ++-- keep/api/utils/enrichment_helpers.py | 25 +++-- keep/api/utils/pagination.py | 6 +- tests/test_incidents.py | 10 ++ 11 files changed, 270 insertions(+), 32 deletions(-) create mode 100644 keep/api/models/db/migrations/versions/2024-10-14-08-34_83c1020be97d.py diff --git a/keep-ui/app/alerts/alert-sidebar.tsx b/keep-ui/app/alerts/alert-sidebar.tsx index 135a97ff4..e34252a69 100644 --- a/keep-ui/app/alerts/alert-sidebar.tsx +++ b/keep-ui/app/alerts/alert-sidebar.tsx @@ -1,4 +1,5 @@ import { Fragment } from "react"; +import Image from "next/image"; import { Dialog, Transition } from "@headlessui/react"; import { AlertDto } from "./models"; import { Button, Title, Card, Badge } from "@tremor/react"; @@ -83,10 +84,11 @@ const AlertSidebar = ({ isOpen, toggle, alert }: AlertSidebarProps) => { Severity: {alert.severity}

- Source:{" "} - {alert.source![0]}

diff --git a/keep-ui/app/alerts/models.tsx b/keep-ui/app/alerts/models.tsx index c7322c034..afab6f3d4 100644 --- a/keep-ui/app/alerts/models.tsx +++ b/keep-ui/app/alerts/models.tsx @@ -62,6 +62,9 @@ note?: string; isNoisy?: boolean; enriched_fields: string[]; incident?: string; + +// From AlertWithIncidentLinkMetadataDto +is_created_by_ai?: boolean; } interface Option { diff --git a/keep-ui/app/incidents/[id]/incident-alerts.tsx b/keep-ui/app/incidents/[id]/incident-alerts.tsx index e4946ac37..0177cc705 100644 --- a/keep-ui/app/incidents/[id]/incident-alerts.tsx +++ b/keep-ui/app/incidents/[id]/incident-alerts.tsx @@ -103,6 +103,20 @@ export default function IncidentAlerts({ incident }: Props) { minSize: 100, header: "Status", }), + columnHelper.accessor("is_created_by_ai", { + id: "is_created_by_ai", + header: "🔗", + minSize: 50, + cell: (context) => ( + <> + {context.getValue() ? ( +
🤖
+ ) : ( +
👨‍💻
+ )} + + ), + }), columnHelper.accessor("lastReceived", { id: "lastReceived", header: "Last Received", diff --git a/keep/api/core/db.py b/keep/api/core/db.py index 7bb3b7663..15b3b9dd3 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -1231,7 +1231,8 @@ def get_last_alerts( if with_incidents: query = query.add_columns(AlertToIncident.incident_id.label("incident")) query = query.outerjoin( - AlertToIncident, AlertToIncident.alert_id == Alert.id, + AlertToIncident, + and_(AlertToIncident.alert_id == Alert.id, AlertToIncident.deleted_at == NULL_FOR_DELETED_AT), ) if provider_id: @@ -1693,7 +1694,10 @@ def get_rule_distribution(tenant_id, minute=False): ) .join(Incident, Rule.id == Incident.rule_id) .join(AlertToIncident, Incident.id == AlertToIncident.incident_id) - .filter(AlertToIncident.timestamp >= seven_days_ago) + .filter( + AlertToIncident.deleted_at == NULL_FOR_DELETED_AT, + AlertToIncident.timestamp >= seven_days_ago + ) .filter(Rule.tenant_id == tenant_id) # Filter by tenant_id .group_by( "rule_id", "rule_name", "incident_id", "rule_fingerprint", "time" @@ -2387,6 +2391,7 @@ def is_alert_assigned_to_incident( .where(AlertToIncident.alert_id == alert_id) .where(AlertToIncident.incident_id == incident_id) .where(AlertToIncident.tenant_id == tenant_id) + .where(AlertToIncident.deleted_at == NULL_FOR_DELETED_AT) ).first() return assigned is not None @@ -2829,17 +2834,19 @@ def get_incidents_count( ) -def get_incident_alerts_by_incident_id( +def get_incident_alerts_and_links_by_incident_id( tenant_id: str, incident_id: UUID | str, limit: Optional[int] = None, offset: Optional[int] = None, session: Optional[Session] = None, -) -> (List[Alert], int): + include_unlinked: bool = False, +) -> tuple[List[tuple[Alert, AlertToIncident]], int]: with existed_or_new_session(session) as session: query = ( session.query( Alert, + AlertToIncident, ) .join(AlertToIncident, AlertToIncident.alert_id == Alert.id) .join(Incident, AlertToIncident.incident_id == Incident.id) @@ -2849,6 +2856,10 @@ def get_incident_alerts_by_incident_id( ) .order_by(col(Alert.timestamp).desc()) ) + if not include_unlinked: + query = query.filter( + AlertToIncident.deleted_at == NULL_FOR_DELETED_AT, + ) total_count = query.count() @@ -2858,12 +2869,20 @@ def get_incident_alerts_by_incident_id( return query.all(), total_count +def get_incident_alerts_by_incident_id(*args, **kwargs) -> tuple[List[Alert], int]: + """ + Unpacking (List[(Alert, AlertToIncident)], int) to (List[Alert], int). + """ + alerts_and_links, total_alerts = get_incident_alerts_and_links_by_incident_id(*args, **kwargs) + alerts = [alert_and_link[0] for alert_and_link in alerts_and_links] + return alerts, total_alerts + def get_future_incidents_by_incident_id( incident_id: str, limit: Optional[int] = None, offset: Optional[int] = None, -) -> (List[Incident], int): +) -> tuple[List[Incident], int]: with Session(engine) as session: query = ( session.query( @@ -2936,6 +2955,7 @@ def add_alerts_to_incident_by_incident_id( tenant_id: str, incident_id: str | UUID, alert_ids: List[UUID], + is_created_by_ai: bool = False, session: Optional[Session] = None, ) -> Optional[Incident]: with existed_or_new_session(session) as session: @@ -2947,13 +2967,14 @@ def add_alerts_to_incident_by_incident_id( if not incident: return None - return add_alerts_to_incident(tenant_id, incident, alert_ids, session) + return add_alerts_to_incident(tenant_id, incident, alert_ids, is_created_by_ai, session) def add_alerts_to_incident( tenant_id: str, incident: Incident, alert_ids: List[UUID], + is_created_by_ai: bool = False, session: Optional[Session] = None, ) -> Optional[Incident]: logger.info( @@ -2967,6 +2988,7 @@ def add_alerts_to_incident( existing_alert_ids = set( session.exec( select(AlertToIncident.alert_id).where( + AlertToIncident.deleted_at == NULL_FOR_DELETED_AT, AlertToIncident.tenant_id == tenant_id, AlertToIncident.incident_id == incident.id, col(AlertToIncident.alert_id).in_(alert_ids), @@ -2993,7 +3015,7 @@ def add_alerts_to_incident( alert_to_incident_entries = [ AlertToIncident( - alert_id=alert_id, incident_id=incident.id, tenant_id=tenant_id + alert_id=alert_id, incident_id=incident.id, tenant_id=tenant_id, is_created_by_ai=is_created_by_ai ) for alert_id in new_alert_ids ] @@ -3014,6 +3036,7 @@ def add_alerts_to_incident( select(func.min(Alert.timestamp), func.max(Alert.timestamp)) .join(AlertToIncident, AlertToIncident.alert_id == Alert.id) .where( + AlertToIncident.deleted_at == NULL_FOR_DELETED_AT, AlertToIncident.tenant_id == tenant_id, AlertToIncident.incident_id == incident.id, ) @@ -3036,6 +3059,7 @@ def get_incident_unique_fingerprint_count(tenant_id: str, incident_id: str) -> i .select_from(AlertToIncident) .join(Alert, AlertToIncident.alert_id == Alert.id) .where( + AlertToIncident.deleted_at == NULL_FOR_DELETED_AT, Alert.tenant_id == tenant_id, AlertToIncident.incident_id == incident_id, ) @@ -3053,6 +3077,7 @@ def get_last_alerts_for_incidents( ) .join(AlertToIncident, Alert.id == AlertToIncident.alert_id) .filter( + AlertToIncident.deleted_at == NULL_FOR_DELETED_AT, AlertToIncident.incident_id.in_(incident_ids), ) .order_by(Alert.timestamp.desc()) @@ -3085,11 +3110,13 @@ def remove_alerts_to_incident_by_incident_id( deleted = ( session.query(AlertToIncident) .where( + AlertToIncident.deleted_at == NULL_FOR_DELETED_AT, AlertToIncident.tenant_id == tenant_id, AlertToIncident.incident_id == incident.id, col(AlertToIncident.alert_id).in_(alert_ids), - ) - .delete() + ).update({ + "deleted_at": datetime.now(datetime.now().astimezone().tzinfo), + }) ) session.commit() @@ -3104,6 +3131,7 @@ def remove_alerts_to_incident_by_incident_id( select(func.distinct(service_field)) .join(AlertToIncident, Alert.id == AlertToIncident.alert_id) .filter( + AlertToIncident.deleted_at == NULL_FOR_DELETED_AT, AlertToIncident.incident_id == incident_id, service_field.in_(alerts_data_for_incident["services"]), ) @@ -3116,6 +3144,7 @@ def remove_alerts_to_incident_by_incident_id( select(col(Alert.provider_type).distinct()) .join(AlertToIncident, Alert.id == AlertToIncident.alert_id) .filter( + AlertToIncident.deleted_at == NULL_FOR_DELETED_AT, AlertToIncident.incident_id == incident_id, col(Alert.provider_type).in_(alerts_data_for_incident["sources"]), ) @@ -3522,7 +3551,10 @@ def get_workflow_executions_for_incident_or_alert( Alert, WorkflowToAlertExecution.alert_fingerprint == Alert.fingerprint ) .join(AlertToIncident, Alert.id == AlertToIncident.alert_id) - .where(AlertToIncident.incident_id == incident_id) + .where( + AlertToIncident.deleted_at == NULL_FOR_DELETED_AT, + AlertToIncident.incident_id == incident_id + ) ) # Combine both queries @@ -3564,6 +3596,7 @@ def is_all_incident_alerts_resolved(incident: Incident, session: Optional[Sessio .outerjoin(AlertEnrichment, Alert.fingerprint == AlertEnrichment.alert_fingerprint) .join(AlertToIncident, AlertToIncident.alert_id == Alert.id) .where( + AlertToIncident.deleted_at == NULL_FOR_DELETED_AT, AlertToIncident.incident_id == incident.id, ) .group_by(Alert.fingerprint) @@ -3620,6 +3653,7 @@ def is_edge_incident_alert_resolved(incident: Incident, direction: Callable, ses .outerjoin(AlertEnrichment, Alert.fingerprint == AlertEnrichment.alert_fingerprint) .join(AlertToIncident, AlertToIncident.alert_id == Alert.id) .where( + AlertToIncident.deleted_at == NULL_FOR_DELETED_AT, AlertToIncident.incident_id == incident.id ) .group_by(Alert.fingerprint) diff --git a/keep/api/models/alert.py b/keep/api/models/alert.py index e616e29f1..c49a89bfb 100644 --- a/keep/api/models/alert.py +++ b/keep/api/models/alert.py @@ -331,6 +331,17 @@ class Config: } +class AlertWithIncidentLinkMetadataDto(AlertDto): + is_created_by_ai: bool = False + + @classmethod + def from_db_instance(cls, db_alert, db_alert_to_incident): + return cls( + is_created_by_ai=db_alert_to_incident.is_created_by_ai, + **db_alert.event, + ) + + class DeleteRequestBody(BaseModel): fingerprint: str lastReceived: str diff --git a/keep/api/models/db/alert.py b/keep/api/models/db/alert.py index 6f2422070..171c6f612 100644 --- a/keep/api/models/db/alert.py +++ b/keep/api/models/db/alert.py @@ -44,10 +44,15 @@ datetime_column_type = DateTime +# We want to include the deleted_at field in the primary key, +# but we also want to allow it to be nullable. MySQL doesn't allow nullable fields in primary keys, so: +NULL_FOR_DELETED_AT = datetime(1000, 1, 1, 0, 0) + class AlertToIncident(SQLModel, table=True): tenant_id: str = Field(foreign_key="tenant.id") - alert_id: UUID = Field(foreign_key="alert.id", primary_key=True) timestamp: datetime = Field(default_factory=datetime.utcnow) + + alert_id: UUID = Field(foreign_key="alert.id", primary_key=True) incident_id: UUID = Field( sa_column=Column( UUIDType(binary=False), @@ -55,7 +60,17 @@ class AlertToIncident(SQLModel, table=True): primary_key=True, ) ) - + alert: "Alert" = Relationship(back_populates="alert_to_incident_link") + incident: "Incident" = Relationship(back_populates="alert_to_incident_link") + + is_created_by_ai: bool = Field(default=False) + + deleted_at: datetime = Field( + default_factory=None, + nullable=True, + primary_key=True, + default=NULL_FOR_DELETED_AT, + ) class Incident(SQLModel, table=True): id: UUID = Field(default_factory=uuid4, primary_key=True) @@ -82,7 +97,24 @@ class Incident(SQLModel, table=True): # map of attributes to values alerts: List["Alert"] = Relationship( - back_populates="incidents", link_model=AlertToIncident + back_populates="incidents", link_model=AlertToIncident, + # primaryjoin is used to filter out deleted links for various DB dialects + sa_relationship_kwargs={ + "primaryjoin": f"""and_(AlertToIncident.incident_id == Incident.id, + or_( + AlertToIncident.deleted_at == '{NULL_FOR_DELETED_AT.strftime('%Y-%m-%d %H:%M:%S.%f')}', + AlertToIncident.deleted_at == '{NULL_FOR_DELETED_AT.strftime('%Y-%m-%d %H:%M:%S')}' + ))""", + "uselist": True, + "overlaps": "alert,incident", + } + + ) + alert_to_incident_link: List[AlertToIncident] = Relationship( + back_populates="incident", + sa_relationship_kwargs={ + "overlaps": "alerts,incidents" + } ) is_predicted: bool = Field(default=False) @@ -150,9 +182,6 @@ class Alert(SQLModel, table=True): event: dict = Field(sa_column=Column(JSON)) fingerprint: str = Field(index=True) # Add the fingerprint field with an index - incidents: List["Incident"] = Relationship( - back_populates="alerts", link_model=AlertToIncident - ) # alert_hash is different than fingerprint, it is a hash of the alert itself # and it is used for deduplication. # alert can be different but have the same fingerprint (e.g. different "firing" and "resolved" will have the same fingerprint but not the same alert_hash) @@ -166,6 +195,27 @@ class Alert(SQLModel, table=True): } ) + incidents: List["Incident"] = Relationship( + back_populates="alerts", + link_model=AlertToIncident, + sa_relationship_kwargs={ + # primaryjoin is used to filter out deleted links for various DB dialects + "primaryjoin": f"""and_(AlertToIncident.alert_id == Alert.id, + or_( + AlertToIncident.deleted_at == '{NULL_FOR_DELETED_AT.strftime('%Y-%m-%d %H:%M:%S.%f')}', + AlertToIncident.deleted_at == '{NULL_FOR_DELETED_AT.strftime('%Y-%m-%d %H:%M:%S')}' + ))""", + "uselist": True, + "overlaps": "alert,incident", + } + ) + alert_to_incident_link: List[AlertToIncident] = Relationship( + back_populates="alert", + sa_relationship_kwargs={ + "overlaps": "alerts,incidents" + } + ) + class Config: arbitrary_types_allowed = True diff --git a/keep/api/models/db/migrations/versions/2024-10-14-08-34_83c1020be97d.py b/keep/api/models/db/migrations/versions/2024-10-14-08-34_83c1020be97d.py new file mode 100644 index 000000000..01efc9e3b --- /dev/null +++ b/keep/api/models/db/migrations/versions/2024-10-14-08-34_83c1020be97d.py @@ -0,0 +1,95 @@ +"""Alert To Incident link history + +Revision ID: 83c1020be97d +Revises: bf756df80e9d +Create Date: 2024-10-14 08:34:46.608806 + +""" + +from sqlalchemy import inspect +from alembic import op +import sqlalchemy as sa +from sqlalchemy.sql import expression +from contextlib import contextmanager + +# revision identifiers, used by Alembic. +revision = "83c1020be97d" +down_revision = "bf756df80e9d" +branch_labels = None +depends_on = None + + +@contextmanager +def drop_and_restore_f_keys(table_name, conn): + inspector = inspect(conn) + existing_f_keys = inspector.get_foreign_keys(table_name, schema=None) + + print(f"Existing foreign keys: {existing_f_keys}") + + # Drop all foreign keys + for fk in existing_f_keys: + op.drop_constraint(fk['name'], table_name, type_='foreignkey') + print(f"Dropped foreign key: {fk['name']}") + try: + yield + finally: + # Restore all foreign keys + for fk in existing_f_keys: + op.create_foreign_key( + fk['name'], + table_name, + fk['referred_table'], + fk['constrained_columns'], + fk['referred_columns'], + ondelete=fk['options'].get('ondelete') + ) + print(f"Restored foreign key: {fk['name']}") + + +def upgrade() -> None: + with op.batch_alter_table("alerttoincident", schema=None) as batch_op: + batch_op.add_column(sa.Column( + "is_created_by_ai", + sa.Boolean(), + nullable=False, + server_default=expression.false() + )) + batch_op.add_column(sa.Column( + "deleted_at", + sa.DateTime(), + nullable=False, + server_default="1000-01-01 00:00:00", + )) + + conn = op.get_bind() + + with drop_and_restore_f_keys("alerttoincident", conn): + + with op.batch_alter_table("alerttoincident", schema=None) as batch_op: + inspector = inspect(conn) + existing_primary_key = inspector.get_pk_constraint('alerttoincident', schema=None) + batch_op.drop_constraint(existing_primary_key['name'], type_="primary") + + with op.batch_alter_table("alerttoincident", schema=None) as batch_op: + batch_op.create_primary_key( + "alerttoincident_pkey", ["alert_id", "incident_id", "deleted_at"] + ) + + +def downgrade() -> None: + conn = op.get_bind() + inspector = inspect(conn) + + existing_primary_key = inspector.get_pk_constraint('alerttoincident', schema=None) + + with op.batch_alter_table("alerttoincident", schema=None) as batch_op: + batch_op.drop_column("deleted_at") + batch_op.drop_column("is_created_by_ai") + + with drop_and_restore_f_keys("alerttoincident", conn): + with op.batch_alter_table("alerttoincident", schema=None) as batch_op: + batch_op.drop_constraint(existing_primary_key['name'], type_="primary") + with op.batch_alter_table("alerttoincident", schema=None) as batch_op: + batch_op.create_primary_key( + "alerttoincident_pkey", ["alert_id", "incident_id"] + ) diff --git a/keep/api/routes/incidents.py b/keep/api/routes/incidents.py index b7835faa1..9160d9e62 100644 --- a/keep/api/routes/incidents.py +++ b/keep/api/routes/incidents.py @@ -18,6 +18,7 @@ delete_incident_by_id, get_future_incidents_by_incident_id, get_incident_alerts_by_incident_id, + get_incident_alerts_and_links_by_incident_id, get_incident_by_id, get_incident_unique_fingerprint_count, get_last_incidents, @@ -45,7 +46,7 @@ from keep.api.utils.enrichment_helpers import convert_db_alerts_to_dto_alerts from keep.api.utils.import_ee import mine_incidents_and_create_objects from keep.api.utils.pagination import ( - AlertPaginatedResultsDto, + AlertWithIncidentLinkMetadataPaginatedResultsDto, IncidentsPaginatedResultsDto, WorkflowExecutionsPaginatedResultsDto, ) @@ -355,10 +356,11 @@ def get_incident_alerts( incident_id: UUID, limit: int = 25, offset: int = 0, + include_unlinked: bool = False, authenticated_entity: AuthenticatedEntity = Depends( IdentityManagerFactory.get_auth_verifier(["read:incidents"]) ), -) -> AlertPaginatedResultsDto: +) -> AlertWithIncidentLinkMetadataPaginatedResultsDto: tenant_id = authenticated_entity.tenant_id logger.info( "Fetching incident", @@ -378,14 +380,15 @@ def get_incident_alerts( "tenant_id": tenant_id, }, ) - db_alerts, total_count = get_incident_alerts_by_incident_id( + db_alerts_and_links, total_count = get_incident_alerts_and_links_by_incident_id( tenant_id=tenant_id, incident_id=incident_id, limit=limit, offset=offset, + include_unlinked=include_unlinked, ) - enriched_alerts_dto = convert_db_alerts_to_dto_alerts(db_alerts) + enriched_alerts_dto = convert_db_alerts_to_dto_alerts(db_alerts_and_links) logger.info( "Fetched alerts from DB", extra={ @@ -393,7 +396,7 @@ def get_incident_alerts( }, ) - return AlertPaginatedResultsDto( + return AlertWithIncidentLinkMetadataPaginatedResultsDto( limit=limit, offset=offset, count=total_count, items=enriched_alerts_dto ) @@ -492,6 +495,7 @@ def get_incident_workflows( async def add_alerts_to_incident( incident_id: UUID, alert_ids: List[UUID], + is_created_by_ai: bool = False, authenticated_entity: AuthenticatedEntity = Depends( IdentityManagerFactory.get_auth_verifier(["write:incident"]) ), @@ -509,7 +513,7 @@ async def add_alerts_to_incident( if not incident: raise HTTPException(status_code=404, detail="Incident not found") - add_alerts_to_incident_by_incident_id(tenant_id, incident_id, alert_ids) + add_alerts_to_incident_by_incident_id(tenant_id, incident_id, alert_ids, is_created_by_ai) try: logger.info("Pushing enriched alert to elasticsearch") elastic_client = ElasticClient(tenant_id) diff --git a/keep/api/utils/enrichment_helpers.py b/keep/api/utils/enrichment_helpers.py index 05f36d25b..86e9795be 100644 --- a/keep/api/utils/enrichment_helpers.py +++ b/keep/api/utils/enrichment_helpers.py @@ -3,8 +3,8 @@ from opentelemetry import trace -from keep.api.models.alert import AlertDto, AlertStatus -from keep.api.models.db.alert import Alert +from keep.api.models.alert import AlertDto, AlertStatus, AlertWithIncidentLinkMetadataDto +from keep.api.models.db.alert import Alert, AlertToIncident tracer = trace.get_tracer(__name__) logger = logging.getLogger(__name__) @@ -76,7 +76,10 @@ def calculated_start_firing_time( return alert.lastReceived -def convert_db_alerts_to_dto_alerts(alerts: list[Alert], with_incidents: bool = False) -> list[AlertDto]: +def convert_db_alerts_to_dto_alerts( + alerts: list[Alert | tuple[Alert, AlertToIncident]], + with_incidents: bool = False + ) -> list[AlertDto | AlertWithIncidentLinkMetadataDto]: """ Enriches the alerts with the enrichment data. @@ -85,19 +88,29 @@ def convert_db_alerts_to_dto_alerts(alerts: list[Alert], with_incidents: bool = with_incidents (bool): enrich with incidents data Returns: - list[AlertDto]: The enriched alerts. + list[AlertDto | AlertWithIncidentLinkMetadataDto]: The enriched alerts. """ alerts_dto = [] with tracer.start_as_current_span("alerts_enrichment"): # enrich the alerts with the enrichment data - for alert in alerts: + for _object in alerts: + + # We may have an Alert only or and Alert with an AlertToIncident + if isinstance(_object, Alert): + alert, alert_to_incident = _object, None + else: + alert, alert_to_incident = _object + if alert.alert_enrichment: alert.event.update(alert.alert_enrichment.enrichments) if with_incidents: if alert.incidents: alert.event["incident"] = ",".join(str(incident.id) for incident in alert.incidents) try: - alert_dto = AlertDto(**alert.event) + if alert_to_incident is not None: + alert_dto = AlertWithIncidentLinkMetadataDto.from_db_instance(alert, alert_to_incident) + else: + alert_dto = AlertDto(**alert.event) if alert.alert_enrichment: parse_and_enrich_deleted_and_assignees( alert_dto, alert.alert_enrichment.enrichments diff --git a/keep/api/utils/pagination.py b/keep/api/utils/pagination.py index c7390b33e..7f6db3160 100644 --- a/keep/api/utils/pagination.py +++ b/keep/api/utils/pagination.py @@ -2,11 +2,10 @@ from pydantic import BaseModel -from keep.api.models.alert import AlertDto, IncidentDto +from keep.api.models.alert import AlertDto, IncidentDto, AlertWithIncidentLinkMetadataDto from keep.api.models.db.workflow import * # pylint: disable=unused-wildcard-importfrom typing import Optional from keep.api.models.workflow import WorkflowDTO, WorkflowExecutionDTO - class PaginatedResultsDto(BaseModel): limit: int = 25 offset: int = 0 @@ -22,6 +21,9 @@ class AlertPaginatedResultsDto(PaginatedResultsDto): items: list[AlertDto] +class AlertWithIncidentLinkMetadataPaginatedResultsDto(PaginatedResultsDto): + items: list[AlertWithIncidentLinkMetadataDto] + class WorkflowExecutionsPaginatedResultsDto(PaginatedResultsDto): items: list[WorkflowExecutionDTO] passCount: int = 0 diff --git a/tests/test_incidents.py b/tests/test_incidents.py index 7337d51da..0ffff3d06 100644 --- a/tests/test_incidents.py +++ b/tests/test_incidents.py @@ -13,6 +13,7 @@ get_incident_by_id, get_last_incidents, remove_alerts_to_incident_by_incident_id, + get_incident_alerts_by_incident_id ) from keep.api.core.db_utils import get_json_extract_field from keep.api.core.dependencies import SINGLE_TENANT_UUID @@ -52,6 +53,8 @@ def test_add_remove_alert_to_incidents(db_session, setup_stress_alerts_no_elasti incident = get_incident_by_id(SINGLE_TENANT_UUID, incident.id) + assert len(incident.alerts) == 100 + assert sorted(incident.affected_services) == sorted( ["service_{}".format(i) for i in range(10)] ) @@ -84,6 +87,13 @@ def test_add_remove_alert_to_incidents(db_session, setup_stress_alerts_no_elasti SINGLE_TENANT_UUID, incident.id, [a.id for a in service_0] ) + # Removing shouldn't impact links between alert and incident if include_unlinked=True + assert len(get_incident_alerts_by_incident_id( + incident_id=incident.id, + tenant_id=incident.tenant_id, + include_unlinked=True + )[0]) == 100 + incident = get_incident_by_id(SINGLE_TENANT_UUID, incident.id) assert len(incident.alerts) == 90