diff --git a/keep-ui/app/incidents/[id]/incident-alert-menu.tsx b/keep-ui/app/incidents/[id]/incident-alert-menu.tsx
index 03d7c615e..5ab2604ed 100644
--- a/keep-ui/app/incidents/[id]/incident-alert-menu.tsx
+++ b/keep-ui/app/incidents/[id]/incident-alert-menu.tsx
@@ -1,10 +1,10 @@
-import { Icon } from "@tremor/react";
 import { AlertDto } from "app/alerts/models";
 import { useSession } from "next-auth/react";
 import { toast } from "react-toastify";
 import { useApiUrl } from "utils/hooks/useConfig";
 import { useIncidentAlerts } from "utils/hooks/useIncidents";
-import { LinkSlashIcon } from "@heroicons/react/24/outline";
+import { Icon } from "@tremor/react";
+import LinkSlashIcon from "@heroicons/react/24/outline/LinkSlashIcon";
 
 interface Props {
   incidentId: string;
diff --git a/keep-ui/app/incidents/incident-table-component.tsx b/keep-ui/app/incidents/incident-table-component.tsx
index 47dbe6e86..33d356512 100644
--- a/keep-ui/app/incidents/incident-table-component.tsx
+++ b/keep-ui/app/incidents/incident-table-component.tsx
@@ -51,8 +51,8 @@ const SortableHeaderCell = ({ header, children }: SortableHeaderCellProps) => {
                 column.getNextSortingOrder() === "asc"
                   ? "Sort ascending"
                   : column.getNextSortingOrder() === "desc"
-                    ? "Sort descending"
-                    : "Clear sort"
+                  ? "Sort descending"
+                  : "Clear sort"
               }
               icon={
                 column.getIsSorted()
@@ -77,10 +77,10 @@ export const IncidentTableComponent = (props: Props) => {
   return (
     <Table>
       <TableHead>
-        {table.getHeaderGroups().map((headerGroup) => (
+        {table.getHeaderGroups().map((headerGroup, index) => (
           <TableRow
             className="border-b border-tremor-border dark:border-dark-tremor-border"
-            key={headerGroup.id}
+            key={`header-${index}`}
           >
             {headerGroup.headers.map((header) => {
               return (
diff --git a/keep-ui/utils/hooks/usePusher.ts b/keep-ui/utils/hooks/usePusher.ts
index 57828a2a3..ab9c05a47 100644
--- a/keep-ui/utils/hooks/usePusher.ts
+++ b/keep-ui/utils/hooks/usePusher.ts
@@ -13,9 +13,6 @@ export const useWebsocket = () => {
   const { data: session } = useSession();
   let channelName = `private-${session?.tenantId}`;
 
-  console.log("useWebsocket: Initializing with config:", configData);
-  console.log("useWebsocket: Session:", session);
-
   if (
     PUSHER === null &&
     configData !== undefined &&
@@ -23,11 +20,9 @@ export const useWebsocket = () => {
     configData.PUSHER_DISABLED === false
   ) {
     channelName = `private-${session?.tenantId}`;
-    console.log("useWebsocket: Creating new Pusher instance");
     try {
       const isRelativeHost =
         configData.PUSHER_HOST && !configData.PUSHER_HOST.includes("://");
-      console.log("useWebsocket: isRelativeHost:", isRelativeHost);
       PUSHER = new Pusher(configData.PUSHER_APP_KEY, {
         wsHost: isRelativeHost
           ? window.location.hostname
@@ -50,46 +45,23 @@ export const useWebsocket = () => {
           },
         },
       });
-      console.log("useWebsocket: Pusher instance created successfully");
-
-      PUSHER.connection.bind("connected", () => {
-        console.log("useWebsocket: Pusher connected successfully");
-      });
 
-      PUSHER.connection.bind("error", (err: any) => {
-        console.error("useWebsocket: Pusher connection error:", err);
-      });
-
-      PUSHER.subscribe(channelName)
-        .bind("pusher:subscription_succeeded", () => {
-          console.log(
-            `useWebsocket: Successfully subscribed to ${channelName}`
-          );
-        })
-        .bind("pusher:subscription_error", (err: any) => {
-          console.error(
-            `useWebsocket: Subscription error for ${channelName}:`,
-            err
-          );
-        });
+      PUSHER.subscribe(channelName);
     } catch (error) {
       console.error("useWebsocket: Error creating Pusher instance:", error);
     }
   }
 
   const subscribe = useCallback(() => {
-    console.log(`useWebsocket: Subscribing to ${channelName}`);
     return PUSHER?.subscribe(channelName);
   }, [channelName]);
 
   const unsubscribe = useCallback(() => {
-    console.log(`useWebsocket: Unsubscribing from ${channelName}`);
     return PUSHER?.unsubscribe(channelName);
   }, [channelName]);
 
   const bind = useCallback(
     (event: any, callback: any) => {
-      console.log(`useWebsocket: Binding to event ${event} on ${channelName}`);
       return PUSHER?.channel(channelName)?.bind(event, callback);
     },
     [channelName]
@@ -97,27 +69,12 @@ export const useWebsocket = () => {
 
   const unbind = useCallback(
     (event: any, callback: any) => {
-      console.log(
-        `useWebsocket: Unbinding from event ${event} on ${channelName}`
-      );
       return PUSHER?.channel(channelName)?.unbind(event, callback);
     },
     [channelName]
   );
 
-  const trigger = useCallback(
-    (event: any, data: any) => {
-      console.log(
-        `useWebsocket: Triggering event ${event} on ${channelName} with data:`,
-        data
-      );
-      return PUSHER?.channel(channelName).trigger(event, data);
-    },
-    [channelName]
-  );
-
   const channel = useCallback(() => {
-    console.log(`useWebsocket: Getting channel ${channelName}`);
     return PUSHER?.channel(channelName);
   }, [channelName]);
 
@@ -126,7 +83,6 @@ export const useWebsocket = () => {
     unsubscribe,
     bind,
     unbind,
-    trigger,
     channel,
   };
 };
@@ -136,40 +92,25 @@ export const useAlertPolling = () => {
   const [pollAlerts, setPollAlerts] = useState(0);
   const lastPollTimeRef = useRef(0);
 
-  console.log("useAlertPolling: Initializing");
-
   const handleIncoming = useCallback((incoming: any) => {
-    console.log("useAlertPolling: Received incoming data:", incoming);
     const currentTime = Date.now();
     const timeSinceLastPoll = currentTime - lastPollTimeRef.current;
 
-    console.log(
-      `useAlertPolling: Time since last poll: ${timeSinceLastPoll}ms`
-    );
-
     if (timeSinceLastPoll < POLLING_INTERVAL) {
-      console.log("useAlertPolling: Ignoring poll due to short interval");
       setPollAlerts(0);
     } else {
-      console.log("useAlertPolling: Updating poll alerts");
       lastPollTimeRef.current = currentTime;
       const newPollValue = Math.floor(Math.random() * 10000);
-      console.log(`useAlertPolling: New poll value: ${newPollValue}`);
       setPollAlerts(newPollValue);
     }
   }, []);
 
   useEffect(() => {
-    console.log("useAlertPolling: Setting up event listener for 'poll-alerts'");
     bind("poll-alerts", handleIncoming);
     return () => {
-      console.log(
-        "useAlertPolling: Cleaning up event listener for 'poll-alerts'"
-      );
       unbind("poll-alerts", handleIncoming);
     };
   }, [bind, unbind, handleIncoming]);
 
-  console.log("useAlertPolling: Current poll alerts value:", pollAlerts);
   return { data: pollAlerts };
 };
diff --git a/keep/api/core/db.py b/keep/api/core/db.py
index 503e0e018..0b8f5a203 100644
--- a/keep/api/core/db.py
+++ b/keep/api/core/db.py
@@ -19,7 +19,7 @@
 import validators
 from dotenv import find_dotenv, load_dotenv
 from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
-from sqlalchemy import and_, case, desc, literal, null, union, update, func, case
+from sqlalchemy import and_, case, desc, func, literal, null, union, update
 from sqlalchemy.dialects.mysql import insert as mysql_insert
 from sqlalchemy.dialects.postgresql import insert as pg_insert
 from sqlalchemy.dialects.sqlite import insert as sqlite_insert
@@ -1016,7 +1016,11 @@ def get_enrichment_with_session(session, tenant_id, fingerprint, refresh=False):
 
 
 def get_alerts_with_filters(
-    tenant_id, provider_id=None, filters=None, time_delta=1, with_incidents=False,
+    tenant_id,
+    provider_id=None,
+    filters=None,
+    time_delta=1,
+    with_incidents=False,
 ) -> list[Alert]:
     with Session(engine) as session:
         # Create the query
@@ -1190,7 +1194,7 @@ def get_last_alerts(
     Returns:
         List[Alert]: A list of Alert objects including the first time the alert was triggered.
     """
-    with (Session(engine) as session):
+    with Session(engine) as session:
         # Subquery that selects the max and min timestamp for each fingerprint.
         subquery = (
             session.query(
@@ -1254,7 +1258,10 @@ def get_last_alerts(
             query = query.add_columns(AlertToIncident.incident_id.label("incident"))
             query = query.outerjoin(
                 AlertToIncident,
-                and_(AlertToIncident.alert_id == Alert.id,  AlertToIncident.deleted_at == NULL_FOR_DELETED_AT),
+                and_(
+                    AlertToIncident.alert_id == Alert.id,
+                    AlertToIncident.deleted_at == NULL_FOR_DELETED_AT,
+                ),
             )
 
         if provider_id:
@@ -1718,7 +1725,7 @@ def get_rule_distribution(tenant_id, minute=False):
             .join(AlertToIncident, Incident.id == AlertToIncident.incident_id)
             .filter(
                 AlertToIncident.deleted_at == NULL_FOR_DELETED_AT,
-                AlertToIncident.timestamp >= seven_days_ago
+                AlertToIncident.timestamp >= seven_days_ago,
             )
             .filter(Rule.tenant_id == tenant_id)  # Filter by tenant_id
             .group_by(
@@ -2888,6 +2895,7 @@ def get_incident_alerts_and_links_by_incident_id(
                 Incident.id == incident_id,
             )
             .order_by(col(Alert.timestamp).desc())
+            .options(joinedload(Alert.alert_enrichment))
         )
         if not include_unlinked:
             query = query.filter(
@@ -2896,16 +2904,19 @@ def get_incident_alerts_and_links_by_incident_id(
 
     total_count = query.count()
 
-    if limit and offset:
+    if limit is not None and offset is not None:
         query = query.limit(limit).offset(offset)
 
     return query.all(), total_count
 
+
 def get_incident_alerts_by_incident_id(*args, **kwargs) -> tuple[List[Alert], int]:
     """
     Unpacking (List[(Alert, AlertToIncident)], int) to (List[Alert], int).
     """
-    alerts_and_links, total_alerts = get_incident_alerts_and_links_by_incident_id(*args, **kwargs)
+    alerts_and_links, total_alerts = get_incident_alerts_and_links_by_incident_id(
+        *args, **kwargs
+    )
     alerts = [alert_and_link[0] for alert_and_link in alerts_and_links]
     return alerts, total_alerts
 
@@ -2931,28 +2942,27 @@ def get_future_incidents_by_incident_id(
 
 
 def get_all_same_alert_ids(
-    tenant_id: str,
-    alert_ids: List[str | UUID],
-    session: Optional[Session] = None
+    tenant_id: str, alert_ids: List[str | UUID], session: Optional[Session] = None
 ):
     with existed_or_new_session(session) as session:
-        fingerprints_subquery = session.query(Alert.fingerprint).where(
-            Alert.tenant_id == tenant_id,
-            col(Alert.id).in_(alert_ids)
-        ).subquery()
+        fingerprints_subquery = (
+            session.query(Alert.fingerprint)
+            .where(Alert.tenant_id == tenant_id, col(Alert.id).in_(alert_ids))
+            .subquery()
+        )
         query = session.scalars(
-            select(Alert.id)
-            .where(
-            Alert.tenant_id == tenant_id,
-                col(Alert.fingerprint).in_(fingerprints_subquery)
-        ))
+            select(Alert.id).where(
+                Alert.tenant_id == tenant_id,
+                col(Alert.fingerprint).in_(fingerprints_subquery),
+            )
+        )
         return query.all()
 
 
 def get_alerts_data_for_incident(
     alert_ids: List[str | UUID],
     existed_fingerprints: Optional[List[str]] = None,
-    session: Optional[Session] = None
+    session: Optional[Session] = None,
 ) -> dict:
     """
     Function to prepare aggregated data for incidents from the given list of alert_ids
@@ -3023,7 +3033,9 @@ def add_alerts_to_incident_by_incident_id(
 
         if not incident:
             return None
-        return add_alerts_to_incident(tenant_id, incident, alert_ids, is_created_by_ai, session)
+        return add_alerts_to_incident(
+            tenant_id, incident, alert_ids, is_created_by_ai, session
+        )
 
 
 def add_alerts_to_incident(
@@ -3067,27 +3079,38 @@ def add_alerts_to_incident(
             )
 
             new_alert_ids = [
-                alert_id for alert_id in all_alert_ids if alert_id not in existing_alert_ids
+                alert_id for alert_id in alert_ids if alert_id not in existing_alert_ids
             ]
 
             if not new_alert_ids:
                 return incident
 
-            alerts_data_for_incident = get_alerts_data_for_incident(new_alert_ids, existing_fingerprints, session)
+            alerts_data_for_incident = get_alerts_data_for_incident(
+                new_alert_ids, existing_fingerprints, session
+            )
 
             incident.sources = list(
-                set(incident.sources if incident.sources else []) | set(alerts_data_for_incident["sources"])
+                set(incident.sources if incident.sources else [])
+                | set(alerts_data_for_incident["sources"])
             )
             incident.affected_services = list(
-                set(incident.affected_services if incident.affected_services else []) | set(alerts_data_for_incident["services"])
+                set(incident.affected_services if incident.affected_services else [])
+                | set(alerts_data_for_incident["services"])
             )
             # If incident has alerts already, use the max severity between existing and new alerts, otherwise use the new alerts max severity
-            incident.severity = max(incident.severity, alerts_data_for_incident["max_severity"].order) if incident.alerts_count else alerts_data_for_incident["max_severity"].order
+            incident.severity = (
+                max(incident.severity, alerts_data_for_incident["max_severity"].order)
+                if incident.alerts_count
+                else alerts_data_for_incident["max_severity"].order
+            )
             incident.alerts_count += alerts_data_for_incident["count"]
 
             alert_to_incident_entries = [
                 AlertToIncident(
-                    alert_id=alert_id, incident_id=incident.id, tenant_id=tenant_id, is_created_by_ai=is_created_by_ai
+                    alert_id=alert_id,
+                    incident_id=incident.id,
+                    tenant_id=tenant_id,
+                    is_created_by_ai=is_created_by_ai,
                 )
                 for alert_id in new_alert_ids
             ]
@@ -3187,14 +3210,19 @@ def remove_alerts_to_incident_by_incident_id(
                 AlertToIncident.tenant_id == tenant_id,
                 AlertToIncident.incident_id == incident.id,
                 col(AlertToIncident.alert_id).in_(all_alert_ids),
-            ).update({
-                "deleted_at": datetime.now(datetime.now().astimezone().tzinfo),
-            })
+            )
+            .update(
+                {
+                    "deleted_at": datetime.now(datetime.now().astimezone().tzinfo),
+                }
+            )
         )
         session.commit()
 
         # Getting aggregated data for incidents for alerts which just was removed
-        alerts_data_for_incident = get_alerts_data_for_incident(all_alert_ids, session=session)
+        alerts_data_for_incident = get_alerts_data_for_incident(
+            all_alert_ids, session=session
+        )
 
         service_field = get_json_extract_field(session, Alert.event, "service")
 
@@ -3701,7 +3729,7 @@ def get_workflow_executions_for_incident_or_alert(
             .join(AlertToIncident, Alert.id == AlertToIncident.alert_id)
             .where(
                 AlertToIncident.deleted_at == NULL_FOR_DELETED_AT,
-                AlertToIncident.incident_id == incident_id
+                AlertToIncident.incident_id == incident_id,
             )
         )
 
@@ -3811,37 +3839,35 @@ def is_edge_incident_alert_resolved(
                 AlertEnrichment, Alert.fingerprint == AlertEnrichment.alert_fingerprint
             )
             .join(AlertToIncident, AlertToIncident.alert_id == Alert.id)
-            .where(
-                AlertToIncident.incident_id == incident.id
-            )
+            .where(AlertToIncident.incident_id == incident.id)
             .group_by(Alert.fingerprint)
             .having(func.max(Alert.timestamp))
             .order_by(direction(Alert.timestamp))
         ).first()
-        
-        return (
-            enriched_status == AlertStatus.RESOLVED.value or
-            (enriched_status is None and status == AlertStatus.RESOLVED.value)
+
+        return enriched_status == AlertStatus.RESOLVED.value or (
+            enriched_status is None and status == AlertStatus.RESOLVED.value
         )
 
+
 def get_alerts_metrics_by_provider(
     tenant_id: str,
-    start_date: Optional[datetime] = None, 
+    start_date: Optional[datetime] = None,
     end_date: Optional[datetime] = None,
-    fields: Optional[List[str]] = []
+    fields: Optional[List[str]] = [],
 ) -> Dict[str, Dict[str, Any]]:
-    
+
     dynamic_field_sums = [
         func.sum(
             case(
                 [
                     (
-                        func.json_extract(Alert.event, f'$.{field}').isnot(None) & 
-                        (func.json_extract(Alert.event, f'$.{field}') != False), 
-                        1
+                        func.json_extract(Alert.event, f"$.{field}").isnot(None)
+                        & (func.json_extract(Alert.event, f"$.{field}") != False),
+                        1,
                     )
-                ], 
-                else_=0
+                ],
+                else_=0,
             )
         ).label(f"{field}_count")
         for field in fields
@@ -3853,8 +3879,10 @@ def get_alerts_metrics_by_provider(
                 Alert.provider_type,
                 Alert.provider_id,
                 func.count(Alert.id).label("total_alerts"),
-                func.sum(case([(AlertToIncident.alert_id.isnot(None), 1)], else_=0)).label("correlated_alerts"),
-                *dynamic_field_sums
+                func.sum(
+                    case([(AlertToIncident.alert_id.isnot(None), 1)], else_=0)
+                ).label("correlated_alerts"),
+                *dynamic_field_sums,
             )
             .outerjoin(AlertToIncident, Alert.id == AlertToIncident.alert_id)
             .filter(
@@ -3865,18 +3893,19 @@ def get_alerts_metrics_by_provider(
         # Add timestamp filter only if both start_date and end_date are provided
         if start_date and end_date:
             query = query.filter(
-                Alert.timestamp >= start_date,
-                Alert.timestamp <= end_date
+                Alert.timestamp >= start_date, Alert.timestamp <= end_date
             )
 
         results = query.group_by(Alert.provider_id, Alert.provider_type).all()
-        
+
     return {
         f"{row.provider_id}_{row.provider_type}": {
             "total_alerts": row.total_alerts,
             "correlated_alerts": row.correlated_alerts,
             "provider_type": row.provider_type,
-            **{f"{field}_count": getattr(row, f"{field}_count") for field in fields}  # Add field-specific counts
+            **{
+                f"{field}_count": getattr(row, f"{field}_count") for field in fields
+            },  # Add field-specific counts
         }
         for row in results
     }
diff --git a/keep/api/models/db/alert.py b/keep/api/models/db/alert.py
index 473574e0a..31d179842 100644
--- a/keep/api/models/db/alert.py
+++ b/keep/api/models/db/alert.py
@@ -47,6 +47,7 @@
 # but we also want to allow it to be nullable. MySQL doesn't allow nullable fields in primary keys, so:
 NULL_FOR_DELETED_AT = datetime(1000, 1, 1, 0, 0)
 
+
 class AlertToIncident(SQLModel, table=True):
     tenant_id: str = Field(foreign_key="tenant.id")
     timestamp: datetime = Field(default_factory=datetime.utcnow)
@@ -59,18 +60,23 @@ class AlertToIncident(SQLModel, table=True):
             primary_key=True,
         )
     )
-    alert: "Alert" = Relationship(back_populates="alert_to_incident_link")
-    incident: "Incident" = Relationship(back_populates="alert_to_incident_link")
-    
+    # alert: "Alert" = Relationship(
+    #     back_populates="alert_to_incident_link",
+    # )
+    # incident: "Incident" = Relationship(
+    #     back_populates="alert_to_incident_link",
+    # )
+
     is_created_by_ai: bool = Field(default=False)
 
     deleted_at: datetime = Field(
         default_factory=None,
-        nullable=True, 
+        nullable=True,
         primary_key=True,
         default=NULL_FOR_DELETED_AT,
     )
 
+
 class Incident(SQLModel, table=True):
     id: UUID = Field(default_factory=uuid4, primary_key=True)
     tenant_id: str = Field(foreign_key="tenant.id")
@@ -96,25 +102,23 @@ class Incident(SQLModel, table=True):
 
     # map of attributes to values
     alerts: List["Alert"] = Relationship(
-        back_populates="incidents", link_model=AlertToIncident,
+        back_populates="incidents",
+        link_model=AlertToIncident,
         # primaryjoin is used to filter out deleted links for various DB dialects
-        sa_relationship_kwargs={
-            "primaryjoin": f"""and_(AlertToIncident.incident_id == Incident.id,
-                or_(
-                    AlertToIncident.deleted_at == '{NULL_FOR_DELETED_AT.strftime('%Y-%m-%d %H:%M:%S.%f')}',
-                    AlertToIncident.deleted_at == '{NULL_FOR_DELETED_AT.strftime('%Y-%m-%d %H:%M:%S')}'
-                ))""",
-            "uselist": True,
-            "overlaps": "alert,incident",
-        }
-        
-    )
-    alert_to_incident_link: List[AlertToIncident] = Relationship(
-        back_populates="incident",
-        sa_relationship_kwargs={
-            "overlaps": "alerts,incidents"
-        }
+        # sa_relationship_kwargs={
+        #     "primaryjoin": f"""and_(AlertToIncident.incident_id == Incident.id,
+        #         or_(
+        #             AlertToIncident.deleted_at == '{NULL_FOR_DELETED_AT.strftime('%Y-%m-%d %H:%M:%S.%f')}',
+        #             AlertToIncident.deleted_at == '{NULL_FOR_DELETED_AT.strftime('%Y-%m-%d %H:%M:%S')}'
+        #         ))""",
+        #     "uselist": True,
+        #     "overlaps": "alert,incident",
+        # },
     )
+    # alert_to_incident_link: List[AlertToIncident] = Relationship(
+    #     back_populates="incident",
+    #     sa_relationship_kwargs={"overlaps": "alerts,incidents"},
+    # )
 
     is_predicted: bool = Field(default=False)
     is_confirmed: bool = Field(default=False)
@@ -222,25 +226,22 @@ class Alert(SQLModel, table=True):
     )
 
     incidents: List["Incident"] = Relationship(
-        back_populates="alerts", 
+        back_populates="alerts",
         link_model=AlertToIncident,
-        sa_relationship_kwargs={
-            # primaryjoin is used to filter out deleted links for various DB dialects
-            "primaryjoin": f"""and_(AlertToIncident.alert_id == Alert.id,
-                or_(
-                    AlertToIncident.deleted_at == '{NULL_FOR_DELETED_AT.strftime('%Y-%m-%d %H:%M:%S.%f')}',
-                    AlertToIncident.deleted_at == '{NULL_FOR_DELETED_AT.strftime('%Y-%m-%d %H:%M:%S')}'
-                ))""",
-            "uselist": True,
-            "overlaps": "alert,incident",
-        }
-    )
-    alert_to_incident_link: List[AlertToIncident] = Relationship(
-        back_populates="alert",
-        sa_relationship_kwargs={
-            "overlaps": "alerts,incidents"
-        }
+        # sa_relationship_kwargs={
+        #     # primaryjoin is used to filter out deleted links for various DB dialects
+        #     "primaryjoin": f"""and_(AlertToIncident.alert_id == Alert.id,
+        #         or_(
+        #             AlertToIncident.deleted_at == '{NULL_FOR_DELETED_AT.strftime('%Y-%m-%d %H:%M:%S.%f')}',
+        #             AlertToIncident.deleted_at == '{NULL_FOR_DELETED_AT.strftime('%Y-%m-%d %H:%M:%S')}'
+        #         ))""",
+        #     "uselist": True,
+        #     "overlaps": "alert,incident",
+        # },
     )
+    # alert_to_incident_link: List[AlertToIncident] = Relationship(
+    #     back_populates="alert", sa_relationship_kwargs={"overlaps": "alerts,incidents"}
+    # )
 
     class Config:
         arbitrary_types_allowed = True
diff --git a/keep/api/routes/incidents.py b/keep/api/routes/incidents.py
index ed87d105a..a8a8afad3 100644
--- a/keep/api/routes/incidents.py
+++ b/keep/api/routes/incidents.py
@@ -12,6 +12,7 @@
 
 from keep.api.arq_pool import get_pool
 from keep.api.core.db import (
+    DestinationIncidentNotFound,
     add_alerts_to_incident_by_incident_id,
     add_audit,
     change_incident_status_by_id,
@@ -19,18 +20,16 @@
     create_incident_from_dto,
     delete_incident_by_id,
     get_future_incidents_by_incident_id,
-    get_incident_alerts_by_incident_id,
     get_incident_alerts_and_links_by_incident_id,
+    get_incident_alerts_by_incident_id,
     get_incident_by_id,
     get_incident_unique_fingerprint_count,
     get_incidents_meta_for_tenant,
     get_last_incidents,
     get_workflow_executions_for_incident_or_alert,
+    merge_incidents_to_id,
     remove_alerts_to_incident_by_incident_id,
     update_incident_from_dto_by_id,
-    get_incidents_meta_for_tenant,
-    merge_incidents_to_id,
-    DestinationIncidentNotFound,
 )
 from keep.api.core.dependencies import get_pusher_client
 from keep.api.core.elastic import ElasticClient
@@ -40,11 +39,11 @@
     IncidentDto,
     IncidentDtoIn,
     IncidentListFilterParamsDto,
-    MergeIncidentsRequestDto,
     IncidentSeverity,
     IncidentSorting,
     IncidentStatus,
     IncidentStatusChangeDto,
+    MergeIncidentsRequestDto,
     MergeIncidentsResponseDto,
 )
 from keep.api.models.db.alert import AlertActionType, AlertAudit
@@ -385,7 +384,7 @@ def merge_incidents(
             message = "No incidents merged"
         else:
             message = f"{pluralize(len(merged_ids), 'incident')} merged into {command.destination_incident_id} successfully"
-        
+
         if skipped_ids:
             message += f", {pluralize(len(skipped_ids), 'incident')} were skipped"
         if failed_ids:
@@ -569,7 +568,9 @@ async def add_alerts_to_incident(
     if not incident:
         raise HTTPException(status_code=404, detail="Incident not found")
 
-    add_alerts_to_incident_by_incident_id(tenant_id, incident_id, alert_ids, is_created_by_ai)
+    add_alerts_to_incident_by_incident_id(
+        tenant_id, incident_id, alert_ids, is_created_by_ai
+    )
     try:
         logger.info("Pushing enriched alert to elasticsearch")
         elastic_client = ElasticClient(tenant_id)
@@ -596,7 +597,11 @@ async def add_alerts_to_incident(
     except Exception:
         logger.exception("Failed to push alert to elasticsearch")
         pass
-    __update_client_on_incident_change(pusher_client, tenant_id, incident_id)
+
+    try:
+        __update_client_on_incident_change(pusher_client, tenant_id, incident_id)
+    except Exception:
+        logger.exception("Failed to push incident to pusher")
 
     incident_dto = IncidentDto.from_db_incident(incident)
 
diff --git a/keep/providers/github_provider/github_provider.py b/keep/providers/github_provider/github_provider.py
index c4d524a8d..f0a0952da 100644
--- a/keep/providers/github_provider/github_provider.py
+++ b/keep/providers/github_provider/github_provider.py
@@ -87,10 +87,10 @@ def _query(
         self.logger.debug(f"Previous stargazers: {previous_stars_count}")
         self.logger.debug(f"New stargazers: {stars_count - int(previous_stars_count)}")
 
-        stargazers_with_dates = []
+        stargazers_with_dates = list(repo.get_stargazers_with_dates())
+
         # If we have the last stargazer login name, use it as index
         if last_stargazer:
-            stargazers_with_dates = list(repo.get_stargazers_with_dates())
             last_stargazer_index = next(
                 (
                     i
@@ -107,9 +107,10 @@ def _query(
                 ]
         # If we dont, use the previous stars count as an index
         elif previous_stars_count and int(previous_stars_count) > 0:
-            stargazers_with_dates = list(repo.get_stargazers_with_dates())[
-                int(previous_stars_count) :
-            ]
+            stargazers_with_dates = stargazers_with_dates[int(previous_stars_count) :]
+
+        # Save last stargazer name so we can use it next iteration
+        last_stargazer = stargazers_with_dates[-1].user.login
 
         # Iterate new stargazers if there are any
         for stargazer in stargazers_with_dates:
@@ -121,13 +122,6 @@ def _query(
             )
             self.logger.debug(f"New stargazer: {stargazer.user.login}")
 
-        # Save last stargazer name so we can use it next iteration
-        last_stargazer = (
-            new_stargazers[-1]["username"]
-            if len(new_stargazers) >= 1
-            else last_stargazer
-        )
-
         return {
             "stars": stars_count,
             "new_stargazers": new_stargazers,