From 8c51fc4f30545f964434ca1a13a160e83b492892 Mon Sep 17 00:00:00 2001 From: 35C4n0r Date: Tue, 22 Oct 2024 12:18:38 +0530 Subject: [PATCH 1/8] feat: new metric widgets Signed-off-by: 35C4n0r --- keep-ui/app/dashboard/GridItem.tsx | 60 ++- keep-ui/app/dashboard/GridLayout.tsx | 11 +- keep-ui/app/dashboard/WidgetModal.tsx | 404 ++++++++++-------- keep-ui/app/dashboard/[id]/dashboard.tsx | 46 +- keep-ui/app/dashboard/types.tsx | 69 +-- .../utils/hooks/useDashboardMetricWidgets.ts | 64 +++ keep/api/core/db.py | 370 ++++++++++++---- keep/api/routes/dashboard.py | 33 +- 8 files changed, 726 insertions(+), 331 deletions(-) create mode 100644 keep-ui/utils/hooks/useDashboardMetricWidgets.ts diff --git a/keep-ui/app/dashboard/GridItem.tsx b/keep-ui/app/dashboard/GridItem.tsx index a1ef8a5ea..802056a52 100644 --- a/keep-ui/app/dashboard/GridItem.tsx +++ b/keep-ui/app/dashboard/GridItem.tsx @@ -1,7 +1,7 @@ import React, { useState } from "react"; -import { Card } from "@tremor/react"; +import { AreaChart, Card } from "@tremor/react"; import MenuButton from "./MenuButton"; -import { WidgetData } from "./types"; +import {WidgetData, WidgetType} from "./types"; import AlertQuality from "@/app/alerts/alert-quality-table"; import { useSearchParams } from "next/navigation"; @@ -55,12 +55,14 @@ const GridItem: React.FC = ({ } const getColor = () => { let color = "#000000"; + if (item.widgetType === WidgetType.PRESET && item.thresholds && item.preset) { for (let i = item.thresholds.length - 1; i >= 0; i--) { if (item.preset && item.preset.alerts_count >= item.thresholds[i].value) { color = item.thresholds[i].color; break; } } + } return color; }; @@ -80,11 +82,13 @@ const GridItem: React.FC = ({ }; return ( - +
{/* For table view we need intract with table filter and pagination.so we aare dragging the widget here */} @@ -111,16 +115,40 @@ const GridItem: React.FC = ({
)} -
- -
- -
- ); -}; + { item.metric && ( +
+
+ + `${Intl.NumberFormat().format(number).toString()}` + } + startEndOnly + connectNulls + showLegend={false} + showTooltip={true} + xAxisLabel="24H Activity Data" + /> +
+
+ + )} + + +
+ +
+ +
+ ); + }; -export default GridItem; + export default GridItem; diff --git a/keep-ui/app/dashboard/GridLayout.tsx b/keep-ui/app/dashboard/GridLayout.tsx index 647fc3f3b..d78df13a3 100644 --- a/keep-ui/app/dashboard/GridLayout.tsx +++ b/keep-ui/app/dashboard/GridLayout.tsx @@ -4,6 +4,7 @@ import GridItemContainer from "./GridItemContainer"; import { LayoutItem, WidgetData } from "./types"; import "react-grid-layout/css/styles.css"; import { Preset } from "app/alerts/models"; +import {MetricsWidget} from "@/utils/hooks/useDashboardMetricWidgets"; const ResponsiveGridLayout = WidthProvider(Responsive); @@ -15,6 +16,7 @@ interface GridLayoutProps { onDelete: (id: string) => void; presets: Preset[]; onSave: (updateItem: WidgetData) => void; + metrics: MetricsWidget[]; } const GridLayout: React.FC = ({ @@ -25,6 +27,7 @@ const GridLayout: React.FC = ({ onDelete, onSave, presets, + metrics }) => { const layouts = { lg: layout }; @@ -52,14 +55,18 @@ const GridLayout: React.FC = ({ draggableHandle=".grid-item__widget" > {data.map((item) => { - //Fixing the static hardcode db value. + //Updating the static hardcode db value. if (item.preset) { const preset = presets?.find((p) => p?.id === item?.preset?.id); item.preset = { ...item.preset, alerts_count: preset?.alerts_count ?? 0, }; - + } else if (item.metric) { + const metric = metrics?.find(m => m?.id === item?.metric?.id); + if (metric) { + item.metric = {...metric} + } } return (
diff --git a/keep-ui/app/dashboard/WidgetModal.tsx b/keep-ui/app/dashboard/WidgetModal.tsx index a264c869a..ff331379f 100644 --- a/keep-ui/app/dashboard/WidgetModal.tsx +++ b/keep-ui/app/dashboard/WidgetModal.tsx @@ -1,32 +1,30 @@ -import React, { useState, useEffect, ChangeEvent, FormEvent } from "react"; +import React, {ChangeEvent, useEffect, useState} from "react"; import Modal from "@/components/ui/Modal"; -import { Button, Subtitle, TextInput, Select, SelectItem, Icon } from "@tremor/react"; -import { Trashcan } from "components/icons"; -import { Threshold, WidgetData, GenericsMertics } from "./types"; -import { Preset } from "app/alerts/models"; -import { useForm, Controller, get } from "react-hook-form"; +import {Button, Icon, Select, SelectItem, Subtitle, TextInput} from "@tremor/react"; +import {Trashcan} from "components/icons"; +import {GenericsMetrics, Threshold, WidgetData, WidgetType} from "./types"; +import {Preset} from "app/alerts/models"; +import {Controller, get, useForm, useWatch} from "react-hook-form"; +import {MetricsWidget} from "@/utils/hooks/useDashboardMetricWidgets"; interface WidgetForm { widgetName: string; selectedPreset: string; thresholds: Threshold[]; - selectedWidgetType: string; + widgetType: WidgetType; + selectedMetricWidget: string; selectedGenericMetrics: string; } interface WidgetModalProps { isOpen: boolean; onClose: () => void; - onAddWidget: ( - preset: Preset | null, - thresholds: Threshold[], - name: string, - widgetType?: string, - genericMetrics?: GenericsMertics | null, + onAddWidget: (name: string, widgetType: WidgetType, preset?: Preset, thresholds?: Threshold[], metric?: MetricsWidget, genericMetrics?: GenericsMetrics, ) => void; onEditWidget: (updatedWidget: WidgetData) => void; presets: Preset[]; editingItem?: WidgetData | null; + metricWidgets: MetricsWidget[]; } const GENERIC_METRICS = [ @@ -35,85 +33,100 @@ const GENERIC_METRICS = [ label: "Alert Quality", widgetType: "table", meta: { - defaultFilters: {"fields":"severity"}, + defaultFilters: {"fields": "severity"}, }, }, -] as GenericsMertics[]; +] as GenericsMetrics[]; -const WidgetModal: React.FC = ({ isOpen, onClose, onAddWidget, onEditWidget, presets, editingItem }) => { +const WidgetModal: React.FC = ({ + isOpen, + onClose, + onAddWidget, + onEditWidget, + presets, + editingItem, + metricWidgets + }) => { const [thresholds, setThresholds] = useState([ - { value: 0, color: '#22c55e' }, // Green - { value: 20, color: '#ef4444' } // Red + {value: 0, color: '#22c55e'}, // Green + {value: 20, color: '#ef4444'} // Red ]); - const { control, handleSubmit, setValue, formState: { errors }, reset } = useForm({ + const {control, handleSubmit, setValue, formState: {errors}, reset} = useForm({ defaultValues: { widgetName: '', selectedPreset: '', thresholds: thresholds, - selectedWidgetType: '', + widgetType: WidgetType.PRESET, selectedGenericMetrics: '' } }); - const [currentWidgetType, setCurrentWidgetType] = useState(''); + const widgetType = useWatch({ + control, + name: 'widgetType', + }); useEffect(() => { if (editingItem) { setValue("widgetName", editingItem.name); + setValue('widgetType', editingItem.widgetType); + + if (editingItem.thresholds) { + setThresholds(editingItem.thresholds); + } setValue("selectedPreset", editingItem?.preset?.id ?? ""); - setValue("selectedWidgetType", editingItem?.widgetType ?? ""); + setValue('selectedMetricWidget', editingItem?.metric?.id ?? ""); setValue("selectedGenericMetrics", editingItem?.genericMetrics?.key ?? ""); - setThresholds(editingItem.thresholds); } else { reset({ widgetName: '', selectedPreset: '', + selectedMetricWidget: '', + selectedGenericMetrics: '', thresholds: thresholds, - selectedWidgetType: "", + widgetType: WidgetType.PRESET, }); } }, [editingItem, setValue, reset]); const handleThresholdChange = (index: number, field: 'value' | 'color', e: ChangeEvent) => { const value = field === 'value' ? e.target.value : e.target.value; - const updatedThresholds = thresholds.map((t, i) => i === index ? { ...t, [field]: value } : t); + const updatedThresholds = thresholds.map((t, i) => i === index ? {...t, [field]: value} : t); setThresholds(updatedThresholds); }; const handleThresholdBlur = () => { setThresholds(prevThresholds => { return prevThresholds - .map(t => ({ - ...t, - value: parseInt(t.value.toString(), 10) || 0 - })) - .sort((a, b) => a.value - b.value); + .map(t => ({ + ...t, + value: parseInt(t.value.toString(), 10) || 0 + })) + .sort((a, b) => a.value - b.value); }); }; const handleAddThreshold = () => { const maxThreshold = Math.max(...thresholds.map(t => t.value), 0); - setThresholds([...thresholds, { value: maxThreshold + 10, color: '#000000' }]); + setThresholds([...thresholds, {value: maxThreshold + 10, color: '#000000'}]); }; const handleRemoveThreshold = (index: number) => { setThresholds(thresholds.filter((_, i) => i !== index)); }; - const deepClone = (obj: GenericsMertics|undefined) => { - if(!obj){ - return null; - } - try{ - return JSON.parse(JSON.stringify(obj)) as GenericsMertics; - }catch(e){ - return null; + const deepClone = (obj: GenericsMetrics | undefined) => { + if (!obj) { + return obj; } + return JSON.parse(JSON.stringify(obj)) as GenericsMetrics; + }; const onSubmit = (data: WidgetForm) => { - const preset = presets.find(p => p.id === data.selectedPreset) ?? null; + const preset = presets.find(p => p.id === data.selectedPreset); + const metric = metricWidgets.find(p => p.id === data.selectedMetricWidget); if (preset || data.selectedGenericMetrics) { const formattedThresholds = thresholds.map(t => ({ ...t, @@ -124,31 +137,51 @@ const WidgetModal: React.FC = ({ isOpen, onClose, onAddWidget, let updatedWidget: WidgetData = { ...editingItem, name: data.widgetName, - widgetType: data.selectedWidgetType || "preset", //backwards compatibility + widgetType: data.widgetType || WidgetType.PRESET, // backwards compatibility preset, thresholds: formattedThresholds, - genericMetrics: editingItem.genericMetrics || null, + genericMetrics: editingItem.genericMetrics, }; onEditWidget(updatedWidget); } else { onAddWidget( - preset, - formattedThresholds, - data.widgetName, - data.selectedWidgetType, - deepClone(GENERIC_METRICS.find((g) => g.key === data.selectedGenericMetrics)) + data.widgetName, + data.widgetType, + preset, + formattedThresholds, + undefined, + deepClone(GENERIC_METRICS.find((g) => g.key === data.selectedGenericMetrics)) ); // cleanup form setThresholds([ - { value: 0, color: '#22c55e' }, // Green - { value: 20, color: '#ef4444' } // Red + {value: 0, color: '#22c55e'}, // Green + {value: 20, color: '#ef4444'} // Red ]); reset({ widgetName: '', selectedPreset: '', thresholds: thresholds, selectedGenericMetrics: '', - selectedWidgetType: '', + widgetType: WidgetType.PRESET, + }); + } + onClose(); + } + if (metric) { + if (editingItem) { + const updatedWidget: WidgetData = { + ...editingItem, + name: data.widgetName, + widgetType: data.widgetType, + }; + onEditWidget(updatedWidget); + } else { + onAddWidget(data.widgetName, data.widgetType, undefined, undefined, metric, undefined); + reset({ + widgetName: '', + selectedPreset: '', + widgetType: WidgetType.PRESET, + thresholds: thresholds, }); } onClose(); @@ -156,135 +189,156 @@ const WidgetModal: React.FC = ({ isOpen, onClose, onAddWidget, }; return ( - -
-
- Widget Name - ( - - )} - /> -
-
- Widget Type - { - setCurrentWidgetType(field.value); - return - }} - /> -
- {currentWidgetType === 'preset' ? ( - <> -
- Preset - ( - - )} - /> -
-
-
- Thresholds - -
-
- {thresholds.map((threshold, index) => ( -
- handleThresholdChange(index, 'value', e)} - onBlur={handleThresholdBlur} - placeholder="Threshold value" - required - /> - handleThresholdChange(index, 'color', e)} - className="w-10 h-10 p-1 border" - required - /> - {thresholds.length > 1 && ( - + + +
+ Widget Name + ( + )} -
- ))} + />
-
- - ): currentWidgetType === 'generic_metrics' && <>
- Generic Metrics - ( - - )} - /> + Widget Type + { + return + }} + />
- } - - - + {widgetType === WidgetType.PRESET ? ( + <> +
+ Preset + ( + + )} + /> +
+
+
+ Thresholds + +
+
+ {thresholds.map((threshold, index) => ( +
+ handleThresholdChange(index, 'value', e)} + onBlur={handleThresholdBlur} + placeholder="Threshold value" + required + /> + handleThresholdChange(index, 'color', e)} + className="w-10 h-10 p-1 border" + required + /> + {thresholds.length > 1 && ( + + )} +
+ ))} +
+
+ + ) : widgetType === WidgetType.GENERICS_METRICS ? <> +
+ Generic Metrics + ( + + )} + /> +
+ : +
+ Widget + ( + + )} + /> +
} + + + ); }; diff --git a/keep-ui/app/dashboard/[id]/dashboard.tsx b/keep-ui/app/dashboard/[id]/dashboard.tsx index 627b9773e..850cbb048 100644 --- a/keep-ui/app/dashboard/[id]/dashboard.tsx +++ b/keep-ui/app/dashboard/[id]/dashboard.tsx @@ -1,20 +1,20 @@ 'use client'; -import { useParams } from 'next/navigation'; -import { useState, useEffect, ChangeEvent } from 'react'; +import {useParams} from 'next/navigation'; +import {ChangeEvent, useEffect, useState} from 'react'; import GridLayout from '../GridLayout'; -import { usePresets } from "utils/hooks/usePresets"; import WidgetModal from '../WidgetModal'; -import { Button, Card, TextInput, Subtitle, Icon } from '@tremor/react'; -import { LayoutItem, WidgetData, Threshold, GenericsMertics } from '../types'; -import { Preset } from 'app/alerts/models'; -import { FiSave, FiEdit2 } from 'react-icons/fi'; -import { useSession } from 'next-auth/react'; -import { useDashboards } from 'utils/hooks/useDashboards'; -import { getApiURL } from 'utils/apiUrl'; +import {Button, Card, Icon, Subtitle, TextInput} from '@tremor/react'; +import {GenericsMetrics, LayoutItem, Threshold, WidgetData, WidgetType} from '../types'; +import {Preset} from 'app/alerts/models'; +import {FiEdit2, FiSave} from 'react-icons/fi'; +import {useSession} from 'next-auth/react'; +import {useDashboards} from 'utils/hooks/useDashboards'; +import {getApiURL} from 'utils/apiUrl'; import './../styles.css'; -import { toast } from 'react-toastify'; -import { GenericFilters } from '@/components/filters/GenericFilters'; -import { useDashboardPreset } from 'utils/hooks/useDashboardPresets'; +import {toast} from 'react-toastify'; +import {GenericFilters} from '@/components/filters/GenericFilters'; +import {useDashboardPreset} from 'utils/hooks/useDashboardPresets'; +import {MetricsWidget, useDashboardMetricWidgets} from '@/utils/hooks/useDashboardMetricWidgets'; const DASHBOARD_FILTERS = [ { @@ -33,6 +33,7 @@ const DashboardPage = () => { const [isModalOpen, setIsModalOpen] = useState(false); const [layout, setLayout] = useState([]); const [widgetData, setWidgetData] = useState([]); + const {widgets: allMetricWidgets} = useDashboardMetricWidgets(); const [editingItem, setEditingItem] = useState(null); const [dashboardName, setDashboardName] = useState(decodeURIComponent(id)); const [isEditingName, setIsEditingName] = useState(false); @@ -54,16 +55,16 @@ const DashboardPage = () => { }; const closeModal = () => setIsModalOpen(false); - const handleAddWidget = (preset: Preset|null, thresholds: Threshold[], name: string, widgetType?: string, genericMetrics?: GenericsMertics|null) => { + const handleAddWidget = (name: string, widgetType: WidgetType, preset?: Preset, thresholds?: Threshold[], metric?: MetricsWidget, genericMetrics?: GenericsMetrics) => { const uniqueId = `w-${Date.now()}`; const newItem: LayoutItem = { i: uniqueId, x: (layout.length % 12) * 2, y: Math.floor(layout.length / 12) * 2, - w: genericMetrics ? 12 : 3, - h: genericMetrics ? 20 : 3, - minW: genericMetrics ? 10 : 2, - minH: genericMetrics ? 15 : 2, + w: widgetType === WidgetType.GENERICS_METRICS ? 12 : widgetType === WidgetType.METRIC ? 6 : 3, + h: widgetType === WidgetType.GENERICS_METRICS ? 20 : widgetType === WidgetType.METRIC ? 8 : 3, + minW: widgetType === WidgetType.GENERICS_METRICS ? 10 : 2, + minH: widgetType === WidgetType.GENERICS_METRICS ? 15 : widgetType === WidgetType.METRIC ? 7 : 3, static: false }; const newWidget: WidgetData = { @@ -71,8 +72,9 @@ const DashboardPage = () => { thresholds, preset, name, - widgetType: widgetType || 'preset', - genericMetrics: genericMetrics || null, + widgetType, + genericMetrics, + metric }; setLayout((prevLayout) => [...prevLayout, newItem]); setWidgetData((prevData) => [...prevData, newWidget]); @@ -80,13 +82,13 @@ const DashboardPage = () => { const handleEditWidget = (id: string, update?: WidgetData) => { let itemToEdit = widgetData.find(d => d.i === id) || null; + console.log(itemToEdit, update) if(itemToEdit && update){ setEditingItem({...itemToEdit, ...update}); }else { setEditingItem(itemToEdit); } setIsModalOpen(true); - }; const handleSaveEdit = (updatedItem: WidgetData) => { @@ -211,6 +213,7 @@ const DashboardPage = () => { onDelete={handleDeleteWidget} onSave={handleSaveEdit} presets={allPresets} + metrics={allMetricWidgets} /> )} @@ -221,6 +224,7 @@ const DashboardPage = () => { onEditWidget={handleSaveEdit} presets={allPresets} editingItem={editingItem} + metricWidgets={allMetricWidgets} />
); diff --git a/keep-ui/app/dashboard/types.tsx b/keep-ui/app/dashboard/types.tsx index 05ca0cd95..be80ac842 100644 --- a/keep-ui/app/dashboard/types.tsx +++ b/keep-ui/app/dashboard/types.tsx @@ -1,35 +1,44 @@ -import { Preset } from "app/alerts/models"; +import {Preset} from "app/alerts/models"; +import {MetricsWidget} from "@/utils/hooks/useDashboardMetricWidgets"; + export interface LayoutItem { - i: string; - x: number; - y: number; - w: number; - h: number; - minW?: number; - minH?: number; - static: boolean; - } + i: string; + x: number; + y: number; + w: number; + h: number; + minW?: number; + minH?: number; + static: boolean; +} - export interface GenericsMertics { - key: string; - label: string; - widgetType: "table" | "chart"; - meta: { - defaultFilters: { - [key: string]: string|string[]; - }, - } +export interface GenericsMetrics { + key: string; + label: string; + widgetType: "table" | "chart"; + meta: { + defaultFilters: { + [key: string]: string | string[]; + }, } +} - export interface WidgetData extends LayoutItem { - thresholds: Threshold[]; - preset: Preset | null; - name: string; - widgetType?:string; - genericMetrics?: GenericsMertics| null; - } +export enum WidgetType { + PRESET = 'PRESET', + METRIC = 'METRIC', + GENERICS_METRICS = 'GENERICS_METRICS' +} - export interface Threshold { - value: number; - color: string; - } +export interface WidgetData extends LayoutItem { + thresholds?: Threshold[]; + preset?: Preset; + name: string; + widgetType: WidgetType; + genericMetrics?: GenericsMetrics; + metric?: MetricsWidget; +} + +export interface Threshold { + value: number; + color: string; +} diff --git a/keep-ui/utils/hooks/useDashboardMetricWidgets.ts b/keep-ui/utils/hooks/useDashboardMetricWidgets.ts new file mode 100644 index 000000000..9bac84342 --- /dev/null +++ b/keep-ui/utils/hooks/useDashboardMetricWidgets.ts @@ -0,0 +1,64 @@ +import {useSession} from "next-auth/react"; +import {getApiURL} from "@/utils/apiUrl"; +import useSWR from "swr"; +import {fetcher} from "@/utils/fetcher"; + +export interface MetricsWidget { + id: string; + name: string; + data: DistributionData[]; +} + +interface DistributionData { + hour: string; + number: number +} + +interface DashboardDistributionData { + mttr: DistributionData[]; + ipd: DistributionData[]; + apd: DistributionData[]; + wpd: DistributionData[]; + +} + +export const useDashboardMetricWidgets = () => { + const {data: session} = useSession(); + const apiUrl = getApiURL(); + const {data, error, mutate} = useSWR( + session ? `${apiUrl}/dashboard/metric-widgets` : null, + (url: string) => fetcher(url, session!.accessToken) + ) + + const useGetData = () => { + return useSWR( + session ? `${apiUrl}/dashboard/metric-widgets` : null, + (url: string) => fetcher(url, session!.accessToken)) + } + let widgets: MetricsWidget[] = [] + if (data) { + widgets = [ + { + id: "mttr", + name: "MTTR", + data: data.mttr + }, + { + id: "apd", + "name": "Alerts/Day", + data: data.apd + }, + { + id: "ipd", + name: "Incidents/Day", + data: data.ipd + }, + { + id: "wpd", + name: "Workflows/Day", + data: data.wpd + } + ]; + } + return {widgets, useGetData}; +} \ No newline at end of file diff --git a/keep/api/core/db.py b/keep/api/core/db.py index 0d1c0e3c5..d1ca4a72c 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -1016,7 +1016,11 @@ def get_enrichment_with_session(session, tenant_id, fingerprint, refresh=False): def get_alerts_with_filters( - tenant_id, provider_id=None, filters=None, time_delta=1, with_incidents=False, + tenant_id, + provider_id=None, + filters=None, + time_delta=1, + with_incidents=False, ) -> list[Alert]: with Session(engine) as session: # Create the query @@ -1190,7 +1194,7 @@ def get_last_alerts( Returns: List[Alert]: A list of Alert objects including the first time the alert was triggered. """ - with (Session(engine) as session): + with Session(engine) as session: # Subquery that selects the max and min timestamp for each fingerprint. subquery = ( session.query( @@ -1254,7 +1258,10 @@ def get_last_alerts( query = query.add_columns(AlertToIncident.incident_id.label("incident")) query = query.outerjoin( AlertToIncident, - and_(AlertToIncident.alert_id == Alert.id, AlertToIncident.deleted_at == NULL_FOR_DELETED_AT), + and_( + AlertToIncident.alert_id == Alert.id, + AlertToIncident.deleted_at == NULL_FOR_DELETED_AT, + ), ) if provider_id: @@ -1718,7 +1725,7 @@ def get_rule_distribution(tenant_id, minute=False): .join(AlertToIncident, Incident.id == AlertToIncident.incident_id) .filter( AlertToIncident.deleted_at == NULL_FOR_DELETED_AT, - AlertToIncident.timestamp >= seven_days_ago + AlertToIncident.timestamp >= seven_days_ago, ) .filter(Rule.tenant_id == tenant_id) # Filter by tenant_id .group_by( @@ -2092,8 +2099,11 @@ def get_linked_providers(tenant_id: str) -> List[Tuple[str, str, datetime]]: return providers -def get_provider_distribution(tenant_id: str) -> dict: - """Returns hits per hour and the last alert timestamp for each provider, limited to the last 24 hours.""" +def get_provider_distribution(tenant_id: str, aggregate_all: bool = False) -> dict: + """ + Returns hits per hour and the last alert timestamp for each provider in the past 24 hours. + If aggregate_all is True, returns combined distribution across all providers. + """ with Session(engine) as session: twenty_four_hours_ago = datetime.utcnow() - timedelta(hours=24) time_format = "%Y-%m-%d %H" @@ -2107,62 +2117,235 @@ def get_provider_distribution(tenant_id: str) -> dict: elif session.bind.dialect.name == "sqlite": timestamp_format = func.strftime(time_format, Alert.timestamp) - # Adjusted query to include max timestamp + if aggregate_all: + # Query for combined alert distribution across all providers + query = ( + session.query( + timestamp_format.label("time"), + func.count().label("hits"), + func.max(Alert.timestamp).label("last_alert_timestamp"), + ) + .filter( + Alert.tenant_id == tenant_id, + Alert.timestamp >= twenty_four_hours_ago, + ) + .group_by("time") + .order_by("time") + ) + + results = query.all() + + combined_distribution = { + "alert_per_hour": [{"hour": i, "number": 0} for i in range(24)], + "last_alert_received": None, + } + + for time, hits, last_alert_timestamp in results: + last_alert_timestamp = ( + datetime.fromisoformat(last_alert_timestamp) + if isinstance(last_alert_timestamp, str) + else last_alert_timestamp + ) + + if ( + combined_distribution["last_alert_received"] is None + or last_alert_timestamp + > combined_distribution["last_alert_received"] + ): + combined_distribution["last_alert_received"] = last_alert_timestamp + + time = datetime.strptime(time, time_format) + index = int((time - twenty_four_hours_ago).total_seconds() // 3600) + + if 0 <= index < 24: + combined_distribution["alert_per_hour"][index]["number"] += hits + + return combined_distribution + + else: + # Query for alert distribution grouped by provider + query = ( + session.query( + Alert.provider_id, + Alert.provider_type, + timestamp_format.label("time"), + func.count().label("hits"), + func.max(Alert.timestamp).label("last_alert_timestamp"), + ) + .filter( + Alert.tenant_id == tenant_id, + Alert.timestamp >= twenty_four_hours_ago, + ) + .group_by(Alert.provider_id, Alert.provider_type, "time") + .order_by(Alert.provider_id, Alert.provider_type, "time") + ) + + results = query.all() + + provider_distribution = {} + + for provider_id, provider_type, time, hits, last_alert_timestamp in results: + provider_key = f"{provider_id}_{provider_type}" + last_alert_timestamp = ( + datetime.fromisoformat(last_alert_timestamp) + if isinstance(last_alert_timestamp, str) + else last_alert_timestamp + ) + + if provider_key not in provider_distribution: + provider_distribution[provider_key] = { + "provider_id": provider_id, + "provider_type": provider_type, + "alert_last_24_hours": [ + {"hour": i, "number": 0} for i in range(24) + ], + "last_alert_received": last_alert_timestamp, + } + else: + provider_distribution[provider_key]["last_alert_received"] = max( + provider_distribution[provider_key]["last_alert_received"], + last_alert_timestamp, + ) + + time = datetime.strptime(time, time_format) + index = int((time - twenty_four_hours_ago).total_seconds() // 3600) + + if 0 <= index < 24: + provider_distribution[provider_key]["alert_last_24_hours"][index][ + "number" + ] += hits + + return provider_distribution + + +def get_combined_workflow_execution_distribution(tenant_id: str): + """ + Returns counts of WorkflowExecutions started in the past 24 hours, combined across all workflows. + """ + with Session(engine) as session: + twenty_four_hours_ago = datetime.utcnow() - timedelta(hours=24) + time_format = "%Y-%m-%d %H" + + # Database-specific timestamp formatting + if session.bind.dialect.name == "mysql": + timestamp_format = func.date_format(WorkflowExecution.started, time_format) + elif session.bind.dialect.name == "postgresql": + timestamp_format = func.to_char(WorkflowExecution.started, "YYYY-MM-DD HH") + elif session.bind.dialect.name == "sqlite": + timestamp_format = func.strftime(time_format, WorkflowExecution.started) + + # Query for combined execution count across all workflows query = ( session.query( - Alert.provider_id, - Alert.provider_type, timestamp_format.label("time"), - func.count().label("hits"), - func.max(Alert.timestamp).label( - "last_alert_timestamp" - ), # Include max timestamp + func.count().label("executions"), ) .filter( - Alert.tenant_id == tenant_id, - Alert.timestamp >= twenty_four_hours_ago, + WorkflowExecution.tenant_id == tenant_id, + WorkflowExecution.started >= twenty_four_hours_ago, ) - .group_by(Alert.provider_id, Alert.provider_type, "time") - .order_by(Alert.provider_id, Alert.provider_type, "time") + .group_by("time") + .order_by("time") ) results = query.all() - provider_distribution = {} + executions_per_hour = [{"hour": i, "number": 0} for i in range(24)] - for provider_id, provider_type, time, hits, last_alert_timestamp in results: - provider_key = f"{provider_id}_{provider_type}" - last_alert_timestamp = ( - datetime.fromisoformat(last_alert_timestamp) - if isinstance(last_alert_timestamp, str) - else last_alert_timestamp + for time, executions in results: + time = datetime.strptime(time, time_format) + index = int((time - twenty_four_hours_ago).total_seconds() // 3600) + + if 0 <= index < 24: + executions_per_hour[index]["number"] += executions + + return executions_per_hour + + +def get_incidents_created_distribution(tenant_id): + with Session(engine) as session: + twenty_four_hours_ago = datetime.utcnow() - timedelta(hours=24) + time_format = "%Y-%m-%d %H" + + # Database-specific timestamp formatting + if session.bind.dialect.name == "mysql": + timestamp_format = func.date_format(Incident.creation_time, time_format) + elif session.bind.dialect.name == "postgresql": + timestamp_format = func.to_char(Incident.creation_time, "YYYY-MM-DD HH") + elif session.bind.dialect.name == "sqlite": + timestamp_format = func.strftime(time_format, Incident.creation_time) + + query = ( + session.query( + timestamp_format.label("time"), func.count().label("incidents") ) + .filter( + Incident.tenant_id == tenant_id, + Incident.creation_time >= twenty_four_hours_ago, + ) + .group_by("time") + .order_by("time") + ) - if provider_key not in provider_distribution: - provider_distribution[provider_key] = { - "provider_id": provider_id, - "provider_type": provider_type, - "alert_last_24_hours": [ - {"hour": i, "number": 0} for i in range(24) - ], - "last_alert_received": last_alert_timestamp, # Initialize with the first seen timestamp - } - else: - # Update the last alert timestamp if the current one is more recent - provider_distribution[provider_key]["last_alert_received"] = max( - provider_distribution[provider_key]["last_alert_received"], - last_alert_timestamp, - ) + results = query.all() + incidents_per_hour = [{"hour": i, "number": 0} for i in range(24)] + for time, incidents in results: time = datetime.strptime(time, time_format) index = int((time - twenty_four_hours_ago).total_seconds() // 3600) if 0 <= index < 24: - provider_distribution[provider_key]["alert_last_24_hours"][index][ - "number" - ] += hits + incidents_per_hour[index]["number"] += incidents + + return incidents_per_hour + + +def calc_incidents_mttr(tenant_id: str): + with Session(engine) as session: + twenty_four_hours_ago = datetime.utcnow() - timedelta(hours=24) + time_format = "%Y-%m-%d %H" + + # Database-specific timestamp formatting + if session.bind.dialect.name == "mysql": + timestamp_format = func.date_format(Incident.creation_time, time_format) + elif session.bind.dialect.name == "postgresql": + timestamp_format = func.to_char(Incident.creation_time, "YYYY-MM-DD HH") + elif session.bind.dialect.name == "sqlite": + timestamp_format = func.strftime(time_format, Incident.creation_time) + + query = ( + session.query( + timestamp_format.label("time"), + Incident.start_time, + Incident.end_time, + func.count().label("incidents"), + ) + .filter( + Incident.tenant_id == tenant_id, + Incident.creation_time >= twenty_four_hours_ago, + Incident.status == IncidentStatus.RESOLVED.value, + ) + .group_by("time", Incident.start_time, Incident.end_time) + .order_by("time") + ) - return provider_distribution + results = query.all() + mttr = [{"hour": i, "number": 0, "mttr": 0} for i in range(24)] + + for time, start_time, end_time, incidents in results: + time = datetime.strptime(time, time_format) + index = int((time - twenty_four_hours_ago).total_seconds() // 3600) + if start_time and end_time: + resolution_time = int( + (end_time - start_time).total_seconds() / 3600 + ) # MTTR in hours + mttr[index]["number"] += incidents + mttr[index]["mttr"] += resolution_time * incidents + + for hour_data in mttr: + if hour_data["number"] > 0: + hour_data["mttr"] /= hour_data["number"] # Average MTTR for the hour + return mttr def get_presets( @@ -2901,11 +3084,14 @@ def get_incident_alerts_and_links_by_incident_id( return query.all(), total_count + def get_incident_alerts_by_incident_id(*args, **kwargs) -> tuple[List[Alert], int]: """ Unpacking (List[(Alert, AlertToIncident)], int) to (List[Alert], int). """ - alerts_and_links, total_alerts = get_incident_alerts_and_links_by_incident_id(*args, **kwargs) + alerts_and_links, total_alerts = get_incident_alerts_and_links_by_incident_id( + *args, **kwargs + ) alerts = [alert_and_link[0] for alert_and_link in alerts_and_links] return alerts, total_alerts @@ -2931,21 +3117,20 @@ def get_future_incidents_by_incident_id( def get_all_same_alert_ids( - tenant_id: str, - alert_ids: List[str | UUID], - session: Optional[Session] = None + tenant_id: str, alert_ids: List[str | UUID], session: Optional[Session] = None ): with existed_or_new_session(session) as session: - fingerprints_subquery = session.query(Alert.fingerprint).where( - Alert.tenant_id == tenant_id, - col(Alert.id).in_(alert_ids) - ).subquery() + fingerprints_subquery = ( + session.query(Alert.fingerprint) + .where(Alert.tenant_id == tenant_id, col(Alert.id).in_(alert_ids)) + .subquery() + ) query = session.scalars( - select(Alert.id) - .where( - Alert.tenant_id == tenant_id, - col(Alert.fingerprint).in_(fingerprints_subquery) - )) + select(Alert.id).where( + Alert.tenant_id == tenant_id, + col(Alert.fingerprint).in_(fingerprints_subquery), + ) + ) return query.all() @@ -3016,7 +3201,9 @@ def add_alerts_to_incident_by_incident_id( if not incident: return None - return add_alerts_to_incident(tenant_id, incident, alert_ids, is_created_by_ai, session) + return add_alerts_to_incident( + tenant_id, incident, alert_ids, is_created_by_ai, session + ) def add_alerts_to_incident( @@ -3049,7 +3236,9 @@ def add_alerts_to_incident( ) new_alert_ids = [ - alert_id for alert_id in all_alert_ids if alert_id not in existing_alert_ids + alert_id + for alert_id in all_alert_ids + if alert_id not in existing_alert_ids ] if not new_alert_ids: @@ -3060,16 +3249,21 @@ def add_alerts_to_incident( ) incident.sources = list( - set(incident.sources if incident.sources else []) | set(alerts_data_for_incident["sources"]) + set(incident.sources if incident.sources else []) + | set(alerts_data_for_incident["sources"]) ) incident.affected_services = list( - set(incident.affected_services if incident.affected_services else []) | set(alerts_data_for_incident["services"]) + set(incident.affected_services if incident.affected_services else []) + | set(alerts_data_for_incident["services"]) ) incident.alerts_count += alerts_data_for_incident["count"] alert_to_incident_entries = [ AlertToIncident( - alert_id=alert_id, incident_id=incident.id, tenant_id=tenant_id, is_created_by_ai=is_created_by_ai + alert_id=alert_id, + incident_id=incident.id, + tenant_id=tenant_id, + is_created_by_ai=is_created_by_ai, ) for alert_id in new_alert_ids ] @@ -3170,9 +3364,12 @@ def remove_alerts_to_incident_by_incident_id( AlertToIncident.tenant_id == tenant_id, AlertToIncident.incident_id == incident.id, col(AlertToIncident.alert_id).in_(all_alert_ids), - ).update({ - "deleted_at": datetime.now(datetime.now().astimezone().tzinfo), - }) + ) + .update( + { + "deleted_at": datetime.now(datetime.now().astimezone().tzinfo), + } + ) ) session.commit() @@ -3609,7 +3806,7 @@ def get_workflow_executions_for_incident_or_alert( .join(AlertToIncident, Alert.id == AlertToIncident.alert_id) .where( AlertToIncident.deleted_at == NULL_FOR_DELETED_AT, - AlertToIncident.incident_id == incident_id + AlertToIncident.incident_id == incident_id, ) ) @@ -3719,37 +3916,35 @@ def is_edge_incident_alert_resolved( AlertEnrichment, Alert.fingerprint == AlertEnrichment.alert_fingerprint ) .join(AlertToIncident, AlertToIncident.alert_id == Alert.id) - .where( - AlertToIncident.incident_id == incident.id - ) + .where(AlertToIncident.incident_id == incident.id) .group_by(Alert.fingerprint) .having(func.max(Alert.timestamp)) .order_by(direction(Alert.timestamp)) ).first() - - return ( - enriched_status == AlertStatus.RESOLVED.value or - (enriched_status is None and status == AlertStatus.RESOLVED.value) + + return enriched_status == AlertStatus.RESOLVED.value or ( + enriched_status is None and status == AlertStatus.RESOLVED.value ) + def get_alerts_metrics_by_provider( tenant_id: str, - start_date: Optional[datetime] = None, + start_date: Optional[datetime] = None, end_date: Optional[datetime] = None, - fields: Optional[List[str]] = [] + fields: Optional[List[str]] = [], ) -> Dict[str, Dict[str, Any]]: - + dynamic_field_sums = [ func.sum( case( [ ( - func.json_extract(Alert.event, f'$.{field}').isnot(None) & - (func.json_extract(Alert.event, f'$.{field}') != False), - 1 + func.json_extract(Alert.event, f"$.{field}").isnot(None) + & (func.json_extract(Alert.event, f"$.{field}") != False), + 1, ) - ], - else_=0 + ], + else_=0, ) ).label(f"{field}_count") for field in fields @@ -3761,8 +3956,10 @@ def get_alerts_metrics_by_provider( Alert.provider_type, Alert.provider_id, func.count(Alert.id).label("total_alerts"), - func.sum(case([(AlertToIncident.alert_id.isnot(None), 1)], else_=0)).label("correlated_alerts"), - *dynamic_field_sums + func.sum( + case([(AlertToIncident.alert_id.isnot(None), 1)], else_=0) + ).label("correlated_alerts"), + *dynamic_field_sums, ) .outerjoin(AlertToIncident, Alert.id == AlertToIncident.alert_id) .filter( @@ -3773,18 +3970,19 @@ def get_alerts_metrics_by_provider( # Add timestamp filter only if both start_date and end_date are provided if start_date and end_date: query = query.filter( - Alert.timestamp >= start_date, - Alert.timestamp <= end_date + Alert.timestamp >= start_date, Alert.timestamp <= end_date ) results = query.group_by(Alert.provider_id, Alert.provider_type).all() - + return { f"{row.provider_id}_{row.provider_type}": { "total_alerts": row.total_alerts, "correlated_alerts": row.correlated_alerts, "provider_type": row.provider_type, - **{f"{field}_count": getattr(row, f"{field}_count") for field in fields} # Add field-specific counts + **{ + f"{field}_count": getattr(row, f"{field}_count") for field in fields + }, # Add field-specific counts } for row in results } diff --git a/keep/api/routes/dashboard.py b/keep/api/routes/dashboard.py index 32e437947..9364320ea 100644 --- a/keep/api/routes/dashboard.py +++ b/keep/api/routes/dashboard.py @@ -7,7 +7,13 @@ from fastapi import APIRouter, Depends, HTTPException from pydantic import BaseModel -from keep.api.core.db import create_dashboard as create_dashboard_db +from keep.api.core.db import ( + create_dashboard as create_dashboard_db, + get_provider_distribution, + get_incidents_created_distribution, + get_combined_workflow_execution_distribution, + calc_incidents_mttr, +) from keep.api.core.db import delete_dashboard as delete_dashboard_db from keep.api.core.db import get_dashboards as get_dashboards_db from keep.api.core.db import update_dashboard as update_dashboard_db @@ -136,3 +142,28 @@ def delete_dashboard( if not dashboard: raise HTTPException(status_code=404, detail="Dashboard not found") return {"ok": True} + + +@router.get("/metric-widgets") +def get_metric_widgets( + mttr: bool = True, + apd: bool = True, + ipd: bool = True, + wpd: bool = True, + authenticated_entity: AuthenticatedEntity = Depends( + IdentityManagerFactory.get_auth_verifier(["read:dashboards"]) + ), +): + data = {} + tenant_id = authenticated_entity.tenant_id + if apd: + data["apd"] = get_provider_distribution( + tenant_id=tenant_id, aggregate_all=True + )["alert_per_hour"] + if ipd: + data["ipd"] = get_incidents_created_distribution(tenant_id=tenant_id) + if wpd: + data["wpd"] = get_combined_workflow_execution_distribution(tenant_id=tenant_id) + if mttr: + data["mttr"] = calc_incidents_mttr(tenant_id=tenant_id) + return data From ac3f3fbf6fc1e7f9b5e54cad755f8bae0ce918da Mon Sep 17 00:00:00 2001 From: 35C4n0r Date: Tue, 22 Oct 2024 12:29:38 +0530 Subject: [PATCH 2/8] fix: CI build Signed-off-by: 35C4n0r --- keep-ui/app/dashboard/EditGridItemModal.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/keep-ui/app/dashboard/EditGridItemModal.tsx b/keep-ui/app/dashboard/EditGridItemModal.tsx index 79e9b9ae1..35dfe709e 100644 --- a/keep-ui/app/dashboard/EditGridItemModal.tsx +++ b/keep-ui/app/dashboard/EditGridItemModal.tsx @@ -14,7 +14,7 @@ const EditGridItemModal: React.FC = ({ isOpen, onClose, const [thresholds, setThresholds] = useState([]); useEffect(() => { - if (item) { + if (item?.thresholds) { setThresholds(item.thresholds); } }, [item]); From 138fe56b25dd2c1c868794d12eb6518619705be6 Mon Sep 17 00:00:00 2001 From: 35C4n0r Date: Tue, 22 Oct 2024 13:33:02 +0530 Subject: [PATCH 3/8] fix: fixes #2262 Signed-off-by: 35C4n0r --- keep/api/core/db.py | 10 ++++++++-- keep/api/routes/incidents.py | 11 ++++++++--- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/keep/api/core/db.py b/keep/api/core/db.py index d1ca4a72c..387ee3a69 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -3744,7 +3744,10 @@ def get_alerts_fields(tenant_id: str) -> List[AlertField]: def change_incident_status_by_id( - tenant_id: str, incident_id: UUID | str, status: IncidentStatus + tenant_id: str, + incident_id: UUID | str, + status: IncidentStatus, + end_time: datetime | None = None, ) -> bool: with Session(engine) as session: stmt = ( @@ -3753,7 +3756,10 @@ def change_incident_status_by_id( Incident.tenant_id == tenant_id, Incident.id == incident_id, ) - .values(status=status.value) + .values( + status=status.value, + end_time=end_time, + ) ) updated = session.execute(stmt) session.commit() diff --git a/keep/api/routes/incidents.py b/keep/api/routes/incidents.py index bf2921bd8..b779157f6 100644 --- a/keep/api/routes/incidents.py +++ b/keep/api/routes/incidents.py @@ -515,7 +515,9 @@ async def add_alerts_to_incident( if not incident: raise HTTPException(status_code=404, detail="Incident not found") - add_alerts_to_incident_by_incident_id(tenant_id, incident_id, alert_ids, is_created_by_ai) + add_alerts_to_incident_by_incident_id( + tenant_id, incident_id, alert_ids, is_created_by_ai + ) try: logger.info("Pushing enriched alert to elasticsearch") elastic_client = ElasticClient(tenant_id) @@ -708,7 +710,10 @@ def change_incident_status( # We need to do something only if status really changed if not change.status == incident.status: - result = change_incident_status_by_id(tenant_id, incident_id, change.status) + end_time = (None, datetime.utcnow())[change.status == IncidentStatus.RESOLVED] + result = change_incident_status_by_id( + tenant_id, incident_id, change.status, end_time + ) if not result: raise HTTPException( status_code=500, detail="Error changing incident status" @@ -724,7 +729,7 @@ def change_incident_status( ), authenticated_entity=authenticated_entity, ) - + incident.end_time = end_time incident.status = change.status new_incident_dto = IncidentDto.from_db_incident(incident) From 1cd8d4db9f722b410ad8c4a7dfe8bab809b37c1f Mon Sep 17 00:00:00 2001 From: 35C4n0r Date: Sun, 27 Oct 2024 14:15:59 +0530 Subject: [PATCH 4/8] feat: add timestamp filter Signed-off-by: 35C4n0r --- keep/api/core/db.py | 241 ++++++++++++++++++++++------------ keep/api/models/time_stamp.py | 23 +++- keep/api/routes/dashboard.py | 18 ++- keep/api/routes/preset.py | 18 +-- 4 files changed, 188 insertions(+), 112 deletions(-) diff --git a/keep/api/core/db.py b/keep/api/core/db.py index 387ee3a69..ffff14f39 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -44,6 +44,7 @@ from keep.api.models.db.tenant import * # pylint: disable=unused-wildcard-import from keep.api.models.db.topology import * # pylint: disable=unused-wildcard-import from keep.api.models.db.workflow import * # pylint: disable=unused-wildcard-import +from keep.api.models.time_stamp import TimeStampFilter logger = logging.getLogger(__name__) @@ -2099,7 +2100,14 @@ def get_linked_providers(tenant_id: str) -> List[Tuple[str, str, datetime]]: return providers -def get_provider_distribution(tenant_id: str, aggregate_all: bool = False) -> dict: +def get_provider_distribution( + tenant_id: str, + aggregate_all: bool = False, + timestamp_filter: TimeStampFilter = None, +) -> ( + list[dict[str, int | Any]] + | dict[str, dict[str, datetime | list[dict[str, int]] | Any]] +): """ Returns hits per hour and the last alert timestamp for each provider in the past 24 hours. If aggregate_all is True, returns combined distribution across all providers. @@ -2108,6 +2116,16 @@ def get_provider_distribution(tenant_id: str, aggregate_all: bool = False) -> di twenty_four_hours_ago = datetime.utcnow() - timedelta(hours=24) time_format = "%Y-%m-%d %H" + filters = [Alert.tenant_id == tenant_id] + + if timestamp_filter: + if timestamp_filter.lower_timestamp: + filters.append(Alert.timestamp >= timestamp_filter.lower_timestamp) + if timestamp_filter.upper_timestamp: + filters.append(Alert.timestamp <= timestamp_filter.upper_timestamp) + else: + filters.append(Alert.timestamp >= twenty_four_hours_ago) + if session.bind.dialect.name == "mysql": timestamp_format = func.date_format(Alert.timestamp, time_format) elif session.bind.dialect.name == "postgresql": @@ -2121,46 +2139,32 @@ def get_provider_distribution(tenant_id: str, aggregate_all: bool = False) -> di # Query for combined alert distribution across all providers query = ( session.query( - timestamp_format.label("time"), - func.count().label("hits"), - func.max(Alert.timestamp).label("last_alert_timestamp"), - ) - .filter( - Alert.tenant_id == tenant_id, - Alert.timestamp >= twenty_four_hours_ago, + timestamp_format.label("time"), func.count().label("hits") ) + .filter(*filters) .group_by("time") .order_by("time") ) results = query.all() - combined_distribution = { - "alert_per_hour": [{"hour": i, "number": 0} for i in range(24)], - "last_alert_received": None, - } + results = {str(time): hits for time, hits in results} - for time, hits, last_alert_timestamp in results: - last_alert_timestamp = ( - datetime.fromisoformat(last_alert_timestamp) - if isinstance(last_alert_timestamp, str) - else last_alert_timestamp + # Create a complete list of timestamps within the specified range + distribution = [] + current_time = timestamp_filter.lower_timestamp.replace( + minute=0, second=0, microsecond=0 + ) + while current_time <= timestamp_filter.upper_timestamp: + timestamp_str = current_time.strftime(time_format) + distribution.append( + { + "timestamp": timestamp_str + ":00", + "number": results.get(timestamp_str, 0), + } ) - - if ( - combined_distribution["last_alert_received"] is None - or last_alert_timestamp - > combined_distribution["last_alert_received"] - ): - combined_distribution["last_alert_received"] = last_alert_timestamp - - time = datetime.strptime(time, time_format) - index = int((time - twenty_four_hours_ago).total_seconds() // 3600) - - if 0 <= index < 24: - combined_distribution["alert_per_hour"][index]["number"] += hits - - return combined_distribution + current_time += timedelta(hours=1) + return distribution else: # Query for alert distribution grouped by provider @@ -2172,10 +2176,7 @@ def get_provider_distribution(tenant_id: str, aggregate_all: bool = False) -> di func.count().label("hits"), func.max(Alert.timestamp).label("last_alert_timestamp"), ) - .filter( - Alert.tenant_id == tenant_id, - Alert.timestamp >= twenty_four_hours_ago, - ) + .filter(*filters) .group_by(Alert.provider_id, Alert.provider_type, "time") .order_by(Alert.provider_id, Alert.provider_type, "time") ) @@ -2218,7 +2219,9 @@ def get_provider_distribution(tenant_id: str, aggregate_all: bool = False) -> di return provider_distribution -def get_combined_workflow_execution_distribution(tenant_id: str): +def get_combined_workflow_execution_distribution( + tenant_id: str, timestamp_filter: TimeStampFilter = None +): """ Returns counts of WorkflowExecutions started in the past 24 hours, combined across all workflows. """ @@ -2226,6 +2229,20 @@ def get_combined_workflow_execution_distribution(tenant_id: str): twenty_four_hours_ago = datetime.utcnow() - timedelta(hours=24) time_format = "%Y-%m-%d %H" + filters = [WorkflowExecution.tenant_id == tenant_id] + + if timestamp_filter: + if timestamp_filter.lower_timestamp: + filters.append( + WorkflowExecution.started >= timestamp_filter.lower_timestamp + ) + if timestamp_filter.upper_timestamp: + filters.append( + WorkflowExecution.started <= timestamp_filter.upper_timestamp + ) + else: + filters.append(WorkflowExecution.started >= twenty_four_hours_ago) + # Database-specific timestamp formatting if session.bind.dialect.name == "mysql": timestamp_format = func.date_format(WorkflowExecution.started, time_format) @@ -2240,33 +2257,51 @@ def get_combined_workflow_execution_distribution(tenant_id: str): timestamp_format.label("time"), func.count().label("executions"), ) - .filter( - WorkflowExecution.tenant_id == tenant_id, - WorkflowExecution.started >= twenty_four_hours_ago, - ) + .filter(*filters) .group_by("time") .order_by("time") ) - results = query.all() - - executions_per_hour = [{"hour": i, "number": 0} for i in range(24)] - - for time, executions in results: - time = datetime.strptime(time, time_format) - index = int((time - twenty_four_hours_ago).total_seconds() // 3600) + results = {str(time): executions for time, executions in query.all()} - if 0 <= index < 24: - executions_per_hour[index]["number"] += executions + distribution = [] + current_time = timestamp_filter.lower_timestamp.replace( + minute=0, second=0, microsecond=0 + ) + while current_time <= timestamp_filter.upper_timestamp: + timestamp_str = current_time.strftime(time_format) + distribution.append( + { + "timestamp": timestamp_str + ":00", + "number": results.get(timestamp_str, 0), + } + ) + current_time += timedelta(hours=1) - return executions_per_hour + return distribution -def get_incidents_created_distribution(tenant_id): +def get_incidents_created_distribution( + tenant_id: str, timestamp_filter: TimeStampFilter = None +): with Session(engine) as session: twenty_four_hours_ago = datetime.utcnow() - timedelta(hours=24) time_format = "%Y-%m-%d %H" + filters = [Incident.tenant_id == tenant_id] + + if timestamp_filter: + if timestamp_filter.lower_timestamp: + filters.append( + Incident.creation_time >= timestamp_filter.lower_timestamp + ) + if timestamp_filter.upper_timestamp: + filters.append( + Incident.creation_time <= timestamp_filter.upper_timestamp + ) + else: + filters.append(Incident.creation_time >= twenty_four_hours_ago) + # Database-specific timestamp formatting if session.bind.dialect.name == "mysql": timestamp_format = func.date_format(Incident.creation_time, time_format) @@ -2279,32 +2314,51 @@ def get_incidents_created_distribution(tenant_id): session.query( timestamp_format.label("time"), func.count().label("incidents") ) - .filter( - Incident.tenant_id == tenant_id, - Incident.creation_time >= twenty_four_hours_ago, - ) + .filter(*filters) .group_by("time") .order_by("time") ) - results = query.all() - incidents_per_hour = [{"hour": i, "number": 0} for i in range(24)] - - for time, incidents in results: - time = datetime.strptime(time, time_format) - index = int((time - twenty_four_hours_ago).total_seconds() // 3600) + results = {str(time): incidents for time, incidents in query.all()} - if 0 <= index < 24: - incidents_per_hour[index]["number"] += incidents + distribution = [] + current_time = timestamp_filter.lower_timestamp.replace( + minute=0, second=0, microsecond=0 + ) + while current_time <= timestamp_filter.upper_timestamp: + timestamp_str = current_time.strftime(time_format) + distribution.append( + { + "timestamp": timestamp_str + ":00", + "number": results.get(timestamp_str, 0), + } + ) + current_time += timedelta(hours=1) - return incidents_per_hour + return distribution -def calc_incidents_mttr(tenant_id: str): +def calc_incidents_mttr(tenant_id: str, timestamp_filter: TimeStampFilter = None): with Session(engine) as session: twenty_four_hours_ago = datetime.utcnow() - timedelta(hours=24) time_format = "%Y-%m-%d %H" + filters = [ + Incident.tenant_id == tenant_id, + Incident.status == IncidentStatus.RESOLVED.value, + ] + if timestamp_filter: + if timestamp_filter.lower_timestamp: + filters.append( + Incident.creation_time >= timestamp_filter.lower_timestamp + ) + if timestamp_filter.upper_timestamp: + filters.append( + Incident.creation_time <= timestamp_filter.upper_timestamp + ) + else: + filters.append(Incident.creation_time >= twenty_four_hours_ago) + # Database-specific timestamp formatting if session.bind.dialect.name == "mysql": timestamp_format = func.date_format(Incident.creation_time, time_format) @@ -2320,32 +2374,45 @@ def calc_incidents_mttr(tenant_id: str): Incident.end_time, func.count().label("incidents"), ) - .filter( - Incident.tenant_id == tenant_id, - Incident.creation_time >= twenty_four_hours_ago, - Incident.status == IncidentStatus.RESOLVED.value, - ) + .filter(*filters) .group_by("time", Incident.start_time, Incident.end_time) .order_by("time") ) + results = {} + for time, start_time, end_time, incidents in query.all(): + if start_time and end_time: + resolution_time = ( + end_time - start_time + ).total_seconds() / 3600 # in hours + time_str = str(time) + if time_str not in results: + results[time_str] = {"number": 0, "mttr": 0} + + results[time_str]["number"] += incidents + results[time_str]["mttr"] += resolution_time * incidents + + distribution = [] + current_time = timestamp_filter.lower_timestamp.replace( + minute=0, second=0, microsecond=0 + ) + while current_time <= timestamp_filter.upper_timestamp: + timestamp_str = current_time.strftime(time_format) + if timestamp_str in results and results[timestamp_str]["number"] > 0: + avg_mttr = ( + results[timestamp_str]["mttr"] / results[timestamp_str]["number"] + ) + else: + avg_mttr = 0 - results = query.all() - mttr = [{"hour": i, "number": 0, "mttr": 0} for i in range(24)] + distribution.append( + { + "timestamp": timestamp_str + ":00", + "mttr": avg_mttr, + } + ) + current_time += timedelta(hours=1) - for time, start_time, end_time, incidents in results: - time = datetime.strptime(time, time_format) - index = int((time - twenty_four_hours_ago).total_seconds() // 3600) - if start_time and end_time: - resolution_time = int( - (end_time - start_time).total_seconds() / 3600 - ) # MTTR in hours - mttr[index]["number"] += incidents - mttr[index]["mttr"] += resolution_time * incidents - - for hour_data in mttr: - if hour_data["number"] > 0: - hour_data["mttr"] /= hour_data["number"] # Average MTTR for the hour - return mttr + return distribution def get_presets( diff --git a/keep/api/models/time_stamp.py b/keep/api/models/time_stamp.py index 67c910538..afe9b0c13 100644 --- a/keep/api/models/time_stamp.py +++ b/keep/api/models/time_stamp.py @@ -1,10 +1,27 @@ +import json from typing import Optional + +from fastapi import Query, HTTPException from pydantic import BaseModel, Field from datetime import datetime + + class TimeStampFilter(BaseModel): - lower_timestamp: Optional[datetime] = Field(None, alias='start') - upper_timestamp: Optional[datetime] = Field(None, alias='end') + lower_timestamp: Optional[datetime] = Field(None, alias="start") + upper_timestamp: Optional[datetime] = Field(None, alias="end") class Config: allow_population_by_field_name = True - \ No newline at end of file + + +# Function to handle the time_stamp query parameter and parse it +def _get_time_stamp_filter(time_stamp: Optional[str] = Query(None)) -> TimeStampFilter: + if time_stamp: + try: + # Parse the JSON string + time_stamp_dict = json.loads(time_stamp) + # Return the TimeStampFilter object, Pydantic will map 'from' -> lower_timestamp and 'to' -> upper_timestamp + return TimeStampFilter(**time_stamp_dict) + except (json.JSONDecodeError, TypeError): + raise HTTPException(status_code=400, detail="Invalid time_stamp format") + return TimeStampFilter() diff --git a/keep/api/routes/dashboard.py b/keep/api/routes/dashboard.py index 9364320ea..4092608f7 100644 --- a/keep/api/routes/dashboard.py +++ b/keep/api/routes/dashboard.py @@ -17,6 +17,7 @@ from keep.api.core.db import delete_dashboard as delete_dashboard_db from keep.api.core.db import get_dashboards as get_dashboards_db from keep.api.core.db import update_dashboard as update_dashboard_db +from keep.api.models.time_stamp import TimeStampFilter, _get_time_stamp_filter from keep.identitymanager.authenticatedentity import AuthenticatedEntity from keep.identitymanager.identitymanagerfactory import IdentityManagerFactory @@ -146,6 +147,7 @@ def delete_dashboard( @router.get("/metric-widgets") def get_metric_widgets( + time_stamp: TimeStampFilter = Depends(_get_time_stamp_filter), mttr: bool = True, apd: bool = True, ipd: bool = True, @@ -158,12 +160,18 @@ def get_metric_widgets( tenant_id = authenticated_entity.tenant_id if apd: data["apd"] = get_provider_distribution( - tenant_id=tenant_id, aggregate_all=True - )["alert_per_hour"] + tenant_id=tenant_id, aggregate_all=True, timestamp_filter=time_stamp + ) if ipd: - data["ipd"] = get_incidents_created_distribution(tenant_id=tenant_id) + data["ipd"] = get_incidents_created_distribution( + tenant_id=tenant_id, timestamp_filter=time_stamp + ) if wpd: - data["wpd"] = get_combined_workflow_execution_distribution(tenant_id=tenant_id) + data["wpd"] = get_combined_workflow_execution_distribution( + tenant_id=tenant_id, timestamp_filter=time_stamp + ) if mttr: - data["mttr"] = calc_incidents_mttr(tenant_id=tenant_id) + data["mttr"] = calc_incidents_mttr( + tenant_id=tenant_id, timestamp_filter=time_stamp + ) return data diff --git a/keep/api/routes/preset.py b/keep/api/routes/preset.py index cfa701681..a4f87cc08 100644 --- a/keep/api/routes/preset.py +++ b/keep/api/routes/preset.py @@ -1,16 +1,13 @@ -import json import logging import os import uuid from datetime import datetime -from typing import Optional from fastapi import ( APIRouter, BackgroundTasks, Depends, HTTPException, - Query, Request, Response, ) @@ -34,7 +31,7 @@ Tag, TagDto, ) -from keep.api.models.time_stamp import TimeStampFilter +from keep.api.models.time_stamp import TimeStampFilter, _get_time_stamp_filter from keep.api.tasks.process_event_task import process_event from keep.api.tasks.process_topology_task import process_topology from keep.contextmanager.contextmanager import ContextManager @@ -177,19 +174,6 @@ def pull_data_from_providers( ) -# Function to handle the time_stamp query parameter and parse it -def _get_time_stamp_filter(time_stamp: Optional[str] = Query(None)) -> TimeStampFilter: - if time_stamp: - try: - # Parse the JSON string - time_stamp_dict = json.loads(time_stamp) - # Return the TimeStampFilter object, Pydantic will map 'from' -> lower_timestamp and 'to' -> upper_timestamp - return TimeStampFilter(**time_stamp_dict) - except (json.JSONDecodeError, TypeError): - raise HTTPException(status_code=400, detail="Invalid time_stamp format") - return TimeStampFilter() - - @router.get( "", description="Get all presets for tenant", From 7ea45e5345491608470c14ac0f3c9c543f4fdf9e Mon Sep 17 00:00:00 2001 From: 35C4n0r Date: Sun, 27 Oct 2024 14:39:52 +0530 Subject: [PATCH 5/8] feat: add timestamp filter Signed-off-by: 35C4n0r --- keep/api/routes/dashboard.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/keep/api/routes/dashboard.py b/keep/api/routes/dashboard.py index 4092608f7..cfcb61d4a 100644 --- a/keep/api/routes/dashboard.py +++ b/keep/api/routes/dashboard.py @@ -1,7 +1,7 @@ import json import logging import os -from datetime import datetime +from datetime import datetime, timedelta from typing import Dict, List, Optional from fastapi import APIRouter, Depends, HTTPException @@ -158,6 +158,11 @@ def get_metric_widgets( ): data = {} tenant_id = authenticated_entity.tenant_id + if not time_stamp.lower_timestamp or not time_stamp.upper_timestamp: + time_stamp = TimeStampFilter( + upper_timestamp=datetime.utcnow(), + lower_timestamp=datetime.utcnow() - timedelta(hours=24), + ) if apd: data["apd"] = get_provider_distribution( tenant_id=tenant_id, aggregate_all=True, timestamp_filter=time_stamp From 5cf37b3a6634fb4460c5a60f1ce92b82a32f9d1f Mon Sep 17 00:00:00 2001 From: Jay Kumar Date: Sun, 27 Oct 2024 09:12:44 +0000 Subject: [PATCH 6/8] feat: integrate timestamp filter --- keep-ui/app/dashboard/GridItem.tsx | 6 ++--- keep-ui/app/dashboard/[id]/dashboard.tsx | 2 +- .../utils/hooks/useDashboardMetricWidgets.ts | 24 ++++++++++--------- 3 files changed, 17 insertions(+), 15 deletions(-) diff --git a/keep-ui/app/dashboard/GridItem.tsx b/keep-ui/app/dashboard/GridItem.tsx index 802056a52..6c85e2c43 100644 --- a/keep-ui/app/dashboard/GridItem.tsx +++ b/keep-ui/app/dashboard/GridItem.tsx @@ -118,11 +118,11 @@ const GridItem: React.FC = ({ { item.metric && (
-
+
`${Intl.NumberFormat().format(number).toString()}` @@ -131,7 +131,7 @@ const GridItem: React.FC = ({ connectNulls showLegend={false} showTooltip={true} - xAxisLabel="24H Activity Data" + xAxisLabel="Timestamp" />
diff --git a/keep-ui/app/dashboard/[id]/dashboard.tsx b/keep-ui/app/dashboard/[id]/dashboard.tsx index f67bece23..77b060624 100644 --- a/keep-ui/app/dashboard/[id]/dashboard.tsx +++ b/keep-ui/app/dashboard/[id]/dashboard.tsx @@ -33,7 +33,7 @@ const DashboardPage = () => { const [isModalOpen, setIsModalOpen] = useState(false); const [layout, setLayout] = useState([]); const [widgetData, setWidgetData] = useState([]); - const {widgets: allMetricWidgets} = useDashboardMetricWidgets(); + const {widgets: allMetricWidgets} = useDashboardMetricWidgets(true); const [editingItem, setEditingItem] = useState(null); const [dashboardName, setDashboardName] = useState(decodeURIComponent(id)); const [isEditingName, setIsEditingName] = useState(false); diff --git a/keep-ui/utils/hooks/useDashboardMetricWidgets.ts b/keep-ui/utils/hooks/useDashboardMetricWidgets.ts index 9bac84342..d7d24ecc8 100644 --- a/keep-ui/utils/hooks/useDashboardMetricWidgets.ts +++ b/keep-ui/utils/hooks/useDashboardMetricWidgets.ts @@ -1,7 +1,8 @@ -import {useSession} from "next-auth/react"; -import {getApiURL} from "@/utils/apiUrl"; + import {useSession} from "next-auth/react"; + import { useApiUrl } from "./useConfig"; import useSWR from "swr"; import {fetcher} from "@/utils/fetcher"; +import { usePathname, useSearchParams } from "next/navigation"; export interface MetricsWidget { id: string; @@ -22,19 +23,20 @@ interface DashboardDistributionData { } -export const useDashboardMetricWidgets = () => { +export const useDashboardMetricWidgets = (useFilters?: boolean) => { const {data: session} = useSession(); - const apiUrl = getApiURL(); + const apiUrl = useApiUrl(); + const searchParams = useSearchParams(); + const filters = searchParams?.toString(); + const {data, error, mutate} = useSWR( - session ? `${apiUrl}/dashboard/metric-widgets` : null, + session ? `${apiUrl}/dashboard/metric-widgets${ + useFilters && filters ? `?${filters}` : "" + }` : null, (url: string) => fetcher(url, session!.accessToken) ) + console.log(filters) - const useGetData = () => { - return useSWR( - session ? `${apiUrl}/dashboard/metric-widgets` : null, - (url: string) => fetcher(url, session!.accessToken)) - } let widgets: MetricsWidget[] = [] if (data) { widgets = [ @@ -60,5 +62,5 @@ export const useDashboardMetricWidgets = () => { } ]; } - return {widgets, useGetData}; + return {widgets}; } \ No newline at end of file From 53583da3011a545742675ed09fe9af376cd9a105 Mon Sep 17 00:00:00 2001 From: Jay Kumar <70096901+35C4n0r@users.noreply.github.com> Date: Mon, 28 Oct 2024 14:59:00 +0530 Subject: [PATCH 7/8] Update keep/api/routes/incidents.py Co-authored-by: Vladimir Filonov Signed-off-by: Jay Kumar <70096901+35C4n0r@users.noreply.github.com> --- keep/api/routes/incidents.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/keep/api/routes/incidents.py b/keep/api/routes/incidents.py index b779157f6..c23597213 100644 --- a/keep/api/routes/incidents.py +++ b/keep/api/routes/incidents.py @@ -710,7 +710,7 @@ def change_incident_status( # We need to do something only if status really changed if not change.status == incident.status: - end_time = (None, datetime.utcnow())[change.status == IncidentStatus.RESOLVED] + end_time = datetime.utcnow() if change.status == IncidentStatus.RESOLVED else None result = change_incident_status_by_id( tenant_id, incident_id, change.status, end_time ) From e988c5ea0d4b51a6ce9e19bf7eb3ad45db661f30 Mon Sep 17 00:00:00 2001 From: 35C4n0r Date: Tue, 29 Oct 2024 19:55:19 +0530 Subject: [PATCH 8/8] feat: update docs Signed-off-by: 35C4n0r --- keep/api/core/db.py | 76 +++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 73 insertions(+), 3 deletions(-) diff --git a/keep/api/core/db.py b/keep/api/core/db.py index 2501b7d01..1b18a0c47 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -2109,8 +2109,23 @@ def get_provider_distribution( | dict[str, dict[str, datetime | list[dict[str, int]] | Any]] ): """ - Returns hits per hour and the last alert timestamp for each provider in the past 24 hours. - If aggregate_all is True, returns combined distribution across all providers. + Calculate the distribution of incidents created over time for a specific tenant. + + Args: + tenant_id (str): ID of the tenant whose incidents are being queried. + timestamp_filter (TimeStampFilter, optional): Filter to specify the time range. + - lower_timestamp (datetime): Start of the time range. + - upper_timestamp (datetime): End of the time range. + + Returns: + List[dict]: A list of dictionaries representing the hourly distribution of incidents. + Each dictionary contains: + - 'timestamp' (str): Timestamp of the hour in "YYYY-MM-DD HH:00" format. + - 'number' (int): Number of incidents created in that hour. + + Notes: + - If no timestamp_filter is provided, defaults to the last 24 hours. + - Supports MySQL, PostgreSQL, and SQLite for timestamp formatting. """ with Session(engine) as session: twenty_four_hours_ago = datetime.utcnow() - timedelta(hours=24) @@ -2223,7 +2238,23 @@ def get_combined_workflow_execution_distribution( tenant_id: str, timestamp_filter: TimeStampFilter = None ): """ - Returns counts of WorkflowExecutions started in the past 24 hours, combined across all workflows. + Calculate the distribution of WorkflowExecutions started over time, combined across all workflows for a specific tenant. + + Args: + tenant_id (str): ID of the tenant whose workflow executions are being analyzed. + timestamp_filter (TimeStampFilter, optional): Filter to specify the time range. + - lower_timestamp (datetime): Start of the time range. + - upper_timestamp (datetime): End of the time range. + + Returns: + List[dict]: A list of dictionaries representing the hourly distribution of workflow executions. + Each dictionary contains: + - 'timestamp' (str): Timestamp of the hour in "YYYY-MM-DD HH:00" format. + - 'number' (int): Number of workflow executions started in that hour. + + Notes: + - If no timestamp_filter is provided, defaults to the last 24 hours. + - Supports MySQL, PostgreSQL, and SQLite for timestamp formatting. """ with Session(engine) as session: twenty_four_hours_ago = datetime.utcnow() - timedelta(hours=24) @@ -2284,6 +2315,25 @@ def get_combined_workflow_execution_distribution( def get_incidents_created_distribution( tenant_id: str, timestamp_filter: TimeStampFilter = None ): + """ + Calculate the distribution of incidents created over time for a specific tenant. + + Args: + tenant_id (str): ID of the tenant whose incidents are being queried. + timestamp_filter (TimeStampFilter, optional): Filter to specify the time range. + - lower_timestamp (datetime): Start of the time range. + - upper_timestamp (datetime): End of the time range. + + Returns: + List[dict]: A list of dictionaries representing the hourly distribution of incidents. + Each dictionary contains: + - 'timestamp' (str): Timestamp of the hour in "YYYY-MM-DD HH:00" format. + - 'number' (int): Number of incidents created in that hour. + + Notes: + - If no timestamp_filter is provided, defaults to the last 24 hours. + - Supports MySQL, PostgreSQL, and SQLite for timestamp formatting. + """ with Session(engine) as session: twenty_four_hours_ago = datetime.utcnow() - timedelta(hours=24) time_format = "%Y-%m-%d %H" @@ -2339,6 +2389,26 @@ def get_incidents_created_distribution( def calc_incidents_mttr(tenant_id: str, timestamp_filter: TimeStampFilter = None): + """ + Calculate the Mean Time to Resolve (MTTR) for incidents over time for a specific tenant. + + Args: + tenant_id (str): ID of the tenant whose incidents are being analyzed. + timestamp_filter (TimeStampFilter, optional): Filter to specify the time range. + - lower_timestamp (datetime): Start of the time range. + - upper_timestamp (datetime): End of the time range. + + Returns: + List[dict]: A list of dictionaries representing the hourly MTTR of incidents. + Each dictionary contains: + - 'timestamp' (str): Timestamp of the hour in "YYYY-MM-DD HH:00" format. + - 'mttr' (float): Mean Time to Resolve incidents in that hour (in hours). + + Notes: + - If no timestamp_filter is provided, defaults to the last 24 hours. + - Only includes resolved incidents. + - Supports MySQL, PostgreSQL, and SQLite for timestamp formatting. + """ with Session(engine) as session: twenty_four_hours_ago = datetime.utcnow() - timedelta(hours=24) time_format = "%Y-%m-%d %H"