diff --git a/.gitignore b/.gitignore index 4ff2fd406..78deb1a61 100644 --- a/.gitignore +++ b/.gitignore @@ -212,3 +212,4 @@ ee/experimental/ai_temp/* oauth2.cfg scripts/keep_slack_bot.py keepnew.db +providers_cache.json diff --git a/docker/Dockerfile.api b/docker/Dockerfile.api index 9e000384e..2be394d30 100644 --- a/docker/Dockerfile.api +++ b/docker/Dockerfile.api @@ -1,4 +1,4 @@ -FROM python:3.11.6-slim as base +FROM python:3.11.10-slim-bullseye as base ENV PYTHONFAULTHANDLER=1 \ PYTHONHASHSEED=random \ @@ -17,12 +17,16 @@ ENV PIP_DEFAULT_TIMEOUT=100 \ RUN pip install "poetry==$POETRY_VERSION" RUN python -m venv /venv COPY pyproject.toml poetry.lock ./ -RUN poetry export -f requirements.txt --output requirements.txt --without-hashes && /venv/bin/python -m pip install --upgrade -r requirements.txt +RUN poetry export -f requirements.txt --output requirements.txt --without-hashes --only main && \ + /venv/bin/python -m pip install --upgrade -r requirements.txt && \ + pip uninstall -y poetry COPY keep keep COPY ee keep/ee COPY examples examples -COPY README.md README.md -RUN poetry build && /venv/bin/pip install --use-deprecated=legacy-resolver dist/*.whl +RUN /venv/bin/pip install --use-deprecated=legacy-resolver . && \ + rm -rf /root/.cache/pip && \ + find /venv -type d -name "__pycache__" -exec rm -r {} + && \ + find /venv -type f -name "*.pyc" -delete FROM base as final ENV PATH="/venv/bin:${PATH}" @@ -30,12 +34,14 @@ ENV VIRTUAL_ENV="/venv" ENV EE_PATH="ee" COPY --from=builder /venv /venv COPY --from=builder /app/examples /examples +# Build the providers cache +RUN keep provider build_cache # as per Openshift guidelines, https://docs.openshift.com/container-platform/4.11/openshift_images/create-images.html#use-uid_create-images -RUN chgrp -R 0 /app && chmod -R g=u /app -RUN chown -R keep:keep /app -RUN chown -R keep:keep /venv +RUN chgrp -R 0 /app && chmod -R g=u /app && \ + chown -R keep:keep /app && \ + chown -R keep:keep /venv USER keep ENTRYPOINT ["/venv/lib/python3.11/site-packages/keep/entrypoint.sh"] -CMD ["gunicorn", "keep.api.api:get_app", "--bind" , "0.0.0.0:8080" , "--workers", "4" , "-k" , "uvicorn.workers.UvicornWorker", "-c", "/venv/lib/python3.11/site-packages/keep/api/config.py"] +CMD ["gunicorn", "keep.api.api:get_app", "--bind" , "0.0.0.0:8080" , "--workers", "4" , "-k" , "uvicorn.workers.UvicornWorker", "-c", "/venv/lib/python3.11/site-packages/keep/api/config.py", "--preload"] diff --git a/ee/experimental/__init__.py b/ee/experimental/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/ee/experimental/ai_temp/.gitkeep b/ee/experimental/ai_temp/.gitkeep deleted file mode 100644 index e69de29bb..000000000 diff --git a/ee/experimental/generative_utils.py b/ee/experimental/generative_utils.py deleted file mode 100644 index 5689eb7c0..000000000 --- a/ee/experimental/generative_utils.py +++ /dev/null @@ -1,239 +0,0 @@ -import logging -import os - -import numpy as np -from openai import OpenAI - -from keep.api.core.db import get_incident_by_id - -from keep.api.models.db.alert import Incident - -logger = logging.getLogger(__name__) - -SUMMARY_GENERATOR_VERBOSE_NAME = "Summary generator v0.1" -NAME_GENERATOR_VERBOSE_NAME = "Name generator v0.1" -MAX_SUMMARY_LENGTH = 900 -MAX_NAME_LENGTH = 75 - -def generate_incident_summary( - incident: Incident, - use_n_alerts_for_summary: int = -1, - generate_summary: str = None, - max_summary_length: int = None, -) -> str: - if "OPENAI_API_KEY" not in os.environ: - logger.error( - "OpenAI API key is not set. Incident summary generation is not available.", - extra={"algorithm": SUMMARY_GENERATOR_VERBOSE_NAME, - "incident_id": incident.id, "tenant_id": incident.tenant_id} - ) - return "" - - if not generate_summary: - generate_summary = os.environ.get("GENERATE_INCIDENT_SUMMARY", "True") - - if generate_summary == "False": - logger.info(f"Incident summary generation is disabled. Aborting.", - extra={"algorithm": SUMMARY_GENERATOR_VERBOSE_NAME, "incident_id": incident.id, "tenant_id": incident.tenant_id}) - return "" - - if incident.user_summary: - return "" - - if not max_summary_length: - max_summary_length = os.environ.get( - "MAX_SUMMARY_LENGTH", MAX_SUMMARY_LENGTH) - - try: - client = OpenAI(api_key=os.environ["OPENAI_API_KEY"]) - - incident = get_incident_by_id(incident.tenant_id, incident.id) - - description_strings = np.unique( - [f'{alert.event["name"]}' for alert in incident.alerts] - ).tolist() - - if use_n_alerts_for_summary > 0: - incident_description = "\n".join( - description_strings[:use_n_alerts_for_summary] - ) - else: - incident_description = "\n".join(description_strings) - - timestamps = [alert.timestamp for alert in incident.alerts] - incident_start = min(timestamps).replace(microsecond=0) - incident_end = max(timestamps).replace(microsecond=0) - - model = os.environ.get("OPENAI_MODEL", "gpt-4o-mini") - - summary = ( - client.chat.completions.create( - model=model, - messages=[ - { - "role": "system", - "content": f"""You are a very skilled DevOps specialist who can summarize any incident based on alert descriptions. - When provided with information, summarize it in a 2-3 sentences explaining what happened and when. - ONLY SUMMARIZE WHAT YOU SEE. In the end add information about potential scenario of the incident. - When provided with information, answer with max a {int(max_summary_length * 0.9)} symbols excerpt - describing incident thoroughly. - - EXAMPLE: - An incident occurred between 2022-11-17 14:11:04 and 2022-11-22 22:19:04, involving a - total of 200 alerts. The alerts indicated critical and warning issues such as high CPU and memory - usage in pods and nodes, as well as stuck Kubernetes Daemonset rollout. Potential incident scenario: - Kubernetes Daemonset rollout stuck due to high CPU and memory usage in pods and nodes. This caused a - long tail of alerts on various topics.""", - }, - { - "role": "user", - "content": f"""Here are alerts of an incident for summarization:\n{incident_description}\n This incident started on - {incident_start}, ended on {incident_end}, included {incident.alerts_count} alerts.""", - }, - ], - ) - .choices[0] - .message.content - ) - - logger.info(f"Generated incident summary with length {len(summary)} symbols", - extra={"algorithm": SUMMARY_GENERATOR_VERBOSE_NAME, "incident_id": incident.id, "tenant_id": incident.tenant_id}) - - if len(summary) > max_summary_length: - logger.info(f"Generated incident summary is too long. Applying smart truncation", - extra={"algorithm": SUMMARY_GENERATOR_VERBOSE_NAME, "incident_id": incident.id, "tenant_id": incident.tenant_id}) - - summary = ( - client.chat.completions.create( - model=model, - messages=[ - { - "role": "system", - "content": f"""You are a very skilled DevOps specialist who can summarize any incident based on a description. - When provided with information, answer with max a {int(max_summary_length * 0.9)} symbols excerpt describing - incident thoroughly. - """, - }, - { - "role": "user", - "content": f"""Here is the description of an incident for summarization:\n{summary}""", - }, - ], - ) - .choices[0] - .message.content - ) - - logger.info(f"Generated new incident summary with length {len(summary)} symbols", - extra={"algorithm": SUMMARY_GENERATOR_VERBOSE_NAME, "incident_id": incident.id, "tenant_id": incident.tenant_id}) - - if len(summary) > max_summary_length: - logger.info(f"Generated incident summary is too long. Applying hard truncation", - extra={"algorithm": SUMMARY_GENERATOR_VERBOSE_NAME, "incident_id": incident.id, "tenant_id": incident.tenant_id}) - summary = summary[: max_summary_length] - - return summary - except Exception as e: - logger.error(f"Error in generating incident summary: {e}", - extra={"algorithm": SUMMARY_GENERATOR_VERBOSE_NAME, "incident_id": incident.id, "tenant_id": incident.tenant_id}) - return "" - - -def generate_incident_name(incident: Incident, generate_name: str = None, max_name_length: int = None, use_n_alerts_for_name: int = -1) -> str: - if "OPENAI_API_KEY" not in os.environ: - logger.error( - "OpenAI API key is not set. Incident name generation is not available.", - extra={"algorithm": NAME_GENERATOR_VERBOSE_NAME, - "incident_id": incident.id, "tenant_id": incident.tenant_id} - ) - return "" - - if not generate_name: - generate_name = os.environ.get("GENERATE_INCIDENT_NAME", "True") - - if generate_name == "False": - logger.info(f"Incident name generation is disabled. Aborting.", - extra={"algorithm": NAME_GENERATOR_VERBOSE_NAME, "incident_id": incident.id, "tenant_id": incident.tenant_id}) - return "" - - if incident.user_generated_name: - return "" - - if not max_name_length: - max_name_length = os.environ.get( - "MAX_NAME_LENGTH", MAX_NAME_LENGTH) - - try: - client = OpenAI(api_key=os.environ["OPENAI_API_KEY"]) - - incident = get_incident_by_id(incident.tenant_id, incident.id) - - description_strings = np.unique( - [f'{alert.event["name"]}' for alert in incident.alerts]).tolist() - - if use_n_alerts_for_name > 0: - incident_description = "\n".join( - description_strings[:use_n_alerts_for_name]) - else: - incident_description = "\n".join(description_strings) - - timestamps = [alert.timestamp for alert in incident.alerts] - incident_start = min(timestamps).replace(microsecond=0) - - model = os.environ.get("OPENAI_MODEL", "gpt-4o-mini") - - name = client.chat.completions.create(model=model, messages=[ - { - "role": "system", - "content": f"""You are a very skilled DevOps specialist who can name any incident based on alert descriptions. - When provided with information, output a short descriptive name of incident that could cause these alerts. - Add information about start time to the name. ONLY USE WHAT YOU SEE. Answer with max a {int(max_name_length * 0.9)} - symbols excerpt. - - EXAMPLE: - Kubernetes rollout stuck (started on 2022.11.17 14:11)""" - }, - { - "role": "user", - "content": f"""This incident started on {incident_start}. - Here are alerts of an incident:\n{incident_description}\n""" - } - ]).choices[0].message.content - - logger.info(f"Generated incident name with length {len(name)} symbols", - extra={"incident_id": incident.id, "tenant_id": incident.tenant_id}) - - if len(name) > max_name_length: - logger.info(f"Generated incident name is too long. Applying smart truncation", - extra={"algorithm": NAME_GENERATOR_VERBOSE_NAME, "incident_id": incident.id, "tenant_id": incident.tenant_id}) - - name = client.chat.completions.create(model=model, messages=[ - { - "role": "system", - "content": f"""You are a very skilled DevOps specialist who can name any incident based on a description. - Add information about start time to the name.When provided with information, answer with max a - {int(max_name_length * 0.9)} symbols. - - EXAMPLE: - Kubernetes rollout stuck (started on 2022.11.17 14:11)""" - }, - { - "role": "user", - "content": f"""This incident started on {incident_start}. - Here is the description of an incident to name:\n{name}.""" - } - ]).choices[0].message.content - - logger.info(f"Generated new incident name with length {len(name)} symbols", - extra={"algorithm": NAME_GENERATOR_VERBOSE_NAME, "incident_id": incident.id, "tenant_id": incident.tenant_id}) - - if len(name) > max_name_length: - logger.info(f"Generated incident name is too long. Applying hard truncation", - extra={"algorithm": NAME_GENERATOR_VERBOSE_NAME, "incident_id": incident.id, "tenant_id": incident.tenant_id}) - name = name[: max_name_length] - - return name - except Exception as e: - logger.error(f"Error in generating incident name: {e}", - extra={"algorithm": NAME_GENERATOR_VERBOSE_NAME, "incident_id": incident.id, "tenant_id": incident.tenant_id}) - return "" diff --git a/ee/experimental/graph_utils.py b/ee/experimental/graph_utils.py deleted file mode 100644 index 368e747f9..000000000 --- a/ee/experimental/graph_utils.py +++ /dev/null @@ -1,106 +0,0 @@ -import logging - -import numpy as np -import networkx as nx - -from typing import List, Tuple - -from keep.api.core.db import get_pmi_values_from_temp_file - -logger = logging.getLogger(__name__) - -def detect_knee_1d_auto_increasing(y: List[float]) -> Tuple[int, float]: - """ - This function detects the knee point in an increasing 1D curve. Knee point is the point where a curve - starts to flatten out (https://en.wikipedia.org/wiki/Knee_of_a_curve). - - Parameters: - y (List[float]): a list of float values - - Returns: - tuple: knee_index, knee_y - """ - - def detect_knee_1d(y: List[float], curve: str, direction: str = 'increasing') -> Tuple[int, float, List[float]]: - x = np.arange(len(y)) - - x_norm = (x - np.min(x)) / (np.max(x) - np.min(x)) - y_norm = (y - np.min(y)) / (np.max(y) - np.min(y)) - - diff_curve = y_norm - x_norm - - if curve == 'concave': - knee_index = np.argmax(diff_curve) - else: - knee_index = np.argmin(diff_curve) - - knee_y = y[knee_index] - - return knee_index, knee_y, diff_curve - - knee_index_concave, knee_y_concave, diff_curve_concave = detect_knee_1d(y, 'concave') - knee_index_convex, knee_y_convex, diff_curve_convex = detect_knee_1d(y, 'convex') - max_diff_concave = np.max(np.abs(diff_curve_concave)) - max_diff_convex = np.max(np.abs(diff_curve_convex)) - - if max_diff_concave > max_diff_convex: - return knee_index_concave, knee_y_concave - else: - return knee_index_convex, knee_y_convex - - -def create_graph(tenant_id: str, fingerprints: List[str], pmi_values: np.ndarray, fingerprint2idx: dict, pmi_threshold: float = 0., delete_nodes: bool = False, knee_threshold: float = 0.8) -> nx.Graph: - """ - This function creates a graph from a list of fingerprints. The graph is created based on the PMI values between - the fingerprints. The edges are created between the fingerprints that have a PMI value greater than the threshold. - The nodes are removed if the knee point of the PMI values of the edges connected to the node is less than the threshold. - - Parameters: - tenant_id (str): tenant id - fingerprints (List[str]): a list of fingerprints - pmi_threshold (float): PMI threshold - knee_threshold (float): knee threshold - - Returns: - nx.Graph: a graph - """ - graph = nx.Graph() - - if len(fingerprints) == 1: - graph.add_node(fingerprints[0]) - return graph - - logger.info(f'Creating alert graph edges', extra={'tenant_id': tenant_id}) - - for idx_i, fingerprint_i in enumerate(fingerprints): - if fingerprint_i not in fingerprint2idx: - continue - - for idx_j in range(idx_i + 1, len(fingerprints)): - fingerprint_j = fingerprints[idx_j] - - if fingerprint_j not in fingerprint2idx: - continue - - weight = pmi_values[fingerprint2idx[fingerprint_i], fingerprint2idx[fingerprint_j]] - - if weight > pmi_threshold: - graph.add_edge(fingerprint_i, fingerprint_j, weight=weight) - - if delete_nodes: - nodes_to_delete = [] - logger.info(f'Preparing candidate nodes for deletion', extra={'tenant_id': tenant_id}) - - for node in graph.nodes: - weights = sorted([edge['weight'] for edge in graph[node].values()]) - - knee_index, knee_statistic = detect_knee_1d_auto_increasing(weights) - - if knee_statistic < knee_threshold: - nodes_to_delete.append(node) - - logger.info(f'Removing nodes from graph, {len(nodes_to_delete)} nodes will be removed, {len(graph.nodes) - len(nodes_to_delete)} nodes will be left', - extra={'tenant_id': tenant_id}) - graph.remove_nodes_from(nodes_to_delete) - - return graph \ No newline at end of file diff --git a/ee/experimental/incident_utils.py b/ee/experimental/incident_utils.py deleted file mode 100644 index 6593ce830..000000000 --- a/ee/experimental/incident_utils.py +++ /dev/null @@ -1,514 +0,0 @@ -import logging -import os -import math - -import networkx as nx -import numpy as np - -from tqdm import tqdm -from datetime import datetime, timedelta -from typing import Dict, List, Set, Tuple, Any -from arq.connections import ArqRedis - -from ee.experimental.graph_utils import create_graph -from ee.experimental.statistical_utils import get_alert_pmi_matrix -from ee.experimental.generative_utils import generate_incident_summary, generate_incident_name, \ - SUMMARY_GENERATOR_VERBOSE_NAME, NAME_GENERATOR_VERBOSE_NAME - -from keep.api.arq_pool import get_pool -from keep.api.core.dependencies import get_pusher_client -from keep.api.models.db.alert import Alert, Incident -from keep.api.core.db import ( - add_alerts_to_incident_by_incident_id, - create_incident_from_dict, - get_incident_by_id, - get_last_incidents, - query_alerts, - update_incident_summary, - update_incident_name, - write_pmi_matrix_to_temp_file, - get_pmi_values_from_temp_file, - get_tenant_config, - write_tenant_config, -) - -logger = logging.getLogger(__name__) - -ALGORITHM_VERBOSE_NAME = "Correlation algorithm v0.2" -USE_N_HISTORICAL_ALERTS_MINING = 10e4 -USE_N_HISTORICAL_ALERTS_PMI = 10e4 -USE_N_HISTORICAL_INCIDENTS = 10e4 -MIN_ALERT_NUMBER = 100 -INCIDENT_VALIDITY_THRESHOLD = 3600 -ALERT_VALIDITY_THRESHOLD = 3600 -# We assume that incident / alert validity threshold is greater than a size of a batch -STRIDE_DENOMINATOR = 4 -DEFAULT_TEMP_DIR_LOCATION = "./ee/experimental/ai_temp" -PMI_SLIDING_WINDOW = 3600 - -def calculate_pmi_matrix( - ctx: dict | None, # arq context - tenant_id: str, - upper_timestamp: datetime = None, - use_n_historical_alerts: int = None, - sliding_window: int = None, - stride: int = None, - temp_dir: str = None, - offload_config: Dict = None, - min_alert_number: int = None, -) -> dict: - logger.info("Calculating PMI coefficients for alerts", extra={"tenant_id": tenant_id, "algorithm": ALGORITHM_VERBOSE_NAME}) - - if not upper_timestamp: - upper_timestamp = os.environ.get("PMI_ALERT_UPPER_TIMESTAMP", datetime.now()) - - if not use_n_historical_alerts: - use_n_historical_alerts = os.environ.get( - "PMI_USE_N_HISTORICAL_ALERTS", USE_N_HISTORICAL_ALERTS_PMI) - - if not sliding_window: - sliding_window = os.environ.get("PMI_SLIDING_WINDOW", PMI_SLIDING_WINDOW) - - if not stride: - stride = os.environ.get("PMI_STRIDE", int(sliding_window // STRIDE_DENOMINATOR)) - - if not temp_dir: - temp_dir = os.environ.get("AI_TEMP_FOLDER", DEFAULT_TEMP_DIR_LOCATION) - temp_dir = f"{temp_dir}/{tenant_id}" - os.makedirs(temp_dir, exist_ok=True) - - if not offload_config: - offload_config = os.environ.get("PMI_OFFLOAD_CONFIG", {}) - - if "temp_dir" in offload_config: - offload_config["temp_dir"] = f'{offload_config["temp_dir"]}/{tenant_id}' - os.makedirs(offload_config["temp_dir"], exist_ok=True) - - if not min_alert_number: - min_alert_number = os.environ.get("MIN_ALERT_NUMBER", MIN_ALERT_NUMBER) - - alerts = query_alerts( - tenant_id, limit=use_n_historical_alerts, upper_timestamp=upper_timestamp, sort_ascending=True) - - if len(alerts) < min_alert_number: - logger.info("Not enough alerts to mine incidents", extra={"tenant_id": tenant_id, "algorithm": ALGORITHM_VERBOSE_NAME}) - return {"status": "failed", "message": "Not enough alerts to mine incidents"} - - pmi_matrix, pmi_columns = get_alert_pmi_matrix( - alerts, "fingerprint", sliding_window, stride, offload_config) - - return {"status": "success", "pmi_matrix": pmi_matrix, "pmi_columns": pmi_columns} - - -def update_existing_incident(incident: Incident, alerts: List[Alert]) -> Tuple[str, bool]: - add_alerts_to_incident_by_incident_id(incident.tenant_id, incident.id, alerts) - return incident.id, True - - -def create_new_incident(component: Set[str], alerts: List[Alert], - tenant_id: str) -> Tuple[str, bool]: - incident_start_time = min(alert.timestamp for alert in alerts if alert.fingerprint in component) - incident_start_time = incident_start_time.replace(microsecond=0) - - incident = create_incident_from_dict(tenant_id, - {"ai_generated_name": f"Incident started at {incident_start_time}", - "generated_summary": "Summarization is Disabled", - "is_predicted": True}) - add_alerts_to_incident_by_incident_id( - tenant_id, incident.id, [ - alert.id for alert in alerts if alert.fingerprint in component],) - return incident.id, False - - -async def schedule_incident_processing(pool: ArqRedis, tenant_id: str, incident_id: str) -> None: - job_summary = await pool.enqueue_job("process_summary_generation", tenant_id=tenant_id, incident_id=incident_id,) - logger.info(f"Summary generation for incident {incident_id} scheduled, job: {job_summary}", extra={ - "algorithm": SUMMARY_GENERATOR_VERBOSE_NAME, "tenant_id": tenant_id, "incident_id": incident_id},) - - job_name = await pool.enqueue_job("process_name_generation", tenant_id=tenant_id, incident_id=incident_id) - logger.info(f"Name generation for incident {incident_id} scheduled, job: {job_name}", extra={ - "algorithm": NAME_GENERATOR_VERBOSE_NAME, "tenant_id": tenant_id, "incident_id": incident_id},) - - -def is_incident_accepting_updates(incident: Incident, current_time: datetime, - incident_validity_threshold: timedelta) -> bool: - return current_time - incident.last_seen_time < incident_validity_threshold - - -def get_component_first_seen_time(component: Set[str], alerts: List[Alert]) -> datetime: - return min(alert.timestamp for alert in alerts if alert.fingerprint in component) - - -def process_graph_component(component: Set[str], batch_incidents: List[Incident], batch_alerts: List[Alert], batch_fingerprints: Set[str], - tenant_id: str, min_incident_size: int, incident_validity_threshold: timedelta) -> Tuple[str, bool]: - is_component_merged = False - for incident in batch_incidents: - incident_fingerprints = set(alert.fingerprint for alert in incident.alerts) - if incident_fingerprints.issubset(component): - if not incident_fingerprints.intersection(batch_fingerprints): - continue - logger.info(f"Found possible extension for incident {incident.id}", - extra={"tenant_id": tenant_id, "algorithm": ALGORITHM_VERBOSE_NAME}) - - amendment_time = get_component_first_seen_time(component, batch_alerts) - if is_incident_accepting_updates(incident, amendment_time, incident_validity_threshold): - logger.info(f"Incident {incident.id} is accepting updates.", - extra={"tenant_id": tenant_id, "algorithm": ALGORITHM_VERBOSE_NAME}) - - existing_alert_ids = set([alert.id for alert in incident.alerts]) - appendable_alerts = [alert for alert in batch_alerts if alert.fingerprint in component and not alert.id in existing_alert_ids] - - logger.info(f"Appending {len(appendable_alerts)} alerts to incident {incident.id}", - extra={"tenant_id": tenant_id, "algorithm": ALGORITHM_VERBOSE_NAME}) - is_component_merged = True - return update_existing_incident_inmem(incident, appendable_alerts) - else: - logger.info(f"Incident {incident.id} is not accepting updates. Aborting merge operation.", - extra={"tenant_id": tenant_id, "algorithm": ALGORITHM_VERBOSE_NAME}) - - if not is_component_merged: - if len(component) >= min_incident_size: - logger.info(f"Creating new incident with {len(component)} alerts", - extra={"tenant_id": tenant_id, "algorithm": ALGORITHM_VERBOSE_NAME}) - return create_new_incident_inmem(component, batch_alerts, tenant_id) - else: - return None, False - - -def process_alert_batch(batch_alerts: List[Alert], batch_incidents: list[Incident], tenant_id: str, min_incident_size: int, - incident_validity_threshold: timedelta, pmi_values, fingerpint2idx, pmi_threshold, delete_nodes, knee_threshold) -> Tuple[str, bool]: - - batch_fingerprints = set([alert.fingerprint for alert in batch_alerts]) - - amended_fingerprints = set(batch_fingerprints) - for incident in batch_incidents: - incident_fingerprints = set(alert.fingerprint for alert in incident.alerts) - - amended_fingerprints = incident_fingerprints.union(batch_fingerprints) - - logger.info("Building alert graph", extra={"tenant_id": tenant_id, "algorithm": NAME_GENERATOR_VERBOSE_NAME}) - amended_graph = create_graph(tenant_id, list(amended_fingerprints), pmi_values, - fingerpint2idx, pmi_threshold, delete_nodes, knee_threshold) - - logger.info("Analyzing alert graph", extra={"tenant_id": tenant_id, "algorithm": ALGORITHM_VERBOSE_NAME}) - batch_incident_ids_for_processing = [] - batch_new_incidents = [] - batch_updated_incidents = [] - - for component in nx.connected_components(amended_graph): - incident, is_updated = process_graph_component(component, batch_incidents, batch_alerts, batch_fingerprints, tenant_id, min_incident_size, incident_validity_threshold) - if incident: - batch_incident_ids_for_processing.append(incident.id) - if is_updated: - batch_updated_incidents.append(incident) - else: - batch_new_incidents.append(incident) - - return batch_incident_ids_for_processing, batch_new_incidents, batch_updated_incidents - - -async def generate_update_incident_summary(ctx, tenant_id: str, incident_id: str): - incident = get_incident_by_id(tenant_id, incident_id) - summary = generate_incident_summary(incident) - - if summary: - update_incident_summary(tenant_id, incident_id, summary) - - return summary - - -async def generate_update_incident_name(ctx, tenant_id: str, incident_id: str): - incident = get_incident_by_id(tenant_id, incident_id) - name = generate_incident_name(incident) - - if name: - update_incident_name(tenant_id, incident_id, name) - - return name - - -def get_last_incidents_inmem(incidents: List[Incident], upper_timestamp: datetime, lower_timestamp: datetime) -> List[Incident]: - return [incident for incident in incidents if lower_timestamp < incident.last_seen_time < upper_timestamp] - - -def add_alerts_to_incident_by_incident_id_inmem(incident: Incident, alerts: List[str]): - incident.alerts.extend(alerts) - return incident - - -def create_incident_from_dict_inmem(tenant_id: str, incident_dict: Dict[str, Any]) -> Incident: - return Incident(tenant_id=tenant_id, **incident_dict) - - -def create_new_incident_inmem(component: Set[str], alerts: List[Alert], tenant_id: str) -> Tuple[Incident, bool]: - incident_start_time = min(alert.timestamp for alert in alerts if alert.fingerprint in component) - incident_start_time = incident_start_time.replace(microsecond=0) - - incident = create_incident_from_dict_inmem(tenant_id, - {"name": f"Incident started at {incident_start_time}", - "description": "Summarization is Disabled", - "is_predicted": True}) - - incident = add_alerts_to_incident_by_incident_id_inmem( - incident, [alert for alert in alerts if alert.fingerprint in component],) - incident.last_seen_time = max([alert.timestamp for alert in incident.alerts]) - - return incident, False - - -def update_existing_incident_inmem(incident: Incident, alerts: List[str]) -> Tuple[str, bool]: - incident = add_alerts_to_incident_by_incident_id_inmem(incident, alerts) - incident.last_seen_time = max([alert.timestamp for alert in incident.alerts]) - return incident, True - - -def update_incident_summary_inmem(incident: Incident, summary: str): - incident.summary = summary - return incident - - -def update_incident_name_inmem(incident: Incident, name: str): - incident.name = name - return incident - - -async def mine_incidents_and_create_objects( - ctx: dict | None, # arq context - tenant_id: str, - alert_lower_timestamp: datetime = None, - alert_upper_timestamp: datetime = None, - use_n_historical_alerts: int = None, - incident_lower_timestamp: datetime = None, - incident_upper_timestamp: datetime = None, - use_n_historical_incidents: int = None, - pmi_threshold: float = None, - delete_nodes: bool = None, - knee_threshold: float = None, - min_incident_size: int = None, - min_alert_number: int = None, - incident_similarity_threshold: float = None, - incident_validity_threshold: timedelta = None, - general_temp_dir: str = None, - alert_validity_threshold: int = None, -) -> Dict[str, List[Incident]]: - """ - This function mines incidents from alerts and creates incidents in the database. - - Parameters: - tenant_id (str): tenant id - alert_lower_timestamp (datetime): lower timestamp for alerts - alert_upper_timestamp (datetime): upper timestamp for alerts - use_n_historical_alerts (int): number of historical alerts to use - incident_lower_timestamp (datetime): lower timestamp for incidents - incident_upper_timestamp (datetime): upper timestamp for incidents - use_n_historical_incidents (int): number of historical incidents to use - pmi_threshold (float): PMI threshold used for incident graph edges creation - knee_threshold (float): knee threshold used for incident graph nodes creation - min_incident_size (int): minimum incident size - incident_similarity_threshold (float): incident similarity threshold - - Returns: - Dict[str, List[Incident]]: a dictionary containing the created incidents - """ - # obtain tenant_config - if not general_temp_dir: - general_temp_dir = os.environ.get( - "AI_TEMP_FOLDER", DEFAULT_TEMP_DIR_LOCATION) - - temp_dir = f"{general_temp_dir}/{tenant_id}" - os.makedirs(temp_dir, exist_ok=True) - - tenant_config = get_tenant_config(tenant_id) - - # obtain alert-related parameters - alert_validity_threshold = int(os.environ.get("ALERT_VALIDITY_THRESHOLD", ALERT_VALIDITY_THRESHOLD)) - alert_batch_stride = alert_validity_threshold // STRIDE_DENOMINATOR - - if not alert_upper_timestamp: - alert_upper_timestamp = os.environ.get( - "MINE_ALERT_UPPER_TIMESTAMP", datetime.now()) - - if not alert_lower_timestamp: - if tenant_config.get("last_correlated_batch_start", None): - alert_lower_timestamp = datetime.fromisoformat( - tenant_config.get("last_correlated_batch_start", None)) - - else: - alert_lower_timestamp = None - - if not use_n_historical_alerts: - use_n_historical_alerts = os.environ.get( - "MINE_USE_N_HISTORICAL_ALERTS", - USE_N_HISTORICAL_ALERTS_MINING) - - # obtain incident-related parameters - if not incident_validity_threshold: - incident_validity_threshold = timedelta( - seconds=int(os.environ.get("MINE_INCIDENT_VALIDITY", INCIDENT_VALIDITY_THRESHOLD))) - - if not use_n_historical_incidents: - use_n_historical_incidents = os.environ.get( - "MINE_USE_N_HISTORICAL_INCIDENTS", USE_N_HISTORICAL_INCIDENTS) - - if not incident_similarity_threshold: - incident_similarity_threshold = os.environ.get("INCIDENT_SIMILARITY_THRESHOLD", 0.8) - - if not min_incident_size: - min_incident_size = os.environ.get("MIN_INCIDENT_SIZE", 5) - - if not pmi_threshold: - pmi_threshold = os.environ.get("PMI_THRESHOLD", 0.0) - - if not delete_nodes: - delete_nodes = os.environ.get("DELETE_NODES", False) - - if not knee_threshold: - knee_threshold = os.environ.get("KNEE_THRESHOLD", 0.8) - - status = calculate_pmi_matrix(ctx, tenant_id, min_alert_number=min_alert_number) - if status.get("status") == "failed": - pusher_client = get_pusher_client() - if pusher_client: - log_string = f"{ALGORITHM_VERBOSE_NAME} failed to calculate PMI matrix" - pusher_client.trigger(f"private-{tenant_id}", "ai-logs-change", {"log": "Failed to calculate PMI matrix"}) - - return {"incidents": []} - - elif status.get("status") == "success": - logger.info( - f"Calculating PMI coefficients for alerts finished. PMI matrix is being written to the database. Total number of PMI coefficients: {status.get('pmi_matrix').size}", - extra={"tenant_id": tenant_id, "algorithm": ALGORITHM_VERBOSE_NAME}) - - pmi_values = status.get("pmi_matrix") - fingerprints = status.get("pmi_columns") - write_pmi_matrix_to_temp_file(tenant_id, pmi_values, fingerprints, temp_dir) - - logger.info("PMI matrix is written to the database.", extra={"tenant_id": tenant_id, "algorithm": ALGORITHM_VERBOSE_NAME}) - fingerprint2idx = {fingerprint: i for i, fingerprint in enumerate(fingerprints)} - logger.info("Getting new alerts and incidents", extra={"tenant_id": tenant_id, "algorithm": ALGORITHM_VERBOSE_NAME}) - - alerts = query_alerts(tenant_id, limit=use_n_historical_alerts, upper_timestamp=alert_upper_timestamp, - lower_timestamp=alert_lower_timestamp, sort_ascending=True) - - if not alert_lower_timestamp: - alert_lower_timestamp = min(alert.timestamp for alert in alerts) - - incidents, _ = get_last_incidents(tenant_id, limit=use_n_historical_incidents, upper_timestamp=alert_lower_timestamp + incident_validity_threshold, - lower_timestamp=alert_upper_timestamp - incident_validity_threshold, with_alerts=True) - - n_batches = int(math.ceil((alert_upper_timestamp - alert_lower_timestamp).total_seconds() / alert_batch_stride)) - (STRIDE_DENOMINATOR - 1) - logging.info( - f"Starting alert correlation. Current batch size: {alert_validity_threshold} seconds. Current \ - batch stride: {alert_batch_stride} seconds. Number of batches to process: {n_batches}") - - pool = await get_pool() if not ctx else ctx["redis"] - - new_incident_ids = [] - updated_incident_ids = [] - incident_ids_for_processing = [] - - alert_timestamps = np.array([alert.timestamp.timestamp() for alert in alerts]) - batch_indices = np.arange(0, n_batches) - batch_start_ts = alert_lower_timestamp.timestamp() + np.array([batch_idx * alert_batch_stride for batch_idx in batch_indices]) - batch_end_ts = batch_start_ts + alert_validity_threshold - - start_indices = np.searchsorted(alert_timestamps, batch_start_ts, side='left') - end_indices = np.searchsorted(alert_timestamps, batch_end_ts, side='right') - - for batch_idx, (start_idx, end_idx) in tqdm(enumerate(zip(start_indices, end_indices)), total=n_batches, desc="Processing alert batches.."): - batch_alerts = alerts[start_idx:end_idx] - - logger.info( - f"Processing batch {batch_idx} with start timestamp {datetime.fromtimestamp(batch_start_ts[batch_idx])} \ - and end timestamp {min(datetime.fromtimestamp(batch_end_ts[batch_idx]), alert_upper_timestamp)}. Batch size: {len(batch_alerts)}", - extra={"tenant_id": tenant_id, "algorithm": ALGORITHM_VERBOSE_NAME}) - - if len(batch_alerts) == 0: - continue - - batch_incidents = get_last_incidents_inmem(incidents, datetime.fromtimestamp(batch_end_ts[batch_idx]), - datetime.fromtimestamp(batch_start_ts[batch_idx]) - incident_validity_threshold) - - logger.info( - f"Found {len(batch_incidents)} incidents that accept updates by {datetime.fromtimestamp(batch_start_ts[batch_idx])}.", - extra={"tenant_id": tenant_id, "algorithm": ALGORITHM_VERBOSE_NAME}) - - batch_incident_ids_for_processing, batch_new_incidents, batch_updated_incidents = process_alert_batch( - batch_alerts, batch_incidents, tenant_id, min_incident_size, incident_validity_threshold, pmi_values, fingerprint2idx, pmi_threshold, delete_nodes, knee_threshold) - - new_incident_ids.extend([incident.id for incident in batch_new_incidents]) - incidents.extend(batch_new_incidents) - updated_incident_ids.extend([incident.id for incident in batch_updated_incidents]) - incident_ids_for_processing.extend(batch_incident_ids_for_processing) - - logger.info(f"Saving last correlated batch start timestamp: {datetime.isoformat(alert_lower_timestamp + timedelta(seconds= (n_batches - 1) * alert_batch_stride))}", - extra={"tenant_id": tenant_id, "algorithm": ALGORITHM_VERBOSE_NAME}) - tenant_config["last_correlated_batch_start"] = datetime.isoformat(alert_lower_timestamp + timedelta(seconds= (n_batches - 1) * alert_batch_stride)) - write_tenant_config(tenant_id, tenant_config) - - logger.info(f"Writing {len(incidents)} incidents to database", - extra={"tenant_id": tenant_id, "algorithm": ALGORITHM_VERBOSE_NAME}) - db_incident_ids_for_processing = [] - db_new_incident_ids = [] - db_updated_incident_ids = [] - for incident in incidents: - if not get_incident_by_id(tenant_id, incident.id): - incident_dict = { - "ai_generated_name": incident.ai_generated_name, - "generated_summary": incident.generated_summary, - "is_predicted": True, - } - db_incident = create_incident_from_dict(tenant_id, incident_dict) - - incident_id = db_incident.id - else: - incident_id = incident.id - - if incident.id in incident_ids_for_processing: - db_incident_ids_for_processing.append(incident_id) - - if incident.id in new_incident_ids: - db_new_incident_ids.append(incident_id) - - if incident.id in updated_incident_ids: - db_updated_incident_ids.append(incident_id) - - - add_alerts_to_incident_by_incident_id(tenant_id, incident_id, [alert.id for alert in incident.alerts]) - - logger.info(f"Scheduling {len(db_incident_ids_for_processing)} incidents for name / summary generation", - extra={"tenant_id": tenant_id, "algorithm": ALGORITHM_VERBOSE_NAME}) - new_incident_count = len(set(new_incident_ids)) - updated_incident_count = len(set(updated_incident_ids).difference(set(new_incident_ids))) - db_incident_ids_for_processing = list(set(db_incident_ids_for_processing)) - for incident_id in db_incident_ids_for_processing: - await schedule_incident_processing(pool, tenant_id, incident_id) - - incident_ids = list(set(db_new_incident_ids + db_updated_incident_ids)) - - pusher_client = get_pusher_client() - if pusher_client: - if new_incident_count > 0 or updated_incident_count > 0: - log_string = f"{ALGORITHM_VERBOSE_NAME} successfully executed. Alerts from {alert_lower_timestamp.replace(microsecond=0)} \ - till {alert_upper_timestamp.replace(microsecond=0)} were processed. Total count of processed alerts: {len(alerts)}. \ - Total count of created incidents: {new_incident_count}. Total count of updated incidents: \ - {updated_incident_count}." - elif len(alerts) > 0: - log_string = f'{ALGORITHM_VERBOSE_NAME} successfully executed. Alerts from {alert_lower_timestamp.replace(microsecond=0)} \ - till {alert_upper_timestamp.replace(microsecond=0)} were processed. Total count of processed alerts: {len(alerts)}. \ - Total count of created incidents: {new_incident_count}. Total count of updated incidents: \ - {updated_incident_count}. This may be due to high alert sparsity or low amount of unique \ - alert fingerprints. Adding more alerts, increasing "sliding window size" or decreasing minimal amount of \ - "minimal amount of unique fingerprints in an incident" configuration parameters may help.' - - else: - log_string = f'{ALGORITHM_VERBOSE_NAME} successfully executed. Alerts from {alert_lower_timestamp.replace(microsecond=0)} \ - till {alert_upper_timestamp.replace(microsecond=0)} were processed. Total count of processed alerts: {len(alerts)}. \ - No incidents were created or updated. Add alerts to the system to enable automatic incident creation.' - - pusher_client.trigger(f"private-{tenant_id}", "ai-logs-change", {"log": log_string}) - - logger.info("Client notified on new AI log", extra={"tenant_id": tenant_id, "algorithm": ALGORITHM_VERBOSE_NAME}) - - return {"incidents": [get_incident_by_id(tenant_id, incident_id) - for incident_id in incident_ids]} \ No newline at end of file diff --git a/ee/experimental/statistical_utils.py b/ee/experimental/statistical_utils.py deleted file mode 100644 index 39278a444..000000000 --- a/ee/experimental/statistical_utils.py +++ /dev/null @@ -1,184 +0,0 @@ -import os -import logging - -import numpy as np -import pandas as pd - -from datetime import datetime -from typing import List, Tuple, Dict -from scipy.sparse import csr_matrix - -from keep.api.models.db.alert import Alert - -logger = logging.getLogger(__name__) - -def get_batched_alert_counts(alerts: pd.DataFrame, unique_alert_identifier: str, sliding_window_size: int, step_size: int) -> pd.DataFrame: - """ - This function calculates number of alerts per sliding window. - - Parameters: - alerts (pd.DataFrame): a DataFrame containing alerts - unique_alert_identifier (str): a unique identifier for alerts - sliding_window_size (int): sliding window size in seconds - step_size (int): step size in seconds - - Returns: - rolling_counts (pd.DataFrame): a DataFrame containing the number of alerts per sliding window - """ - - resampled_alert_counts = alerts.set_index('starts_at').resample(f'{step_size}s')[unique_alert_identifier].value_counts().unstack(fill_value=0) - rolling_counts = resampled_alert_counts.rolling(window=f'{sliding_window_size}s', min_periods=1).sum() - - return rolling_counts - - -def get_batched_alert_occurrences(alerts: pd.DataFrame, unique_alert_identifier: str, sliding_window_size: int, step_size: int) -> pd.DataFrame: - """ - This function calculates occurrences of alerts per sliding window. - - Parameters: - alerts (pd.DataFrame): a DataFrame containing alerts - unique_alert_identifier (str): a unique identifier for alerts - sliding_window_size (int): sliding window size in seconds - step_size (int): step size in seconds - - Returns: - alert_occurences (pd.DataFrame): a DataFrame containing the occurrences of alerts per sliding window - """ - - alert_counts = get_batched_alert_counts(alerts, unique_alert_identifier, sliding_window_size, step_size) - alert_occurences = pd.DataFrame(np.where(alert_counts > 0, 1, 0), index=alert_counts.index, columns=alert_counts.columns) - - return alert_occurences - -def get_jaccard_scores(P_a: np.array, P_aa: np.array) -> np.array: - """ - This function calculates the Jaccard similarity scores between recurring events. - - Parameters: - P_a (np.array): a 1D array containing the probabilities of events - P_aa (np.array): a 2D array containing the probabilities of joint events - - Returns: - jaccard_matrix (np.array): a 2D array containing the Jaccard similarity scores between events - """ - - P_a_matrix = P_a[:, None] + P_a - union_matrix = P_a_matrix - P_aa - - with np.errstate(divide='ignore', invalid='ignore'): - jaccard_matrix = np.where(union_matrix != 0, P_aa / union_matrix, 0) - - np.fill_diagonal(jaccard_matrix, 1) - - return jaccard_matrix - - -def get_alert_jaccard_matrix(alerts: pd.DataFrame, unique_alert_identifier: str, sliding_window_size: int, step_size: int) -> pd.DataFrame: - """ - This function calculates Jaccard similarity scores between alert groups (fingerprints). - - Parameters: - alerts (pd.DataFrame): a DataFrame containing alerts - unique_alert_identifier (str): a unique identifier for alerts - sliding_window_size (int): sliding window size in seconds - step_size (int): step size in seconds - - Returns: - jaccard_scores_df (pd.DataFrame): a DataFrame containing the Jaccard similarity scores between alert groups - """ - - alert_occurrences_df = get_batched_alert_occurrences(alerts, unique_alert_identifier, sliding_window_size, step_size) - alert_occurrences = alert_occurrences_df.to_numpy() - - alert_probabilities = np.mean(alert_occurrences, axis=0) - joint_alert_occurrences = np.dot(alert_occurrences.T, alert_occurrences) - pairwise_alert_probabilities = joint_alert_occurrences / alert_occurrences.shape[0] - - jaccard_scores = get_jaccard_scores(alert_probabilities, pairwise_alert_probabilities) - jaccard_scores_df = pd.DataFrame(jaccard_scores, index=alert_occurrences_df.columns, columns=alert_occurrences_df.columns) - - return jaccard_scores_df - -def get_alert_pmi_matrix(alerts: List[Alert], - unique_alert_identifier: str, - sliding_window_size: int, - step_size: int, - offload_config: Dict = {}) -> Tuple[np.array, List[str]]: - """ - This funciton calculates PMI scores between alert groups (fingerprints). - - Parameters: - alerts List[Alert]: a list containing alerts - unique_alert_identifier (str): a unique identifier for alerts - sliding_window_size (int): sliding window size in seconds - step_size (int): step size in seconds - - Returns: - pmi_matrix (np.array): a 2D array containing the PMI scores between alert fingerprints - alert_occurences_df.columns (List[str]): a list containing the alert fingerprints - """ - - alert_dict = { - 'fingerprint': [alert.fingerprint for alert in alerts], - 'starts_at': [alert.timestamp for alert in alerts], - } - - if offload_config: - temp_dir = offload_config.get('temp_dir', None) - - alert_df = pd.DataFrame(alert_dict) - alert_occurences_df = get_batched_alert_occurrences(alert_df, unique_alert_identifier, sliding_window_size, step_size) - logger.info('Windowed alert occurrences calculated.') - - alert_occurrences = alert_occurences_df.to_numpy() - alert_probabilities = np.mean(alert_occurrences, axis=0) - logger.info('Alert probabilities calculated.') - - alert_occurrences = csr_matrix(alert_occurrences) - - if offload_config: - joint_alert_occurrences = np.memmap(f'{temp_dir}/joint_alert_occurrences.dat', dtype='float16', mode='w+', - shape=(alert_occurrences.shape[1], alert_occurrences.shape[1])) - else: - joint_alert_occurrences = np.zeros((alert_occurrences.shape[1], alert_occurrences.shape[1]), dtype=np.float16) - - joint_alert_occurrences[:] = alert_occurrences.T.dot(alert_occurrences).toarray() - logger.info('Joint alert occurrences calculated.') - - if offload_config: - pairwise_alert_probabilities = np.memmap(f'{temp_dir}/pairwise_alert_probabilities.dat', dtype='float16', mode='w+', - shape=(joint_alert_occurrences.shape[0], joint_alert_occurrences.shape[1])) - else: - pairwise_alert_probabilities = np.zeros((joint_alert_occurrences.shape[0], joint_alert_occurrences.shape[1]), dtype=np.float16) - - pairwise_alert_probabilities[:] = joint_alert_occurrences / alert_occurrences.shape[0] - logger.info('Pairwise alert probabilities calculated.') - - if offload_config: - dense_pmi_matrix = np.memmap(f'{temp_dir}/dense_pmi_matrix.dat', dtype='float16', mode='w+', - shape=(pairwise_alert_probabilities.shape[0], pairwise_alert_probabilities.shape[1])) - else: - dense_pmi_matrix = np.zeros((pairwise_alert_probabilities.shape[0], pairwise_alert_probabilities.shape[1]), dtype=np.float16) - - dense_pmi_matrix[:] = np.log(pairwise_alert_probabilities / - (alert_probabilities[:, None] * alert_probabilities)) - logger.info('PMI matrix calculated.') - - dense_pmi_matrix[np.isnan(dense_pmi_matrix)] = 0 - np.fill_diagonal(dense_pmi_matrix, 0) - pmi_matrix = np.clip(dense_pmi_matrix, -100, 100) - logger.info('PMI matrix modified.') - - if offload_config: - joint_alert_occurrences._mmap.close() - pairwise_alert_probabilities._mmap.close() - dense_pmi_matrix._mmap.close() - - os.remove(f'{temp_dir}/joint_alert_occurrences.dat') - os.remove(f'{temp_dir}/pairwise_alert_probabilities.dat') - os.remove(f'{temp_dir}/dense_pmi_matrix.dat') - - logger.info(f'Temporary files removed from {temp_dir}.') - - return pmi_matrix, alert_occurences_df.columns \ No newline at end of file diff --git a/keep/api/api.py b/keep/api/api.py index d17af4bf6..3c2ed422b 100644 --- a/keep/api/api.py +++ b/keep/api/api.py @@ -15,12 +15,11 @@ import keep.api.logging import keep.api.observability +import keep.api.utils.import_ee from keep.api.arq_worker import get_arq_worker from keep.api.consts import ( - KEEP_ARQ_QUEUE_AI, KEEP_ARQ_QUEUE_BASIC, KEEP_ARQ_TASK_POOL, - KEEP_ARQ_TASK_POOL_AI, KEEP_ARQ_TASK_POOL_ALL, KEEP_ARQ_TASK_POOL_BASIC_PROCESSING, KEEP_ARQ_TASK_POOL_NONE, @@ -30,7 +29,6 @@ from keep.api.middlewares import LoggingMiddleware from keep.api.routes import ( actions, - ai, alerts, dashboard, deduplications, @@ -141,7 +139,6 @@ async def root(): app.include_router(healthcheck.router, prefix="/healthcheck", tags=["healthcheck"]) app.include_router(alerts.router, prefix="/alerts", tags=["alerts"]) app.include_router(incidents.router, prefix="/incidents", tags=["incidents"]) - app.include_router(ai.router, prefix="/ai", tags=["ai"]) app.include_router(settings.router, prefix="/settings", tags=["settings"]) app.include_router( workflows.router, prefix="/workflows", tags=["workflows", "alerts"] @@ -217,13 +214,7 @@ async def on_startup(): if KEEP_ARQ_TASK_POOL == KEEP_ARQ_TASK_POOL_ALL: logger.info("Starting all task pools") basic_worker = get_arq_worker(KEEP_ARQ_QUEUE_BASIC) - ai_worker = get_arq_worker(KEEP_ARQ_QUEUE_AI) event_loop.create_task(basic_worker.async_run()) - event_loop.create_task(ai_worker.async_run()) - elif KEEP_ARQ_TASK_POOL == KEEP_ARQ_TASK_POOL_AI: - logger.info("Starting AI task pool") - arq_worker = get_arq_worker(KEEP_ARQ_QUEUE_AI) - event_loop.create_task(arq_worker.async_run()) elif KEEP_ARQ_TASK_POOL == KEEP_ARQ_TASK_POOL_BASIC_PROCESSING: logger.info("Starting Basic Processing task pool") arq_worker = get_arq_worker(KEEP_ARQ_QUEUE_BASIC) diff --git a/keep/api/arq_worker.py b/keep/api/arq_worker.py index 8de5504aa..5a040d124 100644 --- a/keep/api/arq_worker.py +++ b/keep/api/arq_worker.py @@ -2,7 +2,7 @@ from typing import Optional from uuid import uuid4 -from arq import Worker, cron +from arq import Worker from arq.connections import RedisSettings from arq.worker import create_worker from pydantic.utils import import_string @@ -10,15 +10,12 @@ import keep.api.logging from keep.api.consts import ( - KEEP_ARQ_QUEUE_AI, KEEP_ARQ_QUEUE_BASIC, KEEP_ARQ_TASK_POOL, - KEEP_ARQ_TASK_POOL_AI, KEEP_ARQ_TASK_POOL_ALL, KEEP_ARQ_TASK_POOL_BASIC_PROCESSING, ) from keep.api.core.config import config -from keep.api.tasks.process_background_ai_task import process_background_ai_task keep.api.logging.setup_logging() logger = logging.getLogger(__name__) @@ -39,25 +36,6 @@ ), ] -if KEEP_ARQ_TASK_POOL in [KEEP_ARQ_TASK_POOL_ALL, KEEP_ARQ_TASK_POOL_AI]: - all_tasks_for_the_worker += [ - ( - "keep.api.tasks.process_background_ai_task.process_background_ai_task", - KEEP_ARQ_QUEUE_AI, - ), - ( - "keep.api.tasks.process_background_ai_task.process_correlation", - KEEP_ARQ_QUEUE_AI, - ), - ( - "keep.api.tasks.process_background_ai_task.process_summary_generation", - KEEP_ARQ_QUEUE_AI, - ), - ( - "keep.api.tasks.process_background_ai_task.process_name_generation", - KEEP_ARQ_QUEUE_AI, - ), - ] ARQ_BACKGROUND_FUNCTIONS: Optional[CommaSeparatedStrings] = config( "ARQ_BACKGROUND_FUNCTIONS", @@ -90,15 +68,12 @@ def get_arq_worker(queue_name: str) -> Worker: expires = config( "ARQ_EXPIRES", cast=int, default=3600 ) # the default length of time from when a job is expected to start after which the job expires, making it shorter to avoid clogging - expires_ai = config("ARQ_EXPIRES_AI", cast=int, default=3600 * 1000) # generate a worker id so each worker will have a different health check key worker_id = str(uuid4()).replace("-", "") worker = create_worker( WorkerSettings, keep_result=keep_result, - expires_extra_ms=( - expires_ai if KEEP_ARQ_TASK_POOL == KEEP_ARQ_TASK_POOL_AI else expires - ), + expires_extra_ms=expires, queue_name=queue_name, health_check_key=f"{queue_name}:{worker_id}:health-check", ) @@ -125,8 +100,7 @@ class WorkerSettings: conn_retries=10, conn_retry_delay=10, ) - # Only if it's an AI-dedicated worker, we can set large timeout, otherwise keeping low to avoid clogging - timeout = 60 * 15 if KEEP_ARQ_TASK_POOL == KEEP_ARQ_TASK_POOL_AI else 30 + timeout = 30 functions: list = FUNCTIONS queue_name: str health_check_interval: int = 10 @@ -134,16 +108,3 @@ class WorkerSettings: def __init__(self, queue_name: str): self.queue_name = queue_name - - cron_jobs = [] - if KEEP_ARQ_TASK_POOL in [KEEP_ARQ_TASK_POOL_ALL, KEEP_ARQ_TASK_POOL_AI]: - cron_jobs.append( - cron( - process_background_ai_task, - minute=at_every_x_minutes(1), - unique=True, - timeout=30, - max_tries=1, - run_at_startup=True, - ) - ) diff --git a/keep/api/bl/incidents_bl.py b/keep/api/bl/incidents_bl.py index 855966bb1..c1ff21a00 100644 --- a/keep/api/bl/incidents_bl.py +++ b/keep/api/bl/incidents_bl.py @@ -12,13 +12,13 @@ from keep.api.arq_pool import get_pool from keep.api.core.db import ( add_alerts_to_incident_by_incident_id, + create_incident_from_dto, delete_incident_by_id, get_incident_alerts_by_incident_id, get_incident_by_id, get_incident_unique_fingerprint_count, remove_alerts_to_incident_by_incident_id, update_incident_from_dto_by_id, - create_incident_from_dto, ) from keep.api.core.elastic import ElasticClient from keep.api.models.alert import IncidentDto, IncidentDtoIn @@ -36,7 +36,6 @@ str(pathlib.Path(__file__).parent.resolve()) + "/../../../ee/experimental" ) sys.path.insert(0, path_with_ee) - from ee.experimental.incident_utils import ALGORITHM_VERBOSE_NAME # noqa else: ALGORITHM_VERBOSE_NAME = NotImplemented diff --git a/keep/api/config.py b/keep/api/config.py index 54387cf86..a27c4b66d 100644 --- a/keep/api/config.py +++ b/keep/api/config.py @@ -6,6 +6,7 @@ from keep.api.core.db_on_start import migrate_db, try_create_single_tenant from keep.api.core.dependencies import SINGLE_TENANT_UUID from keep.identitymanager.identitymanagerfactory import IdentityManagerTypes +from keep.providers.providers_factory import ProvidersFactory PORT = int(os.environ.get("PORT", 8080)) @@ -18,6 +19,10 @@ def on_starting(server=None): logger.info("Keep server starting") migrate_db() + # Load this early and use preloading + # https://www.joelsleppy.com/blog/gunicorn-application-preloading/ + # @tb: 👏 @Matvey-Kuk + ProvidersFactory.get_all_providers() # Create single tenant if it doesn't exist if AUTH_TYPE in [ diff --git a/keep/api/consts.py b/keep/api/consts.py index 3aa27031b..4e04e023a 100644 --- a/keep/api/consts.py +++ b/keep/api/consts.py @@ -37,10 +37,8 @@ KEEP_ARQ_TASK_POOL_NONE = "none" # Arq workers explicitly disabled for this service KEEP_ARQ_TASK_POOL_ALL = "all" # All arq workers enabled for this service KEEP_ARQ_TASK_POOL_BASIC_PROCESSING = "basic_processing" # Everything except AI -KEEP_ARQ_TASK_POOL_AI = "ai" # Only AI # Define queues for different task types KEEP_ARQ_QUEUE_BASIC = "basic_processing" -KEEP_ARQ_QUEUE_AI = "ai_processing" REDIS = os.environ.get("REDIS", "false") == "true" KEEP_ARQ_TASK_POOL = os.environ.get("KEEP_ARQ_TASK_POOL", None) diff --git a/keep/api/core/db.py b/keep/api/core/db.py index 9be0d25f7..b9746866b 100644 --- a/keep/api/core/db.py +++ b/keep/api/core/db.py @@ -15,7 +15,6 @@ from typing import Any, Callable, Dict, List, Tuple, Type, Union from uuid import uuid4 -import numpy as np import validators from dotenv import find_dotenv, load_dotenv from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor @@ -59,7 +58,7 @@ from keep.api.models.db.preset import * # pylint: disable=unused-wildcard-import from keep.api.models.db.provider import * # pylint: disable=unused-wildcard-import from keep.api.models.db.rule import * # pylint: disable=unused-wildcard-import -from keep.api.models.db.system import * # pylint: disable=unused-wildcard-import +from keep.api.models.db.system import * # pylint: disable=unused-wildcard-import from keep.api.models.db.tenant import * # pylint: disable=unused-wildcard-import from keep.api.models.db.topology import * # pylint: disable=unused-wildcard-import from keep.api.models.db.workflow import * # pylint: disable=unused-wildcard-import @@ -3937,25 +3936,6 @@ def confirm_predicted_incident_by_id( return incident -def write_pmi_matrix_to_temp_file( - tenant_id: str, pmi_matrix: np.array, fingerprints: List, temp_dir: str -) -> bool: - np.savez( - f"{temp_dir}/pmi_matrix.npz", pmi_matrix=pmi_matrix, fingerprints=fingerprints - ) - return True - - -def get_pmi_values_from_temp_file(temp_dir: str) -> Tuple[np.array, Dict[str, int]]: - npzfile = np.load(f"{temp_dir}/pmi_matrix.npz", allow_pickle=True) - pmi_matrix = npzfile["pmi_matrix"] - fingerprints = npzfile["fingerprints"] - - fingerint2idx = {fingerprint: i for i, fingerprint in enumerate(fingerprints)} - - return pmi_matrix, fingerint2idx - - def get_tenant_config(tenant_id: str) -> dict: with Session(engine) as session: tenant_data = session.exec(select(Tenant).where(Tenant.id == tenant_id)).first() @@ -4488,45 +4468,56 @@ def get_resource_ids_by_resource_type( result = session.exec(query) return result.all() -def get_or_creat_posthog_instance_id( - session: Optional[Session] = None - ): - POSTHOG_INSTANCE_ID_KEY = "posthog_instance_id" - with Session(engine) as session: - system = session.exec(select(System).where(System.name == POSTHOG_INSTANCE_ID_KEY)).first() - if system: - return system.value - - system = System( - id=str(uuid4()), - name=POSTHOG_INSTANCE_ID_KEY, - value=str(uuid4()), - ) - session.add(system) - session.commit() - session.refresh(system) - return system.value - -def get_activity_report( - session: Optional[Session] = None - ): + +def get_or_creat_posthog_instance_id(session: Optional[Session] = None): + POSTHOG_INSTANCE_ID_KEY = "posthog_instance_id" + with Session(engine) as session: + system = session.exec( + select(System).where(System.name == POSTHOG_INSTANCE_ID_KEY) + ).first() + if system: + return system.value + + system = System( + id=str(uuid4()), + name=POSTHOG_INSTANCE_ID_KEY, + value=str(uuid4()), + ) + session.add(system) + session.commit() + session.refresh(system) + return system.value + + +def get_activity_report(session: Optional[Session] = None): from keep.api.models.db.user import User last_24_hours = datetime.utcnow() - timedelta(hours=24) activity_report = {} with Session(engine) as session: - activity_report['tenants_count'] = session.query(Tenant).count() - activity_report['providers_count'] = session.query(Provider).count() - activity_report['users_count'] = session.query(User).count() - activity_report['last_24_hours_incidents_count'] = session.query(Incident).filter( - Incident.creation_time >= last_24_hours).count() - activity_report['last_24_hours_alerts_count'] = session.query(Alert).filter( - Alert.timestamp >= last_24_hours).count() - activity_report['last_24_hours_rules_created'] = session.query(Rule).filter( - Rule.creation_time >= last_24_hours).count() - activity_report['last_24_hours_workflows_created'] = session.query(Workflow).filter( - Workflow.creation_time >= last_24_hours).count() - activity_report['last_24_hours_workflows_executed'] = session.query(WorkflowExecution).filter( - WorkflowExecution.started >= last_24_hours).count() - + activity_report["tenants_count"] = session.query(Tenant).count() + activity_report["providers_count"] = session.query(Provider).count() + activity_report["users_count"] = session.query(User).count() + activity_report["last_24_hours_incidents_count"] = ( + session.query(Incident) + .filter(Incident.creation_time >= last_24_hours) + .count() + ) + activity_report["last_24_hours_alerts_count"] = ( + session.query(Alert).filter(Alert.timestamp >= last_24_hours).count() + ) + activity_report["last_24_hours_rules_created"] = ( + session.query(Rule).filter(Rule.creation_time >= last_24_hours).count() + ) + activity_report["last_24_hours_workflows_created"] = ( + session.query(Workflow) + .filter(Workflow.creation_time >= last_24_hours) + .count() + ) + activity_report["last_24_hours_workflows_executed"] = ( + session.query(WorkflowExecution) + .filter(WorkflowExecution.started >= last_24_hours) + .count() + ) + return activity_report diff --git a/keep/api/models/db/migrations/versions/2024-07-29-12-51_c91b348b94f2.py b/keep/api/models/db/migrations/versions/2024-07-29-12-51_c91b348b94f2.py index 181119744..8b1d38d99 100644 --- a/keep/api/models/db/migrations/versions/2024-07-29-12-51_c91b348b94f2.py +++ b/keep/api/models/db/migrations/versions/2024-07-29-12-51_c91b348b94f2.py @@ -23,41 +23,53 @@ # Direct table definition for Incident incident_table = sa.Table( - 'incident', + "incident", migration_metadata, - sa.Column('id', UUID(as_uuid=False), primary_key=True), - sa.Column('description', sa.String), - sa.Column('user_summary', sa.String), + sa.Column("id", UUID(as_uuid=False), primary_key=True), + sa.Column("description", sa.String), + sa.Column("user_summary", sa.String), ) def populate_db(session): # we need to populate the user_summary field with the description - session.execute(sa.update(incident_table).values(user_summary=incident_table.c.description)) + session.execute( + sa.update(incident_table).values(user_summary=incident_table.c.description) + ) session.commit() def depopulate_db(session): # we need to populate the description field with the user_summary - session.execute(sa.update(incident_table).values(description=incident_table.c.user_summary)) + session.execute( + sa.update(incident_table).values(description=incident_table.c.user_summary) + ) session.commit() def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - + session = Session(op.get_bind()) populate_db(session) - - op.drop_column("incident", "description") + + try: + op.drop_column("incident", "description") + except Exception as e: + print(f"Error dropping column description: {e}") # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.add_column("incident", sa.Column("description", sa.VARCHAR(), nullable=False, default="", server_default="")) - + op.add_column( + "incident", + sa.Column( + "description", sa.VARCHAR(), nullable=False, default="", server_default="" + ), + ) + session = Session(op.get_bind()) depopulate_db(session) - + # ### end Alembic commands ### diff --git a/keep/api/routes/ai.py b/keep/api/routes/ai.py deleted file mode 100644 index 3c6b819cf..000000000 --- a/keep/api/routes/ai.py +++ /dev/null @@ -1,35 +0,0 @@ -import logging - -from fastapi import APIRouter, Depends - -from keep.api.core.db import ( - get_alerts_count, - get_first_alert_datetime, - get_incidents_count, -) -from keep.api.utils.import_ee import ALGORITHM_VERBOSE_NAME, is_ee_enabled_for_tenant -from keep.identitymanager.authenticatedentity import AuthenticatedEntity -from keep.identitymanager.identitymanagerfactory import IdentityManagerFactory - -router = APIRouter() -logger = logging.getLogger(__name__) - - -@router.get( - "/stats", - description="Get stats for the AI Landing Page", - include_in_schema=False, -) -def get_stats( - authenticated_entity: AuthenticatedEntity = Depends( - IdentityManagerFactory.get_auth_verifier(["read:alert"]) - ), -): - tenant_id = authenticated_entity.tenant_id - return { - "alerts_count": get_alerts_count(tenant_id), - "first_alert_datetime": get_first_alert_datetime(tenant_id), - "incidents_count": get_incidents_count(tenant_id), - "is_mining_enabled": is_ee_enabled_for_tenant(tenant_id), - "algorithm_verbose_name": str(ALGORITHM_VERBOSE_NAME), - } diff --git a/keep/api/routes/incidents.py b/keep/api/routes/incidents.py index 7b2c02130..5933f6dbb 100644 --- a/keep/api/routes/incidents.py +++ b/keep/api/routes/incidents.py @@ -1,4 +1,3 @@ -import asyncio import logging from datetime import datetime from typing import List @@ -14,7 +13,6 @@ Response, ) from pusher import Pusher -from pydantic import BaseModel, Field # noqa from pydantic.types import UUID from sqlmodel import Session @@ -57,7 +55,6 @@ from keep.api.routes.alerts import _enrich_alert from keep.api.tasks.process_incident_task import process_incident from keep.api.utils.enrichment_helpers import convert_db_alerts_to_dto_alerts -from keep.api.utils.import_ee import mine_incidents_and_create_objects from keep.api.utils.pagination import ( AlertWithIncidentLinkMetadataPaginatedResultsDto, IncidentsPaginatedResultsDto, @@ -739,48 +736,6 @@ async def commit_with_ai( return committed_incidents -### Deprecated? ### - - -@router.post( - "/mine", - description="Create incidents using historical alerts", - include_in_schema=False, -) -def mine( - authenticated_entity: AuthenticatedEntity = Depends( - IdentityManagerFactory.get_auth_verifier(["write:incident"]) - ), - alert_lower_timestamp: datetime = None, - alert_upper_timestamp: datetime = None, - use_n_historical_alerts: int = None, - incident_lower_timestamp: datetime = None, - incident_upper_timestamp: datetime = None, - use_n_historical_incidents: int = None, - pmi_threshold: float = None, - knee_threshold: float = None, - min_incident_size: int = None, - incident_similarity_threshold: float = None, -) -> dict: - result = asyncio.run( - mine_incidents_and_create_objects( - ctx=None, - tenant_id=authenticated_entity.tenant_id, - alert_lower_timestamp=alert_lower_timestamp, - alert_upper_timestamp=alert_upper_timestamp, - use_n_historical_alerts=use_n_historical_alerts, - incident_lower_timestamp=incident_lower_timestamp, - incident_upper_timestamp=incident_upper_timestamp, - use_n_historical_incidents=use_n_historical_incidents, - pmi_threshold=pmi_threshold, - knee_threshold=knee_threshold, - min_incident_size=min_incident_size, - incident_similarity_threshold=incident_similarity_threshold, - ) - ) - return result - - @router.post( "/{incident_id}/confirm", description="Confirm predicted incident by id", diff --git a/keep/api/tasks/process_background_ai_task.py b/keep/api/tasks/process_background_ai_task.py deleted file mode 100644 index 586bebcd3..000000000 --- a/keep/api/tasks/process_background_ai_task.py +++ /dev/null @@ -1,122 +0,0 @@ -import time -import logging -import datetime - -from keep.api.core.tenant_configuration import TenantConfiguration -from keep.api.utils.import_ee import mine_incidents_and_create_objects, ALGORITHM_VERBOSE_NAME, \ - SUMMARY_GENERATOR_VERBOSE_NAME, NAME_GENERATOR_VERBOSE_NAME, is_ee_enabled_for_tenant, generate_update_incident_summary, generate_update_incident_name -from keep.api.core.db import get_tenants_configurations - -logger = logging.getLogger(__name__) - - -async def process_correlation(ctx, tenant_id:str): - logger.info( - f"Background AI task started, {ALGORITHM_VERBOSE_NAME}", - extra={"algorithm": ALGORITHM_VERBOSE_NAME, "tenant_id": tenant_id}, - ) - start_time = datetime.datetime.now() - await mine_incidents_and_create_objects( - ctx, - tenant_id=tenant_id - ) - end_time = datetime.datetime.now() - logger.info( - f"Background AI task finished, {ALGORITHM_VERBOSE_NAME}, took {(end_time - start_time).total_seconds()} seconds", - extra={ - "algorithm": ALGORITHM_VERBOSE_NAME, - "tenant_id": tenant_id, - "duration_ms": (end_time - start_time).total_seconds() * 1000 - }, - ) - -async def process_summary_generation(ctx, tenant_id: str, incident_id:str): - logger.info( - f"Background summary generation started, {SUMMARY_GENERATOR_VERBOSE_NAME}", - extra={"algorithm": SUMMARY_GENERATOR_VERBOSE_NAME, "incident_id": incident_id}, - ) - - start_time = datetime.datetime.now() - await generate_update_incident_summary( - ctx, - tenant_id=tenant_id, - incident_id=incident_id - ) - end_time = datetime.datetime.now() - logger.info( - f"Background summary generation finished, {SUMMARY_GENERATOR_VERBOSE_NAME}, took {(end_time - start_time).total_seconds()} seconds", - extra={ - "algorithm": SUMMARY_GENERATOR_VERBOSE_NAME, - "incident_id": incident_id, - "duration_ms": (end_time - start_time).total_seconds() * 1000 - }, - ) - -async def process_name_generation(ctx, tenant_id: str, incident_id: str): - logger.info( - f"Background name generation started, {NAME_GENERATOR_VERBOSE_NAME}", - extra={"algorithm": NAME_GENERATOR_VERBOSE_NAME, "incident_id": incident_id}, - ) - - start_time = datetime.datetime.now() - await generate_update_incident_name( - ctx, - tenant_id=tenant_id, - incident_id=incident_id - ) - end_time = datetime.datetime.now() - logger.info( - f"Background name generation finished, {NAME_GENERATOR_VERBOSE_NAME}, took {(end_time - start_time).total_seconds()} seconds", - extra={ - "algorithm": NAME_GENERATOR_VERBOSE_NAME, - "incident_id": incident_id, - "duration_ms": (end_time - start_time).total_seconds() * 1000 - }, - ) - - -async def process_background_ai_task( - ctx: dict | None, # arq context - ): - """ - This job will schedule the process_correlation job for each tenant with strict ID's. - This ensures that the job is not scheduled multiple times for the same tenant. - """ - pool = ctx["redis"] - try: - all_jobs = await pool.queued_jobs() - except Exception as e: - logger.error(f"Error getting queued jobs, happens sometimes with unknown reason: {e}") - return None - - tenant_configuration = TenantConfiguration() - - if mine_incidents_and_create_objects is not NotImplemented: - tenants = get_tenants_configurations(only_with_config=True) - for tenant in tenants: - if is_ee_enabled_for_tenant(tenant, tenant_configuration=tenant_configuration): - # Because of https://github.com/python-arq/arq/issues/432 we need to check if the job is already running - # The other option would be to twick "keep_result" but it will make debugging harder - job_prefix = 'process_correlation_tenant_id_' + str(tenant) - jobs_with_same_prefix = [job for job in all_jobs if job.job_id.startswith(job_prefix)] - if len(jobs_with_same_prefix) > 0: - logger.info( - f"No {ALGORITHM_VERBOSE_NAME} for tenant {tenant} scheduled because there is already one running", - extra={"algorithm": ALGORITHM_VERBOSE_NAME, "tenant_id": tenant}, - ) - else: - job = await pool.enqueue_job( - "process_correlation", - tenant_id=tenant, - _job_id=job_prefix + ":" + str(time.time()), # Strict ID ensures uniqueness - _job_try=1 - ) - logger.info( - f"{ALGORITHM_VERBOSE_NAME} for tenant {tenant} scheduled, job: {job}", - extra={"algorithm": ALGORITHM_VERBOSE_NAME, "tenant_id": tenant}, - ) - else: - logger.info( - f"No {ALGORITHM_VERBOSE_NAME} for tenant {tenant} scheduled because EE is disabled for this tenant", - extra={"algorithm": ALGORITHM_VERBOSE_NAME, "tenant_id": tenant}, - ) diff --git a/keep/api/utils/import_ee.py b/keep/api/utils/import_ee.py index de1742c1f..65a8d0db2 100644 --- a/keep/api/utils/import_ee.py +++ b/keep/api/utils/import_ee.py @@ -1,43 +1,37 @@ -import os -import sys +import os import pathlib +import sys from keep.api.core.tenant_configuration import TenantConfiguration EE_ENABLED = os.environ.get("EE_ENABLED", "false") == "true" -EE_PATH = os.environ.get("EE_PATH", "../ee") # Path related to the fastapi root directory +EE_PATH = os.environ.get( + "EE_PATH", "../ee" +) # Path related to the fastapi root directory if EE_ENABLED: path_with_ee = ( - str(pathlib.Path(__file__).parent.resolve()) + - "/../../" + # To go to the fastapi root directory - EE_PATH + - "/../" # To go to the parent directory of the ee directory to allow imports like ee.abc.abc + str(pathlib.Path(__file__).parent.resolve()) + + "/../../" # To go to the fastapi root directory + + EE_PATH + + "/../" # To go to the parent directory of the ee directory to allow imports like ee.abc.abc ) sys.path.insert(0, path_with_ee) - - from ee.experimental.incident_utils import mine_incidents_and_create_objects, generate_update_incident_summary, generate_update_incident_name # noqa - from ee.experimental.generative_utils import generate_incident_summary, generate_incident_name, SUMMARY_GENERATOR_VERBOSE_NAME, NAME_GENERATOR_VERBOSE_NAME # noqa - from ee.experimental.incident_utils import ALGORITHM_VERBOSE_NAME # noqa else: - mine_incidents_and_create_objects = NotImplemented - generate_update_incident_summary = NotImplemented - generate_update_incident_name = NotImplemented ALGORITHM_VERBOSE_NAME = NotImplemented SUMMARY_GENERATOR_VERBOSE_NAME = NotImplemented NAME_GENERATOR_VERBOSE_NAME = NotImplemented - + + def is_ee_enabled_for_tenant(tenant_id: str, tenant_configuration=None) -> bool: if not EE_ENABLED: return False - + if tenant_configuration is None: tenant_configuration = TenantConfiguration() - - config = tenant_configuration.get_configuration( - tenant_id, "ee_enabled" - ) + + config = tenant_configuration.get_configuration(tenant_id, "ee_enabled") if config is None: return False - + return bool(config) diff --git a/keep/cli/cli.py b/keep/cli/cli.py index e0c0ad87f..de2ddec83 100644 --- a/keep/cli/cli.py +++ b/keep/cli/cli.py @@ -6,6 +6,7 @@ import typing import uuid from collections import OrderedDict +from dataclasses import _MISSING_TYPE from importlib import metadata import click @@ -16,11 +17,12 @@ from keep.api.core.db_on_start import try_create_single_tenant from keep.api.core.dependencies import SINGLE_TENANT_UUID +from keep.api.core.posthog import posthog_client from keep.cli.click_extensions import NotRequiredIf +from keep.providers.models.provider_config import ProviderScope from keep.providers.providers_factory import ProvidersFactory from keep.workflowmanager.workflowmanager import WorkflowManager from keep.workflowmanager.workflowstore import WorkflowStore -from keep.api.core.posthog import posthog_client load_dotenv(find_dotenv()) @@ -134,6 +136,7 @@ def set_config(self, keep_config: str): or "api" in arguments or "config" in arguments or "version" in arguments + or "build_cache" in arguments ): return @@ -311,9 +314,19 @@ def whoami(info: Info): @cli.command() @click.option("--multi-tenant", is_flag=True, help="Enable multi-tenant mode") -@click.option("--port", "-p", type=int, default=int(os.environ.get("PORT", 8080)), help="The port to run the API on") @click.option( - "--host", "-h", type=str, default=os.environ.get("HOST", "0.0.0.0"), help="The host to run the API on" + "--port", + "-p", + type=int, + default=int(os.environ.get("PORT", 8080)), + help="The port to run the API on", +) +@click.option( + "--host", + "-h", + type=str, + default=os.environ.get("HOST", "0.0.0.0"), + help="The host to run the API on", ) def api(multi_tenant: bool, port: int, host: str): """Start the API.""" @@ -1088,6 +1101,27 @@ def provider(info: Info): pass +@provider.command(name="build_cache", help="Output providers cache for future use") +def build_cache(): + class ProviderEncoder(json.JSONEncoder): + def default(self, o): + if isinstance(o, ProviderScope): + dct = o.__dict__ + dct.pop("__pydantic_initialised__", None) + return dct + elif isinstance(o, _MISSING_TYPE): + return None + return o.dict() + + logger.info("Building providers cache") + providers_cache = ProvidersFactory.get_all_providers(ignore_cache_file=True) + with open("providers_cache.json", "w") as f: + json.dump(providers_cache, f, cls=ProviderEncoder) + logger.info( + "Providers cache built successfully", extra={"file": "providers_cache.json"} + ) + + @provider.command(name="list") @click.option( "--available", @@ -1512,6 +1546,7 @@ def simulate(info: Info, provider_type: str, params: list[str]): else: click.echo(click.style("Alert simulated successfully", bold=True)) + @cli.group() @pass_info def auth(info: Info): diff --git a/keep/identitymanager/identitymanagerfactory.py b/keep/identitymanager/identitymanagerfactory.py index acd2387dd..04077da4a 100644 --- a/keep/identitymanager/identitymanagerfactory.py +++ b/keep/identitymanager/identitymanagerfactory.py @@ -105,13 +105,14 @@ def _load_manager(manager_type: str, manager_class: str, *args, **kwargs): ) # look for the module in ee except ModuleNotFoundError: - module = importlib.import_module( - f"ee.identitymanager.identity_managers.{manager_type}.{manager_type}_{manager_class}" - ) - except ModuleNotFoundError: - raise NotImplementedError( - f"{manager_class.__name__} for {manager_type} not implemented" - ) + try: + module = importlib.import_module( + f"ee.identitymanager.identity_managers.{manager_type}.{manager_type}_{manager_class}" + ) + except ModuleNotFoundError: + raise NotImplementedError( + f"{manager_class} for {manager_type} not implemented" + ) # look for the class that contains the manager_class in its name for _attr in dir(module): if manager_class in _attr.lower() and "base" not in _attr.lower(): @@ -121,7 +122,7 @@ def _load_manager(manager_type: str, manager_class: str, *args, **kwargs): return manager_class(*args, **kwargs) except (ImportError, AttributeError): raise NotImplementedError( - f"{manager_class.__name__} for {manager_type} not implemented" + f"{manager_class} for {manager_type} not implemented" ) @staticmethod diff --git a/keep/providers/elastic_provider/elastic_provider.py b/keep/providers/elastic_provider/elastic_provider.py index a194f00d8..0de3db3a2 100644 --- a/keep/providers/elastic_provider/elastic_provider.py +++ b/keep/providers/elastic_provider/elastic_provider.py @@ -133,13 +133,26 @@ def _query(self, query: str | dict, index: str = None) -> list[str]: def _run_sql_query(self, query: str) -> list[str]: response = self.client.sql.query(body={"query": query}) - import pandas as pd - results = pd.DataFrame(response["rows"]) + # @tb: I removed pandas so if we'll have performance issues we can revert to pandas + # Original pandas implementation: + # import pandas as pd + # results = pd.DataFrame(response["rows"]) + # columns = [col["name"] for col in response["columns"]] + # results.rename( + # columns={i: columns[i] for i in range(len(columns))}, inplace=True + # ) + # return results + + # Convert rows to list of dicts with proper column names columns = [col["name"] for col in response["columns"]] - results.rename( - columns={i: columns[i] for i in range(len(columns))}, inplace=True - ) + results = [] + for row in response["rows"]: + result = {} + for i, value in enumerate(row): + result[columns[i]] = value + results.append(result) + return results def _run_eql_query(self, query: str | dict, index: str) -> list[str]: diff --git a/keep/providers/grafana_provider/grafana_provider.py b/keep/providers/grafana_provider/grafana_provider.py index a343eaa3e..703fac575 100644 --- a/keep/providers/grafana_provider/grafana_provider.py +++ b/keep/providers/grafana_provider/grafana_provider.py @@ -7,7 +7,6 @@ import pydantic import requests -from grafana_api.model import APIEndpoints from packaging.version import Version from keep.api.models.alert import AlertDto, AlertSeverity, AlertStatus @@ -155,7 +154,7 @@ def validate_scopes(self) -> dict[str, bool | str]: return validated_scopes def get_alerts_configuration(self, alert_id: str | None = None): - api = f"{self.authentication_config.host}{APIEndpoints.ALERTING_PROVISIONING.value}/alert-rules" + api = f"{self.authentication_config.host}/api/v1/provisioning/alert-rules" headers = {"Authorization": f"Bearer {self.authentication_config.token}"} response = requests.get(api, verify=False, headers=headers) if not response.ok: @@ -172,7 +171,7 @@ def get_alerts_configuration(self, alert_id: str | None = None): def deploy_alert(self, alert: dict, alert_id: str | None = None): self.logger.info("Deploying alert") - api = f"{self.authentication_config.host}{APIEndpoints.ALERTING_PROVISIONING.value}/alert-rules" + api = f"{self.authentication_config.host}/api/v1/provisioning/alert-rules" headers = {"Authorization": f"Bearer {self.authentication_config.token}"} response = requests.post(api, verify=False, json=alert, headers=headers) @@ -244,7 +243,9 @@ def setup_webhook( f"{GrafanaProvider.KEEP_GRAFANA_WEBHOOK_INTEGRATION_NAME}-{tenant_id}" ) headers = {"Authorization": f"Bearer {self.authentication_config.token}"} - contacts_api = f"{self.authentication_config.host}{APIEndpoints.ALERTING_PROVISIONING.value}/contact-points" + contacts_api = ( + f"{self.authentication_config.host}/api/v1/provisioning/contact-points" + ) try: self.logger.info("Getting contact points") all_contact_points = requests.get( @@ -346,7 +347,9 @@ def setup_webhook( # Finally, we need to update the policies to match the webhook if setup_alerts: self.logger.info("Setting up alerts") - policies_api = f"{self.authentication_config.host}{APIEndpoints.ALERTING_PROVISIONING.value}/policies" + policies_api = ( + f"{self.authentication_config.host}/api/v1/provisioning/policies" + ) all_policies = requests.get( policies_api, verify=False, headers=headers ).json() diff --git a/keep/providers/models/provider_config.py b/keep/providers/models/provider_config.py index 341c16485..11060041e 100644 --- a/keep/providers/models/provider_config.py +++ b/keep/providers/models/provider_config.py @@ -1,11 +1,12 @@ """ Provider configuration model. """ + import os -from dataclasses import dataclass from typing import Optional import chevron +from pydantic.dataclasses import dataclass @dataclass diff --git a/keep/providers/providers_factory.py b/keep/providers/providers_factory.py index f4000efb6..16bffd92c 100644 --- a/keep/providers/providers_factory.py +++ b/keep/providers/providers_factory.py @@ -31,6 +31,8 @@ from keep.providers.models.provider_method import ProviderMethodDTO, ProviderMethodParam from keep.secretmanager.secretmanagerfactory import SecretManagerFactory +PROVIDERS_CACHE_FILE = os.environ.get("PROVIDERS_CACHE_FILE", "providers_cache.json") + logger = logging.getLogger(__name__) @@ -221,7 +223,7 @@ def __get_methods(provider_class: BaseProvider) -> list[ProviderMethodDTO]: return methods @staticmethod - def get_all_providers() -> list[Provider]: + def get_all_providers(ignore_cache_file: bool = False) -> list[Provider]: """ Get all the providers. @@ -234,6 +236,22 @@ def get_all_providers() -> list[Provider]: logger.debug("Using cached providers") return ProvidersFactory._loaded_providers_cache + if os.path.exists(PROVIDERS_CACHE_FILE) and not ignore_cache_file: + logger.info( + "Loading providers from cache file", + extra={"file": PROVIDERS_CACHE_FILE}, + ) + with open(PROVIDERS_CACHE_FILE, "r") as f: + providers_cache = json.load(f) + ProvidersFactory._loaded_providers_cache = [ + Provider(**provider) for provider in providers_cache + ] + logger.info( + "Providers loaded from cache file", + extra={"file": PROVIDERS_CACHE_FILE}, + ) + return ProvidersFactory._loaded_providers_cache + logger.info("Loading providers") providers = [] blacklisted_providers = [ diff --git a/keep/workflowmanager/workflowmanager.py b/keep/workflowmanager/workflowmanager.py index a86dbfeac..01d1f9f16 100644 --- a/keep/workflowmanager/workflowmanager.py +++ b/keep/workflowmanager/workflowmanager.py @@ -4,8 +4,6 @@ import typing import uuid -from pandas.core.common import flatten - from keep.api.core.config import config from keep.api.core.db import ( get_enrichment, @@ -115,13 +113,15 @@ def insert_incident(self, tenant_id: str, incident: IncidentDto, trigger: str): if workflow is None: continue - incident_triggers = flatten( - [ - t.get("events", []) - for t in workflow.workflow_triggers - if t["type"] == "incident" - ] - ) + # Using list comprehension instead of pandas flatten() for better performance + # and to avoid pandas dependency + # @tb: I removed pandas so if we'll have performance issues we can revert to pandas + incident_triggers = [ + event + for trigger in workflow.workflow_triggers + if trigger["type"] == "incident" + for event in trigger.get("events", []) + ] if trigger not in incident_triggers: self.logger.debug( diff --git a/poetry.lock b/poetry.lock index cea7ddf06..b4be37586 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1135,25 +1135,6 @@ async = ["aiosonic (==0.15.1)"] tests = ["aiosonic (==0.15.1)", "glom", "jinja2", "mypy", "pytest (<8.0.0)", "pytest-asyncio", "pytest-bdd (==6.0.1)", "pytest-randomly", "pytest-recording", "python-dateutil", "types-python-dateutil", "zstandard"] zstandard = ["zstandard"] -[[package]] -name = "datefinder" -version = "0.7.3" -description = "Extract datetime objects from strings" -optional = false -python-versions = "*" -files = [ - {file = "datefinder-0.7.3-py2.py3-none-any.whl", hash = "sha256:c012e8cf60f8e80ee2df203f69338211ab2a68ec6e8a83c5c1e1f424d5743a9c"}, -] - -[package.dependencies] -python-dateutil = ">=2.4.2" -pytz = "*" -regex = ">=2017.02.08" - -[package.extras] -dev = ["mock", "pylint (==2.1.1)", "pytest (>=2.8.5)", "pytz (>=2015.7)"] -test = ["mock", "pytest (>=2.8.5)", "pytz (>=2015.7)"] - [[package]] name = "decorator" version = "5.1.1" @@ -1771,20 +1752,6 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4 [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] -[[package]] -name = "grafana-api-sdk" -version = "0.1.1" -description = "A Grafana API SDK" -optional = false -python-versions = ">=3.6" -files = [ - {file = "grafana-api-sdk-0.1.1.tar.gz", hash = "sha256:057180fdb057d792e24c201406deb8872f90b6d56d81875f4026570ed934105f"}, - {file = "grafana_api_sdk-0.1.1-py3-none-any.whl", hash = "sha256:d73287617c8aa909f18cbb7d98b45655b3fc9583f673060fe6a8b7a9f8e98824"}, -] - -[package.dependencies] -urllib3 = "*" - [[package]] name = "greenlet" version = "3.0.3" @@ -2680,35 +2647,6 @@ files = [ pyasn1 = ">=0.1.1" PyOpenSSL = "*" -[[package]] -name = "nest-asyncio" -version = "1.6.0" -description = "Patch asyncio to allow nested event loops" -optional = false -python-versions = ">=3.5" -files = [ - {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, - {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, -] - -[[package]] -name = "networkx" -version = "3.3" -description = "Python package for creating and manipulating graphs and networks" -optional = false -python-versions = ">=3.10" -files = [ - {file = "networkx-3.3-py3-none-any.whl", hash = "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2"}, - {file = "networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9"}, -] - -[package.extras] -default = ["matplotlib (>=3.6)", "numpy (>=1.23)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"] -developer = ["changelist (==0.5)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] -doc = ["myst-nb (>=1.0)", "numpydoc (>=1.7)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"] -extra = ["lxml (>=4.6)", "pydot (>=2.0)", "pygraphviz (>=1.12)", "sympy (>=1.10)"] -test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] - [[package]] name = "nodeenv" version = "1.9.1" @@ -2720,60 +2658,6 @@ files = [ {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] -[[package]] -name = "numpy" -version = "2.0.0" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-2.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:04494f6ec467ccb5369d1808570ae55f6ed9b5809d7f035059000a37b8d7e86f"}, - {file = "numpy-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2635dbd200c2d6faf2ef9a0d04f0ecc6b13b3cad54f7c67c61155138835515d2"}, - {file = "numpy-2.0.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:0a43f0974d501842866cc83471bdb0116ba0dffdbaac33ec05e6afed5b615238"}, - {file = "numpy-2.0.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:8d83bb187fb647643bd56e1ae43f273c7f4dbcdf94550d7938cfc32566756514"}, - {file = "numpy-2.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79e843d186c8fb1b102bef3e2bc35ef81160ffef3194646a7fdd6a73c6b97196"}, - {file = "numpy-2.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d7696c615765091cc5093f76fd1fa069870304beaccfd58b5dcc69e55ef49c1"}, - {file = "numpy-2.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b4c76e3d4c56f145d41b7b6751255feefae92edbc9a61e1758a98204200f30fc"}, - {file = "numpy-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:acd3a644e4807e73b4e1867b769fbf1ce8c5d80e7caaef0d90dcdc640dfc9787"}, - {file = "numpy-2.0.0-cp310-cp310-win32.whl", hash = "sha256:cee6cc0584f71adefe2c908856ccc98702baf95ff80092e4ca46061538a2ba98"}, - {file = "numpy-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:ed08d2703b5972ec736451b818c2eb9da80d66c3e84aed1deeb0c345fefe461b"}, - {file = "numpy-2.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad0c86f3455fbd0de6c31a3056eb822fc939f81b1618f10ff3406971893b62a5"}, - {file = "numpy-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7f387600d424f91576af20518334df3d97bc76a300a755f9a8d6e4f5cadd289"}, - {file = "numpy-2.0.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:34f003cb88b1ba38cb9a9a4a3161c1604973d7f9d5552c38bc2f04f829536609"}, - {file = "numpy-2.0.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:b6f6a8f45d0313db07d6d1d37bd0b112f887e1369758a5419c0370ba915b3871"}, - {file = "numpy-2.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f64641b42b2429f56ee08b4f427a4d2daf916ec59686061de751a55aafa22e4"}, - {file = "numpy-2.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7039a136017eaa92c1848152827e1424701532ca8e8967fe480fe1569dae581"}, - {file = "numpy-2.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46e161722e0f619749d1cd892167039015b2c2817296104487cd03ed4a955995"}, - {file = "numpy-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0e50842b2295ba8414c8c1d9d957083d5dfe9e16828b37de883f51fc53c4016f"}, - {file = "numpy-2.0.0-cp311-cp311-win32.whl", hash = "sha256:2ce46fd0b8a0c947ae047d222f7136fc4d55538741373107574271bc00e20e8f"}, - {file = "numpy-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbd6acc766814ea6443628f4e6751d0da6593dae29c08c0b2606164db026970c"}, - {file = "numpy-2.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:354f373279768fa5a584bac997de6a6c9bc535c482592d7a813bb0c09be6c76f"}, - {file = "numpy-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4d2f62e55a4cd9c58c1d9a1c9edaedcd857a73cb6fda875bf79093f9d9086f85"}, - {file = "numpy-2.0.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:1e72728e7501a450288fc8e1f9ebc73d90cfd4671ebbd631f3e7857c39bd16f2"}, - {file = "numpy-2.0.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:84554fc53daa8f6abf8e8a66e076aff6ece62de68523d9f665f32d2fc50fd66e"}, - {file = "numpy-2.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c73aafd1afca80afecb22718f8700b40ac7cab927b8abab3c3e337d70e10e5a2"}, - {file = "numpy-2.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d9f7d256fbc804391a7f72d4a617302b1afac1112fac19b6c6cec63fe7fe8a"}, - {file = "numpy-2.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0ec84b9ba0654f3b962802edc91424331f423dcf5d5f926676e0150789cb3d95"}, - {file = "numpy-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:feff59f27338135776f6d4e2ec7aeeac5d5f7a08a83e80869121ef8164b74af9"}, - {file = "numpy-2.0.0-cp312-cp312-win32.whl", hash = "sha256:c5a59996dc61835133b56a32ebe4ef3740ea5bc19b3983ac60cc32be5a665d54"}, - {file = "numpy-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:a356364941fb0593bb899a1076b92dfa2029f6f5b8ba88a14fd0984aaf76d0df"}, - {file = "numpy-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e61155fae27570692ad1d327e81c6cf27d535a5d7ef97648a17d922224b216de"}, - {file = "numpy-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4554eb96f0fd263041baf16cf0881b3f5dafae7a59b1049acb9540c4d57bc8cb"}, - {file = "numpy-2.0.0-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:903703372d46bce88b6920a0cd86c3ad82dae2dbef157b5fc01b70ea1cfc430f"}, - {file = "numpy-2.0.0-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:3e8e01233d57639b2e30966c63d36fcea099d17c53bf424d77f088b0f4babd86"}, - {file = "numpy-2.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cde1753efe513705a0c6d28f5884e22bdc30438bf0085c5c486cdaff40cd67a"}, - {file = "numpy-2.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821eedb7165ead9eebdb569986968b541f9908979c2da8a4967ecac4439bae3d"}, - {file = "numpy-2.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a1712c015831da583b21c5bfe15e8684137097969c6d22e8316ba66b5baabe4"}, - {file = "numpy-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9c27f0946a3536403efb0e1c28def1ae6730a72cd0d5878db38824855e3afc44"}, - {file = "numpy-2.0.0-cp39-cp39-win32.whl", hash = "sha256:63b92c512d9dbcc37f9d81b123dec99fdb318ba38c8059afc78086fe73820275"}, - {file = "numpy-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:3f6bed7f840d44c08ebdb73b1825282b801799e325bcbdfa6bc5c370e5aecc65"}, - {file = "numpy-2.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9416a5c2e92ace094e9f0082c5fd473502c91651fb896bc17690d6fc475128d6"}, - {file = "numpy-2.0.0-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:17067d097ed036636fa79f6a869ac26df7db1ba22039d962422506640314933a"}, - {file = "numpy-2.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ecb5b0582cd125f67a629072fed6f83562d9dd04d7e03256c9829bdec027ad"}, - {file = "numpy-2.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cef04d068f5fb0518a77857953193b6bb94809a806bd0a14983a8f12ada060c9"}, - {file = "numpy-2.0.0.tar.gz", hash = "sha256:cf5d1c9e6837f8af9f92b6bd3e86d513cdc11f60fd62185cc49ec7d1aba34864"}, -] - [[package]] name = "oauthlib" version = "3.2.2" @@ -3160,75 +3044,6 @@ files = [ {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] -[[package]] -name = "pandas" -version = "2.2.2" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, - {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, - {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, - {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, - {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, - {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, - {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, -] - -[package.dependencies] -numpy = {version = ">=1.23.2", markers = "python_version == \"3.11\""} -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.7" - -[package.extras] -all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] -aws = ["s3fs (>=2022.11.0)"] -clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] -compression = ["zstandard (>=0.19.0)"] -computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] -consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] -feather = ["pyarrow (>=10.0.1)"] -fss = ["fsspec (>=2022.11.0)"] -gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] -hdf5 = ["tables (>=3.8.0)"] -html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] -mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] -parquet = ["pyarrow (>=10.0.1)"] -performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] -plot = ["matplotlib (>=3.6.3)"] -postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] -pyarrow = ["pyarrow (>=10.0.1)"] -spss = ["pyreadstat (>=1.2.0)"] -sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.9.2)"] - [[package]] name = "paramiko" version = "3.4.0" @@ -3548,83 +3363,78 @@ files = [ [[package]] name = "psycopg2-binary" -version = "2.9.9" +version = "2.9.10" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, + {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:056470c3dc57904bbf63d6f534988bafc4e970ffd50f6271fc4ee7daad9498a5"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aa0e31fa4bb82578f3a6c74a73c273367727de397a7a0f07bd83cbea696baa"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8de718c0e1c4b982a54b41779667242bc630b2197948405b7bd8ce16bcecac92"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5c370b1e4975df846b0277b4deba86419ca77dbc25047f535b0bb03d1a544d44"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ffe8ed017e4ed70f68b7b371d84b7d4a790368db9203dfc2d222febd3a9c8863"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8aecc5e80c63f7459a1a2ab2c64df952051df196294d9f739933a9f6687e86b3"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"}, ] [[package]] @@ -4336,94 +4146,6 @@ hiredis = {version = ">=1.0.0", optional = true, markers = "extra == \"hiredis\" hiredis = ["hiredis (>=1.0.0)"] ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] -[[package]] -name = "regex" -version = "2024.5.15" -description = "Alternative regular expression module, to replace re." -optional = false -python-versions = ">=3.8" -files = [ - {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a81e3cfbae20378d75185171587cbf756015ccb14840702944f014e0d93ea09f"}, - {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b59138b219ffa8979013be7bc85bb60c6f7b7575df3d56dc1e403a438c7a3f6"}, - {file = "regex-2024.5.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0bd000c6e266927cb7a1bc39d55be95c4b4f65c5be53e659537537e019232b1"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eaa7ddaf517aa095fa8da0b5015c44d03da83f5bd49c87961e3c997daed0de7"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba68168daedb2c0bab7fd7e00ced5ba90aebf91024dea3c88ad5063c2a562cca"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e8d717bca3a6e2064fc3a08df5cbe366369f4b052dcd21b7416e6d71620dca1"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1337b7dbef9b2f71121cdbf1e97e40de33ff114801263b275aafd75303bd62b5"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9ebd0a36102fcad2f03696e8af4ae682793a5d30b46c647eaf280d6cfb32796"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9efa1a32ad3a3ea112224897cdaeb6aa00381627f567179c0314f7b65d354c62"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1595f2d10dff3d805e054ebdc41c124753631b6a471b976963c7b28543cf13b0"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b802512f3e1f480f41ab5f2cfc0e2f761f08a1f41092d6718868082fc0d27143"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a0981022dccabca811e8171f913de05720590c915b033b7e601f35ce4ea7019f"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:19068a6a79cf99a19ccefa44610491e9ca02c2be3305c7760d3831d38a467a6f"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1b5269484f6126eee5e687785e83c6b60aad7663dafe842b34691157e5083e53"}, - {file = "regex-2024.5.15-cp310-cp310-win32.whl", hash = "sha256:ada150c5adfa8fbcbf321c30c751dc67d2f12f15bd183ffe4ec7cde351d945b3"}, - {file = "regex-2024.5.15-cp310-cp310-win_amd64.whl", hash = "sha256:ac394ff680fc46b97487941f5e6ae49a9f30ea41c6c6804832063f14b2a5a145"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f5b1dff3ad008dccf18e652283f5e5339d70bf8ba7c98bf848ac33db10f7bc7a"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c6a2b494a76983df8e3d3feea9b9ffdd558b247e60b92f877f93a1ff43d26656"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a32b96f15c8ab2e7d27655969a23895eb799de3665fa94349f3b2fbfd547236f"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10002e86e6068d9e1c91eae8295ef690f02f913c57db120b58fdd35a6bb1af35"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec54d5afa89c19c6dd8541a133be51ee1017a38b412b1321ccb8d6ddbeb4cf7d"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10e4ce0dca9ae7a66e6089bb29355d4432caed736acae36fef0fdd7879f0b0cb"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e507ff1e74373c4d3038195fdd2af30d297b4f0950eeda6f515ae3d84a1770f"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1f059a4d795e646e1c37665b9d06062c62d0e8cc3c511fe01315973a6542e40"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0721931ad5fe0dda45d07f9820b90b2148ccdd8e45bb9e9b42a146cb4f695649"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:833616ddc75ad595dee848ad984d067f2f31be645d603e4d158bba656bbf516c"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:287eb7f54fc81546346207c533ad3c2c51a8d61075127d7f6d79aaf96cdee890"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:19dfb1c504781a136a80ecd1fff9f16dddf5bb43cec6871778c8a907a085bb3d"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:119af6e56dce35e8dfb5222573b50c89e5508d94d55713c75126b753f834de68"}, - {file = "regex-2024.5.15-cp311-cp311-win32.whl", hash = "sha256:1c1c174d6ec38d6c8a7504087358ce9213d4332f6293a94fbf5249992ba54efa"}, - {file = "regex-2024.5.15-cp311-cp311-win_amd64.whl", hash = "sha256:9e717956dcfd656f5055cc70996ee2cc82ac5149517fc8e1b60261b907740201"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:632b01153e5248c134007209b5c6348a544ce96c46005d8456de1d552455b014"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e64198f6b856d48192bf921421fdd8ad8eb35e179086e99e99f711957ffedd6e"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68811ab14087b2f6e0fc0c2bae9ad689ea3584cad6917fc57be6a48bbd012c49"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ec0c2fea1e886a19c3bee0cd19d862b3aa75dcdfb42ebe8ed30708df64687a"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0c0c0003c10f54a591d220997dd27d953cd9ccc1a7294b40a4be5312be8797b"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2431b9e263af1953c55abbd3e2efca67ca80a3de8a0437cb58e2421f8184717a"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a605586358893b483976cffc1723fb0f83e526e8f14c6e6614e75919d9862cf"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391d7f7f1e409d192dba8bcd42d3e4cf9e598f3979cdaed6ab11288da88cb9f2"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9ff11639a8d98969c863d4617595eb5425fd12f7c5ef6621a4b74b71ed8726d5"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4eee78a04e6c67e8391edd4dad3279828dd66ac4b79570ec998e2155d2e59fd5"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8fe45aa3f4aa57faabbc9cb46a93363edd6197cbc43523daea044e9ff2fea83e"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d0a3d8d6acf0c78a1fff0e210d224b821081330b8524e3e2bc5a68ef6ab5803d"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c486b4106066d502495b3025a0a7251bf37ea9540433940a23419461ab9f2a80"}, - {file = "regex-2024.5.15-cp312-cp312-win32.whl", hash = "sha256:c49e15eac7c149f3670b3e27f1f28a2c1ddeccd3a2812cba953e01be2ab9b5fe"}, - {file = "regex-2024.5.15-cp312-cp312-win_amd64.whl", hash = "sha256:673b5a6da4557b975c6c90198588181029c60793835ce02f497ea817ff647cb2"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87e2a9c29e672fc65523fb47a90d429b70ef72b901b4e4b1bd42387caf0d6835"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3bea0ba8b73b71b37ac833a7f3fd53825924165da6a924aec78c13032f20850"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfc4f82cabe54f1e7f206fd3d30fda143f84a63fe7d64a81558d6e5f2e5aaba9"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5bb9425fe881d578aeca0b2b4b3d314ec88738706f66f219c194d67179337cb"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64c65783e96e563103d641760664125e91bd85d8e49566ee560ded4da0d3e704"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf2430df4148b08fb4324b848672514b1385ae3807651f3567871f130a728cc3"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5397de3219a8b08ae9540c48f602996aa6b0b65d5a61683e233af8605c42b0f2"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:455705d34b4154a80ead722f4f185b04c4237e8e8e33f265cd0798d0e44825fa"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2b6f1b3bb6f640c1a92be3bbfbcb18657b125b99ecf141fb3310b5282c7d4ed"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3ad070b823ca5890cab606c940522d05d3d22395d432f4aaaf9d5b1653e47ced"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5b5467acbfc153847d5adb21e21e29847bcb5870e65c94c9206d20eb4e99a384"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e6662686aeb633ad65be2a42b4cb00178b3fbf7b91878f9446075c404ada552f"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:2b4c884767504c0e2401babe8b5b7aea9148680d2e157fa28f01529d1f7fcf67"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3cd7874d57f13bf70078f1ff02b8b0aa48d5b9ed25fc48547516c6aba36f5741"}, - {file = "regex-2024.5.15-cp38-cp38-win32.whl", hash = "sha256:e4682f5ba31f475d58884045c1a97a860a007d44938c4c0895f41d64481edbc9"}, - {file = "regex-2024.5.15-cp38-cp38-win_amd64.whl", hash = "sha256:d99ceffa25ac45d150e30bd9ed14ec6039f2aad0ffa6bb87a5936f5782fc1569"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13cdaf31bed30a1e1c2453ef6015aa0983e1366fad2667657dbcac7b02f67133"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cac27dcaa821ca271855a32188aa61d12decb6fe45ffe3e722401fe61e323cd1"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7dbe2467273b875ea2de38ded4eba86cbcbc9a1a6d0aa11dcf7bd2e67859c435"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64f18a9a3513a99c4bef0e3efd4c4a5b11228b48aa80743be822b71e132ae4f5"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d347a741ea871c2e278fde6c48f85136c96b8659b632fb57a7d1ce1872547600"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1878b8301ed011704aea4c806a3cadbd76f84dece1ec09cc9e4dc934cfa5d4da"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4babf07ad476aaf7830d77000874d7611704a7fcf68c9c2ad151f5d94ae4bfc4"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35cb514e137cb3488bce23352af3e12fb0dbedd1ee6e60da053c69fb1b29cc6c"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cdd09d47c0b2efee9378679f8510ee6955d329424c659ab3c5e3a6edea696294"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:72d7a99cd6b8f958e85fc6ca5b37c4303294954eac1376535b03c2a43eb72629"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a094801d379ab20c2135529948cb84d417a2169b9bdceda2a36f5f10977ebc16"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c0c18345010870e58238790a6779a1219b4d97bd2e77e1140e8ee5d14df071aa"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:16093f563098448ff6b1fa68170e4acbef94e6b6a4e25e10eae8598bb1694b5d"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e38a7d4e8f633a33b4c7350fbd8bad3b70bf81439ac67ac38916c4a86b465456"}, - {file = "regex-2024.5.15-cp39-cp39-win32.whl", hash = "sha256:71a455a3c584a88f654b64feccc1e25876066c4f5ef26cd6dd711308aa538694"}, - {file = "regex-2024.5.15-cp39-cp39-win_amd64.whl", hash = "sha256:cab12877a9bdafde5500206d1020a584355a97884dfd388af3699e9137bf7388"}, - {file = "regex-2024.5.15.tar.gz", hash = "sha256:d3ee02d9e5f482cc8309134a91eeaacbdd2261ba111b0fef3748eeb4913e6a2c"}, -] - [[package]] name = "requests" version = "2.32.3" @@ -4611,56 +4333,6 @@ botocore = ">=1.33.2,<2.0a.0" [package.extras] crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] -[[package]] -name = "scipy" -version = "1.14.1" -description = "Fundamental algorithms for scientific computing in Python" -optional = false -python-versions = ">=3.10" -files = [ - {file = "scipy-1.14.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389"}, - {file = "scipy-1.14.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3"}, - {file = "scipy-1.14.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0"}, - {file = "scipy-1.14.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3"}, - {file = "scipy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d"}, - {file = "scipy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69"}, - {file = "scipy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad"}, - {file = "scipy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5"}, - {file = "scipy-1.14.1-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675"}, - {file = "scipy-1.14.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2"}, - {file = "scipy-1.14.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617"}, - {file = "scipy-1.14.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8"}, - {file = "scipy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37"}, - {file = "scipy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2"}, - {file = "scipy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2"}, - {file = "scipy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94"}, - {file = "scipy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d"}, - {file = "scipy-1.14.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07"}, - {file = "scipy-1.14.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5"}, - {file = "scipy-1.14.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc"}, - {file = "scipy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310"}, - {file = "scipy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066"}, - {file = "scipy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1"}, - {file = "scipy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f"}, - {file = "scipy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79"}, - {file = "scipy-1.14.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e"}, - {file = "scipy-1.14.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73"}, - {file = "scipy-1.14.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e"}, - {file = "scipy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d"}, - {file = "scipy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e"}, - {file = "scipy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06"}, - {file = "scipy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84"}, - {file = "scipy-1.14.1.tar.gz", hash = "sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417"}, -] - -[package.dependencies] -numpy = ">=1.23.5,<2.3" - -[package.extras] -dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] -doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.13.1)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<=7.3.7)", "sphinx-design (>=0.4.0)"] -test = ["Cython", "array-api-strict (>=2.0)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] - [[package]] name = "sendgrid" version = "6.11.0" @@ -5482,4 +5154,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.12" -content-hash = "1f7fba5a2c241db5654d7f84018d40650402bdc53e46ef5b09e7fcf3d6f055a8" +content-hash = "d87b9a24e331d9b1ce1286aa31bf653c563e667ea713ca9a46468d59b8164b77" diff --git a/pyproject.toml b/pyproject.toml index 367bc7944..1950a35f4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,9 +1,8 @@ [tool.poetry] name = "keep" -version = "0.29.5" +version = "0.30.0" description = "Alerting. for developers, by developers." authors = ["Keep Alerting LTD"] -readme = "README.md" packages = [{include = "keep"}] [tool.poetry.dependencies] @@ -18,7 +17,6 @@ python-dotenv = "^0.21.1" pygithub = "^1.57" sentry-sdk = "^1.15.0" pydantic = "^1.10.4" -datefinder = "^0.7.3" mysql-connector-python = "^9.1.0" logmine = "^0.4.1" astunparse = "^1.6.3" @@ -29,17 +27,13 @@ python-telegram-bot = "^20.1" fastapi = "^0.109.1" uvicorn = "^0.20.0" opsgenie-sdk = "^2.1.5" -psycopg2-binary = "^2.9.5" starlette-context = "^0.3.6" -nest-asyncio = "^1.5.6" datadog-api-client = "^2.12.0" sqlmodel = "^0.0.8" -grafana-api-sdk = "^0.1.0" cloud-sql-python-connector = "1.12.0" pymysql = "^1.1.1" google-cloud-secret-manager = "^2.16.1" python-jose = "^3.3.0" -jwcrypto = "^1.5.6" sqlalchemy = "1.4.41" snowflake-connector-python = "3.12.3" openai = "1.37.1" @@ -84,18 +78,14 @@ openshift-client = "^2.0.4" uptime-kuma-api = "^1.2.1" packaging = "^24.0" arq = "^0.26.0" - - alembic = "^1.13.2" -numpy = "^2.0.0" -pandas = "^2.2.2" quickchart-io = "^2.0.0" -scipy = "^1.14.1" -networkx = "^3.3" google-auth = "2.34.0" clickhouse-driver = "^0.2.9" google-cloud-logging = "^3.11.3" json5 = "^0.9.28" + +psycopg2-binary = "^2.9.10" [tool.poetry.group.dev.dependencies] pre-commit = "^3.0.4" pre-commit-hooks = "^4.4.0" @@ -109,8 +99,8 @@ coverage = "^7.2.2" pytest-mock = "^3.11.1" ruff = "^0.1.6" pytest-docker = "^2.0.1" - playwright = "^1.44.0" + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" diff --git a/tests/test_alert_correlation.py b/tests/test_alert_correlation.py deleted file mode 100644 index aaa06be16..000000000 --- a/tests/test_alert_correlation.py +++ /dev/null @@ -1,133 +0,0 @@ -import os -import pytest -import random -import numpy as np - -from datetime import datetime, timedelta -from unittest.mock import patch, MagicMock, AsyncMock -from keep.api.models.db.alert import Alert -from keep.api.models.db.tenant import Tenant -from ee.experimental.incident_utils import mine_incidents_and_create_objects, calculate_pmi_matrix, DEFAULT_TEMP_DIR_LOCATION - -random.seed(42) - -@pytest.mark.asyncio -async def test_mine_incidents_and_create_objects(db_session, tenant_id='test', n_alerts=10000, n_fingerprints=50): - # Add alerts - current_time = datetime.now() - time_lags = [int(round(random.normalvariate(mu=60*24*30/2, sigma=60*24*30/6))) for _ in range(n_alerts)] - alerts = [ - Alert( - tenant_id=tenant_id, - provider_type="test", - provider_id="test", - event={ - "id": f"test-{i}", - "name": f"Test Alert {i}", - "fingerprint": f"fp-{i % n_fingerprints}", - "lastReceived": (current_time - timedelta(minutes=time_lags[i])).isoformat(), - "severity": "critical", - "source": ["test-source"], - }, - fingerprint=f"fp-{i % n_fingerprints}", - timestamp=current_time - timedelta(minutes=time_lags[i]) - ) - for i in range(n_alerts) - ] - db_session.add_all(alerts) - db_session.commit() - - # add Tenant - tenant = Tenant( - id=tenant_id, - name=tenant_id, - configuration={ - "ee_enabled": True, - } - ) - db_session.add(tenant) - db_session.commit() - - # Mock dependencies and call the function - with patch('ee.experimental.incident_utils.get_pusher_client') as mock_pusher, \ - patch('ee.experimental.incident_utils.get_pool') as mock_get_pool: - - mock_pusher.return_value = MagicMock() - mock_pool = AsyncMock() - mock_get_pool.return_value = mock_pool - - result = await mine_incidents_and_create_objects(None, tenant_id) - - assert result is not None - assert mock_pusher.called - assert mock_get_pool.called - -def test_calculate_pmi_matrix(db_session, tenant_id='test', n_alerts=10000, n_fingerprints=50): - # Add Alerts - current_time = datetime.now() - time_lags = [int(round(random.normalvariate(mu=60*24*30/2, sigma=60*24*30/6))) for _ in range(n_alerts)] - alerts = [ - Alert( - tenant_id=tenant_id, - provider_type="test", - provider_id="test", - event={ - "id": f"test-{i}", - "name": f"Test Alert {i}", - "fingerprint": f"fp-{i % n_fingerprints}", - "lastReceived": (current_time - timedelta(minutes=time_lags[i])).isoformat(), - "severity": "critical", - "source": ["test-source"], - }, - fingerprint=f"fp-{i % n_fingerprints}", - timestamp=current_time - timedelta(minutes=time_lags[i]) - ) - for i in range(n_alerts) - ] - db_session.add_all(alerts) - db_session.commit() - - # add Tenant - tenant = Tenant( - id=tenant_id, - name=tenant_id, - configuration={ - "ee_enabled": True, - } - ) - db_session.add(tenant) - db_session.commit() - - # Call the function - result = calculate_pmi_matrix(None, tenant_id) - - assert result["status"] == "success" - pmi_matrix = result["pmi_matrix"] - fingerprints = result["pmi_columns"] - assert (np.unique(fingerprints) == np.unique([f"fp-{i % n_fingerprints}" for i in range(n_fingerprints)])).all() - assert pmi_matrix.shape == (n_fingerprints, n_fingerprints) - - -@pytest.mark.asyncio -async def test_mine_incidents_and_create_objects_with_no_alerts(db_session, tenant_id='test'): - # add Tenant - tenant = Tenant( - id=tenant_id, - name=tenant_id, - configuration={ - "ee_enabled": True, - } - ) - - with patch('ee.experimental.incident_utils.get_pusher_client') as mock_pusher, \ - patch('ee.experimental.incident_utils.get_pool') as mock_get_pool: - - mock_pusher.return_value = MagicMock() - mock_pool = AsyncMock() - mock_get_pool.return_value = mock_pool - - result = await mine_incidents_and_create_objects(None, tenant_id) - - assert result=={"incidents": []} - -